diff --git a/.circleci/config.yml b/.circleci/config.yml index 0351375869..e2be3f6528 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,185 +1,80 @@ defaults: &defaults working_directory: ~/build - docker: - - image: alanz/haskell-hie-ci + resource_class: large steps: - checkout - run: - command: git submodule sync --recursive - - run: - command: git submodule update --recursive --init - - run: - name: Write provided stack.yaml with predictable name - command: cp ${STACK_FILE} stack-build.txt - - - run: - name: Figure out resolver for better caching - command: grep '^resolver:' stack-build.txt > resolver.txt - - - run: - name: Create a composite cabal file for changes detection - command: find . -name "*.cabal" | grep -v -e "stack-work" -e "dist-newstyle" -e "submodules" -e "testdata" | sort | xargs cat > all-cabal.txt + name: Save resolver field into file + command: grep '^resolver:' ${STACK_FILE} > resolver.txt - restore_cache: keys: - - stack-cache-{{ .Environment.HIE_CACHE }}-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "stack-build.txt" }}-{{ checksum "all-cabal.txt" }} - - stack-cache-{{ .Environment.HIE_CACHE }}-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "stack-build.txt" }} - - stack-cache-{{ .Environment.HIE_CACHE }}-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "resolver.txt" }} + - v4-stack-cache-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "resolver.txt" }} + # Looking for the string [skip circleci] in all the json returned by https://docs.github.com/en/rest/reference/pulls#get-a-pull-request - run: - name: Stack upgrade - command: stack upgrade + name: Skip ci setup + command: | + set +e + echo "CIRCLE_PULL_REQUEST: $CIRCLE_PULL_REQUEST" + if [[ ! -z $CIRCLE_PULL_REQUEST ]]; then + PULL_REQUEST_URL=${CIRCLE_PULL_REQUEST/github.com/api.github.com/repos} + PULL_REQUEST_URL=${PULL_REQUEST_URL/pull/pulls} + echo "PULL_REQUEST_URL: $PULL_REQUEST_URL" + SKIP_CI=$(curl $PULL_REQUEST_URL | grep -o "\\[skip circleci\\]") + if [[ -z "$SKIP_CI" ]]; then + PULL_REQUEST_COMMENTS_URL=${PULL_REQUEST_URL/pulls/issues}/comments + echo "PULL_REQUEST_COMMENTS_URL: $PULL_REQUEST_COMMENTS_URL" + SKIP_CI=$(curl $PULL_REQUEST_COMMENTS_URL | grep -o "\\[skip circleci\\]") + fi + echo "SKIP_CI: $SKIP_CI" + fi + echo "export SKIP_CI=$SKIP_CI" >> $BASH_ENV - run: - name: Stack setup - command: stack -j 2 --stack-yaml=${STACK_FILE} setup - - - run: - name: Install happy - command: stack --stack-yaml=${STACK_FILE} install happy - - - run: - name: Install Hoogle - command: stack -j 1 --stack-yaml=${STACK_FILE} install hoogle - - - run: - name: Build (we need the exe for tests) - command: stack -j 1 --stack-yaml=${STACK_FILE} install - # need j1, else ghc-lib-parser triggers OOM + name: Build + command: | + if [[ -z "$SKIP_CI" ]]; then + stack -j4 --stack-yaml=${STACK_FILE} install --system-ghc --no-terminal + fi no_output_timeout: 30m - run: name: Build Testsuite without running it - command: stack -j 2 --stack-yaml=${STACK_FILE} build --test --no-run-tests + command: | + if [[ -z "$SKIP_CI" ]]; then + stack -j4 --stack-yaml=${STACK_FILE} build --system-ghc --test --no-run-tests --no-terminal + fi no_output_timeout: 30m - store_artifacts: path: ~/.local/bin destination: bin - - run: - name: Generate Hoogle database - command: if [ ! -d ~/.hoogle ]; then stack --stack-yaml=${STACK_FILE} exec hoogle generate; fi - - - run: - name: Clear cabal-helper cache - command: rm -fr ~/.cache/cabal-helper - - save_cache: - key: stack-cache-{{ .Environment.HIE_CACHE }}-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "resolver.txt" }} - paths: &cache_paths + key: v4-stack-cache-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "resolver.txt" }} + paths: - ~/.stack - - ~/.cache - - ~/.hoogle - - ~/build/.stack-work - - ~/build/ghcide/.stack-work - - - run: - name: Test ghcide - # Tests MUST run with -j1, since multiple ghc-mod sessions are not allowed - # command: stack -j 1 --stack-yaml=${STACK_FILE} test ghcide --dump-logs - command: echo "ghcide tests disabled until they got fixed, see https://github.com/mpickering/ghcide/issues/25" - no_output_timeout: 120m - - - run: - name: Test haskell-language-server - # Tests MUST run with -j1, since multiple ghc-mod sessions are not allowed - command: stack -j 1 --stack-yaml=${STACK_FILE} test haskell-language-server --dump-logs - no_output_timeout: 120m - - - store_test_results: - path: test-results - - - store_artifacts: - path: test-logs - - - save_cache: - key: stack-cache-{{ .Environment.HIE_CACHE }}-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "stack-build.txt" }} - paths: *cache_paths - - - save_cache: - key: stack-cache-{{ .Environment.HIE_CACHE }}-{{ arch }}-{{ .Environment.CIRCLE_JOB }}-{{ checksum "stack-build.txt" }}-{{ checksum "all-cabal.txt" }} - paths: *cache_paths version: 2 jobs: - ghc-8.6.4: - environment: - - STACK_FILE: "stack-8.6.4.yaml" - <<: *defaults - - ghc-8.6.5: - environment: - - STACK_FILE: "stack-8.6.5.yaml" - <<: *defaults - - ghc-8.8.2: - environment: - - STACK_FILE: "stack-8.8.2.yaml" - <<: *defaults - - ghc-8.8.3: + stackage-lts22: + docker: + - image: haskell:9.6.6-slim-bullseye environment: - - STACK_FILE: "stack-8.8.3.yaml" + - STACK_FILE: "stack-lts22.yaml" <<: *defaults - ghc-8.10.1: + stackage-lts23: + docker: + - image: haskell:9.8.4-slim-bullseye environment: - - STACK_FILE: "stack-8.10.1.yaml" + - STACK_FILE: "stack.yaml" <<: *defaults - # ghc-nightly: - # environment: - # - STACK_FILE: "stack.yaml" - # <<: *defaults - - cabal: - working_directory: ~/build - docker: - - image: haskell:8.10.1 - steps: - - checkout - - run: - name: Sync submodules - command: git submodule sync --recursive - - run: - name: Update submodules - command: git submodule update --recursive --init - - restore-cache: - keys: - - cabal-{{ .Environment.HIE_CACHE }}-{{ arch }}-{{ .Environment.CIRCLE_JOB }} - - run: - name: Update - command: cabal new-update - - run: - name: Configure - command: cabal new-configure --enable-tests - - run: - name: Build dependencies - command: cabal new-build -j1 --dependencies-only # need j1, else ghc-lib-parser triggers OOM - no_output_timeout: 30m - - save_cache: - key: cabal-{{ .Environment.HIE_CACHE }}-{{ arch }}-{{ .Environment.CIRCLE_JOB }} - paths: - - ~/.cabal - - run: - name: Build - command: cabal new-build -j1 # need j1, else ghc-lib-parser triggers OOM - no_output_timeout: 30m - - save_cache: - key: cabal-{{ .Environment.HIE_CACHE }}-{{ arch }}-{{ .Environment.CIRCLE_JOB }} - paths: - - ~/.cabal - - workflows: version: 2 multiple-ghcs: jobs: - - ghc-8.6.4 - - ghc-8.6.5 - - ghc-8.8.2 - - ghc-8.8.3 - - ghc-8.10.1 - # - ghc-nightly - - cabal + - stackage-lts22 + - stackage-lts23 diff --git a/.editorconfig b/.editorconfig index c277dff9a5..4b0c4d1d26 100644 --- a/.editorconfig +++ b/.editorconfig @@ -5,10 +5,10 @@ root = true [*] end_of_line = LF +trim_trailing_whitespace = true +insert_final_newline = true -[*] +[*.{hs,lhs}] indent_style = space indent_size = 4 -trim_trailing_whitespace = true -insert_final_newline = true max_line_length = 80 diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000..7dc09e9c88 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,47 @@ +--- +name: Bug report +about: I've spotted something specific thats' going wrong +title: '' +labels: 'status: needs triage, type: bug' +assignees: '' + +--- + + + +### Your environment + + + +Which OS do you use? + +Which version of GHC do you use and how did you install it? + +How is your project built (alternative: link to the project)? + +Which LSP client (editor/plugin) do you use? + +Which version of HLS do you use and how did you install it? + +Have you configured HLS in any way (especially: a `hie.yaml` file)? + +### Steps to reproduce + + + +### Expected behaviour + + + +### Actual behaviour + + + +### Debug information + + diff --git a/.github/ISSUE_TEMPLATE/enhancement_request.md b/.github/ISSUE_TEMPLATE/enhancement_request.md new file mode 100644 index 0000000000..c22b63bc52 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/enhancement_request.md @@ -0,0 +1,24 @@ +--- +name: Enhancement request +about: I have an idea for how to make things better +title: '' +labels: 'status: needs triage, type: enhancement' +assignees: '' + +--- + +## Is your enhancement request related to a problem? Please describe. + + + +## Describe the solution you'd like + + + +## Describe alternatives you've considered + + + +## Additional context + + diff --git a/.github/ISSUE_TEMPLATE/support.md b/.github/ISSUE_TEMPLATE/support.md new file mode 100644 index 0000000000..768e42ba90 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/support.md @@ -0,0 +1,39 @@ +--- +name: Support request +about: Help, something isn't working and I'm stuck! +title: '' +labels: 'status: needs triage, type: support' +assignees: '' + +--- + + + +## Your environment + + + +Which OS do you use? + +Which version of GHC do you use and how did you install it? + +How is your project built (alternative: link to the project)? + +Which LSP client (editor/plugin) do you use? + +Which version of HLS do you use and how did you install it? + +Have you configured HLS in any way (especially: a `hie.yaml` file)? + +## What's wrong? + + + +### Debug information + + diff --git a/.github/actions/bindist-actions/action-deb10/action.yaml b/.github/actions/bindist-actions/action-deb10/action.yaml new file mode 100644 index 0000000000..da96b04669 --- /dev/null +++ b/.github/actions/bindist-actions/action-deb10/action.yaml @@ -0,0 +1,21 @@ +description: Container for deb10 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-deb10 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: debian:10 + using: docker diff --git a/.github/actions/bindist-actions/action-deb11/action.yaml b/.github/actions/bindist-actions/action-deb11/action.yaml new file mode 100644 index 0000000000..8ffe78e1db --- /dev/null +++ b/.github/actions/bindist-actions/action-deb11/action.yaml @@ -0,0 +1,21 @@ +description: Container for deb11 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-deb11 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: debian:11 + using: docker diff --git a/.github/actions/bindist-actions/action-deb12/action.yaml b/.github/actions/bindist-actions/action-deb12/action.yaml new file mode 100644 index 0000000000..20bcc6a157 --- /dev/null +++ b/.github/actions/bindist-actions/action-deb12/action.yaml @@ -0,0 +1,21 @@ +description: Container for deb12 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-deb12 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: debian:12 + using: docker diff --git a/.github/actions/bindist-actions/action-deb9/action.yaml b/.github/actions/bindist-actions/action-deb9/action.yaml new file mode 100644 index 0000000000..693e3845a5 --- /dev/null +++ b/.github/actions/bindist-actions/action-deb9/action.yaml @@ -0,0 +1,24 @@ +description: Container for deb9 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-deb9 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: sed -i s/deb.debian.org/archive.debian.org/g /etc/apt/sources.list && + sed -i 's|security.debian.org|archive.debian.org/|g' /etc/apt/sources.list && + sed -i /-updates/d /etc/apt/sources.list && apt-get update && apt-get install + -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: debian:9 + using: docker diff --git a/.github/actions/bindist-actions/action-fedora33/action.yaml b/.github/actions/bindist-actions/action-fedora33/action.yaml new file mode 100644 index 0000000000..d20c8feccd --- /dev/null +++ b/.github/actions/bindist-actions/action-fedora33/action.yaml @@ -0,0 +1,21 @@ +description: Container for fedora33 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-fedora33 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: dnf install -y + STAGE: ${{ inputs.stage }} + TOOLS: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs + findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs + ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which + xz zlib-devel patchelf + image: fedora:33 + using: docker diff --git a/.github/actions/bindist-actions/action-fedora40/action.yaml b/.github/actions/bindist-actions/action-fedora40/action.yaml new file mode 100644 index 0000000000..83f23b23c8 --- /dev/null +++ b/.github/actions/bindist-actions/action-fedora40/action.yaml @@ -0,0 +1,21 @@ +description: Container for fedora40 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-fedora40 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: dnf install -y + STAGE: ${{ inputs.stage }} + TOOLS: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs + findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs + ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which + xz zlib-devel patchelf + image: fedora:40 + using: docker diff --git a/.github/actions/bindist-actions/action-mint193/action.yaml b/.github/actions/bindist-actions/action-mint193/action.yaml new file mode 100644 index 0000000000..e1269e0e56 --- /dev/null +++ b/.github/actions/bindist-actions/action-mint193/action.yaml @@ -0,0 +1,21 @@ +description: Container for mint193 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-mint193 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: linuxmintd/mint19.3-amd64 + using: docker diff --git a/.github/actions/bindist-actions/action-mint202/action.yaml b/.github/actions/bindist-actions/action-mint202/action.yaml new file mode 100644 index 0000000000..adea7272f1 --- /dev/null +++ b/.github/actions/bindist-actions/action-mint202/action.yaml @@ -0,0 +1,21 @@ +description: Container for mint202 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-mint202 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: linuxmintd/mint20.2-amd64 + using: docker diff --git a/.github/actions/bindist-actions/action-mint213/action.yaml b/.github/actions/bindist-actions/action-mint213/action.yaml new file mode 100644 index 0000000000..bd09dc0e97 --- /dev/null +++ b/.github/actions/bindist-actions/action-mint213/action.yaml @@ -0,0 +1,21 @@ +description: Container for mint213 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-mint213 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: linuxmintd/mint21.3-amd64 + using: docker diff --git a/.github/actions/bindist-actions/action-ubuntu1804/action.yaml b/.github/actions/bindist-actions/action-ubuntu1804/action.yaml new file mode 100644 index 0000000000..6a6f4662a0 --- /dev/null +++ b/.github/actions/bindist-actions/action-ubuntu1804/action.yaml @@ -0,0 +1,21 @@ +description: Container for ubuntu1804 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-ubuntu1804 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: ubuntu:18.04 + using: docker diff --git a/.github/actions/bindist-actions/action-ubuntu2004/action.yaml b/.github/actions/bindist-actions/action-ubuntu2004/action.yaml new file mode 100644 index 0000000000..3a5b57a370 --- /dev/null +++ b/.github/actions/bindist-actions/action-ubuntu2004/action.yaml @@ -0,0 +1,21 @@ +description: Container for ubuntu2004 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-ubuntu2004 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: ubuntu:20.04 + using: docker diff --git a/.github/actions/bindist-actions/action-ubuntu2204/action.yaml b/.github/actions/bindist-actions/action-ubuntu2204/action.yaml new file mode 100644 index 0000000000..857776507d --- /dev/null +++ b/.github/actions/bindist-actions/action-ubuntu2204/action.yaml @@ -0,0 +1,21 @@ +description: Container for ubuntu2204 +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-ubuntu2204 +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: apt-get update && apt-get install -y + STAGE: ${{ inputs.stage }} + TOOLS: libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev + git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc + autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 + libtinfo5 patchelf + image: ubuntu:22.04 + using: docker diff --git a/.github/actions/bindist-actions/action-unknown/action.yaml b/.github/actions/bindist-actions/action-unknown/action.yaml new file mode 100644 index 0000000000..96cf0593e9 --- /dev/null +++ b/.github/actions/bindist-actions/action-unknown/action.yaml @@ -0,0 +1,21 @@ +description: Container for unknown +inputs: + stage: + description: which stage to build + required: true + version: + description: which GHC version to build/test + required: false +name: action-unknown +runs: + entrypoint: .github/scripts/entrypoint.sh + env: + GHC_VERSION: ${{ inputs.version }} + INSTALL: yum -y install epel-release && yum install -y --allowerasing + STAGE: ${{ inputs.stage }} + TOOLS: autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs + findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs + ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which + xz zlib-devel patchelf + image: rockylinux:8 + using: docker diff --git a/.github/actions/setup-build/action.yml b/.github/actions/setup-build/action.yml new file mode 100644 index 0000000000..11f32c09db --- /dev/null +++ b/.github/actions/setup-build/action.yml @@ -0,0 +1,133 @@ +name: "Cached build" +description: "Setup the build using cache" +inputs: + ghc: + description: "Ghc version" + required: true + cabal: + description: "Cabal version" + required: false + default: "3.14.2.0" + os: + description: "Operating system: Linux, Windows or macOS" + required: true + cache-prefix: + description: "To allow make unique the cache key" + required: false + default: "" + shorten-hls: + description: "Whether we have to shorten haskell language server to hls" + required: false + default: "true" + +runs: + using: "composite" + steps: + - name: Workaround runner image issue + if: runner.os == 'Linux' + # https://github.com/actions/runner-images/issues/7061 + run: | + sudo mkdir -p /usr/local/.ghcup + sudo chown -R $USER /usr/local/.ghcup + shell: bash + + - uses: haskell-actions/setup@v2.8.1 + id: HaskEnvSetup + with: + ghc-version : ${{ inputs.ghc }} + cabal-version: ${{ inputs.cabal }} + enable-stack: false + + - if: inputs.os == 'Windows' + name: (Windows) Platform config + run: | + echo "CABAL_PKGS_DIR=C:\\cabal\\packages" >> $GITHUB_ENV + shell: bash + + - if: ( inputs.os == 'Linux' ) || ( inputs.os == 'macOS' ) + name: (Linux,macOS) Platform config + run: | + echo "CABAL_PKGS_DIR=~/.cabal/packages" >> $GITHUB_ENV + shell: bash + + # Shorten binary names as a workaround for filepath length limits in Windows, + # but since tests are hardcoded on this workaround all platforms need it. + # All workflows which distinguishes cache on `cabal.project` needs this. + # Except hackage one, which needs the original name + - if: inputs.shorten-hls == 'true' + name: Workaround shorten binary names + run: | + sed -i.bak -e 's/haskell-language-server/hls/g' \ + -e 's/haskell_language_server/hls/g' \ + haskell-language-server.cabal cabal.project + sed -i.bak -e 's/Paths_haskell_language_server/Paths_hls/g' \ + src/**/*.hs exe/*.hs + shell: bash + + - name: Retrieving `cabal.project` Hackage timestamp + run: | + # Form: index-state: 2021-11-29T08:11:08Z + INDEX_STATE_ENTRY=$(grep index-state cabal.project) + # Form: 2021-11-29T08-11-08Z + INDEX_STATE1=$(echo "$INDEX_STATE_ENTRY" | cut -d' ' -f2 | tr ':' '-') + echo "INDEX_STATE=$INDEX_STATE1" >> $GITHUB_ENV + shell: bash + + # We have to restore package sources before `cabal update` + # because it overwrites the hackage index with the cached one + - name: Hackage sources cache + uses: actions/cache@v3 + env: + cache-name: hackage-sources + with: + path: ${{ env.CABAL_PKGS_DIR }} + key: ${{ env.cache-name }}-${{ env.INDEX_STATE }} + restore-keys: ${{ env.cache-name }}- + + # To ensure we get the latest hackage index without relying on the haskell action logic + # It has to be done before `cabal freeze` to make it aware of the new index + - run: cabal update + shell: bash + + - name: Form the package list ('cabal.project.freeze') + run: | + rm cabal.project.freeze || \ + cabal v2-freeze && \ + echo "" && \ + echo 'Output:' && \ + echo "" && \ + cat 'cabal.project.freeze' + shell: bash + + - name: Compiled deps cache + id: compiled-deps + uses: actions/cache@v3 + env: + cache-name: compiled-deps + with: + path: ${{ steps.HaskEnvSetup.outputs.cabal-store }} + key: ${{ inputs.cache-prefix }}${{ env.cache-name }}-${{ inputs.os }}-${{ inputs.ghc }}-${{ env.INDEX_STATE }}-${{ hashFiles('cabal.project.freeze') }} + restore-keys: | + ${{ inputs.cache-prefix }}${{ env.cache-name }}-${{ inputs.os }}-${{ inputs.ghc }}-${{ env.INDEX_STATE }}- + ${{ inputs.cache-prefix }}${{ env.cache-name }}-${{ inputs.os }}-${{ inputs.ghc }}- + ${{ inputs.cache-prefix }}${{ env.cache-name }}-${{ inputs.os }}- + + # We remove the freeze file because it could interfere with the build + - name: "Remove freeze file" + run: rm -f cabal.project.freeze + shell: bash + + # Make sure to clear all unneeded `ghcup`` caches. + # At some point, we were running out of disk space, see issue + # https://github.com/haskell/haskell-language-server/issues/4386 for details. + # + # Using "printf" debugging (`du -sh *` and `df -h /`) and binary searching, + # we figured out that `ghcup` caches are taking up a sizable portion of the + # disk space. + # Thus, we remove anything we don't need, especially caches and temporary files. + # For got measure, we also make sure no other tooling versions are + # installed besides the ones we explicitly want. + - name: "Remove ghcup caches" + if: runner.os == 'Linux' + run: ghcup gc --ghc-old --share-dir --hls-no-ghc --cache --tmpdirs --unset + shell: bash diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..cfee701dc1 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,16 @@ +version: 2 +updates: + + # NOTE: Dependabot official configuration documentation: + # https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates#package-ecosystem + + # Maintain dependencies for internal GitHub Actions CI for pull requests + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + + - package-ecosystem: "github-actions" + directory: ".github/actions/setup-build" + schedule: + interval: "weekly" diff --git a/.github/generate-ci/LICENSE b/.github/generate-ci/LICENSE new file mode 100644 index 0000000000..261eeb9e9f --- /dev/null +++ b/.github/generate-ci/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/.github/generate-ci/README.mkd b/.github/generate-ci/README.mkd new file mode 100644 index 0000000000..fef645ea12 --- /dev/null +++ b/.github/generate-ci/README.mkd @@ -0,0 +1,5 @@ +# generate-ci + +This is the generator for the release bindist CI. + +Edit ./gen_ci.hs to change configuration and run "./generate-jobs" to regenerate diff --git a/test/testdata/HaReGA1/cabal.project b/.github/generate-ci/cabal.project similarity index 100% rename from test/testdata/HaReGA1/cabal.project rename to .github/generate-ci/cabal.project diff --git a/.github/generate-ci/gen_ci.hs b/.github/generate-ci/gen_ci.hs new file mode 100644 index 0000000000..28a81d8576 --- /dev/null +++ b/.github/generate-ci/gen_ci.hs @@ -0,0 +1,618 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +import Control.Monad +import Data.Maybe + +import Data.Aeson hiding (encode) +import qualified Data.Aeson.Key as K +import Data.Aeson.Types (Pair) +import Data.Yaml + +import qualified Data.ByteString as BS + +import qualified Data.List as L + +import System.Directory +import System.Environment +import System.FilePath + +------------------------------------------------------------------------------- +-- Configuration parameters +------------------------------------------------------------------------------- + +data Opsys + = Linux Distro + | Darwin + | Windows deriving (Eq) + +osName :: Opsys -> String +osName Darwin = "mac" +osName Windows = "windows" +osName (Linux d) = "linux-" ++ distroName d + +data Distro + = Debian9 + | Debian10 + | Debian11 + | Debian12 + | Ubuntu1804 + | Ubuntu2004 + | Ubuntu2204 + | Mint193 + | Mint202 + | Mint213 + | Fedora33 + | Fedora40 + | Rocky8 + deriving (Eq, Enum, Bounded) + +allDistros :: [Distro] +allDistros = [minBound .. maxBound] + +data Arch = Amd64 | AArch64 +archName :: Arch -> String +archName Amd64 = "x86_64" +archName AArch64 = "aarch64" + +artifactName :: Arch -> Opsys -> String +artifactName arch opsys = archName arch ++ "-" ++ case opsys of + Linux distro -> "linux-" ++ distroName distro + Darwin -> "apple-darwin" + Windows -> "mingw64" + +data GHC + = GHC967 + | GHC984 + | GHC9102 + | GHC9122 + deriving (Eq, Enum, Bounded) + +ghcVersion :: GHC -> String +ghcVersion GHC967 = "9.6.7" +ghcVersion GHC984 = "9.8.4" +ghcVersion GHC9102 = "9.10.2" +ghcVersion GHC9122 = "9.12.2" + +ghcVersionIdent :: GHC -> String +ghcVersionIdent = filter (/= '.') . ghcVersion + +allGHCs :: [GHC] +allGHCs = [minBound .. maxBound] + +data Stage = Build GHC | Bindist | Test + +------------------------------------------------------------------------------- +-- Distro Configuration +------------------------------------------------------------------------------- + +distroImage :: Distro -> String +distroImage Debian9 = "debian:9" +distroImage Debian10 = "debian:10" +distroImage Debian11 = "debian:11" +distroImage Debian12 = "debian:12" +distroImage Ubuntu1804 = "ubuntu:18.04" +distroImage Ubuntu2004 = "ubuntu:20.04" +distroImage Ubuntu2204 = "ubuntu:22.04" +distroImage Mint193 = "linuxmintd/mint19.3-amd64" +distroImage Mint202 = "linuxmintd/mint20.2-amd64" +distroImage Mint213 = "linuxmintd/mint21.3-amd64" +distroImage Fedora33 = "fedora:33" +distroImage Fedora40 = "fedora:40" +distroImage Rocky8 = "rockylinux:8" + +distroName :: Distro -> String +distroName Debian9 = "deb9" +distroName Debian10 = "deb10" +distroName Debian11 = "deb11" +distroName Debian12 = "deb12" +distroName Ubuntu1804 = "ubuntu1804" +distroName Ubuntu2004 = "ubuntu2004" +distroName Ubuntu2204 = "ubuntu2204" +distroName Mint193 = "mint193" +distroName Mint202 = "mint202" +distroName Mint213 = "mint213" +distroName Fedora33 = "fedora33" +distroName Fedora40 = "fedora40" +distroName Rocky8 = "unknown" + +distroInstall :: Distro -> String +distroInstall Debian9 = "sed -i s/deb.debian.org/archive.debian.org/g /etc/apt/sources.list && sed -i 's|security.debian.org|archive.debian.org/|g' /etc/apt/sources.list && sed -i /-updates/d /etc/apt/sources.list && apt-get update && apt-get install -y" +distroInstall Debian10 = "apt-get update && apt-get install -y" +distroInstall Debian11 = "apt-get update && apt-get install -y" +distroInstall Debian12 = "apt-get update && apt-get install -y" +distroInstall Ubuntu1804 = "apt-get update && apt-get install -y" +distroInstall Ubuntu2004 = "apt-get update && apt-get install -y" +distroInstall Ubuntu2204 = "apt-get update && apt-get install -y" +distroInstall Mint193 = "apt-get update && apt-get install -y" +distroInstall Mint202 = "apt-get update && apt-get install -y" +distroInstall Mint213 = "apt-get update && apt-get install -y" +distroInstall Fedora33 = "dnf install -y" +distroInstall Fedora40 = "dnf install -y" +distroInstall Rocky8 = "yum -y install epel-release && yum install -y --allowerasing" + +distroTools :: Distro -> String +distroTools Debian9 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Debian10 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Debian11 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Debian12 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Ubuntu1804 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Ubuntu2004 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Ubuntu2204 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Mint193 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Mint202 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Mint213 = "libnuma-dev zlib1g-dev libgmp-dev libgmp10 libssl-dev liblzma-dev libbz2-dev git wget lsb-release software-properties-common gnupg2 apt-transport-https gcc autoconf automake build-essential curl ghc gzip libffi-dev libncurses-dev libncurses5 libtinfo5 patchelf" +distroTools Fedora33 = "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" +distroTools Fedora40 = "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" +distroTools Rocky8 = "autoconf automake binutils bzip2 coreutils curl elfutils-devel elfutils-libs findutils gcc gcc-c++ git gmp gmp-devel jq lbzip2 make ncurses ncurses-compat-libs ncurses-devel openssh-clients patch perl pxz python3 sqlite sudo wget which xz zlib-devel patchelf" + +------------------------------------------------------------------------------- +-- OS/runner Config +------------------------------------------------------------------------------- + +baseEnv :: [(Key,Value)] +baseEnv = [ "AWS_SECRET_ACCESS_KEY" .= str "${{ secrets.AWS_SECRET_ACCESS_KEY }}" + , "AWS_ACCESS_KEY_ID" .= str "${{ secrets.AWS_ACCESS_KEY_ID }}" + , "S3_HOST" .= str "${{ secrets.S3_HOST }}" + , "TZ" .= str "Asia/Singapore" + ] + +-- | Environment configuration +envVars :: Arch -> Opsys -> Value +envVars arch os = object $ + baseEnv + ++ [ "TARBALL_EXT" .= str (case os of + Windows -> "zip" + _ -> "tar.xz") + , "ARCH" .= str (case arch of + Amd64 -> "64" + AArch64 -> "ARM64") + , "ADD_CABAL_ARGS" .= str (case (os,arch) of + (Linux _, Amd64) -> "--enable-split-sections" + _ -> "") + , "ARTIFACT" .= artifactName arch os + ] + ++ [ "DEBIAN_FRONTEND" .= str "noninteractive" + | Linux _ <- [os] + ] + ++ [ "MACOSX_DEPLOYMENT_TARGET" .= str "10.13" + | Darwin <- [os] + ] + ++ [ "HOMEBREW_CHANGE_ARCH_TO_ARM" .= str "1" + | Darwin <- [os], AArch64 <- [arch] + ] + +-- | Runner selection +runner :: Arch -> Opsys -> [Value] +runner Amd64 (Linux _) = ["ubuntu-latest"] +runner AArch64 (Linux _) = ["self-hosted", "Linux", "ARM64", "maerwald"] +runner Amd64 Darwin = ["macOS-13"] +runner AArch64 Darwin = ["self-hosted", "macOS", "ARM64"] +runner Amd64 Windows = ["windows-latest"] +runner AArch64 Windows = error "aarch64 windows not supported" + +-- | Runner selection for bindist jobs +bindistRunner :: Arch -> Opsys -> [Value] +bindistRunner Amd64 (Linux _) = ["self-hosted", "linux-space", "maerwald"] +bindistRunner AArch64 (Linux _) = ["self-hosted", "Linux", "ARM64", "maerwald"] +bindistRunner Amd64 Darwin = ["macOS-13"] +bindistRunner AArch64 Darwin = ["self-hosted", "macOS", "ARM64"] +bindistRunner Amd64 Windows = ["windows-latest"] +bindistRunner AArch64 Windows = error "aarch64 windows not supported" + +------------------------------------------------------------------------------- +-- Action generatation +------------------------------------------------------------------------------- +-- Each x86-linux job has its own action, living in a separate file +-- The contents of the file are derived from the 'Action' datatype +-- +-- We do this so that we can run the build in the right kind of OS container, +-- but not be forced to run the checkout and upload artifact in the same container +-- +-- This is because we want to use container images that are not supported by +-- github provided actions, see for instance https://github.com/actions/upload-artifact/issues/489 +------------------------------------------------------------------------------- + +-- | Container actions for x86-linux runners. +-- Each of these corresponds to a separate action file, +-- called 'actionName', located at 'actionPath' +data Action + = Action + { actionName :: String + , actionDistro :: Distro + } + +actionDir :: FilePath +actionDir = "./.github/actions/bindist-actions/" + +actionPath :: Distro -> FilePath +actionPath d = actionDir ++ distroActionName d + +instance ToJSON Action where + toJSON Action{..} = object + [ "name" .= actionName + , "description" .= str ("Container for " ++ distroName actionDistro) + , "inputs" .= object + [ "stage" .= object + [ "description" .= str "which stage to build" + , "required" .= True + ] + , "version" .= object + [ "description" .= str "which GHC version to build/test" + , "required" .= False + ] + ] + , "runs" .= object + [ "using" .= str "docker" + , "image" .= distroImage actionDistro + , "entrypoint" .= str ".github/scripts/entrypoint.sh" + , "env" .= object + [ "STAGE" .= str "${{ inputs.stage }}" + , "INSTALL" .= distroInstall actionDistro + , "TOOLS" .= distroTools actionDistro + , "GHC_VERSION" .= str "${{ inputs.version }}" + ] + ] + ] + +configAction :: Config -> Maybe Action +configAction (MkConfig Amd64 (Linux d) _) = Just $ Action (distroActionName d) d +configAction _ = Nothing + +distroActionName :: Distro -> String +distroActionName d = "action-" ++ distroName d + +customAction :: Distro -> Stage -> Value +customAction d st = flip (ghAction stepName (actionPath d)) [] $ case st of + Build v -> + [ "stage" .= str "BUILD" + , "version" .= ghcVersion v + ] + Test -> + [ "stage" .= str "TEST" + ] + Bindist -> + [ "stage" .= str "BINDIST" + ] + where + stepName = case st of + Build v -> "Build " ++ ghcVersion v + Test -> "Test" + Bindist -> "Bindist" + +------------------------------------------------------------------------------- +-- CI generation +------------------------------------------------------------------------------- +-- This is the code that generates the bindist workflow + +-- | Global CI config type +data CI = CI [Config] + +data Config = MkConfig Arch Opsys [GHC] + +instance ToJSON CI where + toJSON (CI cs) = object + [ "name" .= str "Build and release" + , "on" .= object [ "push" .= object ["tags" .= [str "*"]] + , "schedule" .= [object ["cron" .= str "0 2 * * 1"]] + ] + , "env" .= object + [ "CABAL_CACHE_DISABLE" .= str "${{ vars.CABAL_CACHE_DISABLE }}" + , "CABAL_CACHE_NONFATAL" .= str "${{ vars.CABAL_CACHE_NONFATAL }}" + ] + , "jobs" .= object (concatMap (getConfigJobs . makeJobs) cs ++ [releaseJob cs]) + ] + +type Job = Pair + +data ConfigJobs = ConfigJobs { buildJobs :: [Job], bindistJob :: Job, testJob :: Job} + +getConfigJobs :: ConfigJobs -> [Job] +getConfigJobs ConfigJobs{..} = buildJobs ++ [bindistJob, testJob] + +makeJobs :: Config -> ConfigJobs +makeJobs (MkConfig arch os vs) = + ConfigJobs + { buildJobs = [ buildJob arch os ver | ver <- vs ] + , bindistJob = mkBindistJob arch os vs + , testJob = mkTestJob arch os + } + +buildJobName :: Arch -> Opsys -> GHC -> String +buildJobName arch os version = L.intercalate "-" ["build",archName arch, osName os, ghcVersionIdent version] + +testJobName :: Arch -> Opsys -> String +testJobName arch os = L.intercalate "-" ["test",archName arch, osName os] + +bindistJobName :: Arch -> Opsys -> String +bindistJobName arch os = L.intercalate "-" ["bindist",archName arch, osName os] + +bindistName :: Arch -> Opsys -> String +bindistName arch os = "bindist-" ++ artifactName arch os + +setupAction :: Arch -> Opsys -> [Value] +-- some +setupAction AArch64 (Linux Ubuntu2004) = + [ ghRun "clean and git config for aarch64-linux" "bash" [] $ unlines + [ "find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} +" + , "git config --global --get-all safe.directory | grep '^\\*$' || git config --global --add safe.directory \"*\"" + ] + ] +setupAction _ _ = [] + +releaseJob :: [Config] -> Job +releaseJob cs = + "release" .= object + [ "name" .= str "release" + , "runs-on" .= str "ubuntu-latest" + , "needs" .= [testJobName arch os | MkConfig arch os _ <- cs] + , "if" .= str "startsWith(github.ref, 'refs/tags/')" + , "steps" .= ( [ checkoutAction ] + ++ [ downloadArtifacts (bindistName arch os) "./out" | MkConfig arch os _ <- cs] + ++ [ ghRun "Prepare release" "bash" [] $ unlines + [ "sudo apt-get update && sudo apt-get install -y tar xz-utils" + , "cd out/plan.json" + , "tar cf plan_json.tar *" + , "mv plan_json.tar ../" + , "cd ../.." + , "export RELEASE=$GITHUB_REF_NAME" + , "git archive --format=tar.gz -o \"out/haskell-language-server-${RELEASE}-src.tar.gz\" --prefix=\"haskell-language-server-${RELEASE}/\" HEAD" + ] + , ghAction "Release" "softprops/action-gh-release@v2" + [ "draft" .= True + , "files" .= unlines + [ "./out/*.zip" + , "./out/*.tar.xz" + , "./out/*.tar.gz" + , "./out/*.tar" + ] + ] [] + ]) + ] + + + +buildJob :: Arch -> Opsys -> GHC -> Job +buildJob arch os v = + K.fromString (buildJobName arch os v) .= object + [ "runs-on" .= runner arch os + , "name" .= str (buildJobName arch os v ++ " (Build binaries)") + , "environment" .= str "CI" + , "env" .= thisEnv + , "steps" .= + ( setupAction arch os + ++ [ checkoutAction ] + ++ buildStep arch os + ++ [uploadArtifacts ("artifacts-"++buildJobName arch os v) outputname]) + ] + + where thisEnv = envVars arch os + art = artifactName arch os + outputname + | Windows <- os = "./out/*" + | otherwise = ("out-"++art++"-"++ghcVersion v++".tar") + buildStep Amd64 (Linux d) = [customAction d (Build v)] + buildStep AArch64 (Linux Ubuntu2004) = + [ ghAction "Build aarch64-linux binaries" "docker://hasufell/arm64v8-ubuntu-haskell:focal" + [ "args" .= str "bash .github/scripts/build.sh" ] + [ "GHC_VERSION" .= ghcVersion v ] + , ghAction "Tar aarch64-linux binaries" "docker://hasufell/arm64v8-ubuntu-haskell:focal" + [ "args" .= str "bash .github/scripts/tar.sh" ] + [ "GHC_VERSION" .= ghcVersion v ] + ] + buildStep AArch64 (Linux _) = error "aarch64-linux non-ubuntu not supported" + + buildStep Amd64 Darwin = [ghRun "Run build" "sh" ["GHC_VERSION" .= ghcVersion v] $ unlines $ + [ "brew install coreutils tree" + , "bash .github/scripts/build.sh" + , "tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/" + ] + ] + buildStep AArch64 Darwin = [ghRun "Run build" "sh" ["GHC_VERSION" .= ghcVersion v] $ unlines $ + [ "bash .github/scripts/brew.sh git coreutils autoconf automake tree" + , "export PATH=\"$HOME/.brew/bin:$HOME/.brew/sbin:$PATH\"" + , "export LD=ld" + , "bash .github/scripts/build.sh" + , "tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/" + ] + ] + + buildStep Amd64 Windows = [ghRun "Run build" "pwsh" ["GHC_VERSION" .= ghcVersion v] $ unlines $ + [ "$env:CHERE_INVOKING = 1" + , "$env:MSYS2_PATH_TYPE = \"inherit\"" + , "$ErrorActionPreference = \"Stop\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"bash .github/scripts/build.sh\"" + ] + ] + buildStep AArch64 Windows = error "aarch64 windows not supported" + +mkBindistJob :: Arch -> Opsys -> [GHC] -> Job +mkBindistJob arch os vs = + K.fromString (bindistJobName arch os) .= object + [ "runs-on" .= bindistRunner arch os + , "name" .= (bindistJobName arch os ++ " (Prepare bindist)") + , "needs" .= [buildJobName arch os ver | ver <- vs] + , "env" .= thisEnv + , "steps" .= + ( setupAction arch os + ++ [ checkoutAction ] + ++ [downloadArtifacts ("artifacts-"++buildJobName arch os v) outputPath | v <- vs] + ++ bindistStep arch os + ++ [ uploadArtifacts (bindistName arch os) "./out/*.tar.xz\n./out/plan.json/*\n./out/*.zip" ]) + ] + where thisEnv = envVars arch os + + outputPath + | Windows <- os = "./out" + | otherwise = "./" + + bindistStep Amd64 (Linux d) = [customAction d Bindist] + bindistStep AArch64 (Linux Ubuntu2004) = + [ ghAction "Unpack aarch64-linux binaries" "docker://hasufell/arm64v8-ubuntu-haskell:focal" + [ "args" .= str "bash .github/scripts/untar.sh" ] + [ ] + , ghAction "Tar aarch64-linux binaries" "docker://hasufell/arm64v8-ubuntu-haskell:focal" + [ "args" .= str "bash .github/scripts/bindist.sh" ] + [ ] + ] + bindistStep AArch64 (Linux _) = error "aarch64-linux non-ubuntu not supported" + + bindistStep Amd64 Darwin = [ghRun "Create bindist" "sh" [] $ unlines $ + [ "brew install coreutils tree" + , "for bindist in out-*.tar ; do" + , " tar xf \"${bindist}\"" + , "done" + , "unset bindist" + , "bash .github/scripts/bindist.sh" + ] + ] + bindistStep AArch64 Darwin = [ghRun "Run build" "sh" [] $ unlines $ + [ "bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree" + , "export PATH=\"$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH\"" + , "export CC=\"$HOME/.brew/opt/llvm@13/bin/clang\"" + , "export CXX=\"$HOME/.brew/opt/llvm@13/bin/clang++\"" + , "export LD=ld" + , "export AR=\"$HOME/.brew/opt/llvm@13/bin/llvm-ar\"" + , "export RANLIB=\"$HOME/.brew/opt/llvm@13/bin/llvm-ranlib\"" + , "for bindist in out-*.tar ; do" + , " tar xf \"${bindist}\"" + , "done" + , "unset bindist" + , "bash .github/scripts/bindist.sh" + ] + ] + + bindistStep Amd64 Windows = [ghRun "Run build" "pwsh" [] $ unlines $ + [ "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -Syuu\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -Syuu\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -S unzip zip git\"" + , "taskkill /F /FI \"MODULES eq msys-2.0.dll\"" + , "$env:CHERE_INVOKING = 1" + , "$env:MSYS2_PATH_TYPE = \"inherit\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"bash .github/scripts/bindist.sh\"" + ] + ] + bindistStep AArch64 Windows = error "aarch64 windows not supported" + +mkTestJob :: Arch -> Opsys -> Job +mkTestJob arch os = + K.fromString (testJobName arch os) .= object + [ "runs-on" .= runner arch os + , "name" .= str (testJobName arch os ++ " (Test binaries)") + , "needs" .= [bindistJobName arch os] + , "environment" .= str "CI" + , "env" .= thisEnv + , "steps" .= + ( setupAction arch os + ++ [ checkoutAction , downloadArtifacts (bindistName arch os) "./out" ] + ++ testStep arch os) + ] + where thisEnv = envVars arch os + + testStep Amd64 (Linux d) = [customAction d Test] + testStep AArch64 (Linux Ubuntu2004) = + [ ghAction "Run test" "docker://hasufell/arm64v8-ubuntu-haskell:focal" + [ "args" .= str "bash .github/scripts/test.sh" ] + [ ] + ] + testStep AArch64 (Linux _) = error "aarch64-linux non-ubuntu not supported" + + testStep Amd64 Darwin = [ghRun "Run test" "sh" [] $ unlines $ + [ "brew install coreutils tree" + , "bash .github/scripts/test.sh" + ] + ] + testStep AArch64 Darwin = [ghRun "Run test" "sh" [] $ unlines $ + [ "bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree" + , "export PATH=\"$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH\"" + , "export CC=\"$HOME/.brew/opt/llvm@13/bin/clang\"" + , "export CXX=\"$HOME/.brew/opt/llvm@13/bin/clang++\"" + , "export LD=ld" + , "export AR=\"$HOME/.brew/opt/llvm@13/bin/llvm-ar\"" + , "export RANLIB=\"$HOME/.brew/opt/llvm@13/bin/llvm-ranlib\"" + , "bash .github/scripts/test.sh" + ] + ] + + testStep Amd64 Windows = + [ ghRun "install windows deps" "pwsh" [] $ unlines $ + [ "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -Syuu\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -Syuu\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"pacman --disable-download-timeout --noconfirm -S make mingw-w64-x86_64-clang curl autoconf mingw-w64-x86_64-pkgconf ca-certificates base-devel gettext autoconf make libtool automake python p7zip patch unzip zip git\"" + , "taskkill /F /FI \"MODULES eq msys-2.0.dll\"" + ] + , ghRun "Run test" "pwsh" [] $ unlines $ + [ "$env:CHERE_INVOKING = 1" + , "$env:MSYS2_PATH_TYPE = \"inherit\"" + , "C:\\msys64\\usr\\bin\\bash -lc \"bash .github/scripts/test.sh\"" + ] + ] + testStep AArch64 Windows = error "aarch64 windows not supported" + + +ciConfigs :: [Config] +ciConfigs = + [ MkConfig Amd64 Darwin allGHCs + , MkConfig AArch64 Darwin allGHCs + , MkConfig Amd64 Windows allGHCs + , MkConfig AArch64 (Linux Ubuntu2004) allGHCs] + ++ [ MkConfig Amd64 (Linux distro) allGHCs | distro <- allDistros ] + +main :: IO () +main = do + [root] <- getArgs + setCurrentDirectory root + removeDirectoryRecursive actionDir + createDirectoryIfMissing True actionDir + forM_ (mapMaybe configAction ciConfigs) $ \a -> do + let path = actionPath (actionDistro a) + createDirectoryIfMissing True path + BS.writeFile (path "action.yaml") $ encode a + BS.putStr "### DO NOT EDIT - GENERATED FILE\n" + BS.putStr "### This file was generated by ./.github/generate-ci/gen_ci.hs\n" + BS.putStr "### Edit that file and run ./.github/generate-ci/generate-jobs to regenerate\n" + BS.putStr $ encode $ CI ciConfigs + + +------------------------------------------------------------------------------- +-- Utils +------------------------------------------------------------------------------- + +str :: String -> String +str = id + +ghAction :: String -> String -> [(Key,Value)] -> [(Key,Value)] -> Value +ghAction name uses args env = object $ + [ "name" .= name + , "uses" .= uses + ] + ++ case args of + [] -> [] + xs -> [ "with" .= object xs ] + ++ case env of + [] -> [] + xs -> [ "env" .= object xs ] + +ghRun :: String -> String -> [(Key,Value)] -> String -> Value +ghRun name shell env script = object $ + [ "name" .= name + , "shell" .= shell + , "run" .= script + ] + ++ case env of + [] -> [] + xs -> [ "env" .= object xs ] + +checkoutAction :: Value +checkoutAction = ghAction "Checkout" "actions/checkout@v4" [] [] + +uploadArtifacts :: String -> String -> Value +uploadArtifacts name path = ghAction "Upload artifact" "actions/upload-artifact@v4" + [ "if-no-files-found" .= str "error" + , "retention-days" .= (2 :: Int) + , "name" .= name + , "path" .= path + ] [] + +downloadArtifacts :: String -> String -> Value +downloadArtifacts name path = ghAction "Download artifacts" "actions/download-artifact@v4" [ "name" .= name, "path" .= path ] [] diff --git a/.github/generate-ci/generate-ci.cabal b/.github/generate-ci/generate-ci.cabal new file mode 100644 index 0000000000..ae9e9d3f52 --- /dev/null +++ b/.github/generate-ci/generate-ci.cabal @@ -0,0 +1,18 @@ +cabal-version: 3.0 +name: generate-ci +version: 0.1.0.0 +license: Apache-2.0 +license-file: LICENSE +build-type: Simple + +executable generate-ci + main-is: gen_ci.hs + ghc-options: -Wall + build-depends: base, + bytestring, + containers, + directory, + filepath, + aeson, + yaml >= 0.11.11.2 + default-language: Haskell2010 diff --git a/.github/generate-ci/generate-jobs b/.github/generate-ci/generate-jobs new file mode 100755 index 0000000000..4cffc82d2a --- /dev/null +++ b/.github/generate-ci/generate-jobs @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -e + +root="$(git rev-parse --show-toplevel)/" +cd "$root/.github/generate-ci/" + +cabal run -v0 generate-ci "$root" > ../workflows/release.yaml + diff --git a/.github/mergify.yml b/.github/mergify.yml new file mode 100644 index 0000000000..c0b76f7eec --- /dev/null +++ b/.github/mergify.yml @@ -0,0 +1,20 @@ +queue_rules: + - name: default + # Mergify always respects the branch protection settings + # so we can left empty mergify own ones + queue_conditions: + - label=merge me + - '#approved-reviews-by>=1' + merge_conditions: [] + merge_method: squash + +pull_request_rules: + - name: Automatically update pull requests + conditions: + - label=merge me + actions: + update: + - name: refactored queue action rule + conditions: [] + actions: + queue: diff --git a/.github/scripts/bindist.sh b/.github/scripts/bindist.sh new file mode 100644 index 0000000000..b50aeb2aca --- /dev/null +++ b/.github/scripts/bindist.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +set -eux + +. .github/scripts/env.sh +. .github/scripts/common.sh + +install_ghcup + +# create tarball/zip +case "${TARBALL_EXT}" in + zip) + HLS_VERSION="$(grep '^version:' haskell-language-server.cabal | awk '{ print $2 }')" + ( + cd "$CI_PROJECT_DIR/out/${ARTIFACT}" + zip "$CI_PROJECT_DIR/out/haskell-language-server-${HLS_VERSION}-${ARTIFACT}.zip" haskell-language-server-* + ) + ;; + tar.xz) + # we need to control the order, so the hls wrapper binary is installed + # from the oldest version in the list + : "${GHCS:="$(cd "$CI_PROJECT_DIR/out/${ARTIFACT}" && rm -f ./*.json && for ghc in * ; do printf "%s\n" "$ghc" ; done | sort -r | tr '\n' ' ')"}" + emake --version + emake GHCUP=ghcup ARTIFACT="${ARTIFACT}" GHCS="${GHCS}" bindist || fail_with_ghcup_logs "make bindist failed" + emake GHCUP=ghcup ARTIFACT="${ARTIFACT}" bindist-tar || fail_with_ghcup_logs "make bindist failed" + ;; + *) + fail "Unknown TARBALL_EXT: ${TARBALL_EXT}" + ;; +esac diff --git a/.github/scripts/brew.sh b/.github/scripts/brew.sh new file mode 100644 index 0000000000..4066dfb885 --- /dev/null +++ b/.github/scripts/brew.sh @@ -0,0 +1,25 @@ +#!/bin/sh + +set -eux + +. .github/scripts/env.sh + +if [ -e "$HOME/.brew" ] ; then + ( + cd "$HOME/.brew" + git fetch --depth 1 + git reset --hard origin/master + ) +else + git clone --depth=1 https://github.com/Homebrew/brew "$HOME/.brew" +fi +export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH" + +mkdir -p $CI_PROJECT_DIR/.brew_cache +export HOMEBREW_CACHE=$CI_PROJECT_DIR/.brew_cache +mkdir -p $CI_PROJECT_DIR/.brew_logs +export HOMEBREW_LOGS=$CI_PROJECT_DIR/.brew_logs +export HOMEBREW_TEMP=$(mktemp -d) + +#brew update +brew install ${1+"$@"} diff --git a/.github/scripts/build.sh b/.github/scripts/build.sh new file mode 100644 index 0000000000..1c0eae6252 --- /dev/null +++ b/.github/scripts/build.sh @@ -0,0 +1,67 @@ +#!/bin/bash + +set -eux + +. .github/scripts/env.sh +. .github/scripts/common.sh + +uname -a +uname -p +uname +pwd +env + +# Ensure ghcup is present and properly configured. +# Sets up the vanilla channel, as HLS CI provides binaries +# for GHCup's vanilla channel. +install_ghcup + +# ensure cabal-cache +download_cabal_cache "$HOME/.local/bin/cabal-cache" + + +# build +ghcup install ghc "${GHC_VERSION}" || fail_with_ghcup_logs "install ghc" +ghcup set ghc "${GHC_VERSION}" +sed -i.bak -e '/DELETE MARKER FOR CI/,/END DELETE/d' cabal.project # see comment in cabal.project +ecabal update +ecabal user-config diff +ecabal user-config init -f +"ghc-${GHC_VERSION}" --info +"ghc" --info + +mkdir -p "$CI_PROJECT_DIR/out/${ARTIFACT}" +mkdir -p "$CI_PROJECT_DIR/out/plan.json" + +case "$(uname)" in + MSYS_*|MINGW*) + # cat "C:\Users\runneradmin\AppData\Roaming\cabal\config" + # sed -ic "/extra-include-dirs/d" "C:\Users\runneradmin\AppData\Roaming\cabal\config" + # sed -ic "/extra-lib-dirs/d" "C:\Users\runneradmin\AppData\Roaming\cabal\config" + cat "C:\Users\runneradmin\AppData\Roaming\cabal\config" + args=( -O2 -w "ghc-$GHC_VERSION" --project-file cabal.project --disable-profiling --disable-tests --enable-executable-stripping ${ADD_CABAL_ARGS}) + + # Shorten binary names + # due to MAX_PATH issues on windows + sed -i.bak -e 's/haskell-language-server/hls/g' \ + -e 's/haskell_language_server/hls/g' \ + haskell-language-server.cabal cabal.project + sed -i.bak -e 's/Paths_haskell_language_server/Paths_hls/g' \ + src/**/*.hs exe/*.hs + + # shellcheck disable=SC2068 + build_with_cache ${args[@]} exe:hls exe:hls-wrapper + cp dist-newstyle/cache/plan.json "$CI_PROJECT_DIR/out/plan.json/${ARTIFACT}-ghc-${GHC_VERSION}-plan.json" + + # shellcheck disable=SC2068 + cp "$(cabal list-bin -v0 ${args[@]} exe:hls)" "$CI_PROJECT_DIR/out/${ARTIFACT}/haskell-language-server-${GHC_VERSION}${ext}" + # shellcheck disable=SC2068 + cp "$(cabal list-bin -v0 ${args[@]} exe:hls-wrapper)" "$CI_PROJECT_DIR/out/${ARTIFACT}/haskell-language-server-wrapper${ext}" + ;; + *) + emake --version + emake GHCUP=ghcup CABAL_CACHE_BIN=cabal-cache.sh S3_HOST="${S3_HOST}" S3_KEY="${ARTIFACT}" GHC_VERSION="${GHC_VERSION}" hls-ghc + ;; +esac + + diff --git a/.github/scripts/cabal-cache.sh b/.github/scripts/cabal-cache.sh new file mode 100755 index 0000000000..94b3001130 --- /dev/null +++ b/.github/scripts/cabal-cache.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +case "$(uname -s)" in + MSYS_*|MINGW*) + ext=".exe" + ;; + *) + ext="" + ;; +esac + +if [ "${CABAL_CACHE_DISABLE}" = "yes" ] ; then + echo "cabal-cache disabled (CABAL_CACHE_DISABLE set)" +elif [ "${CABAL_CACHE_NONFATAL}" = "yes" ] ; then + time "cabal-cache${ext}" "$@" || echo "cabal-cache failed (CABAL_CACHE_NONFATAL set)" +else + time "cabal-cache${ext}" "$@" + exit $? +fi + diff --git a/.github/scripts/common.sh b/.github/scripts/common.sh new file mode 100644 index 0000000000..a10d84045e --- /dev/null +++ b/.github/scripts/common.sh @@ -0,0 +1,213 @@ +#!/bin/bash + +. .github/scripts/env.sh + +# Colors +RED="0;31" +LT_BROWN="1;33" +LT_BLUE="1;34" + +ecabal() { + cabal "$@" +} + +nonfatal() { + "$@" || "$* failed" +} + +# sync the relevant parts of cabal artifacts that are part of +# the current plan.json from an S3 bucket +sync_from() { + if [ "${RUNNER_OS}" != "Windows" ] ; then + cabal_store_path="$(dirname "$(cabal help user-config | tail -n 1 | xargs)")/store" + fi + + cabal-cache.sh sync-from-archive \ + --host-name-override="${S3_HOST}" \ + --host-port-override=443 \ + --host-ssl-override=True \ + --region us-west-2 \ + $([ "${RUNNER_OS}" != "Windows" ] && echo --store-path="$cabal_store_path") \ + --archive-uri "s3://haskell-language-server/${ARTIFACT}" +} + +# sync the relevant parts of cabal artifacts that are part of +# the current plan.json to an S3 bucket +sync_to() { + if [ "${RUNNER_OS}" != "Windows" ] ; then + cabal_store_path="$(dirname "$(cabal help user-config | tail -n 1 | xargs)")/store" + fi + + cabal-cache.sh sync-to-archive \ + --host-name-override="${S3_HOST}" \ + --host-port-override=443 \ + --host-ssl-override=True \ + --region us-west-2 \ + $([ "${RUNNER_OS}" != "Windows" ] && echo --store-path="$cabal_store_path") \ + --archive-uri "s3://haskell-language-server/${ARTIFACT}" +} + +sha_sum() { + if [ "${RUNNER_OS}" = "FreeBSD" ] ; then + sha256 "$@" + else + sha256sum "$@" + fi +} + +git_describe() { + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + git describe --always +} + +download_cabal_cache() { + ( + set -e + dest="$HOME/.local/bin/cabal-cache" + url="" + exe="" + cd /tmp + case "${RUNNER_OS}" in + "Linux") + case "${ARCH}" in + "32") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental5/i386-linux-cabal-cache + ;; + "64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental5/x86_64-linux-cabal-cache + ;; + "ARM64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental5/aarch64-linux-cabal-cache + ;; + "ARM") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental5/armv7-linux-cabal-cache + ;; + esac + ;; + "FreeBSD") + url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental5/x86_64-portbld-freebsd-cabal-cache + ;; + "Windows") + exe=".exe" + url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental5/x86_64-mingw64-cabal-cache + ;; + "macOS") + case "${ARCH}" in + "ARM64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental5/aarch64-apple-darwin-cabal-cache + ;; + "64") url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/experimental5/x86_64-apple-darwin-cabal-cache + ;; + esac + ;; + esac + + if [ -n "${url}" ] ; then + case "${url##*.}" in + "gz") + curl -L -o - "${url}" | gunzip > cabal-cache${exe} + ;; + *) + curl -o cabal-cache${exe} -L "${url}" + ;; + esac + sha_sum cabal-cache${exe} + mv "cabal-cache${exe}" "${dest}${exe}" + chmod +x "${dest}${exe}" + fi + + # install shell wrapper + cp "${CI_PROJECT_DIR}"/.github/scripts/cabal-cache.sh "$HOME"/.local/bin/ + chmod +x "$HOME"/.local/bin/cabal-cache.sh + ) +} + +build_with_cache() { + ecabal configure "$@" + ecabal build --dependencies-only "$@" --dry-run + nonfatal sync_from + ecabal build "$@" + nonfatal sync_to +} + +install_ghcup() { + # find "$GHCUP_INSTALL_BASE_PREFIX" + mkdir -p "$GHCUP_BIN" + mkdir -p "$GHCUP_BIN"/../cache + + if [ "${RUNNER_OS}" = "FreeBSD" ] ; then + curl -o ghcup https://downloads.haskell.org/ghcup/tmp/x86_64-portbld-freebsd-ghcup-0.1.18.1 + chmod +x ghcup + mv ghcup "$HOME/.local/bin/ghcup" + else + curl --proto '=https' --tlsv1.2 -sSf https://get-ghcup.haskell.org | BOOTSTRAP_HASKELL_MINIMAL=1 sh + source "$(dirname "${GHCUP_BIN}")/env" + # make sure we use the vanilla channel for installing binaries + # see https://github.com/haskell/ghcup-metadata/pull/166#issuecomment-1893075575 + ghcup config set url-source https://raw.githubusercontent.com/haskell/ghcup-metadata/refs/heads/master/ghcup-vanilla-0.0.9.yaml + ghcup install cabal --set "${BOOTSTRAP_HASKELL_CABAL_VERSION}" + fi +} + +strip_binary() { + ( + set -e + local binary=$1 + case "$(uname -s)" in + "Darwin"|"darwin") + ;; + MSYS_*|MINGW*) + ;; + *) + strip -s "${binary}" + ;; + esac + ) +} + +# GitLab Pipelines log section delimiters +# https://gitlab.com/gitlab-org/gitlab-foss/issues/14664 +start_section() { + name="$1" + echo -e "section_start:$(date +%s):$name\015\033[0K" +} + +end_section() { + name="$1" + echo -e "section_end:$(date +%s):$name\015\033[0K" +} + +echo_color() { + local color="$1" + local msg="$2" + echo -e "\033[${color}m${msg}\033[0m" +} + +error() { echo_color "${RED}" "$1"; } +warn() { echo_color "${LT_BROWN}" "$1"; } +info() { echo_color "${LT_BLUE}" "$1"; } + +fail_with_ghcup_logs() { + cat /github/workspace/.ghcup/logs/* + fail "$!" +} +fail() { error "error: $1"; exit 1; } + +run() { + info "Running $*..." + "$@" || ( error "$* failed"; return 1; ) +} + +emake() { + if command -v gmake >/dev/null 2>&1 ; then + gmake "$@" + else + make "$@" + fi +} + +mktempdir() { + case "$(uname -s)" in + "Darwin"|"darwin") + mktemp -d -t hls_ci.XXXXXXX + ;; + *) + mktemp -d + ;; + esac +} diff --git a/.github/scripts/entrypoint.sh b/.github/scripts/entrypoint.sh new file mode 100755 index 0000000000..f02e4ec17a --- /dev/null +++ b/.github/scripts/entrypoint.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +set -x + +bash -c "$INSTALL curl bash git tree $TOOLS" + +unset INSTALL +unset TOOLS + +if [ "${ARTIFACT}" = "x86_64-linux-unknown" ]; then + echo "NAME=Linux" > /etc/os-release + echo "ID=linux" >> /etc/os-release + echo "PRETTY_NAME=Linux" >> /etc/os-release +fi + +case "$STAGE" in + "BUILD") + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + ;; + "BINDIST") + set -eux + for bindist in out-*.tar ; do + tar -xf "${bindist}" + done + unset bindist + bash .github/scripts/bindist.sh + ;; + "TEST") + bash .github/scripts/test.sh +esac + diff --git a/.github/scripts/env.sh b/.github/scripts/env.sh new file mode 100644 index 0000000000..2f6eaa3c48 --- /dev/null +++ b/.github/scripts/env.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +mkdir -p "$HOME"/.local/bin + +if [ "${RUNNER_OS}" = "Windows" ] ; then + ext=".exe" +else + ext='' +fi + +export PATH="$HOME/.local/bin:$PATH" + +export BOOTSTRAP_HASKELL_NONINTERACTIVE=1 +export BOOTSTRAP_HASKELL_CABAL_VERSION="${CABAL_VER:-3.10.3.0}" +export BOOTSTRAP_HASKELL_ADJUST_CABAL_CONFIG=no +export BOOTSTRAP_HASKELL_INSTALL_NO_STACK=yes +export BOOTSTRAP_HASKELL_ADJUST_BASHRC=1 + +if [ "${RUNNER_OS}" = "Windows" ] ; then + # on windows use pwd to get unix style path + CI_PROJECT_DIR="$(pwd)" + export CI_PROJECT_DIR + export GHCUP_INSTALL_BASE_PREFIX="/c" + export GHCUP_BIN="$GHCUP_INSTALL_BASE_PREFIX/ghcup/bin" + export PATH="$GHCUP_BIN:$PATH" + export CABAL_DIR="C:\\Users\\runneradmin\\AppData\\Roaming\\cabal" +else + export CI_PROJECT_DIR="${GITHUB_WORKSPACE}" + export GHCUP_INSTALL_BASE_PREFIX="$CI_PROJECT_DIR" + export GHCUP_BIN="$GHCUP_INSTALL_BASE_PREFIX/.ghcup/bin" + export PATH="$GHCUP_BIN:$PATH" + export CABAL_DIR="$CI_PROJECT_DIR/cabal" + export CABAL_CACHE="$CI_PROJECT_DIR/cabal-cache" +fi + +export DEBIAN_FRONTEND=noninteractive +export TZ=Asia/Singapore +export LANG=en_US.UTF-8 +export LC_ALL=C.UTF-8 diff --git a/.github/scripts/tar.sh b/.github/scripts/tar.sh new file mode 100644 index 0000000000..e00522f01a --- /dev/null +++ b/.github/scripts/tar.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -eux + +. .github/scripts/env.sh + +ls -lah +ls -lah out/ +ls -lah store/ + +tar cvf "out-${ARTIFACT}-${GHC_VERSION}.tar" out/ store/ diff --git a/.github/scripts/test.sh b/.github/scripts/test.sh new file mode 100644 index 0000000000..00638dca62 --- /dev/null +++ b/.github/scripts/test.sh @@ -0,0 +1,130 @@ +#!/bin/bash + +# Test installing HLS bindist and then run +# every HLS-GHC version on a test module. + +set -eux + +. .github/scripts/env.sh +. .github/scripts/common.sh + +test_package="text-2.1.2" +test_module="src/Data/Text.hs" + +create_cradle() { + echo "cradle:" > hie.yaml + echo " cabal:" >> hie.yaml +} + +# Tests and benchmarks can't be built on some GHC versions, such as GHC 9.10.1 on Windows. +# Disable these packages for now, building bytestring-0.12.1.0 works completely fine. +create_cabal_project() { + echo "packages: ./" > cabal.project + echo "" >> cabal.project + echo "tests: False" >> cabal.project + echo "benchmarks: False" >> cabal.project + + echo "flags: -simdutf -pure-haskell" >> cabal.project +} + +enter_test_package() { + local tmp_dir + tmp_dir=$(mktempdir) + cd "$tmp_dir" + cabal unpack "${test_package}" + cd "${test_package}" +} + +# For all HLS GHC versions and the wrapper, run 'typecheck' +# over the $test_module +test_all_hls() { + local bin + local bin_noexe + local bindir + local hls + bindir=$1 + + for hls in "${bindir}/"haskell-language-server-* ; do + bin=${hls##*/} + bin_noexe=${bin/.exe/} + if ! [[ "${bin_noexe}" =~ "haskell-language-server-wrapper" ]] && ! [[ "${bin_noexe}" =~ "~" ]] ; then + if ghcup install ghc --set "${bin_noexe/haskell-language-server-/}" ; then + "${hls}" --debug typecheck "${test_module}" || fail "failed to typecheck with HLS for GHC ${bin_noexe/haskell-language-server-/}" + + # After running the test, free up disk space by deleting the unneeded GHC version. + # Helps us staying beneath the 14GB SSD disk limit. + ghcup rm ghc "${bin_noexe/haskell-language-server-/}" + else + fail "GHCup failed to install GHC ${bin_noexe/haskell-language-server-/}" + fi + fi + done + # install the recommended GHC version so the wrapper can launch HLS + ghcup install ghc --set 9.10.2 + "$bindir/haskell-language-server-wrapper${ext}" typecheck "${test_module}" || fail "failed to typecheck with HLS wrapper" +} + +uname -a +uname -p +uname +env + +# ensure ghcup +install_ghcup +ghcup install ghc --set 9.4.8 + +(cd .. && ecabal update) # run cabal update outside project dir + +# unpack +TARBALL_PREFIX="haskell-language-server" +mkdir -p "${GHCUP_BIN}" + +case "${TARBALL_EXT}" in + zip) + cp "$CI_PROJECT_DIR/out/${TARBALL_PREFIX}"-*-"${ARTIFACT}.zip" . + unzip ./*.zip + rm ./*.zip + mv haskell-language-server-* "${GHCUP_BIN}/" + + enter_test_package + create_cradle + create_cabal_project + test_all_hls "$GHCUP_BIN" + + ;; + tar.xz) + hls_bin=$(ls "$CI_PROJECT_DIR/out/${TARBALL_PREFIX}"-*-"${ARTIFACT}.tar.xz") + hls_ver_=${hls_bin#*haskell-language-server-} + hls_ver=${hls_ver_%-"${ARTIFACT}"*} + ghcup install hls -u "file://${hls_bin}" "${hls_ver}" --force + + # cleanup from previous dirty runs + rm -rf "$HOME"/.local/lib/haskell-language-server-* || true + + # print rpaths and libdirs + case "$(uname -s)" in + "Darwin"|"darwin") + otool -l "$(ghcup whereis basedir)/hls/${hls_ver}/lib/haskell-language-server-${hls_ver}/bin/"haskell-language-server-* + ;; + "FreeBSD") + readelf -Ws "$(ghcup whereis basedir)/hls/${hls_ver}/lib/haskell-language-server-${hls_ver}/bin/"haskell-language-server-* + ;; + *) + objdump -x "$(ghcup whereis basedir)/hls/${hls_ver}/lib/haskell-language-server-${hls_ver}/bin/"haskell-language-server-* + ;; + esac + tree "$(ghcup whereis basedir)/hls/${hls_ver}/lib/haskell-language-server-${hls_ver}/bin/" + tree "$GHCUP_BIN" + + enter_test_package + create_cradle + create_cabal_project + test_all_hls "$(ghcup whereis bindir)" + + ;; + *) + fail "Unknown TARBALL_EXT: ${TARBALL_EXT}" + ;; +esac + + diff --git a/.github/scripts/untar.sh b/.github/scripts/untar.sh new file mode 100644 index 0000000000..580c69a5ed --- /dev/null +++ b/.github/scripts/untar.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +set -eux + +for bindist in out-*.tar ; do + tar xf "${bindist}" +done diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml new file mode 100644 index 0000000000..ba39a21058 --- /dev/null +++ b/.github/workflows/bench.yml @@ -0,0 +1,193 @@ +name: Benchmark + +defaults: + run: + shell: bash + +# See: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency. +concurrency: + group: ${{ github.head_ref }}-${{ github.workflow }} + cancel-in-progress: true + +on: + pull_request: + branches: + - '**' + +jobs: + pre_job: + runs-on: ubuntu-latest + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@v5.3.1 + with: + cancel_others: false + paths_ignore: '[ "**/docs/**" + , "**.md" + , "**/LICENSE" + , ".circleci/**" + , "**.nix" + , "**/test/**" + , "flake.lock" + , "**/README.md" + , "FUNDING.yml" + , "**/stack*.yaml" + , ".gitlab-ci.yaml" + , ".gitlab/**" + ]' + + bench_init: + if: needs.pre_job.outputs.should_skip != 'true' + needs: pre_job + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + # benching the two latest GHCs we support now + # since benchmark are expansive. + # choosing the two latest are easier to maintain and more forward looking + # see discussion https://github.com/haskell/haskell-language-server/pull/4118 + # also possible to add more GHCs if we performs better in the future. + ghc: + - '9.8' + - '9.10' + os: + - ubuntu-latest + + # This code is fitted to the strategy: assumes Linux is used ... etc, + # change of the strategy may require changing the bootstrapping/run code + + steps: + - uses: actions/checkout@v4 + with: + + # By default, the `pull_request` event has a `GITHUB_SHA` env variable + # set to the "last merge commit on the GITHUB_REF branch" (see + # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request). + # But we want to check out the latest commit on the branch whether or + # not it is a merge commit, so this is how we do that. + ref: "${{ github.event.pull_request.head.sha }}" + + - run: git fetch origin master # check the master branch for benchmarking + + - uses: ./.github/actions/setup-build + with: + ghc: ${{ matrix.ghc }} + os: ${{ runner.os }} + shorten-hls: "false" + + # max-backjumps is increased as a temporary solution + # for dependency resolution failure + - run: cabal configure --enable-benchmarks --max-backjumps 12000 + + - name: Build + run: cabal build haskell-language-server:benchmark + + - name: Bench init + run: cabal bench -j --benchmark-options="all-binaries" + + # tar is required to preserve file permissions + # compression speeds up upload/download nicely + - name: tar workspace + run: tar -czf workspace.tar.gz * .git + + - name: tar cabal + run: | + cd ~/.cabal + tar -czf cabal.tar.gz * + + - name: Upload workspace + uses: actions/upload-artifact@v4 + with: + name: workspace-${{ matrix.ghc }}-${{ matrix.os }} + retention-days: 1 + path: workspace.tar.gz + + - name: Upload .cabal + uses: actions/upload-artifact@v4 + with: + name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} + retention-days: 1 + path: ~/.cabal/cabal.tar.gz + + bench_example: + if: contains(github.event.pull_request.labels.*.name, 'performance') + needs: [bench_init, pre_job] + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + ghc: ['9.8', '9.10'] + os: [ubuntu-latest] + cabal: ['3.14'] + example: ['cabal', 'lsp-types'] + + steps: + - uses: haskell-actions/setup@v2.8.1 + with: + ghc-version : ${{ matrix.ghc }} + cabal-version: ${{ matrix.cabal }} + enable-stack: false + + - name: Download cabal home + uses: actions/download-artifact@v4 + with: + name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} + path: . + + - name: Download workspace + uses: actions/download-artifact@v4 + with: + name: workspace-${{ matrix.ghc }}-${{ matrix.os }} + path: . + + - name: untar + run: | + mkdir -p ~/.cabal + tar xzf workspace.tar.gz + tar xzf cabal.tar.gz --directory ~/.cabal + + - name: Bench + run: cabal bench -j --benchmark-options="${{ matrix.example }}" + + - name: Display results + run: | + column -s, -t < bench-results/unprofiled/${{ matrix.example }}/results.csv | tee bench-results/unprofiled/${{ matrix.example }}/results.txt + echo + echo "Performance Diff(comparing to its previous Version):" + column -s, -t < bench-results/unprofiled/${{ matrix.example }}/resultDiff.csv | tee bench-results/unprofiled/${{ matrix.example }}/resultDiff.txt + + - name: tar benchmarking artifacts + run: find bench-results -name "*.csv" -or -name "*.svg" -or -name "*.html" | xargs tar -czf benchmark-artifacts.tar.gz + + - name: Archive benchmarking artifacts + uses: actions/upload-artifact@v4 + with: + name: bench-results-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} + path: benchmark-artifacts.tar.gz + + - name: tar benchmarking logs + # We dont' store the eventlogs because the CI workers risk running out of disk space + run: find bench-results -name "*.log" -or -name "*.hp" | xargs tar -czf benchmark-logs.tar.gz + + - name: Archive benchmark logs + uses: actions/upload-artifact@v4 + with: + name: bench-logs-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} + path: benchmark-logs.tar.gz + + bench_post_job: + if: always() + runs-on: ubuntu-latest + needs: [pre_job, bench_init, bench_example] + steps: + - run: | + echo "jobs info: ${{ toJSON(needs) }}" + - if: contains(needs.*.result, 'failure') + run: exit 1 + - if: contains(needs.*.result, 'cancelled') && needs.pre_job.outputs.should_skip != 'true' + run: exit 1 diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml deleted file mode 100644 index 3f05374956..0000000000 --- a/.github/workflows/build.yml +++ /dev/null @@ -1,116 +0,0 @@ -name: Builds - -on: - release: - types: [created] -jobs: - - build: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - ghc: ['8.10.1', '8.8.3', '8.8.2', '8.6.5', '8.6.4'] - os: [ubuntu-latest, macOS-latest, windows-latest] - exclude: - - os: windows-latest - ghc: '8.8.3' # fails due to segfault - - os: windows-latest - ghc: '8.8.2' # fails due to error with Cabal - - steps: - - uses: actions/checkout@v2 - with: - submodules: true - - uses: actions/setup-haskell@v1.1.1 - with: - ghc-version: ${{ matrix.ghc }} - cabal-version: '3.2' - - - name: Cache Cabal - uses: actions/cache@v1.2.0 - with: - path: ~/.cabal - key: ${{ runner.OS }}-${{ matrix.ghc }}-${{ hashFiles('**/*.cabal') }} - - - name: Shorten binary names - shell: bash - run: | - sed -i.bak -e 's/haskell-language-server/hls/g' \ - -e 's/haskell_language_server/hls/g' \ - haskell-language-server.cabal - sed -i.bak -e 's/Paths_haskell_language_server/Paths_hls/g' \ - src/**/*.hs exe/*.hs - - - name: Set some window specific things - if: matrix.os == 'windows-latest' - shell: bash - run: | - echo '::set-env name=EXE_EXT::.exe' - - - name: Set some linux specific things - if: matrix.os == 'ubuntu-latest' - run: | - echo '::set-env name=LINUX_CABAL_ARGS::--enable-executable-static --ghc-options=-split-sections' - - - name: Build Server - shell: bash - # Try building it twice in case of flakey builds on Windows - run: | - cabal build exe:hls -O2 --disable-documentation $LINUX_CABAL_ARGS || \ - cabal build exe:hls -O2 --disable-documentation $LINUX_CABAL_ARGS - - - name: Find Server Binary - id: find_server_binary - shell: bash - run: | - HLS=$(find dist-newstyle \( -name 'hls' -o -name 'hls.exe' \) -type f) - gzip --best $HLS - echo ::set-output name=hls_binary::$HLS.gz - - - name: Upload Server Binary - uses: actions/upload-release-asset@v1.0.2 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ github.event.release.upload_url }} - asset_path: ${{ steps.find_server_binary.outputs.hls_binary }} - asset_name: haskell-language-server-${{ runner.OS }}-${{ matrix.ghc }}${{env.EXE_EXT}}.gz - asset_content_type: application/gzip - - - uses: actions/upload-artifact@v2 - with: - name: haskell-language-server-${{ runner.OS }}-${{ matrix.ghc }}${{env.EXE_EXT}}.gz - path: ${{ steps.find_server_binary.outputs.hls_binary }} - - - name: Build Wrapper - if: matrix.ghc == '8.10.1' - run: cabal build exe:hls-wrapper -O2 --disable-documentation $WIN_CABAL_ARGS $LINUX_CABAL_ARGS - - - name: Find Wrapper Binary - if: matrix.ghc == '8.10.1' - id: find_wrapper_binary - shell: bash - run: | - HLS_WRAPPER=$(find dist-newstyle \( -name 'hls-wrapper' -o -name 'hls-wrapper.exe' \) -type f) - gzip --best $HLS_WRAPPER - echo ::set-output name=hls_wrapper_binary::$HLS_WRAPPER.gz - - - name: Upload Wrapper - if: matrix.ghc == '8.10.1' - uses: actions/upload-release-asset@v1.0.2 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ github.event.release.upload_url }} - asset_path: ${{ steps.find_wrapper_binary.outputs.hls_wrapper_binary }} - asset_name: haskell-language-server-wrapper-${{ runner.OS }}${{env.EXE_EXT}}.gz - asset_content_type: application/gzip - - - uses: actions/upload-artifact@v2 - if: matrix.ghc == '8.10.1' - with: - name: haskell-language-server-wrapper-${{ runner.OS }}${{env.EXE_EXT}}.gz - path: ${{ steps.find_wrapper_binary.outputs.hls_wrapper_binary }} - diff --git a/.github/workflows/caching.yml b/.github/workflows/caching.yml new file mode 100644 index 0000000000..569d380951 --- /dev/null +++ b/.github/workflows/caching.yml @@ -0,0 +1,122 @@ +name: Caching + +# Docs on the workflow: +# 1. GitHub cache scoping goes: +# [main branch -> PR main branch -> PR. (essentially in a tree fashion)](https://web.archive.org/web/20211125171853/https://docs.github.com/en/actions/advanced-guides/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). +# Building & keeping caches on `master` allows +# to share the main project state cache be shared to the whole tree. +# 2. GitHub has a [default 10G cache pool limit](https://web.archive.org/web/20211125171853/https://docs.github.com/en/actions/advanced-guides/caching-dependencies-to-speed-up-workflows#usage-limits-and-eviction-policy) per repo. +# HLS is a big monorepo codebase, which means easy cache pool +# invalidation & exhaustion because of the pool limit. +# To keep caches useful - the main state of the main branch should remain +# & so keep caching in the repo well below the limit. +# that means preferring main branch to the PR caches +# (especially internal branch ones), since PRs from internal branches - +# count into the repo 10G pool, while that cache gets used only inside of the PR, +# while exhausting the pool would bork cache for the rest of the community. +# That is a short story why `dist-newstyle` (especially full) currently is not +# includded into `master` or PR caches. + +defaults: + run: + shell: bash + +# See: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency. +concurrency: + group: ${{ github.head_ref }}-${{ github.workflow }} + cancel-in-progress: true + +on: + workflow_dispatch: + push: + branches: + - master + schedule: + # Refresh snapshot every (02+8*x):25 UTC + # When cache is present it is a light check workflow with early termination. + # When primary cache is not hit - runs the cache generation. + # Why: GitHub repo has 10G pool & on overflow GitHub removes caches in FIFO manner. + # When internal branche PRs save into the same pool - + # their cache is accessible only inside of the scope of the PR. + # If main cache is forced out - there are no cache shared between PRs, + # which implies all PRs would start to create & save their cache. + # Reinstitution of the main chache puts it back into FIFO + # & so it gets shared across all PRs. + - cron: "25 2/8 * * *" + +# Not using a explicit target to build the top level haskell-language-server package +# which make build the rest of subpackages *libs* (but shake-bench) +env: + cabalBuild: "v2-build --keep-going" + +jobs: + pre_job: + runs-on: ubuntu-latest + outputs: + ghcs: ${{ steps.ghcs.outputs.ghcs }} + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + # Need the repo checked out in order to read the file + - uses: actions/checkout@v3 + - id: ghcs + run: echo "ghcs=$(cat ./.github/workflows/supported-ghc-versions.json)" >> $GITHUB_OUTPUT + - id: skip_check + uses: fkirc/skip-duplicate-actions@v5.3.1 + with: + cancel_others: false + paths_ignore: '["**/docs/**" + , "**.md" + , "**/LICENSE" + , "**.nix" + , "flake.lock" + , "**/README.md" + , "FUNDING.yml" + , ".circleci/**" + , "**/stack*.yaml" + , ".gitlab-ci.yaml" + , ".gitlab/**" + ]' + + caching: + if: needs.pre_job.outputs.should_skip != 'true' + needs: pre_job + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + ghc: ${{ fromJSON(needs.pre_job.outputs.ghcs) }} + os: + - ubuntu-latest + - macOS-latest + - windows-latest + steps: + - uses: actions/checkout@v3 + + - uses: ./.github/actions/setup-build + with: + ghc: ${{ matrix.ghc }} + os: ${{ runner.os }} + + # Download sources for feeding build sources cache + # Fetching from github cache is faster than doing it from hackage + # Sources does not change per ghc and ghc version son only doing it + # for one matrix job (it is arbitrary) + - if: steps.compiled-deps.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ghc == '9.6' + name: Download sources + run: | + cabal $cabalBuild --only-download --enable-benchmarks --enable-tests + + # This build agenda is not to have successful code but produce cache as much as possible + - if: steps.compiled-deps.outputs.cache-hit != 'true' + name: Build haskell-language-server + run: | + # repeating builds to workaround segfaults in windows and ghc-8.8.4 + cabal $cabalBuild || cabal $cabalBuild || cabal $cabalBuild + + # We build ghcide with benchs and test enabled to include its dependencies in the cache + # (including shake-bench) + # Only for the same ghc and os used in the bench workflow, so we save cache space + - if: steps.compiled-deps.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ghc == '9.6' + name: Build ghcide benchmark + run: | + cabal $cabalBuild ghcide --enable-benchmarks --enable-tests diff --git a/.github/workflows/flags.yml b/.github/workflows/flags.yml new file mode 100644 index 0000000000..111dbd40a7 --- /dev/null +++ b/.github/workflows/flags.yml @@ -0,0 +1,98 @@ +name: Flags + +defaults: + run: + shell: bash + +# See: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency. +concurrency: + group: ${{ github.head_ref }}-${{ github.workflow }} + cancel-in-progress: true + +on: + pull_request: + branches: + - '**' + +jobs: + pre_job: + runs-on: ubuntu-latest + outputs: + ghcs: ${{ steps.ghcs.outputs.ghcs }} + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + # Need the repo checked out in order to read the file + - uses: actions/checkout@v3 + - id: ghcs + run: echo "ghcs=$(cat ./.github/workflows/supported-ghc-versions.json)" >> $GITHUB_OUTPUT + - id: skip_check + uses: fkirc/skip-duplicate-actions@v5.3.1 + with: + cancel_others: false + paths_ignore: '[ "**/docs/**" + , "**.md" + , "**/LICENSE" + , "**.nix" + , "flake.lock" + , "**/README.md" + , "FUNDING.yml" + , ".circleci/**" + , "**/stack*.yaml" + , ".gitlab-ci.yaml" + , ".gitlab/**" + ]' + + flags: + if: needs.pre_job.outputs.should_skip != 'true' + needs: pre_job + runs-on: ${{ matrix.os }} + strategy: + fail-fast: true + matrix: + ghc: ${{ fromJSON(needs.pre_job.outputs.ghcs) }} + os: + - ubuntu-latest + + steps: + - uses: actions/checkout@v3 + with: + + # By default, the `pull_request` event has a `GITHUB_SHA` env variable + # set to the "last merge commit on the GITHUB_REF branch" (see + # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request). + # But we want to check out the latest commit on the branch whether or + # not it is a merge commit, so this is how we do that. + ref: "${{ github.event.pull_request.head.sha }}" + + - uses: ./.github/actions/setup-build + with: + ghc: ${{ matrix.ghc }} + os: ${{ runner.os }} + + # The purpose of this job is to ensure that the build works even with flags + # in their non-default settings. Below we: + # - enable flags that are off by default + # - disable flags that are on by default + - name: Configue non-default flags for all components + run: | + cabal configure \ + --constraint "haskell-language-server +pedantic" \ + --constraint "hls-graph +embed-files +pedantic +stm-stats" \ + --constraint "ghcide +ekg +executable +test-exe" \ + --constraint "hls-plugin-api +pedantic -use-fingertree" + cat cabal.project.local + + - name: Build everything with non-default flags + run: cabal build all + + flags_post_job: + if: always() + runs-on: ubuntu-latest + needs: [pre_job, flags] + steps: + - run: | + echo "jobs info: ${{ toJSON(needs) }}" + - if: contains(needs.*.result, 'failure') + run: exit 1 + - if: contains(needs.*.result, 'cancelled') && needs.pre_job.outputs.should_skip != 'true' + run: exit 1 diff --git a/.github/workflows/hlint.yml b/.github/workflows/hlint.yml new file mode 100644 index 0000000000..c17bfec921 --- /dev/null +++ b/.github/workflows/hlint.yml @@ -0,0 +1,26 @@ +name: "HLint check" + +on: + pull_request: + branches: + - '**' + +jobs: + hlint: + name: "Hlint check run" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: 'Installing' + uses: rwe/actions-hlint-setup@v1 + with: + version: '3.8' + + - name: 'Checking code' + uses: rwe/actions-hlint-run@v2 + with: + hlint-bin: "hlint --with-group=extra --ignore-glob=**/testdata/** --ignore-glob=**/test/data/**" + fail-on: error + path: . + diff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml new file mode 100644 index 0000000000..bdd770acd0 --- /dev/null +++ b/.github/workflows/nix.yml @@ -0,0 +1,79 @@ +name: Nix + +# See: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency. +concurrency: + group: ${{ github.head_ref }}-${{ github.workflow }} + cancel-in-progress: true + +on: + pull_request: + branches: + - '**' + push: + branches: + - master + +jobs: + pre_job: + runs-on: ubuntu-latest + outputs: + should_skip_develop: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@v5.3.1 + with: + cancel_others: false + paths_ignore: '[ "**/docs/**" + , "**.md" + , "**/LICENSE" + , ".circleci/**" + , "**/README.md" + , "FUNDING.yml" + , "**/stack*.yaml" + , "**/stack*.yaml" + , ".gitlab-ci.yaml" + , ".gitlab/**" + ]' + + # Enter the development shell and run `cabal build` + develop: + if: needs.pre_job.outputs.should_skip_develop != 'true' + needs: pre_job + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + # TODO: Fix compilation problems on macOS. + # os: [ubuntu-latest, macOS-latest] + os: [ubuntu-latest] + + steps: + - uses: actions/checkout@v3 + + - uses: cachix/install-nix-action@v31 + with: + extra_nix_config: | + experimental-features = nix-command flakes + nix_path: nixpkgs=channel:nixos-unstable + - uses: cachix/cachix-action@v16 + with: + name: haskell-language-server + authToken: ${{ secrets.HLS_CACHIX_AUTH_TOKEN }} + # Don't try and run the build (although that would be a good + # test), it takes a long time and we have sometimes had + # glibc incompatibilities with the runners + - run: | + nix develop --print-build-logs --command true + + nix_post_job: + if: always() + runs-on: ubuntu-latest + needs: [pre_job, develop] + steps: + - run: | + echo "jobs info: ${{ toJSON(needs) }}" + - if: contains(needs.*.result, 'failure') + run: exit 1 + - if: contains(needs.*.result, 'cancelled') && needs.pre_job.outputs.should_skip != 'true' + run: exit 1 diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 0000000000..40d79afbf2 --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,59 @@ +name: pre-commit + +on: + pull_request: + push: + branches: [master] + +jobs: + file-diff: + runs-on: ubuntu-latest + outputs: + git-diff: ${{ steps.git-diff.outputs.diff }} + steps: + - name: Checkout code + uses: actions/checkout@v3 + - name: Find changed files + uses: technote-space/get-diff-action@v6.1.2 + id: git-diff + with: + PATTERNS: | + +(src|exe|test|ghcide|plugins|hls-plugin-api|hie-compat|hls-graph|hls-test-utils)/**/*.hs + pre-commit: + runs-on: ubuntu-latest + needs: file-diff + steps: + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-build + with: + # select a stable GHC version + ghc: 9.6 + os: ${{ runner.os }} + shorten-hls: false + + - name: "Install stylish-haskell" + run: | + cabal install stylish-haskell + echo "${HOME}/.cabal/bin" >> $GITHUB_PATH + + - name: "Check stylish-haskell is available" + run: | + echo $(which stylish-haskell) + + - name: Compiled deps cache + id: stylish-haskell-compiled-cache + uses: actions/cache@v3 + env: + cache-name: stylish-haskell-compiled-cache + with: + path: ${{ env.CABAL_PKGS_DIR }} + key: ${{ inputs.cache-prefix }}${{ env.cache-name }}-${{ inputs.os }}-${{ inputs.ghc }}-${{ env.INDEX_STATE }}-${{ hashFiles('cabal.project.freeze') }} + restore-keys: | + ${{ inputs.cache-prefix }}${{ env.cache-name }}-${{ inputs.os }}-${{ inputs.ghc }}-${{ env.INDEX_STATE }}- + ${{ inputs.cache-prefix }}${{ env.cache-name }}-${{ inputs.os }}-${{ inputs.ghc }}- + ${{ inputs.cache-prefix }}${{ env.cache-name }}-${{ inputs.os }}- + + - uses: actions/setup-python@v3 + - uses: pre-commit/action@v3.0.1 + with: + extra_args: --files ${{ needs.file-diff.outputs.git-diff }} diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 0000000000..30c55d375a --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,3833 @@ +### DO NOT EDIT - GENERATED FILE +### This file was generated by ./.github/generate-ci/gen_ci.hs +### Edit that file and run ./.github/generate-ci/generate-jobs to regenerate +env: + CABAL_CACHE_DISABLE: ${{ vars.CABAL_CACHE_DISABLE }} + CABAL_CACHE_NONFATAL: ${{ vars.CABAL_CACHE_NONFATAL }} +jobs: + bindist-aarch64-linux-ubuntu2004: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-aarch64-linux-ubuntu2004 (Prepare bindist) + needs: + - build-aarch64-linux-ubuntu2004-967 + - build-aarch64-linux-ubuntu2004-984 + - build-aarch64-linux-ubuntu2004-9102 + - build-aarch64-linux-ubuntu2004-9122 + runs-on: + - self-hosted + - Linux + - ARM64 + - maerwald + steps: + - name: clean and git config for aarch64-linux + run: | + find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} + + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + shell: bash + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-linux-ubuntu2004-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-linux-ubuntu2004-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-linux-ubuntu2004-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-linux-ubuntu2004-9122 + path: ./ + - name: Unpack aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/untar.sh + - name: Tar aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/bindist.sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-aarch64-linux-ubuntu2004 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-aarch64-mac: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HOMEBREW_CHANGE_ARCH_TO_ARM: '1' + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-aarch64-mac (Prepare bindist) + needs: + - build-aarch64-mac-967 + - build-aarch64-mac-984 + - build-aarch64-mac-9102 + - build-aarch64-mac-9122 + runs-on: + - self-hosted + - macOS + - ARM64 + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-mac-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-mac-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-mac-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-aarch64-mac-9122 + path: ./ + - name: Run build + run: | + bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree + export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH" + export CC="$HOME/.brew/opt/llvm@13/bin/clang" + export CXX="$HOME/.brew/opt/llvm@13/bin/clang++" + export LD=ld + export AR="$HOME/.brew/opt/llvm@13/bin/llvm-ar" + export RANLIB="$HOME/.brew/opt/llvm@13/bin/llvm-ranlib" + for bindist in out-*.tar ; do + tar xf "${bindist}" + done + unset bindist + bash .github/scripts/bindist.sh + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-aarch64-apple-darwin + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-deb10: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb10 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-deb10 (Prepare bindist) + needs: + - build-x86_64-linux-deb10-967 + - build-x86_64-linux-deb10-984 + - build-x86_64-linux-deb10-9102 + - build-x86_64-linux-deb10-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb10-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb10-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb10-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb10-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-deb10 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-deb10 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-deb11: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb11 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-deb11 (Prepare bindist) + needs: + - build-x86_64-linux-deb11-967 + - build-x86_64-linux-deb11-984 + - build-x86_64-linux-deb11-9102 + - build-x86_64-linux-deb11-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb11-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb11-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb11-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb11-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-deb11 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-deb11 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-deb12: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb12 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-deb12 (Prepare bindist) + needs: + - build-x86_64-linux-deb12-967 + - build-x86_64-linux-deb12-984 + - build-x86_64-linux-deb12-9102 + - build-x86_64-linux-deb12-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb12-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb12-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb12-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb12-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-deb12 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-deb12 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-deb9: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb9 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-deb9 (Prepare bindist) + needs: + - build-x86_64-linux-deb9-967 + - build-x86_64-linux-deb9-984 + - build-x86_64-linux-deb9-9102 + - build-x86_64-linux-deb9-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb9-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb9-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb9-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-deb9-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-deb9 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-deb9 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-fedora33: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora33 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-fedora33 (Prepare bindist) + needs: + - build-x86_64-linux-fedora33-967 + - build-x86_64-linux-fedora33-984 + - build-x86_64-linux-fedora33-9102 + - build-x86_64-linux-fedora33-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora33-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora33-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora33-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora33-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-fedora33 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-fedora33 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-fedora40: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora40 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-fedora40 (Prepare bindist) + needs: + - build-x86_64-linux-fedora40-967 + - build-x86_64-linux-fedora40-984 + - build-x86_64-linux-fedora40-9102 + - build-x86_64-linux-fedora40-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora40-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora40-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora40-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-fedora40-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-fedora40 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-fedora40 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-mint193: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint193 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-mint193 (Prepare bindist) + needs: + - build-x86_64-linux-mint193-967 + - build-x86_64-linux-mint193-984 + - build-x86_64-linux-mint193-9102 + - build-x86_64-linux-mint193-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint193-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint193-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint193-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint193-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-mint193 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-mint193 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-mint202: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint202 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-mint202 (Prepare bindist) + needs: + - build-x86_64-linux-mint202-967 + - build-x86_64-linux-mint202-984 + - build-x86_64-linux-mint202-9102 + - build-x86_64-linux-mint202-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint202-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint202-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint202-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint202-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-mint202 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-mint202 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-mint213: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint213 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-mint213 (Prepare bindist) + needs: + - build-x86_64-linux-mint213-967 + - build-x86_64-linux-mint213-984 + - build-x86_64-linux-mint213-9102 + - build-x86_64-linux-mint213-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint213-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint213-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint213-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-mint213-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-mint213 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-mint213 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-ubuntu1804: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu1804 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-ubuntu1804 (Prepare bindist) + needs: + - build-x86_64-linux-ubuntu1804-967 + - build-x86_64-linux-ubuntu1804-984 + - build-x86_64-linux-ubuntu1804-9102 + - build-x86_64-linux-ubuntu1804-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu1804-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu1804-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu1804-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu1804-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-ubuntu1804 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-ubuntu1804 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-ubuntu2004: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-ubuntu2004 (Prepare bindist) + needs: + - build-x86_64-linux-ubuntu2004-967 + - build-x86_64-linux-ubuntu2004-984 + - build-x86_64-linux-ubuntu2004-9102 + - build-x86_64-linux-ubuntu2004-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2004-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2004-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2004-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2004-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-ubuntu2004 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-ubuntu2004 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-ubuntu2204: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2204 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-ubuntu2204 (Prepare bindist) + needs: + - build-x86_64-linux-ubuntu2204-967 + - build-x86_64-linux-ubuntu2204-984 + - build-x86_64-linux-ubuntu2204-9102 + - build-x86_64-linux-ubuntu2204-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2204-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2204-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2204-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-ubuntu2204-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-ubuntu2204 + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-ubuntu2204 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-linux-unknown: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-unknown + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-linux-unknown (Prepare bindist) + needs: + - build-x86_64-linux-unknown-967 + - build-x86_64-linux-unknown-984 + - build-x86_64-linux-unknown-9102 + - build-x86_64-linux-unknown-9122 + runs-on: + - self-hosted + - linux-space + - maerwald + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-unknown-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-unknown-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-unknown-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-linux-unknown-9122 + path: ./ + - name: Bindist + uses: ./.github/actions/bindist-actions/action-unknown + with: + stage: BINDIST + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-linux-unknown + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-mac: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + name: bindist-x86_64-mac (Prepare bindist) + needs: + - build-x86_64-mac-967 + - build-x86_64-mac-984 + - build-x86_64-mac-9102 + - build-x86_64-mac-9122 + runs-on: + - macOS-13 + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-mac-967 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-mac-984 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-mac-9102 + path: ./ + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-mac-9122 + path: ./ + - name: Create bindist + run: | + brew install coreutils tree + for bindist in out-*.tar ; do + tar xf "${bindist}" + done + unset bindist + bash .github/scripts/bindist.sh + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-apple-darwin + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + bindist-x86_64-windows: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-mingw64 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: zip + TZ: Asia/Singapore + name: bindist-x86_64-windows (Prepare bindist) + needs: + - build-x86_64-windows-967 + - build-x86_64-windows-984 + - build-x86_64-windows-9102 + - build-x86_64-windows-9122 + runs-on: + - windows-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-windows-967 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-windows-984 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-windows-9102 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: artifacts-build-x86_64-windows-9122 + path: ./out + - name: Run build + run: | + C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" + C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" + C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -S unzip zip git" + taskkill /F /FI "MODULES eq msys-2.0.dll" + $env:CHERE_INVOKING = 1 + $env:MSYS2_PATH_TYPE = "inherit" + C:\msys64\usr\bin\bash -lc "bash .github/scripts/bindist.sh" + shell: pwsh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: bindist-x86_64-mingw64 + path: |- + ./out/*.tar.xz + ./out/plan.json/* + ./out/*.zip + retention-days: 2 + build-aarch64-linux-ubuntu2004-9102: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-linux-ubuntu2004-9102 (Build binaries) + runs-on: + - self-hosted + - Linux + - ARM64 + - maerwald + steps: + - name: clean and git config for aarch64-linux + run: | + find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} + + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + shell: bash + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.10.2 + name: Build aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/build.sh + - env: + GHC_VERSION: 9.10.2 + name: Tar aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/tar.sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-linux-ubuntu2004-9102 + path: out-aarch64-linux-ubuntu2004-9.10.2.tar + retention-days: 2 + build-aarch64-linux-ubuntu2004-9122: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-linux-ubuntu2004-9122 (Build binaries) + runs-on: + - self-hosted + - Linux + - ARM64 + - maerwald + steps: + - name: clean and git config for aarch64-linux + run: | + find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} + + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + shell: bash + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.12.2 + name: Build aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/build.sh + - env: + GHC_VERSION: 9.12.2 + name: Tar aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/tar.sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-linux-ubuntu2004-9122 + path: out-aarch64-linux-ubuntu2004-9.12.2.tar + retention-days: 2 + build-aarch64-linux-ubuntu2004-967: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-linux-ubuntu2004-967 (Build binaries) + runs-on: + - self-hosted + - Linux + - ARM64 + - maerwald + steps: + - name: clean and git config for aarch64-linux + run: | + find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} + + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + shell: bash + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.6.7 + name: Build aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/build.sh + - env: + GHC_VERSION: 9.6.7 + name: Tar aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/tar.sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-linux-ubuntu2004-967 + path: out-aarch64-linux-ubuntu2004-9.6.7.tar + retention-days: 2 + build-aarch64-linux-ubuntu2004-984: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-linux-ubuntu2004-984 (Build binaries) + runs-on: + - self-hosted + - Linux + - ARM64 + - maerwald + steps: + - name: clean and git config for aarch64-linux + run: | + find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} + + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + shell: bash + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.8.4 + name: Build aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/build.sh + - env: + GHC_VERSION: 9.8.4 + name: Tar aarch64-linux binaries + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/tar.sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-linux-ubuntu2004-984 + path: out-aarch64-linux-ubuntu2004-9.8.4.tar + retention-days: 2 + build-aarch64-mac-9102: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HOMEBREW_CHANGE_ARCH_TO_ARM: '1' + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-mac-9102 (Build binaries) + runs-on: + - self-hosted + - macOS + - ARM64 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.10.2 + name: Run build + run: | + bash .github/scripts/brew.sh git coreutils autoconf automake tree + export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH" + export LD=ld + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-mac-9102 + path: out-aarch64-apple-darwin-9.10.2.tar + retention-days: 2 + build-aarch64-mac-9122: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HOMEBREW_CHANGE_ARCH_TO_ARM: '1' + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-mac-9122 (Build binaries) + runs-on: + - self-hosted + - macOS + - ARM64 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.12.2 + name: Run build + run: | + bash .github/scripts/brew.sh git coreutils autoconf automake tree + export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH" + export LD=ld + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-mac-9122 + path: out-aarch64-apple-darwin-9.12.2.tar + retention-days: 2 + build-aarch64-mac-967: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HOMEBREW_CHANGE_ARCH_TO_ARM: '1' + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-mac-967 (Build binaries) + runs-on: + - self-hosted + - macOS + - ARM64 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.6.7 + name: Run build + run: | + bash .github/scripts/brew.sh git coreutils autoconf automake tree + export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH" + export LD=ld + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-mac-967 + path: out-aarch64-apple-darwin-9.6.7.tar + retention-days: 2 + build-aarch64-mac-984: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HOMEBREW_CHANGE_ARCH_TO_ARM: '1' + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-aarch64-mac-984 (Build binaries) + runs-on: + - self-hosted + - macOS + - ARM64 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.8.4 + name: Run build + run: | + bash .github/scripts/brew.sh git coreutils autoconf automake tree + export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$PATH" + export LD=ld + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-aarch64-mac-984 + path: out-aarch64-apple-darwin-9.8.4.tar + retention-days: 2 + build-x86_64-linux-deb10-9102: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb10 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb10-9102 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.2 + uses: ./.github/actions/bindist-actions/action-deb10 + with: + stage: BUILD + version: 9.10.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb10-9102 + path: out-x86_64-linux-deb10-9.10.2.tar + retention-days: 2 + build-x86_64-linux-deb10-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb10 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb10-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-deb10 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb10-9122 + path: out-x86_64-linux-deb10-9.12.2.tar + retention-days: 2 + build-x86_64-linux-deb10-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb10 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb10-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-deb10 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb10-967 + path: out-x86_64-linux-deb10-9.6.7.tar + retention-days: 2 + build-x86_64-linux-deb10-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb10 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb10-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-deb10 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb10-984 + path: out-x86_64-linux-deb10-9.8.4.tar + retention-days: 2 + build-x86_64-linux-deb11-9102: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb11 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb11-9102 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.2 + uses: ./.github/actions/bindist-actions/action-deb11 + with: + stage: BUILD + version: 9.10.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb11-9102 + path: out-x86_64-linux-deb11-9.10.2.tar + retention-days: 2 + build-x86_64-linux-deb11-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb11 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb11-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-deb11 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb11-9122 + path: out-x86_64-linux-deb11-9.12.2.tar + retention-days: 2 + build-x86_64-linux-deb11-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb11 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb11-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-deb11 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb11-967 + path: out-x86_64-linux-deb11-9.6.7.tar + retention-days: 2 + build-x86_64-linux-deb11-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb11 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb11-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-deb11 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb11-984 + path: out-x86_64-linux-deb11-9.8.4.tar + retention-days: 2 + build-x86_64-linux-deb12-9102: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb12 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb12-9102 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.2 + uses: ./.github/actions/bindist-actions/action-deb12 + with: + stage: BUILD + version: 9.10.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb12-9102 + path: out-x86_64-linux-deb12-9.10.2.tar + retention-days: 2 + build-x86_64-linux-deb12-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb12 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb12-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-deb12 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb12-9122 + path: out-x86_64-linux-deb12-9.12.2.tar + retention-days: 2 + build-x86_64-linux-deb12-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb12 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb12-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-deb12 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb12-967 + path: out-x86_64-linux-deb12-9.6.7.tar + retention-days: 2 + build-x86_64-linux-deb12-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb12 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb12-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-deb12 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb12-984 + path: out-x86_64-linux-deb12-9.8.4.tar + retention-days: 2 + build-x86_64-linux-deb9-9102: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb9 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb9-9102 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.2 + uses: ./.github/actions/bindist-actions/action-deb9 + with: + stage: BUILD + version: 9.10.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb9-9102 + path: out-x86_64-linux-deb9-9.10.2.tar + retention-days: 2 + build-x86_64-linux-deb9-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb9 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb9-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-deb9 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb9-9122 + path: out-x86_64-linux-deb9-9.12.2.tar + retention-days: 2 + build-x86_64-linux-deb9-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb9 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb9-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-deb9 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb9-967 + path: out-x86_64-linux-deb9-9.6.7.tar + retention-days: 2 + build-x86_64-linux-deb9-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb9 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-deb9-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-deb9 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-deb9-984 + path: out-x86_64-linux-deb9-9.8.4.tar + retention-days: 2 + build-x86_64-linux-fedora33-9102: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora33 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora33-9102 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.2 + uses: ./.github/actions/bindist-actions/action-fedora33 + with: + stage: BUILD + version: 9.10.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora33-9102 + path: out-x86_64-linux-fedora33-9.10.2.tar + retention-days: 2 + build-x86_64-linux-fedora33-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora33 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora33-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-fedora33 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora33-9122 + path: out-x86_64-linux-fedora33-9.12.2.tar + retention-days: 2 + build-x86_64-linux-fedora33-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora33 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora33-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-fedora33 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora33-967 + path: out-x86_64-linux-fedora33-9.6.7.tar + retention-days: 2 + build-x86_64-linux-fedora33-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora33 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora33-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-fedora33 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora33-984 + path: out-x86_64-linux-fedora33-9.8.4.tar + retention-days: 2 + build-x86_64-linux-fedora40-9102: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora40 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora40-9102 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.2 + uses: ./.github/actions/bindist-actions/action-fedora40 + with: + stage: BUILD + version: 9.10.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora40-9102 + path: out-x86_64-linux-fedora40-9.10.2.tar + retention-days: 2 + build-x86_64-linux-fedora40-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora40 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora40-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-fedora40 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora40-9122 + path: out-x86_64-linux-fedora40-9.12.2.tar + retention-days: 2 + build-x86_64-linux-fedora40-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora40 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora40-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-fedora40 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora40-967 + path: out-x86_64-linux-fedora40-9.6.7.tar + retention-days: 2 + build-x86_64-linux-fedora40-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora40 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-fedora40-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-fedora40 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-fedora40-984 + path: out-x86_64-linux-fedora40-9.8.4.tar + retention-days: 2 + build-x86_64-linux-mint193-9102: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint193 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint193-9102 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.2 + uses: ./.github/actions/bindist-actions/action-mint193 + with: + stage: BUILD + version: 9.10.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint193-9102 + path: out-x86_64-linux-mint193-9.10.2.tar + retention-days: 2 + build-x86_64-linux-mint193-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint193 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint193-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-mint193 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint193-9122 + path: out-x86_64-linux-mint193-9.12.2.tar + retention-days: 2 + build-x86_64-linux-mint193-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint193 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint193-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-mint193 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint193-967 + path: out-x86_64-linux-mint193-9.6.7.tar + retention-days: 2 + build-x86_64-linux-mint193-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint193 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint193-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-mint193 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint193-984 + path: out-x86_64-linux-mint193-9.8.4.tar + retention-days: 2 + build-x86_64-linux-mint202-9102: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint202 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint202-9102 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.2 + uses: ./.github/actions/bindist-actions/action-mint202 + with: + stage: BUILD + version: 9.10.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint202-9102 + path: out-x86_64-linux-mint202-9.10.2.tar + retention-days: 2 + build-x86_64-linux-mint202-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint202 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint202-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-mint202 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint202-9122 + path: out-x86_64-linux-mint202-9.12.2.tar + retention-days: 2 + build-x86_64-linux-mint202-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint202 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint202-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-mint202 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint202-967 + path: out-x86_64-linux-mint202-9.6.7.tar + retention-days: 2 + build-x86_64-linux-mint202-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint202 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint202-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-mint202 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint202-984 + path: out-x86_64-linux-mint202-9.8.4.tar + retention-days: 2 + build-x86_64-linux-mint213-9102: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint213 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint213-9102 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.2 + uses: ./.github/actions/bindist-actions/action-mint213 + with: + stage: BUILD + version: 9.10.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint213-9102 + path: out-x86_64-linux-mint213-9.10.2.tar + retention-days: 2 + build-x86_64-linux-mint213-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint213 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint213-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-mint213 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint213-9122 + path: out-x86_64-linux-mint213-9.12.2.tar + retention-days: 2 + build-x86_64-linux-mint213-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint213 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint213-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-mint213 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint213-967 + path: out-x86_64-linux-mint213-9.6.7.tar + retention-days: 2 + build-x86_64-linux-mint213-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint213 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-mint213-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-mint213 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-mint213-984 + path: out-x86_64-linux-mint213-9.8.4.tar + retention-days: 2 + build-x86_64-linux-ubuntu1804-9102: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu1804 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu1804-9102 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.2 + uses: ./.github/actions/bindist-actions/action-ubuntu1804 + with: + stage: BUILD + version: 9.10.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu1804-9102 + path: out-x86_64-linux-ubuntu1804-9.10.2.tar + retention-days: 2 + build-x86_64-linux-ubuntu1804-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu1804 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu1804-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-ubuntu1804 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu1804-9122 + path: out-x86_64-linux-ubuntu1804-9.12.2.tar + retention-days: 2 + build-x86_64-linux-ubuntu1804-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu1804 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu1804-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-ubuntu1804 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu1804-967 + path: out-x86_64-linux-ubuntu1804-9.6.7.tar + retention-days: 2 + build-x86_64-linux-ubuntu1804-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu1804 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu1804-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-ubuntu1804 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu1804-984 + path: out-x86_64-linux-ubuntu1804-9.8.4.tar + retention-days: 2 + build-x86_64-linux-ubuntu2004-9102: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2004-9102 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.2 + uses: ./.github/actions/bindist-actions/action-ubuntu2004 + with: + stage: BUILD + version: 9.10.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2004-9102 + path: out-x86_64-linux-ubuntu2004-9.10.2.tar + retention-days: 2 + build-x86_64-linux-ubuntu2004-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2004-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-ubuntu2004 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2004-9122 + path: out-x86_64-linux-ubuntu2004-9.12.2.tar + retention-days: 2 + build-x86_64-linux-ubuntu2004-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2004-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-ubuntu2004 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2004-967 + path: out-x86_64-linux-ubuntu2004-9.6.7.tar + retention-days: 2 + build-x86_64-linux-ubuntu2004-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2004-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-ubuntu2004 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2004-984 + path: out-x86_64-linux-ubuntu2004-9.8.4.tar + retention-days: 2 + build-x86_64-linux-ubuntu2204-9102: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2204 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2204-9102 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.2 + uses: ./.github/actions/bindist-actions/action-ubuntu2204 + with: + stage: BUILD + version: 9.10.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2204-9102 + path: out-x86_64-linux-ubuntu2204-9.10.2.tar + retention-days: 2 + build-x86_64-linux-ubuntu2204-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2204 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2204-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-ubuntu2204 + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2204-9122 + path: out-x86_64-linux-ubuntu2204-9.12.2.tar + retention-days: 2 + build-x86_64-linux-ubuntu2204-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2204 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2204-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-ubuntu2204 + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2204-967 + path: out-x86_64-linux-ubuntu2204-9.6.7.tar + retention-days: 2 + build-x86_64-linux-ubuntu2204-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2204 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-ubuntu2204-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-ubuntu2204 + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-ubuntu2204-984 + path: out-x86_64-linux-ubuntu2204-9.8.4.tar + retention-days: 2 + build-x86_64-linux-unknown-9102: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-unknown + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-unknown-9102 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.10.2 + uses: ./.github/actions/bindist-actions/action-unknown + with: + stage: BUILD + version: 9.10.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-unknown-9102 + path: out-x86_64-linux-unknown-9.10.2.tar + retention-days: 2 + build-x86_64-linux-unknown-9122: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-unknown + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-unknown-9122 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.12.2 + uses: ./.github/actions/bindist-actions/action-unknown + with: + stage: BUILD + version: 9.12.2 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-unknown-9122 + path: out-x86_64-linux-unknown-9.12.2.tar + retention-days: 2 + build-x86_64-linux-unknown-967: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-unknown + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-unknown-967 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.6.7 + uses: ./.github/actions/bindist-actions/action-unknown + with: + stage: BUILD + version: 9.6.7 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-unknown-967 + path: out-x86_64-linux-unknown-9.6.7.tar + retention-days: 2 + build-x86_64-linux-unknown-984: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-unknown + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-linux-unknown-984 (Build binaries) + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build 9.8.4 + uses: ./.github/actions/bindist-actions/action-unknown + with: + stage: BUILD + version: 9.8.4 + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-linux-unknown-984 + path: out-x86_64-linux-unknown-9.8.4.tar + retention-days: 2 + build-x86_64-mac-9102: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-mac-9102 (Build binaries) + runs-on: + - macOS-13 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.10.2 + name: Run build + run: | + brew install coreutils tree + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-mac-9102 + path: out-x86_64-apple-darwin-9.10.2.tar + retention-days: 2 + build-x86_64-mac-9122: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-mac-9122 (Build binaries) + runs-on: + - macOS-13 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.12.2 + name: Run build + run: | + brew install coreutils tree + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-mac-9122 + path: out-x86_64-apple-darwin-9.12.2.tar + retention-days: 2 + build-x86_64-mac-967: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-mac-967 (Build binaries) + runs-on: + - macOS-13 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.6.7 + name: Run build + run: | + brew install coreutils tree + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-mac-967 + path: out-x86_64-apple-darwin-9.6.7.tar + retention-days: 2 + build-x86_64-mac-984: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: build-x86_64-mac-984 (Build binaries) + runs-on: + - macOS-13 + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.8.4 + name: Run build + run: | + brew install coreutils tree + bash .github/scripts/build.sh + tar cf out-${ARTIFACT}-${GHC_VERSION}.tar out/ store/ + shell: sh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-mac-984 + path: out-x86_64-apple-darwin-9.8.4.tar + retention-days: 2 + build-x86_64-windows-9102: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-mingw64 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: zip + TZ: Asia/Singapore + environment: CI + name: build-x86_64-windows-9102 (Build binaries) + runs-on: + - windows-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.10.2 + name: Run build + run: | + $env:CHERE_INVOKING = 1 + $env:MSYS2_PATH_TYPE = "inherit" + $ErrorActionPreference = "Stop" + C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh" + shell: pwsh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-windows-9102 + path: ./out/* + retention-days: 2 + build-x86_64-windows-9122: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-mingw64 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: zip + TZ: Asia/Singapore + environment: CI + name: build-x86_64-windows-9122 (Build binaries) + runs-on: + - windows-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.12.2 + name: Run build + run: | + $env:CHERE_INVOKING = 1 + $env:MSYS2_PATH_TYPE = "inherit" + $ErrorActionPreference = "Stop" + C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh" + shell: pwsh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-windows-9122 + path: ./out/* + retention-days: 2 + build-x86_64-windows-967: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-mingw64 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: zip + TZ: Asia/Singapore + environment: CI + name: build-x86_64-windows-967 (Build binaries) + runs-on: + - windows-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.6.7 + name: Run build + run: | + $env:CHERE_INVOKING = 1 + $env:MSYS2_PATH_TYPE = "inherit" + $ErrorActionPreference = "Stop" + C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh" + shell: pwsh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-windows-967 + path: ./out/* + retention-days: 2 + build-x86_64-windows-984: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-mingw64 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: zip + TZ: Asia/Singapore + environment: CI + name: build-x86_64-windows-984 (Build binaries) + runs-on: + - windows-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - env: + GHC_VERSION: 9.8.4 + name: Run build + run: | + $env:CHERE_INVOKING = 1 + $env:MSYS2_PATH_TYPE = "inherit" + $ErrorActionPreference = "Stop" + C:\msys64\usr\bin\bash -lc "bash .github/scripts/build.sh" + shell: pwsh + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: artifacts-build-x86_64-windows-984 + path: ./out/* + retention-days: 2 + release: + if: startsWith(github.ref, 'refs/tags/') + name: release + needs: + - test-x86_64-mac + - test-aarch64-mac + - test-x86_64-windows + - test-aarch64-linux-ubuntu2004 + - test-x86_64-linux-deb9 + - test-x86_64-linux-deb10 + - test-x86_64-linux-deb11 + - test-x86_64-linux-deb12 + - test-x86_64-linux-ubuntu1804 + - test-x86_64-linux-ubuntu2004 + - test-x86_64-linux-ubuntu2204 + - test-x86_64-linux-mint193 + - test-x86_64-linux-mint202 + - test-x86_64-linux-mint213 + - test-x86_64-linux-fedora33 + - test-x86_64-linux-fedora40 + - test-x86_64-linux-unknown + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-apple-darwin + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-aarch64-apple-darwin + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-mingw64 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-aarch64-linux-ubuntu2004 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb9 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb10 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb11 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb12 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-ubuntu1804 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-ubuntu2004 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-ubuntu2204 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-mint193 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-mint202 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-mint213 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-fedora33 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-fedora40 + path: ./out + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-unknown + path: ./out + - name: Prepare release + run: | + sudo apt-get update && sudo apt-get install -y tar xz-utils + cd out/plan.json + tar cf plan_json.tar * + mv plan_json.tar ../ + cd ../.. + export RELEASE=$GITHUB_REF_NAME + git archive --format=tar.gz -o "out/haskell-language-server-${RELEASE}-src.tar.gz" --prefix="haskell-language-server-${RELEASE}/" HEAD + shell: bash + - name: Release + uses: softprops/action-gh-release@v2 + with: + draft: true + files: | + ./out/*.zip + ./out/*.tar.xz + ./out/*.tar.gz + ./out/*.tar + test-aarch64-linux-ubuntu2004: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-aarch64-linux-ubuntu2004 (Test binaries) + needs: + - bindist-aarch64-linux-ubuntu2004 + runs-on: + - self-hosted + - Linux + - ARM64 + - maerwald + steps: + - name: clean and git config for aarch64-linux + run: | + find . -mindepth 1 -maxdepth 1 -exec rm -rf -- {} + + git config --global --get-all safe.directory | grep '^\*$' || git config --global --add safe.directory "*" + shell: bash + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-aarch64-linux-ubuntu2004 + path: ./out + - name: Run test + uses: docker://hasufell/arm64v8-ubuntu-haskell:focal + with: + args: bash .github/scripts/test.sh + test-aarch64-mac: + env: + ADD_CABAL_ARGS: '' + ARCH: ARM64 + ARTIFACT: aarch64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HOMEBREW_CHANGE_ARCH_TO_ARM: '1' + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-aarch64-mac (Test binaries) + needs: + - bindist-aarch64-mac + runs-on: + - self-hosted + - macOS + - ARM64 + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-aarch64-apple-darwin + path: ./out + - name: Run test + run: | + bash .github/scripts/brew.sh git coreutils llvm@13 autoconf automake tree + export PATH="$HOME/.brew/bin:$HOME/.brew/sbin:$HOME/.brew/opt/llvm@13/bin:$PATH" + export CC="$HOME/.brew/opt/llvm@13/bin/clang" + export CXX="$HOME/.brew/opt/llvm@13/bin/clang++" + export LD=ld + export AR="$HOME/.brew/opt/llvm@13/bin/llvm-ar" + export RANLIB="$HOME/.brew/opt/llvm@13/bin/llvm-ranlib" + bash .github/scripts/test.sh + shell: sh + test-x86_64-linux-deb10: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb10 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-deb10 (Test binaries) + needs: + - bindist-x86_64-linux-deb10 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb10 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-deb10 + with: + stage: TEST + test-x86_64-linux-deb11: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb11 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-deb11 (Test binaries) + needs: + - bindist-x86_64-linux-deb11 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb11 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-deb11 + with: + stage: TEST + test-x86_64-linux-deb12: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb12 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-deb12 (Test binaries) + needs: + - bindist-x86_64-linux-deb12 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb12 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-deb12 + with: + stage: TEST + test-x86_64-linux-deb9: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-deb9 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-deb9 (Test binaries) + needs: + - bindist-x86_64-linux-deb9 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-deb9 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-deb9 + with: + stage: TEST + test-x86_64-linux-fedora33: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora33 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-fedora33 (Test binaries) + needs: + - bindist-x86_64-linux-fedora33 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-fedora33 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-fedora33 + with: + stage: TEST + test-x86_64-linux-fedora40: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-fedora40 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-fedora40 (Test binaries) + needs: + - bindist-x86_64-linux-fedora40 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-fedora40 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-fedora40 + with: + stage: TEST + test-x86_64-linux-mint193: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint193 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-mint193 (Test binaries) + needs: + - bindist-x86_64-linux-mint193 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-mint193 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-mint193 + with: + stage: TEST + test-x86_64-linux-mint202: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint202 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-mint202 (Test binaries) + needs: + - bindist-x86_64-linux-mint202 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-mint202 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-mint202 + with: + stage: TEST + test-x86_64-linux-mint213: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-mint213 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-mint213 (Test binaries) + needs: + - bindist-x86_64-linux-mint213 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-mint213 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-mint213 + with: + stage: TEST + test-x86_64-linux-ubuntu1804: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu1804 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-ubuntu1804 (Test binaries) + needs: + - bindist-x86_64-linux-ubuntu1804 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-ubuntu1804 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-ubuntu1804 + with: + stage: TEST + test-x86_64-linux-ubuntu2004: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2004 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-ubuntu2004 (Test binaries) + needs: + - bindist-x86_64-linux-ubuntu2004 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-ubuntu2004 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-ubuntu2004 + with: + stage: TEST + test-x86_64-linux-ubuntu2204: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-ubuntu2204 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-ubuntu2204 (Test binaries) + needs: + - bindist-x86_64-linux-ubuntu2204 + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-ubuntu2204 + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-ubuntu2204 + with: + stage: TEST + test-x86_64-linux-unknown: + env: + ADD_CABAL_ARGS: --enable-split-sections + ARCH: '64' + ARTIFACT: x86_64-linux-unknown + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DEBIAN_FRONTEND: noninteractive + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-linux-unknown (Test binaries) + needs: + - bindist-x86_64-linux-unknown + runs-on: + - ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-linux-unknown + path: ./out + - name: Test + uses: ./.github/actions/bindist-actions/action-unknown + with: + stage: TEST + test-x86_64-mac: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-apple-darwin + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + MACOSX_DEPLOYMENT_TARGET: '10.13' + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: tar.xz + TZ: Asia/Singapore + environment: CI + name: test-x86_64-mac (Test binaries) + needs: + - bindist-x86_64-mac + runs-on: + - macOS-13 + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-apple-darwin + path: ./out + - name: Run test + run: | + brew install coreutils tree + bash .github/scripts/test.sh + shell: sh + test-x86_64-windows: + env: + ADD_CABAL_ARGS: '' + ARCH: '64' + ARTIFACT: x86_64-mingw64 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_HOST: ${{ secrets.S3_HOST }} + TARBALL_EXT: zip + TZ: Asia/Singapore + environment: CI + name: test-x86_64-windows (Test binaries) + needs: + - bindist-x86_64-windows + runs-on: + - windows-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: bindist-x86_64-mingw64 + path: ./out + - name: install windows deps + run: | + C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" + C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -Syuu" + C:\msys64\usr\bin\bash -lc "pacman --disable-download-timeout --noconfirm -S make mingw-w64-x86_64-clang curl autoconf mingw-w64-x86_64-pkgconf ca-certificates base-devel gettext autoconf make libtool automake python p7zip patch unzip zip git" + taskkill /F /FI "MODULES eq msys-2.0.dll" + shell: pwsh + - name: Run test + run: | + $env:CHERE_INVOKING = 1 + $env:MSYS2_PATH_TYPE = "inherit" + C:\msys64\usr\bin\bash -lc "bash .github/scripts/test.sh" + shell: pwsh +name: Build and release +'on': + push: + tags: + - '*' + schedule: + - cron: 0 2 * * 1 diff --git a/.github/workflows/s3-cache.yaml b/.github/workflows/s3-cache.yaml new file mode 100644 index 0000000000..fa2e97deed --- /dev/null +++ b/.github/workflows/s3-cache.yaml @@ -0,0 +1,43 @@ +name: Cache eviction + +on: + workflow_dispatch: + inputs: + key: + description: Which cache to evict + required: true + default: '/' + type: choice + options: + - aarch64-apple-darwin + - aarch64-linux-ubuntu20 + - x86_64-apple-darwin + - x86_64-freebsd + - x86_64-linux-centos7 + - x86_64-linux-deb10 + - x86_64-linux-deb11 + - x86_64-linux-deb9 + - x86_64-linux-fedora27 + - x86_64-linux-fedora33 + - x86_64-linux-mint19.3 + - x86_64-linux-mint20.2 + - x86_64-linux-ubuntu18.04 + - x86_64-linux-ubuntu20.04 + - x86_64-linux-ubuntu22.04 + - / +jobs: + evict: + runs-on: ubuntu-latest + + steps: + - name: Remove from S3 + uses: vitorsgomes/s3-rm-action@master + with: + args: --recursive + env: + AWS_S3_ENDPOINT: https://${{ secrets.S3_HOST }} + AWS_S3_BUCKET: haskell-language-server + AWS_REGION: us-west-2 + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + PATH_TO_DELETE: ${{ github.event.inputs.key }} diff --git a/.github/workflows/supported-ghc-versions.json b/.github/workflows/supported-ghc-versions.json new file mode 100644 index 0000000000..35a3bd4ac4 --- /dev/null +++ b/.github/workflows/supported-ghc-versions.json @@ -0,0 +1 @@ +["9.12", "9.10", "9.8", "9.6"] diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000000..984758a310 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,274 @@ +name: Testing + +defaults: + run: + shell: bash + +# See: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency. +concurrency: + group: ${{ github.head_ref }}-${{ github.workflow }} + cancel-in-progress: true + +on: + pull_request: + branches: + - '**' + +jobs: + pre_job: + runs-on: ubuntu-latest + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + should_skip_ghcide: ${{ steps.skip_ghcide_check.outputs.should_skip }} + ghcs: ${{ steps.ghcs.outputs.ghcs }} + steps: + # Need the repo checked out in order to read the file + - uses: actions/checkout@v3 + - id: ghcs + run: echo "ghcs=$(cat ./.github/workflows/supported-ghc-versions.json)" >> $GITHUB_OUTPUT + - id: skip_check + uses: fkirc/skip-duplicate-actions@v5.3.1 + with: + cancel_others: false + paths_ignore: '[ "**/docs/**" + , "**.md" + , "**/LICENSE" + , "**.nix" + , "flake.lock" + , "**/README.md" + , "FUNDING.yml" + , ".circleci/**" + , "**/stack*.yaml" + , ".gitlab-ci.yaml" + , ".gitlab/**" + , "CODEOWNERS" + ]' + # If we only change ghcide downstream packages we have not test ghcide itself + - id: skip_ghcide_check + uses: fkirc/skip-duplicate-actions@v5.3.1 + with: + cancel_others: false + paths_ignore: '[ "hls-test-utils/**" + , "plugins/**" + , "src/**" + , "exe/**" + , "test/**" + , "shake-bench/**" + ]' + + test: + if: needs.pre_job.outputs.should_skip != 'true' + needs: + - pre_job + runs-on: ${{ matrix.os }} + strategy: + # We don't want to fail fast. + # We used to fail fast, to avoid caches of failing PRs to overpopulate the CI + # caches, evicting known good build caches. + # However, PRs do not cache anything any more, and can only use the caches from current master. + # See 'caching.yml' for our caching set up. + fail-fast: false + matrix: + ghc: ${{ fromJSON(needs.pre_job.outputs.ghcs) }} + os: + - ubuntu-latest + - macOS-latest + - windows-latest + test: + - true + - false + exclude: + # Exclude the test configuration on macos, it's sufficiently similar to other OSs + # that it mostly just burns CI time. Buiding is still useful since it catches + # solver issues. + - os: macOS-latest + test: true + # Exclude the build-only configurations for windows and ubuntu + - os: windows-latest + test: false + - os: ubuntu-latest + test: false + + steps: + - uses: actions/checkout@v3 + + - uses: ./.github/actions/setup-build + with: + ghc: ${{ matrix.ghc }} + os: ${{ runner.os }} + + - name: Build + run: cabal build all + + - name: Set test options + # See https://github.com/ocharles/tasty-rerun/issues/22 for why we need + # to include 'new' in the filters, since many of our test suites are in the + # same package. + run: | + cabal configure --test-options="--rerun-update --rerun-filter failures,exceptions,new" + + - if: matrix.test + name: Test hls-graph + run: cabal test hls-graph + + - if: needs.pre_job.outputs.should_skip_ghcide != 'true' && matrix.test + name: Test ghcide + # run the tests without parallelism to avoid running out of memory + run: cabal test ghcide-tests || cabal test ghcide-tests + + - if: matrix.test + name: Test hls-plugin-api + run: cabal test hls-plugin-api || cabal test hls-plugin-api + + - if: matrix.test + name: Test func-test suite + env: + HLS_TEST_EXE: hls + HLS_WRAPPER_TEST_EXE: hls-wrapper + run: cabal test func-test || cabal test func-test + + - if: matrix.test + name: Test wrapper-test suite + env: + HLS_TEST_EXE: hls + HLS_WRAPPER_TEST_EXE: hls-wrapper + run: cabal test wrapper-test + + - if: matrix.test + name: Test hls-refactor-plugin + run: cabal test hls-refactor-plugin-tests || cabal test hls-refactor-plugin-tests + + # TODO enable when it supports 9.10 + - if: matrix.test && matrix.ghc != '9.10' && matrix.ghc != '9.12' + name: Test hls-floskell-plugin + run: cabal test hls-floskell-plugin-tests || cabal test hls-floskell-plugin-tests + + - if: matrix.test + name: Test hls-class-plugin + run: cabal test hls-class-plugin-tests || cabal test hls-class-plugin-tests + + - if: matrix.test + name: Test hls-pragmas-plugin + run: cabal test hls-pragmas-plugin-tests || cabal test hls-pragmas-plugin-tests + + - if: matrix.test + name: Test hls-eval-plugin + run: cabal test hls-eval-plugin-tests || cabal test hls-eval-plugin-tests + + # TODO enable when it supports 9.10 + - if: matrix.test && matrix.ghc != '9.10' && matrix.ghc != '9.12' + name: Test hls-splice-plugin + run: cabal test hls-splice-plugin-tests || cabal test hls-splice-plugin-tests + + - if: matrix.test && matrix.ghc != '9.12' + name: Test hls-stan-plugin + run: cabal test hls-stan-plugin-tests || cabal test hls-stan-plugin-tests + + - if: matrix.test + name: Test hls-stylish-haskell-plugin + run: cabal test hls-stylish-haskell-plugin-tests || cabal test hls-stylish-haskell-plugin-tests + + - if: matrix.test + name: Test hls-ormolu-plugin + run: cabal test hls-ormolu-plugin-tests || cabal test hls-ormolu-plugin-tests + + - if: matrix.test + name: Test hls-fourmolu-plugin + run: cabal test hls-fourmolu-plugin-tests || cabal test hls-fourmolu-plugin-tests + + - if: matrix.test + name: Test hls-explicit-imports-plugin test suite + run: cabal test hls-explicit-imports-plugin-tests || cabal test hls-explicit-imports-plugin-tests + + - if: matrix.test + name: Test hls-call-hierarchy-plugin test suite + run: cabal test hls-call-hierarchy-plugin-tests || cabal test hls-call-hierarchy-plugin-tests + + - if: matrix.test && matrix.os != 'windows-latest' + name: Test hls-rename-plugin test suite + run: cabal test hls-rename-plugin-tests || cabal test hls-rename-plugin-tests + + # TODO enable when it supports 9.10 + - if: matrix.test && matrix.ghc != '9.10' + name: Test hls-hlint-plugin test suite + run: cabal test hls-hlint-plugin-tests || cabal test hls-hlint-plugin-tests + + - if: matrix.test + name: Test hls-module-name-plugin test suite + run: cabal test hls-module-name-plugin-tests || cabal test hls-module-name-plugin-tests + + - if: matrix.test + name: Test hls-alternate-number-format-plugin test suite + run: cabal test hls-alternate-number-format-plugin-tests || cabal test hls-alternate-number-format-plugin-tests + + - if: matrix.test + name: Test hls-qualify-imported-names-plugin test suite + run: cabal test hls-qualify-imported-names-plugin-tests || cabal test hls-qualify-imported-names-plugin-tests + + - if: matrix.test + name: Test hls-code-range-plugin test suite + run: cabal test hls-code-range-plugin-tests || cabal test hls-code-range-plugin-tests + + - if: matrix.test + name: Test hls-change-type-signature test suite + run: cabal test hls-change-type-signature-plugin-tests || cabal test hls-change-type-signature-plugin-tests + + - if: matrix.test + name: Test hls-gadt-plugin test suit + run: cabal test hls-gadt-plugin-tests || cabal test hls-gadt-plugin-tests + + - if: matrix.test + name: Test hls-explicit-fixity-plugin test suite + run: cabal test hls-explicit-fixity-plugin-tests || cabal test hls-explicit-fixity-plugin-tests + + - if: matrix.test + name: Test hls-explicit-record-fields-plugin test suite + run: cabal test hls-explicit-record-fields-plugin-tests || cabal test hls-explicit-record-fields-plugin-tests + + # versions need to be limited since the tests depend on cabal-fmt which only builds with ghc <9.10 + - if: matrix.test && matrix.ghc != '9.10' && matrix.ghc != '9.12' + name: Test hls-cabal-fmt-plugin test suite + run: cabal test hls-cabal-fmt-plugin-tests --flag=isolateCabalfmtTests || cabal test hls-cabal-fmt-plugin-tests --flag=isolateCabalfmtTests + + - if: matrix.test && matrix.ghc != '9.12' + name: Test hls-cabal-gild-plugin test suite + run: cabal test hls-cabal-gild-plugin-tests --flag=isolateCabalGildTests || cabal test hls-cabal-gild-plugin-tests --flag=isolateCabalGildTests + + - if: matrix.test + name: Test hls-cabal-plugin test suite + run: cabal test hls-cabal-plugin-tests || cabal test hls-cabal-plugin-tests + + # TODO enable when it supports 9.10 + - if: matrix.test && matrix.ghc != '9.10' && matrix.ghc != '9.12' + name: Test hls-retrie-plugin test suite + run: cabal test hls-retrie-plugin-tests || cabal test hls-retrie-plugin-tests + + - if: matrix.test + name: Test hls-overloaded-record-dot-plugin test suite + run: cabal test hls-overloaded-record-dot-plugin-tests || cabal test hls-overloaded-record-dot-plugin-tests + + - if: matrix.test + name: Test hls-semantic-tokens-plugin test suite + run: cabal test hls-semantic-tokens-plugin-tests || cabal test hls-semantic-tokens-plugin-tests + + - if: matrix.test + name: Test hls-notes-plugin test suite + run: cabal test hls-notes-plugin-tests || cabal test hls-notes-plugin-tests + + # The plugin tutorial is only compatible with 9.6 and 9.8. + # No particular reason, just to avoid excessive CPP. + - if: matrix.test && matrix.ghc != '9.4' && matrix.ghc != '9.10' && matrix.ghc != '9.12' + name: Compile the plugin-tutorial + run: cabal build plugin-tutorial + + test_post_job: + if: always() + runs-on: ubuntu-latest + needs: [pre_job, test] + steps: + - run: | + echo "jobs info: ${{ toJSON(needs) }}" + - if: contains(needs.*.result, 'failure') + run: exit 1 + - if: contains(needs.*.result, 'cancelled') && needs.pre_job.outputs.should_skip != 'true' + run: exit 1 diff --git a/.gitignore b/.gitignore index 2b61164aa0..2413a1fcf5 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,11 @@ cabal.project.local *~ *.lock +.tasty-rerun-log + +# emacs +/.dir-locals.el + # shake build information _build/ @@ -24,3 +29,25 @@ test/testdata/**/hie.yaml # metadata files on macOS .DS_Store + +# shake build folder (used in benchmark suite) +.shake/ + +# direnv +/.direnv/ +/.envrc + +# bench +*.identifierPosition +/bench/example +/bench-results + +# nix +result +result-doc + +out/ +store/ +gh-release-artifacts/ + +.hls/ diff --git a/.gitlab/darwin/nix/sources.json b/.gitlab/darwin/nix/sources.json new file mode 100644 index 0000000000..a6ff5dc415 --- /dev/null +++ b/.gitlab/darwin/nix/sources.json @@ -0,0 +1,26 @@ +{ + "niv": { + "branch": "master", + "description": "Easy dependency management for Nix projects", + "homepage": "https://github.com/nmattia/niv", + "owner": "nmattia", + "repo": "niv", + "rev": "e0ca65c81a2d7a4d82a189f1e23a48d59ad42070", + "sha256": "1pq9nh1d8nn3xvbdny8fafzw87mj7gsmp6pxkdl65w2g18rmcmzx", + "type": "tarball", + "url": "https://github.com/nmattia/niv/archive/e0ca65c81a2d7a4d82a189f1e23a48d59ad42070.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + }, + "nixpkgs": { + "branch": "wip/ghc-8.10.7-darwin", + "description": "Nix Packages collection", + "homepage": "", + "owner": "bgamari", + "repo": "nixpkgs", + "rev": "37c60356e3f83c708a78a96fdd914b5ffc1f551c", + "sha256": "0i5j7nwk4ky0fg4agla3aznadpxz0jyrdwp2q92hyxidra987syn", + "type": "tarball", + "url": "https://github.com/bgamari/nixpkgs/archive/37c60356e3f83c708a78a96fdd914b5ffc1f551c.tar.gz", + "url_template": "https://github.com///archive/.tar.gz" + } +} diff --git a/.gitlab/darwin/nix/sources.nix b/.gitlab/darwin/nix/sources.nix new file mode 100644 index 0000000000..1938409ddd --- /dev/null +++ b/.gitlab/darwin/nix/sources.nix @@ -0,0 +1,174 @@ +# This file has been generated by Niv. + +let + + # + # The fetchers. fetch_ fetches specs of type . + # + + fetch_file = pkgs: name: spec: + let + name' = sanitizeName name + "-src"; + in + if spec.builtin or true then + builtins_fetchurl { inherit (spec) url sha256; name = name'; } + else + pkgs.fetchurl { inherit (spec) url sha256; name = name'; }; + + fetch_tarball = pkgs: name: spec: + let + name' = sanitizeName name + "-src"; + in + if spec.builtin or true then + builtins_fetchTarball { name = name'; inherit (spec) url sha256; } + else + pkgs.fetchzip { name = name'; inherit (spec) url sha256; }; + + fetch_git = name: spec: + let + ref = + if spec ? ref then spec.ref else + if spec ? branch then "refs/heads/${spec.branch}" else + if spec ? tag then "refs/tags/${spec.tag}" else + abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!"; + in + builtins.fetchGit { url = spec.repo; inherit (spec) rev; inherit ref; }; + + fetch_local = spec: spec.path; + + fetch_builtin-tarball = name: throw + ''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`. + $ niv modify ${name} -a type=tarball -a builtin=true''; + + fetch_builtin-url = name: throw + ''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`. + $ niv modify ${name} -a type=file -a builtin=true''; + + # + # Various helpers + # + + # https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695 + sanitizeName = name: + ( + concatMapStrings (s: if builtins.isList s then "-" else s) + ( + builtins.split "[^[:alnum:]+._?=-]+" + ((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name) + ) + ); + + # The set of packages used when specs are fetched using non-builtins. + mkPkgs = sources: system: + let + sourcesNixpkgs = + import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; }; + hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath; + hasThisAsNixpkgsPath = == ./.; + in + if builtins.hasAttr "nixpkgs" sources + then sourcesNixpkgs + else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then + import {} + else + abort + '' + Please specify either (through -I or NIX_PATH=nixpkgs=...) or + add a package called "nixpkgs" to your sources.json. + ''; + + # The actual fetching function. + fetch = pkgs: name: spec: + + if ! builtins.hasAttr "type" spec then + abort "ERROR: niv spec ${name} does not have a 'type' attribute" + else if spec.type == "file" then fetch_file pkgs name spec + else if spec.type == "tarball" then fetch_tarball pkgs name spec + else if spec.type == "git" then fetch_git name spec + else if spec.type == "local" then fetch_local spec + else if spec.type == "builtin-tarball" then fetch_builtin-tarball name + else if spec.type == "builtin-url" then fetch_builtin-url name + else + abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}"; + + # If the environment variable NIV_OVERRIDE_${name} is set, then use + # the path directly as opposed to the fetched source. + replace = name: drv: + let + saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name; + ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}"; + in + if ersatz == "" then drv else + # this turns the string into an actual Nix path (for both absolute and + # relative paths) + if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}"; + + # Ports of functions for older nix versions + + # a Nix version of mapAttrs if the built-in doesn't exist + mapAttrs = builtins.mapAttrs or ( + f: set: with builtins; + listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set)) + ); + + # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295 + range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1); + + # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257 + stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1)); + + # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269 + stringAsChars = f: s: concatStrings (map f (stringToCharacters s)); + concatMapStrings = f: list: concatStrings (map f list); + concatStrings = builtins.concatStringsSep ""; + + # https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331 + optionalAttrs = cond: as: if cond then as else {}; + + # fetchTarball version that is compatible between all the versions of Nix + builtins_fetchTarball = { url, name ? null, sha256 }@attrs: + let + inherit (builtins) lessThan nixVersion fetchTarball; + in + if lessThan nixVersion "1.12" then + fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; })) + else + fetchTarball attrs; + + # fetchurl version that is compatible between all the versions of Nix + builtins_fetchurl = { url, name ? null, sha256 }@attrs: + let + inherit (builtins) lessThan nixVersion fetchurl; + in + if lessThan nixVersion "1.12" then + fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; })) + else + fetchurl attrs; + + # Create the final "sources" from the config + mkSources = config: + mapAttrs ( + name: spec: + if builtins.hasAttr "outPath" spec + then abort + "The values in sources.json should not have an 'outPath' attribute" + else + spec // { outPath = replace name (fetch config.pkgs name spec); } + ) config.sources; + + # The "config" used by the fetchers + mkConfig = + { sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null + , sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile) + , system ? builtins.currentSystem + , pkgs ? mkPkgs sources system + }: rec { + # The sources, i.e. the attribute set of spec name to spec + inherit sources; + + # The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers + inherit pkgs; + }; + +in +mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); } diff --git a/.gitlab/darwin/toolchain.nix b/.gitlab/darwin/toolchain.nix new file mode 100644 index 0000000000..2c65f92b3c --- /dev/null +++ b/.gitlab/darwin/toolchain.nix @@ -0,0 +1,34 @@ +{ system }: + +let + sources = import ./nix/sources.nix; + nixpkgsSrc = sources.nixpkgs; + pkgs = import nixpkgsSrc { inherit system; }; +in + +let + hsPkgs = pkgs.haskellPackages; + alex = hsPkgs.alex; + happy = hsPkgs.happy; + targetTriple = pkgs.stdenv.targetPlatform.config; + + llvm = pkgs.llvm_11; +in +pkgs.writeTextFile { + name = "toolchain"; + text = '' + export PATH + PATH="${pkgs.autoconf}/bin:$PATH" + PATH="${pkgs.automake}/bin:$PATH" + PATH="${pkgs.tree}/bin:$PATH" + export HAPPY="${happy}/bin/happy" + export ALEX="${alex}/bin/alex" + export LLC="${llvm}/bin/llc" + export OPT="${llvm}/bin/opt" + export SPHINXBUILD="${pkgs.python3Packages.sphinx}/bin/sphinx-build" + export CABAL_INSTALL="${pkgs.cabal-install}/bin/cabal" + export CABAL="$CABAL_INSTALL" + + sdk_path="$(xcrun --sdk macosx --show-sdk-path)" + ''; +} diff --git a/.gitlab/setup.sh b/.gitlab/setup.sh new file mode 100644 index 0000000000..ec4402edc4 --- /dev/null +++ b/.gitlab/setup.sh @@ -0,0 +1,11 @@ +case "$(uname -s)" in + "Darwin"|"darwin") + nix build -f $CI_PROJECT_DIR/.gitlab/darwin/toolchain.nix --argstr system "$NIX_SYSTEM" -o toolchain.sh + cat toolchain.sh + source toolchain.sh + unset MACOSX_DEPLOYMENT_TARGET + # Precautious since we want to use ghc from ghcup + unset GHC + ;; +esac + diff --git a/.gitmodules b/.gitmodules index 839d96ebdc..7856aaec36 100644 --- a/.gitmodules +++ b/.gitmodules @@ -8,10 +8,3 @@ # Commit git commit -m "Removed submodule " # Delete the now untracked submodule files # rm -rf path_to_submodule -[submodule "ghcide"] - path = ghcide - # url = https://github.com/digital-asset/ghcide.git - # url = https://github.com/alanz/ghcide.git - # url = https://github.com/wz1000/ghcide.git - # url = https://github.com/fendor/ghcide.git - url = https://github.com/bubba/ghcide.git diff --git a/.gitpod.Dockerfile b/.gitpod.Dockerfile new file mode 100644 index 0000000000..b35e86ebe1 --- /dev/null +++ b/.gitpod.Dockerfile @@ -0,0 +1,22 @@ +FROM gitpod/workspace-full + +RUN sudo install-packages build-essential curl libffi-dev libffi7 libgmp-dev libgmp10 \ + libncurses-dev libncurses5 libtinfo5 && \ + curl --proto '=https' --tlsv1.2 -sSf https://get-ghcup.haskell.org | BOOTSTRAP_HASKELL_NONINTERACTIVE=1 BOOTSTRAP_HASKELL_MINIMAL=1 sh && \ + echo 'source $HOME/.ghcup/env' >> $HOME/.bashrc && \ + echo 'export PATH=$HOME/.cabal/bin:$HOME/.local/bin:$PATH' >> $HOME/.bashrc && \ + . /home/gitpod/.ghcup/env && \ + # Install all verions of GHC that HLS supports. Putting GHC into Docker image makes workspace start much faster. + ghcup install ghc 8.10.7 && \ + ghcup install ghc 9.0.2 && \ + ghcup install ghc 9.2.3 && \ + ghcup install ghc 9.2.4 --set && \ + ghcup install hls --set && \ + ghcup install cabal --set && \ + ghcup install stack --set && \ + cabal update && \ + cabal install --disable-executable-dynamic --install-method copy --constraint "stylish-haskell +ghc-lib" \ + stylish-haskell implicit-hie hoogle && \ + rm -rf $HOME/.cabal/store && \ + pip install pre-commit && \ + npm install -g http-server diff --git a/.gitpod.yml b/.gitpod.yml new file mode 100644 index 0000000000..ae2cf47a3c --- /dev/null +++ b/.gitpod.yml @@ -0,0 +1,76 @@ +image: + file: .gitpod.Dockerfile +# List the start up tasks. Learn more https://www.gitpod.io/docs/config-start-tasks/ +tasks: + - name: Setup + before: | + # Make sure some folders not in /workspace persist between worksapce restarts. + # You may add additional directories to this list. + declare -a CACHE_DIRS=( + $HOME/.local + $HOME/.cabal + $HOME/.stack + $HOME/.cache/ghcide + $HOME/.cache/hie-bios + /nix + ) + for DIR in "${CACHE_DIRS[@]}"; do + mkdir -p $(dirname /workspace/cache$DIR) + mkdir -p $DIR # in case $DIR doesn't already exist + # On a fresh start with no prebuilds, we move existing directory + # to /workspace. 'sudo mv' fails with 'no permission', I don't know why + if [ ! -d /workspace/cache$DIR ]; then + sudo cp -rp $DIR /workspace/cache$DIR + sudo rm -rf $DIR/* + fi + mkdir -p /workspace/cache$DIR # make sure it exists even if cp fails + # Now /workspace/cache$DIR exists. + # Use bind mount to make $DIR backed by /workspace/cache$DIR + sudo mount --bind /workspace/cache$DIR $DIR + done + + # Install pre-commit hook + pre-commit install + + # Configure VSCode to use the locally built version of HLS + mkdir -p .vscode + if [ ! -f .vscode/settings.json ]; then + # Only write to .vscode/settings.json if it doesn't exist. + echo '{' > .vscode/settings.json + echo ' "haskell.serverExecutablePath": "/home/gitpod/.cabal/bin/haskell-language-server",' >> .vscode/settings.json + echo ' "haskell.formattingProvider": "stylish-haskell"' >> .vscode/settings.json + echo '}' >> .vscode/settings.json + fi + + pip install -r docs/requirements.txt + init: | + cabal update + cabal configure --enable-executable-dynamic + cabal build --enable-tests all + cabal install exe:haskell-language-server + +# List the ports to expose. Learn more https://www.gitpod.io/docs/config-ports/ +ports: [] + +github: + prebuilds: + # enable for the master/default branch (defaults to true) + master: true + # enable for all branches in this repo (defaults to false) + branches: false + # enable for pull requests coming from this repo (defaults to true) + pullRequests: true + # enable for pull requests coming from forks (defaults to false) + pullRequestsFromForks: true + # add a "Review in Gitpod" button as a comment to pull requests (defaults to true) + addComment: false + # add a "Review in Gitpod" button to pull requests (defaults to false) + addBadge: true + # add a label once the prebuild is ready to pull requests (defaults to false) + addLabel: false + +vscode: + extensions: + - "haskell.haskell" + - "justusadam.language-haskell" + - "EditorConfig.EditorConfig" diff --git a/.hlint.yaml b/.hlint.yaml new file mode 100644 index 0000000000..edc6886871 --- /dev/null +++ b/.hlint.yaml @@ -0,0 +1,238 @@ +# HLint configuration file +# https://github.com/ndmitchell/hlint +########################## + +# To run HLint do: +# $ hlint --git -j4 + +# Warnings currently triggered by our code +- ignore: {name: "Use <$>"} +- ignore: {name: "Use :"} +- ignore: {name: "Redundant do"} +- ignore: {name: "Avoid lambda"} +- ignore: {name: "Use newtype instead of data"} +- ignore: {name: "Use unless"} +- ignore: {name: "Move brackets to avoid $"} +- ignore: {name: "Eta reduce"} +- ignore: {name: "Parse error"} +- ignore: {name: "Reduce duplication"} +- ignore: {name: "Use ++"} +- ignore: {name: "Use $>"} +- ignore: {name: "Use section"} +- ignore: {name: "Use record patterns"} +- ignore: {name: "Use camelCase"} +- ignore: {name: "Use uncurry"} +- ignore: {name: "Avoid lambda using `infix`"} +- ignore: {name: "Replace case with fromMaybe"} + +# Gives at least one suggestion we don't like. +- ignore: {name: "Use <=<"} +- ignore: {name: "Use zipFrom"} +- ignore: {name: "Use zipWithFrom"} + +# We are using the "redundant" return/pure to assign a name. We do not want to +# delete it. In particular, this is not an improvement: +# Found: +# do options <- somethingComplicated +# pure options +# Perhaps: +# do somethingComplicated +- ignore: {name: "Redundant return"} +- ignore: {name: "Redundant pure"} + +# Off by default hints we like +- warn: {name: Use module export list} + +# Condemn nub and friends +- warn: {lhs: nub (sort x), rhs: Data.List.Extra.nubSort x} +- warn: {lhs: nub, rhs: Data.List.Extra.nubOrd} +- warn: {lhs: nubBy, rhs: Data.List.Extra.nubOrdBy} +- warn: {lhs: Data.List.Extra.nubOn, rhs: Data.List.Extra.nubOrdOn} + +- functions: + # Things that are unsafe in Haskell base library + - name: unsafePerformIO + within: + - Development.IDE.Core.Shake + - Development.IDE.GHC.Util + - Development.IDE.Core.FileStore + - Development.IDE.Plugin.CodeAction.Util + - Development.IDE.Graph.Internal.Database + - Development.IDE.Graph.Internal.Paths + - Development.IDE.Graph.Internal.Profile + - Development.IDE.Graph.Internal.Key + - Ide.Types + - Test.Hls + - Test.Hls.Command + - AutoTupleSpec + - name: unsafeInterleaveIO + within: + - Development.IDE.LSP.LanguageServer + - {name: unsafeDupablePerformIO, within: []} + - name: unsafeCoerce + within: + - Ide.Plugin.Eval.Code + - Development.IDE.Core.Compile + - Development.IDE.Types.Shake + - Ide.Plugin.Properties + + # Things that are a bit dangerous in the GHC API + - name: nameModule + within: + - Development.IDE.GHC.CoreFile + - Ide.Plugin.CallHierarchy.Internal + - Ide.Plugin.Rename + - Compat.HieBin + + # Partial functions + + # We need to check fucntions which + # are typically exported multiple ways under both names, + # see https://github.com/ndmitchell/hlint/issues/1389 + - name: [Prelude.head, Data.List.head] + within: + - Main + - Experiments + - Development.Benchmark.Rules + - Development.IDE.Plugin.Completions + - Development.IDE.Plugin.CodeAction.ExactPrint + - Development.IDE.Spans.Documentation + - Development.IDE.Session + - Ide.Plugin.CallHierarchy.Internal + - Ide.Plugin.Eval.Code + - Ide.Plugin.Eval.Util + - Ide.Plugin.Class.ExactPrint + - TExpectedActual + - TRigidType + - TRigidType2 + - RightToLeftFixities + - Typeclass + - CompletionTests #Previously part of GHCIDE Main tests + - DiagnosticTests #Previously part of GHCIDE Main tests + - FindDefinitionAndHoverTests #Previously part of GHCIDE Main tests + - FindImplementationAndHoverTests #Previously part of GHCIDE Main tests + - TestUtils #Previously part of GHCIDE Main tests + - CodeLensTests #Previously part of GHCIDE Main tests + + - name: [Prelude.tail, Data.List.tail] + within: + - Main + - Development.Benchmark.Rules + - Development.IDE.Plugin.CodeAction + - Development.IDE.Plugin.CodeAction.ExactPrint + - Development.IDE.Session + - UnificationSpec + - WatchedFileTests #Previously part of GHCIDE Main tests + + - name: [Prelude.last, Data.List.last] + within: + - Main + - Development.IDE.Plugin.CodeAction + - Development.IDE.Plugin.CodeAction.ExactPrint + - Development.IDE.Spans.Common + - Development.IDE.Graph.Internal.Types + - Ide.PluginUtils + - Ide.Plugin.Eval.Parse.Comments + - Ide.Plugin.Eval.CodeLens + - FindDefinitionAndHoverTests #Previously part of GHCIDE Main tests + - FindImplementationAndHoverTests #Previously part of GHCIDE Main tests + + - name: [Prelude.init, Data.List.init] + within: + - Main + - Development.IDE.Spans.Common + - Ide.PluginUtils + - Development.Benchmark.Rules + - TErrorGivenPartialSignature + - IfaceTests #Previously part of GHCIDE Main tests + - THTests #Previously part of GHCIDE Main tests + - WatchedFileTests #Previously part of GHCIDE Main tests + + - name: Data.List.foldl1' + within: [] + + - name: Data.List.foldr1' + within: [] + + - name: ["Prelude.!!", "Data.List.!!"] + within: + - Main + - Experiments + - FunctionalCodeAction + - Development.IDE.Plugin.CodeAction + - Development.IDE.Plugin.Completions.Logic + - Development.IDE.Spans.Documentation + - TErrorGivenPartialSignature + - InitializeResponseTests #Previously part of GHCIDE Main tests + - PositionMappingTests #Previously part of GHCIDE Main tests + + - name: Data.Text.head + within: + - Development.IDE.Plugin.CodeAction + - Development.IDE.Plugin.Completions.Logic + + - name: Data.Foldable.foldl1 + within: [] + + - name: Data.Foldable.foldr1 + within: [] + + - name: Data.Maybe.fromJust + within: + - Experiments + - Main + - Progress + - Development.IDE.Core.Compile + - Development.IDE.Core.Rules + - Development.IDE.Core.Shake + - Development.IDE.Test + - Development.IDE.Graph.Internal.Profile + - Development.IDE.Graph.Internal.Rules + - CodeLensTests #Previously part of GHCIDE Main tests + + - name: "Data.Map.!" + within: [] + + - name: "Data.IntMap.!" + within: [] + + - name: "Data.Vector.!" + within: [] + + - name: "GHC.Arr.!" + within: [] + + # We do not want to use functions from the + # GHC driver. Instead use hls rules to construct + # an appropriate GHC session + - name: "load" + within: [] + - name: "load'" + within: [] + - name: "loadWithCache" + within: [] + + # Tracing functions + # We ban an explicit list rather than the + # Debug.Trace, because that module also + # includes the eventlog tracing functions, + # which are legitimate to use. + - name: + - Debug.Trace.trace + - Debug.Trace.traceId + - Debug.Trace.traceShow + - Debug.Trace.traceShowId + - Debug.Trace.traceStack + - Debug.Trace.traceIO + - Debug.Trace.traceM + - Debug.Trace.traceShowM + - Debug.Trace.putTraceMsg + within: + - Development.IDE.Core.Compile + - Development.IDE.Graph.Internal.Database + - Development.IDE.GHC.Util + - Development.IDE.Plugin.CodeAction.Util + +# We really do not want novel usages of restricted functions, and mere +# Warning is not enough to prevent those consistently; you need a build failure. +- error: {name: Avoid restricted function} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..03edd673b7 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,23 @@ +# https://pre-commit.com/ +# https://github.com/pre-commit/pre-commit +repos: + - hooks: + - entry: stylish-haskell --inplace + exclude: >- + (^Setup.hs$|test/testdata/.*$|test/data/.*$|test/manual/lhs/.*$|^hie-compat/.*$|^plugins/hls-tactics-plugin/.*$|^ghcide/src/Development/IDE/GHC/Compat.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/GHC/Compat/ExactPrint.hs$|^ghcide/src/Development/IDE/GHC/Compat/Core.hs$|^ghcide/src/Development/IDE/Spans/Pragmas.hs$|^ghcide/src/Development/IDE/LSP/Outline.hs$|^plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs$|^ghcide/src/Development/IDE/Core/Rules.hs$|^ghcide/src/Development/IDE/Core/Compile.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs$|^plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs$|^plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Handlers.hs$) + files: \.l?hs$ + id: stylish-haskell + language: system + name: stylish-haskell + pass_filenames: true + types: + - file + repo: local + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.1.0 + hooks: + - id: mixed-line-ending + args: + - '--fix' + - lf + exclude: test/testdata/.*CRLF.*?\.hs$ diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000000..f5135a9af1 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,15 @@ +version: 2 + +sphinx: + builder: "html" + configuration: docs/conf.py + +build: + os: "ubuntu-22.04" + tools: + # this means the latest python 3 + python: "3" + +python: + install: + - requirements: docs/requirements.txt diff --git a/.stylish-haskell.yaml b/.stylish-haskell.yaml new file mode 100644 index 0000000000..76840c7497 --- /dev/null +++ b/.stylish-haskell.yaml @@ -0,0 +1,78 @@ +# See https://github.com/jaspervdj/stylish-haskell/blob/main/data/stylish-haskell.yaml +# for reference. + +steps: + # - unicode_syntax: + # add_language_pragma: true + + # - module_header: + # indent: 4 + # sort: true + # separate_lists: true + + # - records: + # equals: "indent 2" + # first_field: "indent 2" + # field_comment: 2 + # deriving: 2 + # via: "indent 2" + # sort_deriving: true + # break_enums: false + # break_single_constructors: true + # curried_context: false + + - simple_align: + cases: always + top_level_patterns: always + records: always + multi_way_if: always + + - imports: + align: global + list_align: after_alias + pad_module_names: true + long_list_align: inline + empty_list_align: inherit + list_padding: 4 + separate_lists: true + space_surround: false + ghc_lib_parser: false + + - language_pragmas: + style: vertical + align: true + remove_redundant: true + language_prefix: LANGUAGE + + # - tabs: + # spaces: 8 + + - trailing_whitespace: {} + + # - squash: {} + +columns: 80 + +newline: lf + +language_extensions: + - BangPatterns + - CPP + - DataKinds + - DeriveFunctor + - DeriveGeneric + - FlexibleContexts + - GeneralizedNewtypeDeriving + - KindSignatures + - LambdaCase + - NamedFieldPuns + - OverloadedStrings + - RecordWildCards + - ScopedTypeVariables + - StandaloneDeriving + - TupleSections + - TypeApplications + - TypeOperators + - ViewPatterns + +cabal: true diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 0000000000..2de4a478b3 --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,36 @@ +cff-version: 1.2.0 +message: "If you need to cite Haskell Language Server, this metadata is the paper that introduces the current design." +repository-code: "https://github.com/haskell/haskell-language-server" +title: Haskell Language Server +preferred-citation: + type: article + authors: + - family-names: Mitchell + given-names: Neil + orcid: "https://orcid.org/0000-0001-5171-9726" + - family-names: Kiefer + given-names: Moritz + - family-names: Iborra + given-names: Pepe + - family-names: Lau + given-names: Luke + - family-names: Duggal + given-names: Zubin + - family-names: Siebenhandl + given-names: Hannes + - family-names: Sanchez + given-names: Javier Neira + - family-names: Pickering + given-names: Matthew + - family-names: Zimmerman + given-names: Alan + doi: "10.1145/3462172" + journal: "IFL 2020: Proceedings of the 32nd Symposium on Implementation and Application of Functional Languages" + month: 9 + day: 2 + isbn: 978-1-4503-8963-1 + publisher: ACM + title: "Building an Integrated Development Environment (IDE) on top of a Build System" + year: 2020 + url: https://ndmitchell.com/downloads/paper-building_an_ide_on_top_of_a_build_system_revised-04_sep_2020.pdf + abstract: "When developing a Haskell IDE we hit upon an idea - why not base an IDE on an build system? In this paper we'll explain how to go from that idea to a usable IDE, including the difficulties imposed by reusing a build system, and those imposed by technical details specific to Haskell. Our design has been successful, and hopefully provides a blue-print for others writing IDEs." diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 0000000000..820661ceeb --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,59 @@ +# Core +/ghcide @wz1000 +/ghcide/session-loader @wz1000 @fendor +/hls-graph @wz1000 +/hls-plugin-api @michaelpj @fendor +/hls-test-utils @fendor + +# HLS main +/src @fendor +/exe @fendor +/test @fendor + +# Plugins +/plugins/hls-alternate-number-format-plugin @drsooch +/plugins/hls-cabal-fmt-plugin @VeryMilkyJoe @fendor +/plugins/hls-cabal-gild-plugin @fendor +/plugins/hls-cabal-plugin @fendor +/plugins/hls-call-hierarchy-plugin @July541 +/plugins/hls-change-type-signature-plugin +/plugins/hls-class-plugin +/plugins/hls-code-range-plugin @kokobd +/plugins/hls-eval-plugin +/plugins/hls-explicit-fixity-plugin +/plugins/hls-explicit-imports-plugin +/plugins/hls-explicit-record-fields-plugin @ozkutuk +/plugins/hls-floskell-plugin @peterbecich +/plugins/hls-fourmolu-plugin @georgefst +/plugins/hls-gadt-plugin @July541 +/plugins/hls-hlint-plugin @eddiemundo +/plugins/hls-module-name-plugin +/plugins/hls-notes-plugin @jvanbruegge +/plugins/hls-ormolu-plugin @georgefst +/plugins/hls-overloaded-record-dot-plugin @joyfulmantis +/plugins/hls-pragmas-plugin @eddiemundo +/plugins/hls-qualify-imported-names-plugin @eddiemundo +/plugins/hls-refactor-plugin @santiweight +/plugins/hls-rename-plugin +/plugins/hls-retrie-plugin @wz1000 +/plugins/hls-semantic-tokens-plugin @soulomoon +/plugins/hls-splice-plugin @konn +/plugins/hls-stan-plugin @0rphee +/plugins/hls-stylish-haskell-plugin @michaelpj + +# Benchmarking +/shake-bench +/bench + +# Docs +/docs @michaelpj + +# CI +/.circleci +/.github @michaelpj @fendor + +# Build +*.nix @berberman @michaelpj @guibou +*.project @michaelpj +*.stack* @michaelpj +.gitpod.* @kokobd diff --git a/ChangeLog.md b/ChangeLog.md index a6e412113d..65000395e2 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -1,5 +1,4011 @@ # Changelog for haskell-language-server + +## 2.11.0.0 + +- Bindists for GHC 9.12.2 + - Full plugin support, inlcuding refactor plugin +- Bindists for GHC 9.10.2 +- Bindists for GHC 9.8.4 +- Bindists for GHC 9.6.7 +- Bindists for GHC 9.4.8 +- Dropped support for Centos 7 as this platform is no longer supported by ghc +- Improved import suggestions for contructors and OverloadedRecordDot fields + +### Pull Requests + +- Add doc for project-wide renaming + ([#4584](https://github.com/haskell/haskell-language-server/pull/4584)) by @jian-lin +- Use hie-bios 0.15.0 + ([#4582](https://github.com/haskell/haskell-language-server/pull/4582)) by @fendor +- Allow building HLS with GHC 9.10.2 + ([#4581](https://github.com/haskell/haskell-language-server/pull/4581)) by @fendor +- Fix Plugin support table for 9.12.2 + ([#4580](https://github.com/haskell/haskell-language-server/pull/4580)) by @fendor +- Fix misplaced inlay hints by applying PositionMapping + ([#4571](https://github.com/haskell/haskell-language-server/pull/4571)) by @jetjinser +- Enable hls-plugin-gadt for ghc-9.12 + ([#4568](https://github.com/haskell/haskell-language-server/pull/4568)) by @GuillaumedeVolpiano +- Remove no longer needed allow-newer + ([#4566](https://github.com/haskell/haskell-language-server/pull/4566)) by @jhrcek +- Add missing golden files for GHC 9.10 config tests + ([#4563](https://github.com/haskell/haskell-language-server/pull/4563)) by @jian-lin +- updating the plugins support table for refactor + ([#4560](https://github.com/haskell/haskell-language-server/pull/4560)) by @GuillaumedeVolpiano +- Enable stylish-haskell for ghc-9.10 and ghc-9.12 + ([#4559](https://github.com/haskell/haskell-language-server/pull/4559)) by @GuillaumedeVolpiano +- Bump haskell-actions/setup from 2.7.10 to 2.7.11 + ([#4557](https://github.com/haskell/haskell-language-server/pull/4557)) by @dependabot[bot] +- Provide code action in hls-eval-plugin + ([#4556](https://github.com/haskell/haskell-language-server/pull/4556)) by @jian-lin +- enable hlint for ghc-9.12 + ([#4555](https://github.com/haskell/haskell-language-server/pull/4555)) by @GuillaumedeVolpiano +- Enable fourmolu and ormolu for GHC 9.12 + ([#4554](https://github.com/haskell/haskell-language-server/pull/4554)) by @fendor +- Enable hls-cabal-gild-plugin for GHC 9.12.2 + ([#4553](https://github.com/haskell/haskell-language-server/pull/4553)) by @fendor +- Update plugin support table for GHC 9.12.2 + ([#4552](https://github.com/haskell/haskell-language-server/pull/4552)) by @fendor +- Remove allow-newer for hiedb + ([#4551](https://github.com/haskell/haskell-language-server/pull/4551)) by @jhrcek +- Fix typo of rename plugin config + ([#4546](https://github.com/haskell/haskell-language-server/pull/4546)) by @jian-lin +- Update the ghcup-metadata generation script + ([#4545](https://github.com/haskell/haskell-language-server/pull/4545)) by @fendor +- porting hls-refactor to ghc-9.12 + ([#4543](https://github.com/haskell/haskell-language-server/pull/4543)) by @GuillaumedeVolpiano +- add ghcide-bench flag to .cabal file + ([#4542](https://github.com/haskell/haskell-language-server/pull/4542)) by @juhp +- Revert "link executables dynamically to speed up linking (#4423)" + ([#4541](https://github.com/haskell/haskell-language-server/pull/4541)) by @fendor +- Support PackageImports in hiddenPackageSuggestion + ([#4537](https://github.com/haskell/haskell-language-server/pull/4537)) by @jian-lin +- Improve FreeBSD installation docs + ([#4536](https://github.com/haskell/haskell-language-server/pull/4536)) by @arrowd +- reinstating ignore-plugins-ghc-bounds + ([#4532](https://github.com/haskell/haskell-language-server/pull/4532)) by @GuillaumedeVolpiano +- Simplify FuzzySearch test (avoid dependency on /usr/share/dict/words) + ([#4531](https://github.com/haskell/haskell-language-server/pull/4531)) by @jhrcek +- Import suggestion for missing newtype constructor, all types constructor and indirect overloadedrecorddot fields + ([#4516](https://github.com/haskell/haskell-language-server/pull/4516)) by @guibou + +## 2.10.0.0 + +- Bindists for GHC 9.12.2 + - This is only basic support, many plugins are not yet compatible. +- Bindists for GHC 9.8.4 +- Bindists for GHC 9.6.7 +- `hls-cabal-plugin` features + - Support for `cabal-add` + - Goto Definition for common sections + - Outline of .cabal files +- Fix handling of LSP resolve requests +- Display Inlay Hints + - Records + - Imports + +### Pull Requests + +- Fix cabal check for Hackage release + ([#4528](https://github.com/haskell/haskell-language-server/pull/4528)) by @fendor +- GHC 9.12 support + ([#4527](https://github.com/haskell/haskell-language-server/pull/4527)) by @wz1000 +- Bump cachix/install-nix-action from 30 to 31 + ([#4525](https://github.com/haskell/haskell-language-server/pull/4525)) by @dependabot[bot] +- Bump cachix/cachix-action from 15 to 16 + ([#4523](https://github.com/haskell/haskell-language-server/pull/4523)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.9 to 2.7.10 + ([#4522](https://github.com/haskell/haskell-language-server/pull/4522)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.9 to 2.7.10 in /.github/actions/setup-build + ([#4521](https://github.com/haskell/haskell-language-server/pull/4521)) by @dependabot[bot] +- Move ghcide-test to stand alone dir + ([#4520](https://github.com/haskell/haskell-language-server/pull/4520)) by @soulomoon +- refactor: remove unnecessary instance and use of unsafeCoerce + ([#4518](https://github.com/haskell/haskell-language-server/pull/4518)) by @MangoIV +- convert `pre-commit-config.yaml` from JSON to YAML + ([#4513](https://github.com/haskell/haskell-language-server/pull/4513)) by @peterbecich +- Enable bench for 9.10 + ([#4512](https://github.com/haskell/haskell-language-server/pull/4512)) by @soulomoon +- Bugfix: Explicit record fields inlay hints for polymorphic records + ([#4510](https://github.com/haskell/haskell-language-server/pull/4510)) by @wczyz +- Capitalization of "Replace" + ([#4509](https://github.com/haskell/haskell-language-server/pull/4509)) by @dschrempf +- document eval plugin not supporting multiline expressions + ([#4495](https://github.com/haskell/haskell-language-server/pull/4495)) by @noughtmare +- Documentation: Imrpove "Contributing" (and amend Sphinx builders) + ([#4494](https://github.com/haskell/haskell-language-server/pull/4494)) by @dschrempf +- Documentation: HLS plugin tutorial improvements + ([#4491](https://github.com/haskell/haskell-language-server/pull/4491)) by @dschrempf +- Nix tooling (minor changes) + ([#4490](https://github.com/haskell/haskell-language-server/pull/4490)) by @dschrempf +- Bump haskell-actions/setup from 2.7.8 to 2.7.9 + ([#4483](https://github.com/haskell/haskell-language-server/pull/4483)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.8 to 2.7.9 in /.github/actions/setup-build + ([#4482](https://github.com/haskell/haskell-language-server/pull/4482)) by @dependabot[bot] +- Rework bindist CI + ([#4481](https://github.com/haskell/haskell-language-server/pull/4481)) by @wz1000 +- Remove Unsafe Dynflags deadcode, they don't exist any more! + ([#4480](https://github.com/haskell/haskell-language-server/pull/4480)) by @fendor +- Implement fallback handler for `*/resolve` requests + ([#4478](https://github.com/haskell/haskell-language-server/pull/4478)) by @fendor +- Bump haskell-actions/setup from 2.7.7 to 2.7.8 + ([#4477](https://github.com/haskell/haskell-language-server/pull/4477)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.7 to 2.7.8 in /.github/actions/setup-build + ([#4476](https://github.com/haskell/haskell-language-server/pull/4476)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.6 to 2.7.7 + ([#4471](https://github.com/haskell/haskell-language-server/pull/4471)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.6 to 2.7.7 in /.github/actions/setup-build + ([#4470](https://github.com/haskell/haskell-language-server/pull/4470)) by @dependabot[bot] +- Allow building with GHC 9.8.4 + ([#4459](https://github.com/haskell/haskell-language-server/pull/4459)) by @fendor +- Update python read-the-docs dependencies to latest + ([#4457](https://github.com/haskell/haskell-language-server/pull/4457)) by @fendor +- More tests and better docs for cabal-add + ([#4455](https://github.com/haskell/haskell-language-server/pull/4455)) by @VenInf +- ci(mergify): upgrade configuration to current format + ([#4454](https://github.com/haskell/haskell-language-server/pull/4454)) by @mergify[bot] +- Support record positional construction inlay hints + ([#4447](https://github.com/haskell/haskell-language-server/pull/4447)) by @jetjinser +- Build HLS with GHC 9.8.3 + ([#4444](https://github.com/haskell/haskell-language-server/pull/4444)) by @fendor +- Don't suggest -Wno-deferred-out-of-scope-variables + ([#4441](https://github.com/haskell/haskell-language-server/pull/4441)) by @jeukshi +- Enable hls-stan-plugin for GHC 9.10.1 + ([#4437](https://github.com/haskell/haskell-language-server/pull/4437)) by @fendor +- Enhance formatting of the `cabal-version` error message + ([#4436](https://github.com/haskell/haskell-language-server/pull/4436)) by @fendor +- Support structured diagnostics 2 + ([#4433](https://github.com/haskell/haskell-language-server/pull/4433)) by @noughtmare +- Cabal ignore if for completions (#4289) + ([#4427](https://github.com/haskell/haskell-language-server/pull/4427)) by @SamuelLess +- Fix cabal-add testdata for hls-cabal-plugin-tests + ([#4426](https://github.com/haskell/haskell-language-server/pull/4426)) by @fendor +- gracefully handle errors for unsupported cabal version + ([#4425](https://github.com/haskell/haskell-language-server/pull/4425)) by @fridewald +- Fix pre-commit in CI + ([#4424](https://github.com/haskell/haskell-language-server/pull/4424)) by @fendor +- link executables dynamically to speed up linking + ([#4423](https://github.com/haskell/haskell-language-server/pull/4423)) by @develop7 +- Cabal plugin: implement check for package.yaml in a stack project + ([#4422](https://github.com/haskell/haskell-language-server/pull/4422)) by @JMoss-dev +- Fix exporting operator pattern synonym + ([#4420](https://github.com/haskell/haskell-language-server/pull/4420)) by @pbrinkmeier +- Add docs about running tests for new contributors + ([#4418](https://github.com/haskell/haskell-language-server/pull/4418)) by @pbrinkmeier +- Bump cachix/install-nix-action from 29 to 30 + ([#4413](https://github.com/haskell/haskell-language-server/pull/4413)) by @dependabot[bot] +- Bump cachix/install-nix-action from V27 to 29 + ([#4411](https://github.com/haskell/haskell-language-server/pull/4411)) by @dependabot[bot] +- Avoid expectFail in the test suite + ([#4402](https://github.com/haskell/haskell-language-server/pull/4402)) by @sgillespie +- Fix typos in hls-cabal-fmt-plugin + ([#4399](https://github.com/haskell/haskell-language-server/pull/4399)) by @fendor +- Jump to instance definition and explain typeclass evidence + ([#4392](https://github.com/haskell/haskell-language-server/pull/4392)) by @fendor +- Update cabal-add dependency + ([#4389](https://github.com/haskell/haskell-language-server/pull/4389)) by @VenInf +- Improve error message for `--probe-tools` + ([#4387](https://github.com/haskell/haskell-language-server/pull/4387)) by @sgillespie +- Documentation for build-depends on hover + ([#4385](https://github.com/haskell/haskell-language-server/pull/4385)) by @VenInf +- Bump haskell-actions/setup from 2.7.3 to 2.7.6 + ([#4384](https://github.com/haskell/haskell-language-server/pull/4384)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.5 to 2.7.6 in /.github/actions/setup-build + ([#4383](https://github.com/haskell/haskell-language-server/pull/4383)) by @dependabot[bot] +- Clear GHCup caches in CI to not run out of space in CI + ([#4382](https://github.com/haskell/haskell-language-server/pull/4382)) by @fendor +- Cabal go to module's definition + ([#4380](https://github.com/haskell/haskell-language-server/pull/4380)) by @VenInf +- Add Goto Definition for cabal common sections + ([#4375](https://github.com/haskell/haskell-language-server/pull/4375)) by @ChristophHochrainer +- cabal-add integration as a CodeAction + ([#4360](https://github.com/haskell/haskell-language-server/pull/4360)) by @VenInf +- Bump haskell-actions/setup from 2.7.3 to 2.7.5 in /.github/actions/setup-build + ([#4354](https://github.com/haskell/haskell-language-server/pull/4354)) by @dependabot[bot] +- Support Inlay hints for record wildcards + ([#4351](https://github.com/haskell/haskell-language-server/pull/4351)) by @jetjinser +- Remove componentInternalUnits + ([#4350](https://github.com/haskell/haskell-language-server/pull/4350)) by @soulomoon +- Fix core file location in `GetLinkable` + ([#4347](https://github.com/haskell/haskell-language-server/pull/4347)) by @soulomoon +- Release 2.9.0.1 + ([#4346](https://github.com/haskell/haskell-language-server/pull/4346)) by @wz1000 +- Using captureKicksDiagnostics to speed up multiple plugin tests + ([#4339](https://github.com/haskell/haskell-language-server/pull/4339)) by @komikat +- Get files from Shake VFS from within plugin handlers + ([#4328](https://github.com/haskell/haskell-language-server/pull/4328)) by @awjchen +- Cabal plugin outline view + ([#4323](https://github.com/haskell/haskell-language-server/pull/4323)) by @VenInf +- Add missing documentation for cabal formatters + ([#4322](https://github.com/haskell/haskell-language-server/pull/4322)) by @fendor +- Provide explicit import in inlay hints + ([#4235](https://github.com/haskell/haskell-language-server/pull/4235)) by @jetjinser +- Add codeactions for cabal field names + ([#3273](https://github.com/haskell/haskell-language-server/pull/3273)) by @dyniec + +## 2.9.0.1 + +- Bindists for GHC 9.6.6 + +## 2.9.0.0 + +- Bindists for GHC 9.10.1 by @wz1000, @jhrcek, @michaelpj +- More hls-graph reliability improvements by @soulomoon +- Refactoring of test suite runners by @soulomoon +- Fixes in multiple home units support by @wz1000 + +### Pull Requests + +- Fix quadratic memory usage in GetLocatedImports + ([#4318](https://github.com/haskell/haskell-language-server/pull/4318)) by @mpickering +- Bump stack configs + CI to 9.6.5 and 9.8.2 + ([#4316](https://github.com/haskell/haskell-language-server/pull/4316)) by @jhrcek +- Add support for Fourmolu 0.16 + ([#4314](https://github.com/haskell/haskell-language-server/pull/4314)) by @ brandonchinn178 +- Code action to remove redundant record field import (fixes #4220) + ([#4308](https://github.com/haskell/haskell-language-server/pull/4308)) by @battermann +- Use restricted monad for plugins (#4057) + ([#4304](https://github.com/haskell/haskell-language-server/pull/4304)) by @awjchen +- 4301 we need to implement utility to wait for all runnning keys in hls graph done + ([#4302](https://github.com/haskell/haskell-language-server/pull/4302)) by @soulomoon +- Call useWithStale instead of useWithStaleFast when calling ParseCabalFields + ([#4294](https://github.com/haskell/haskell-language-server/pull/4294)) by @VeryMilkyJoe +- test: add test documenting #806 + ([#4292](https://github.com/haskell/haskell-language-server/pull/4292)) by @develop7 +- ghcide: drop ghc-check and ghc-paths dependency + ([#4291](https://github.com/haskell/haskell-language-server/pull/4291)) by @wz1000 +- Limit number of valid hole fits to 10 + ([#4288](https://github.com/haskell/haskell-language-server/pull/4288)) by @akshaymankar +- Add common stanza to completion data + ([#4286](https://github.com/haskell/haskell-language-server/pull/4286)) by @VeryMilkyJoe +- FindImports: ThisPkg means some home unit, not "this" unit + ([#4284](https://github.com/haskell/haskell-language-server/pull/4284)) by @wz1000 +- Remove redudant absolutization in session loader + ([#4280](https://github.com/haskell/haskell-language-server/pull/4280)) by @soulomoon +- Bump to new lsp versions + ([#4279](https://github.com/haskell/haskell-language-server/pull/4279)) by @michaelpj +- Put more test code into pre-commit + ([#4275](https://github.com/haskell/haskell-language-server/pull/4275)) by @soulomoon +- Delete library ghcide test utils + ([#4274](https://github.com/haskell/haskell-language-server/pull/4274)) by @soulomoon +- Delete testUtil from ghcide-tests + ([#4272](https://github.com/haskell/haskell-language-server/pull/4272)) by @soulomoon +- CI change, only run bench on performance label + ([#4271](https://github.com/haskell/haskell-language-server/pull/4271)) by @soulomoon +- Migrate WatchedFileTests + ([#4269](https://github.com/haskell/haskell-language-server/pull/4269)) by @soulomoon +- Migrate UnitTests + ([#4268](https://github.com/haskell/haskell-language-server/pull/4268)) by @soulomoon +- Migrate SafeTests + ([#4267](https://github.com/haskell/haskell-language-server/pull/4267)) by @soulomoon +- Migrate SymlinkTests + ([#4266](https://github.com/haskell/haskell-language-server/pull/4266)) by @soulomoon +- Remove unused and outdated CHANGELOG files + ([#4264](https://github.com/haskell/haskell-language-server/pull/4264)) by @fendor +- Enable cabal flaky test + ([#4263](https://github.com/haskell/haskell-language-server/pull/4263)) by @soulomoon +- Migrate RootUriTests + ([#4261](https://github.com/haskell/haskell-language-server/pull/4261)) by @soulomoon +- Migrate PreprocessorTests + ([#4260](https://github.com/haskell/haskell-language-server/pull/4260)) by @soulomoon +- Migrate PluginSimpleTests + ([#4259](https://github.com/haskell/haskell-language-server/pull/4259)) by @soulomoon +- Migrate ClientSettingsTests + ([#4258](https://github.com/haskell/haskell-language-server/pull/4258)) by @soulomoon +- Unify critical session running in hls + ([#4256](https://github.com/haskell/haskell-language-server/pull/4256)) by @soulomoon +- Bump cachix/cachix-action from 14 to 15 + ([#4255](https://github.com/haskell/haskell-language-server/pull/4255)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.2 to 2.7.3 + ([#4254](https://github.com/haskell/haskell-language-server/pull/4254)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.2 to 2.7.3 in /.github/actions/setup-build + ([#4253](https://github.com/haskell/haskell-language-server/pull/4253)) by @dependabot[bot] +- Shorter file names completion + ([#4252](https://github.com/haskell/haskell-language-server/pull/4252)) by @VenInf +- Fix progress start delay + ([#4249](https://github.com/haskell/haskell-language-server/pull/4249)) by @michaelpj +- Bump cachix/install-nix-action from 26 to 27 + ([#4245](https://github.com/haskell/haskell-language-server/pull/4245)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.1 to 2.7.2 + ([#4244](https://github.com/haskell/haskell-language-server/pull/4244)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.1 to 2.7.2 in /.github/actions/setup-build + ([#4243](https://github.com/haskell/haskell-language-server/pull/4243)) by @dependabot[bot] +- Enable test for #717 + ([#4241](https://github.com/haskell/haskell-language-server/pull/4241)) by @soulomoon +- Remove Pepe from CODEOWNERS + ([#4239](https://github.com/haskell/haskell-language-server/pull/4239)) by @michaelpj +- Fix resultBuilt(dirty mechanism) in hls-graph + ([#4238](https://github.com/haskell/haskell-language-server/pull/4238)) by @soulomoon +- Support for 9.10 + ([#4233](https://github.com/haskell/haskell-language-server/pull/4233)) by @wz1000 +- Refactor hls-test-util and reduce getCurrentDirectory after initilization + ([#4231](https://github.com/haskell/haskell-language-server/pull/4231)) by @soulomoon +- [Migrate BootTests] part of #4173 Migrate ghcide tests to hls test utils + ([#4227](https://github.com/haskell/haskell-language-server/pull/4227)) by @soulomoon +- Actually enable pedantic flag in ci flags job + ([#4224](https://github.com/haskell/haskell-language-server/pull/4224)) by @jhrcek +- Cleanup cabal files, ghc compat code, fix ghc warnings + ([#4222](https://github.com/haskell/haskell-language-server/pull/4222)) by @jhrcek +- Another attempt at using the lsp API for some progress reporting + ([#4218](https://github.com/haskell/haskell-language-server/pull/4218)) by @michaelpj +- [Migrate diagnosticTests] part of #4173 Migrate ghcide tests to hls test utils + ([#4207](https://github.com/haskell/haskell-language-server/pull/4207)) by @soulomoon +- Prepare release 2.8.0.0 + ([#4191](https://github.com/haskell/haskell-language-server/pull/4191)) by @wz1000 +- Stabilize the build system by correctly house keeping the dirtykeys and rule values [flaky test #4185 #4093] + ([#4190](https://github.com/haskell/haskell-language-server/pull/4190)) by @soulomoon +- hls-cabal-plugin: refactor context search to use `readFields` + ([#4186](https://github.com/haskell/haskell-language-server/pull/4186)) by @fendor +- 3944 extend the properties api to better support nested configuration + ([#3952](https://github.com/haskell/haskell-language-server/pull/3952)) by @soulomoon + +## 2.8.0.0 + +- Bindists for GHC 9.6.5 +- New hls-notes plugin (#4126, @jvanbruegge) +- Floskell, hlint and stylish-haskell plugins enabled for GHC 9.8 +- Improvements for hls-graph increasing robustness (#4087, @soulomoon) +- Improvements to multi-component support (#4096, #4109, #4179, @wz1000, @fendor) + +### Pull Requests + +- Bump haskell-actions/setup from 2.7.0 to 2.7.1 + ([#4189](https://github.com/haskell/haskell-language-server/pull/4189)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.7.0 to 2.7.1 in /.github/actions/setup-build + ([#4188](https://github.com/haskell/haskell-language-server/pull/4188)) by @dependabot[bot] +- Fix ghcdie-tests CI + ([#4184](https://github.com/haskell/haskell-language-server/pull/4184)) by @soulomoon +- Fix ghc and hlint warnings, fix formatting + ([#4181](https://github.com/haskell/haskell-language-server/pull/4181)) by @jhrcek +- Allow users to specify whether to use `cabal`'s multi-repl feature + ([#4179](https://github.com/haskell/haskell-language-server/pull/4179)) by @fendor +- Improve parsing of import suggestions extending multiple multiline imports (fixes #4175) + ([#4177](https://github.com/haskell/haskell-language-server/pull/4177)) by @jhrcek +- move ghcide-tests to haskell-language-server.cabal and make it depend on hls-test-utils + ([#4176](https://github.com/haskell/haskell-language-server/pull/4176)) by @soulomoon +- enable ThreadId for when testing + ([#4174](https://github.com/haskell/haskell-language-server/pull/4174)) by @soulomoon +- Drop Legacy Logger from Codebase + ([#4171](https://github.com/haskell/haskell-language-server/pull/4171)) by @fendor +- get rid of the `unsafeInterleaveIO` at start up + ([#4167](https://github.com/haskell/haskell-language-server/pull/4167)) by @soulomoon +- Remove EKG + ([#4163](https://github.com/haskell/haskell-language-server/pull/4163)) by @michaelpj +- Mark plugins as not buildable if the flag is disabled + ([#4160](https://github.com/haskell/haskell-language-server/pull/4160)) by @michaelpj +- Fix references to old CPP names in tests, update tests + ([#4159](https://github.com/haskell/haskell-language-server/pull/4159)) by @jhrcek +- Bump haskell-actions/setup from 2.6.3 to 2.7.0 + ([#4158](https://github.com/haskell/haskell-language-server/pull/4158)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.6.3 to 2.7.0 in /.github/actions/setup-build + ([#4157](https://github.com/haskell/haskell-language-server/pull/4157)) by @dependabot[bot] +- Remove dead code in ghcide and hls-graph for priority + ([#4151](https://github.com/haskell/haskell-language-server/pull/4151)) by @soulomoon +- Bump haskell-actions/setup from 2.6.2 to 2.6.3 in /.github/actions/setup-build + ([#4150](https://github.com/haskell/haskell-language-server/pull/4150)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.6.2 to 2.6.3 + ([#4149](https://github.com/haskell/haskell-language-server/pull/4149)) by @dependabot[bot] +- Run ExceptionTests in temporary directory + ([#4146](https://github.com/haskell/haskell-language-server/pull/4146)) by @fendor +- hls-eval-plugin: Replicate #4139 + ([#4140](https://github.com/haskell/haskell-language-server/pull/4140)) by @mattapet +- Update comment in refactor tests + ([#4138](https://github.com/haskell/haskell-language-server/pull/4138)) by @jhrcek +- Update contact info in docs + ([#4137](https://github.com/haskell/haskell-language-server/pull/4137)) by @jhrcek +- hls-notes-plugin: Do not error if no note is under the cursor + ([#4136](https://github.com/haskell/haskell-language-server/pull/4136)) by @jvanbruegge +- improve logging in semantic tokens rule + ([#4135](https://github.com/haskell/haskell-language-server/pull/4135)) by @soulomoon +- Bump softprops/action-gh-release from 1 to 2 + ([#4133](https://github.com/haskell/haskell-language-server/pull/4133)) by @dependabot[bot] +- Bump cachix/install-nix-action from 25 to 26 + ([#4132](https://github.com/haskell/haskell-language-server/pull/4132)) by @dependabot[bot] +- Use Set.member instead of Foldable.elem + ([#4128](https://github.com/haskell/haskell-language-server/pull/4128)) by @jhrcek +- hls-notes-plugin: Initial implementation + ([#4126](https://github.com/haskell/haskell-language-server/pull/4126)) by @jvanbruegge +- Enable floskell and hlint plugins for ghc 9.8 + ([#4125](https://github.com/haskell/haskell-language-server/pull/4125)) by @jhrcek +- Integrate stylish-haskell into hls executable with ghc 9.8 + ([#4124](https://github.com/haskell/haskell-language-server/pull/4124)) by @jhrcek +- Reduce usage of partial functions + ([#4123](https://github.com/haskell/haskell-language-server/pull/4123)) by @jhrcek +- Benchmark: Enable 9.6, 9.8 + ([#4118](https://github.com/haskell/haskell-language-server/pull/4118)) by @soulomoon +- Bump haskell-actions/setup from 2.6.1 to 2.6.2 in /.github/actions/setup-build + ([#4116](https://github.com/haskell/haskell-language-server/pull/4116)) by @dependabot[bot] +- Bump haskell-actions/setup from 2.6.1 to 2.6.2 + ([#4115](https://github.com/haskell/haskell-language-server/pull/4115)) by @dependabot[bot] +- eval: more robust way to extract comments from ParsedModule + ([#4113](https://github.com/haskell/haskell-language-server/pull/4113)) by @jhrcek +- Improve isolation of build artefacts of test runs + ([#4112](https://github.com/haskell/haskell-language-server/pull/4112)) by @fendor +- Improve handling of nonsense rename attempts + ([#4111](https://github.com/haskell/haskell-language-server/pull/4111)) by @jhrcek +- Exit with non-zero exitcode if wrapper fails to launch + ([#4110](https://github.com/haskell/haskell-language-server/pull/4110)) by @fendor +- Replace checkHomeUnitsClosed with a faster implementation + ([#4109](https://github.com/haskell/haskell-language-server/pull/4109)) by @wz1000 +- Don't distribute gifs or plugin readmes + ([#4107](https://github.com/haskell/haskell-language-server/pull/4107)) by @fendor +- Remove locale workaround for Module name that conatins non-ascii characters + ([#4106](https://github.com/haskell/haskell-language-server/pull/4106)) by @fendor +- Track extra-source-files of plugins more accurately + ([#4105](https://github.com/haskell/haskell-language-server/pull/4105)) by @fendor +- remove non-ascii name + ([#4103](https://github.com/haskell/haskell-language-server/pull/4103)) by @soulomoon +- Add cabal-gild as a cabal file formatter plugin + ([#4101](https://github.com/haskell/haskell-language-server/pull/4101)) by @fendor +- Remove more workarounds for GHCs < 9.2 (#4092) + ([#4098](https://github.com/haskell/haskell-language-server/pull/4098)) by @jhrcek +- session-loader: Don't loop forever when we don't find a file in any multi component + ([#4096](https://github.com/haskell/haskell-language-server/pull/4096)) by @wz1000 +- Prepare release 2.7.0.0 + ([#4095](https://github.com/haskell/haskell-language-server/pull/4095)) by @fendor +- Remove more workarounds for GHCs < 9.0 + ([#4092](https://github.com/haskell/haskell-language-server/pull/4092)) by @jhrcek +- Fix hls-graph: phantom dependencies invoke in branching deps (resolve #3423) + ([#4087](https://github.com/haskell/haskell-language-server/pull/4087)) by @soulomoon +- Rename only if the current module compiles (#3799) + ([#3848](https://github.com/haskell/haskell-language-server/pull/3848)) by @sgillespie +- Reintroduce ghc-lib flag for hlint plugin + ([#3757](https://github.com/haskell/haskell-language-server/pull/3757)) by @RaoulHC + +## 2.7.0.0 + +- Bindists for GHC 9.8.2 + - Enable many more plugins, making GHC 9.8.2 fully supported +- Fix refactor code actions for vim +- Preserve HLint's diagnostic severity +- Many other bug fixes. + +### Pull Requests + +- Enable pedantic for remaining plugins + ([#4091](https://github.com/haskell/haskell-language-server/pull/4091)) by @jhrcek +- Add support for fourmolu 0.15 + ([#4086](https://github.com/haskell/haskell-language-server/pull/4086)) by @brandonchinn178 +- refactor plugin: fix regex for extracting import suggestions + ([#4080](https://github.com/haskell/haskell-language-server/pull/4080)) by @jhrcek +- Bump to hiedb 0.6.0.0 + ([#4077](https://github.com/haskell/haskell-language-server/pull/4077)) by @jhrcek +- ghcide: Only try `stat`ing a core file after we ensure it actually exists + ([#4076](https://github.com/haskell/haskell-language-server/pull/4076)) by @wz1000 +- Fix small typo in Retrie error message + ([#4075](https://github.com/haskell/haskell-language-server/pull/4075)) by @iustin +- add Method_TextDocumentSemanticTokensFullDelta + ([#4073](https://github.com/haskell/haskell-language-server/pull/4073)) by @soulomoon +- Fix -Wall in retrie plugin + ([#4071](https://github.com/haskell/haskell-language-server/pull/4071)) by @jhrcek +- Fix -Wall in qualified imported names plugin + ([#4070](https://github.com/haskell/haskell-language-server/pull/4070)) by @jhrcek +- benchmarks: switch from deprecated haskell/actions/setup to haskell-actions/setup + ([#4068](https://github.com/haskell/haskell-language-server/pull/4068)) by @jhrcek +- Bump pre-commit/action from 3.0.0 to 3.0.1 + ([#4066](https://github.com/haskell/haskell-language-server/pull/4066)) by @dependabot[bot] +- Fix -Wall in refactor plugin + ([#4065](https://github.com/haskell/haskell-language-server/pull/4065)) by @jhrcek +- Redundant imports/exports: use range only to determine which code actions are in scope + ([#4063](https://github.com/haskell/haskell-language-server/pull/4063)) by @keithfancher +- Bump haskell-actions/setup to get GHC 9.6.4 in CI + ([#4062](https://github.com/haskell/haskell-language-server/pull/4062)) by @jhrcek +- Enable pedantic for more components + ([#4061](https://github.com/haskell/haskell-language-server/pull/4061)) by @jhrcek +- stack CI: switch to offic. haskell images, bump to lts-22.9 (ghc 9.6.4) + ([#4060](https://github.com/haskell/haskell-language-server/pull/4060)) by @jhrcek +- Improve hls class plugin test + ([#4059](https://github.com/haskell/haskell-language-server/pull/4059)) by @soulomoon +- Bump ghcide-test-utils to 2.0.0.0 + ([#4058](https://github.com/haskell/haskell-language-server/pull/4058)) by @wz1000 +- Promote more warnings to errors in ghcide + ([#4054](https://github.com/haskell/haskell-language-server/pull/4054)) by @jhrcek +- Add -Wunused-packages to common warnings + ([#4053](https://github.com/haskell/haskell-language-server/pull/4053)) by @jhrcek +- Bump lsp versions + ([#4052](https://github.com/haskell/haskell-language-server/pull/4052)) by @michaelpj +- Optimize semantic token extraction logic + ([#4050](https://github.com/haskell/haskell-language-server/pull/4050)) by @soulomoon +- Fix warnings in hls-graph, enable pedantic in CI + ([#4047](https://github.com/haskell/haskell-language-server/pull/4047)) by @jhrcek +- Fix -Wredundant-constraints + ([#4044](https://github.com/haskell/haskell-language-server/pull/4044)) by @jhrcek +- Disable caching job with ghc 9.2 on windows + ([#4043](https://github.com/haskell/haskell-language-server/pull/4043)) by @jhrcek +- fix token omitting problem if multiple tokens are connected. + ([#4041](https://github.com/haskell/haskell-language-server/pull/4041)) by @soulomoon +- Set test options via cabal.project + ([#4039](https://github.com/haskell/haskell-language-server/pull/4039)) by @michaelpj +- Fix document version test in hls-class-plugin + ([#4038](https://github.com/haskell/haskell-language-server/pull/4038)) by @July541 +- Fix -Wunused-imports + ([#4037](https://github.com/haskell/haskell-language-server/pull/4037)) by @jhrcek +- Use GHC2021 + ([#4033](https://github.com/haskell/haskell-language-server/pull/4033)) by @michaelpj +- Remove ghcide-test-utils as a separate package + ([#4032](https://github.com/haskell/haskell-language-server/pull/4032)) by @michaelpj +- Fix weird behavior of OPTIONS_GHC completions (fixes #3908) + ([#4031](https://github.com/haskell/haskell-language-server/pull/4031)) by @jhrcek +- semantic tokens: add infix operator + ([#4030](https://github.com/haskell/haskell-language-server/pull/4030)) by @soulomoon +- fix: a typo in docs/configuration.md + ([#4029](https://github.com/haskell/haskell-language-server/pull/4029)) by @kkweon +- Turn off tasty-rerun + ([#4028](https://github.com/haskell/haskell-language-server/pull/4028)) by @michaelpj +- Reduce the number of ad-hoc helper test functions in refactor plugin tests + ([#4027](https://github.com/haskell/haskell-language-server/pull/4027)) by @jhrcek +- Fix documentation/image links + ([#4025](https://github.com/haskell/haskell-language-server/pull/4025)) by @jhrcek +- Fix various issues + ([#4024](https://github.com/haskell/haskell-language-server/pull/4024)) by @michaelpj +- Use relative file paths for HIE files and Stan's config maps + ([#4023](https://github.com/haskell/haskell-language-server/pull/4023)) by @keithfancher +- fix isClassNodeIdentifier in hls-class-plugin + ([#4020](https://github.com/haskell/haskell-language-server/pull/4020)) by @soulomoon +- Fix -Wall and -Wunused-packages in hlint plugin + ([#4019](https://github.com/haskell/haskell-language-server/pull/4019)) by @jhrcek +- update hlint to 3.8 and prevent linting on testdata dir + ([#4018](https://github.com/haskell/haskell-language-server/pull/4018)) by @soulomoon +- refactor plugin: add reproducer and fix for #3795 + ([#4016](https://github.com/haskell/haskell-language-server/pull/4016)) by @jhrcek +- Fix -Wall and -Wunused-packages in stylish-haskell plugin + ([#4015](https://github.com/haskell/haskell-language-server/pull/4015)) by @jhrcek +- Fix -Wall and -Wunused-packages in stan plugin + ([#4014](https://github.com/haskell/haskell-language-server/pull/4014)) by @jhrcek +- fix doc for semantic token + ([#4011](https://github.com/haskell/haskell-language-server/pull/4011)) by @soulomoon +- Fix -Wall and -Wunused-packages in module name and overloaded record dot plugins + ([#4009](https://github.com/haskell/haskell-language-server/pull/4009)) by @jhrcek +- Fix -Wall and -Wunused-package in gadt plugin + ([#4008](https://github.com/haskell/haskell-language-server/pull/4008)) by @jhrcek +- Fix -Wall and -Wunused-packages in fourmolu and ormolu plugins + ([#4007](https://github.com/haskell/haskell-language-server/pull/4007)) by @jhrcek +- Fix -Wall and -Wunused-packages in plugins api and floskell + ([#4005](https://github.com/haskell/haskell-language-server/pull/4005)) by @jhrcek +- Fix -Wunused-packages in test utils + ([#4004](https://github.com/haskell/haskell-language-server/pull/4004)) by @jhrcek +- Update base lower bounds for HLS + ([#4000](https://github.com/haskell/haskell-language-server/pull/4000)) by @fendor +- Various 9.8 compat + ([#3998](https://github.com/haskell/haskell-language-server/pull/3998)) by @michaelpj +- Fix -Wall and -Wunused-packages in explicit-record-fields plugin + ([#3996](https://github.com/haskell/haskell-language-server/pull/3996)) by @jhrcek +- Fix -Wall and -Wunused-packages in explicit fixity plugin + ([#3995](https://github.com/haskell/haskell-language-server/pull/3995)) by @jhrcek +- Remove an allow-newer + ([#3989](https://github.com/haskell/haskell-language-server/pull/3989)) by @michaelpj +- chore: Fix typo s/occured/occurred + ([#3988](https://github.com/haskell/haskell-language-server/pull/3988)) by @hugo-syn +- Update support tables + ([#3987](https://github.com/haskell/haskell-language-server/pull/3987)) by @michaelpj +- Fix most -Wall in ghcide + ([#3984](https://github.com/haskell/haskell-language-server/pull/3984)) by @jhrcek +- Fix -Wall and -Wunused-packages in pragmas plugin + ([#3982](https://github.com/haskell/haskell-language-server/pull/3982)) by @jhrcek +- Fix -Wall and -Wunused-packages in eval plugin + ([#3981](https://github.com/haskell/haskell-language-server/pull/3981)) by @jhrcek +- Fix -Wall and -Wunused-packages in code-range plugin + ([#3980](https://github.com/haskell/haskell-language-server/pull/3980)) by @jhrcek +- Fix -Wall, -Wunused-packages and hlint warnings in call-hierarchy plugin + ([#3979](https://github.com/haskell/haskell-language-server/pull/3979)) by @jhrcek +- Fix -Wunused-packages in hls-cabal-plugin + ([#3977](https://github.com/haskell/haskell-language-server/pull/3977)) by @jhrcek +- Merge plugins into the HLS package + ([#3976](https://github.com/haskell/haskell-language-server/pull/3976)) by @michaelpj +- Fix most hlint warnings in ghcide + ([#3975](https://github.com/haskell/haskell-language-server/pull/3975)) by @jhrcek +- Remove allow-newer for ghc-trace-events + ([#3974](https://github.com/haskell/haskell-language-server/pull/3974)) by @jhrcek +- Exactprint plugins for 9.8 + ([#3973](https://github.com/haskell/haskell-language-server/pull/3973)) by @wz1000 +- Fix -Wall and -Wunused-packages in hls-class-plugin + ([#3972](https://github.com/haskell/haskell-language-server/pull/3972)) by @jhrcek +- Document cabal diagnostic options + ([#3971](https://github.com/haskell/haskell-language-server/pull/3971)) by @fendor +- Fix -Wall and -Wunused-packages in change-type-signature plugin + ([#3970](https://github.com/haskell/haskell-language-server/pull/3970)) by @jhrcek +- Semantic tokens: expand type synonym to checkout forall function type when possible + ([#3967](https://github.com/haskell/haskell-language-server/pull/3967)) by @soulomoon +- Fix -Wunused-packages in hls-cabal-fmt-plugin + ([#3965](https://github.com/haskell/haskell-language-server/pull/3965)) by @jhrcek +- Fix -Wall and -Wunused-packages in hls-alternate-number-format-plugin + ([#3964](https://github.com/haskell/haskell-language-server/pull/3964)) by @jhrcek +- Prepare release 2.6.0.0 + ([#3959](https://github.com/haskell/haskell-language-server/pull/3959)) by @wz1000 +- Semantic tokens: add module name support and improve performance and accuracy by traversing the hieAst along with source code + ([#3958](https://github.com/haskell/haskell-language-server/pull/3958)) by @soulomoon +- Bump cachix/cachix-action from 13 to 14 + ([#3956](https://github.com/haskell/haskell-language-server/pull/3956)) by @dependabot[bot] +- Bump cachix/install-nix-action from 24 to 25 + ([#3955](https://github.com/haskell/haskell-language-server/pull/3955)) by @dependabot[bot] +- Remove unused dependencies in hls-refactor-plugin + ([#3953](https://github.com/haskell/haskell-language-server/pull/3953)) by @jhrcek +- Cleanup conditional build logic pertaining to pre 9.2 GHCs + ([#3948](https://github.com/haskell/haskell-language-server/pull/3948)) by @jhrcek +- Fix issue: HLS HLint plugin doesn't preserve HLint's severities #3881 + ([#3902](https://github.com/haskell/haskell-language-server/pull/3902)) by @IAmPara0x +- Don't run hlint on testdata directories + ([#3901](https://github.com/haskell/haskell-language-server/pull/3901)) by @fendor +- Add option for setting manual path to Fourmolu binary + ([#3860](https://github.com/haskell/haskell-language-server/pull/3860)) by @georgefst + +## 2.6.0.0 + +- Bindists for GHC 9.6.4 +- A new semantic tokens plugin (#3892, @soulomoon). +- Improvements to multiple home unit support with GHC 9.4. When HLS is used with cabal 3.11+ it will + load proper multiple home unit sessions by default, fixing a lot of issues with + loading and reloading projects that have more than one component (#3462, @wz1000). +- Removed implicit-hie, resulting in better behaviour for projects without cradles. +- Don't produce diagnostics for disabled plugins (#3941, @fendor). +- Many other bug fixes. + +### Pull Requests + +- fix: semantic token omitting record field in `{-# LANGUAGE DuplicateRecordFields #-}` #3950 + ([#3951](https://github.com/haskell/haskell-language-server/pull/3951)) by @soulomoon +- Properties API: Remove unsafe coerce in favor of type class based method in + ([#3947](https://github.com/haskell/haskell-language-server/pull/3947)) by @soulomoon +- Bump to hiedb 0.5.0.0 to fix #3542 + ([#3943](https://github.com/haskell/haskell-language-server/pull/3943)) by @wz1000 +- Don't produce diagnostics if plugin is turned off + ([#3941](https://github.com/haskell/haskell-language-server/pull/3941)) by @fendor +- add config for semantic-tokens-plugin for mapping from hs token type to LSP default token type + ([#3940](https://github.com/haskell/haskell-language-server/pull/3940)) by @soulomoon +- add doc and ci test for semantic tokens + ([#3938](https://github.com/haskell/haskell-language-server/pull/3938)) by @soulomoon +- update Floskell to 0.11.* + ([#3933](https://github.com/haskell/haskell-language-server/pull/3933)) by @peterbecich +- Remove some people from CODEOWNERS + ([#3930](https://github.com/haskell/haskell-language-server/pull/3930)) by @michaelpj +- Adapt to minor API change for 9.6.4 compatibility + ([#3929](https://github.com/haskell/haskell-language-server/pull/3929)) by @wz1000 +- Fix multi unit session when some packages have reexported modules. + ([#3928](https://github.com/haskell/haskell-language-server/pull/3928)) by @wz1000 +- Switch to haskell-actions/setup since haskell/actions is deprecated + ([#3926](https://github.com/haskell/haskell-language-server/pull/3926)) by @fendor +- Make vscode-extension-schema honour default values + ([#3925](https://github.com/haskell/haskell-language-server/pull/3925)) by @fendor +- Add golden tests for public configs + ([#3922](https://github.com/haskell/haskell-language-server/pull/3922)) by @fendor +- Bump geekyeggo/delete-artifact from 2 to 4 + ([#3921](https://github.com/haskell/haskell-language-server/pull/3921)) by @dependabot[bot] +- Fix positionMapping in stale data + ([#3920](https://github.com/haskell/haskell-language-server/pull/3920)) by @soulomoon +- Disable stan plugin by default + ([#3917](https://github.com/haskell/haskell-language-server/pull/3917)) by @fendor +- Use stan config files for stan plugin (#3904) + ([#3914](https://github.com/haskell/haskell-language-server/pull/3914)) by @0rphee +- Bump both upload and download artifact + ([#3913](https://github.com/haskell/haskell-language-server/pull/3913)) by @michaelpj +- Update ghc-version-support.md for 2.5.0 + ([#3909](https://github.com/haskell/haskell-language-server/pull/3909)) by @lehmacdj +- Give plugins descriptions, include versions of key dependencies + ([#3903](https://github.com/haskell/haskell-language-server/pull/3903)) by @michaelpj +- Remove some buildability blockers that aren't needed + ([#3899](https://github.com/haskell/haskell-language-server/pull/3899)) by @michaelpj +- Bump actions/setup-python from 4 to 5 + ([#3895](https://github.com/haskell/haskell-language-server/pull/3895)) by @dependabot[bot] +- Update index-state to get latest stan version + ([#3894](https://github.com/haskell/haskell-language-server/pull/3894)) by @0rphee +- Generate FileTarget for all possible targetLocations + ([#3893](https://github.com/haskell/haskell-language-server/pull/3893)) by @fendor +- Implement semantic tokens plugin to support semantic highlighting(textDocument/semanticTokens/full) + ([#3892](https://github.com/haskell/haskell-language-server/pull/3892)) by @soulomoon +- session-loader: Set working directory on GHC 9.4+ + ([#3891](https://github.com/haskell/haskell-language-server/pull/3891)) by @wz1000 +- Demote home unit closure errors to warnings. + ([#3890](https://github.com/haskell/haskell-language-server/pull/3890)) by @wz1000 +- Bump cachix/install-nix-action from 23 to 24 + ([#3889](https://github.com/haskell/haskell-language-server/pull/3889)) by @dependabot[bot] +- Bump cachix/cachix-action from 12 to 13 + ([#3888](https://github.com/haskell/haskell-language-server/pull/3888)) by @dependabot[bot] +- Add more docs for implicit discovery + ([#3887](https://github.com/haskell/haskell-language-server/pull/3887)) by @fendor +- Prepare release 2.5.0.0 + ([#3879](https://github.com/haskell/haskell-language-server/pull/3879)) by @wz1000 +- Improve no plugin messages + ([#3864](https://github.com/haskell/haskell-language-server/pull/3864)) by @joyfulmantis +- Add support for multi unit argument syntax + ([#3462](https://github.com/haskell/haskell-language-server/pull/3462)) by @wz1000 +- Fix completion for qualified import + ([#2838](https://github.com/haskell/haskell-language-server/pull/2838)) by @xsebek + +## 2.5.0.0 + +- Bindists for GHC 9.4.8 +- Drop support for GHC 9.0 +- Re-add stan plugin +- Load default operator fixities in Fourmolu plugin + +### Pull Requests + +- Drop support for GHC 9.0 + ([#3875](https://github.com/haskell/haskell-language-server/pull/3875)) by @michaelpj +- Fix support tables + ([#3874](https://github.com/haskell/haskell-language-server/pull/3874)) by @michaelpj +- Prefer hls-test-utils functions over code duplication + ([#3870](https://github.com/haskell/haskell-language-server/pull/3870)) by @fendor +- Make sure running tests locally pick up the correct cradle type + ([#3869](https://github.com/haskell/haskell-language-server/pull/3869)) by @fendor +- Some versions of stylish-haskell do need the ghc-lib flag + ([#3868](https://github.com/haskell/haskell-language-server/pull/3868)) by @michaelpj +- Remove head.hackage + ([#3867](https://github.com/haskell/haskell-language-server/pull/3867)) by @wz1000 +- Load default operator fixities in Fourmolu plugin non-CLI mode + ([#3855](https://github.com/haskell/haskell-language-server/pull/3855)) by @georgefst +- Fix #3847 + ([#3854](https://github.com/haskell/haskell-language-server/pull/3854)) by @BurningLutz +- Re-add hls-stan-plugin + ([#3851](https://github.com/haskell/haskell-language-server/pull/3851)) by @0rphee +- Bump fkirc/skip-duplicate-actions from 5.3.0 to 5.3.1 + ([#3850](https://github.com/haskell/haskell-language-server/pull/3850)) by @dependabot[bot] +- Merge definitions from all plugins for Document(Type)Definition message + ([#3846](https://github.com/haskell/haskell-language-server/pull/3846)) by @JiriLojda +- Simplify cabal.project + ([#3836](https://github.com/haskell/haskell-language-server/pull/3836)) by @michaelpj +- Set the root for tests to the test directory + ([#3830](https://github.com/haskell/haskell-language-server/pull/3830)) by @fendor +- Reduce Nix support + ([#3804](https://github.com/haskell/haskell-language-server/pull/3804)) by @michaelpj + +## 2.4.0.0 + +* Initial support for GHC 9.8.1, without plugins dependent on `ghc-exactprint` +* Fix broken Windows binaries (#3822) + +### Pull Requests + +- Remove constraint on stm-hamt + ([#3829](https://github.com/haskell/haskell-language-server/pull/3829)) by @iMichka +- Cleanup func-test suite + ([#3828](https://github.com/haskell/haskell-language-server/pull/3828)) by @fendor +- Bump haskell/actions from 2.4.6 to 2.4.7 in /.github/actions/setup-build + ([#3824](https://github.com/haskell/haskell-language-server/pull/3824)) by @dependabot[bot] +- Bump haskell/actions from 2.4.6 to 2.4.7 + ([#3823](https://github.com/haskell/haskell-language-server/pull/3823)) by @dependabot[bot] +- Release 2.3.0.0 + ([#3818](https://github.com/haskell/haskell-language-server/pull/3818)) by @wz1000 +- GHC 9.8 support + ([#3727](https://github.com/haskell/haskell-language-server/pull/3727)) by @wz1000 + +## 2.3.0.0 + +* Binaries for GHC 9.6.3 +* Drop support for GHC 8.10 +* Remove `hls-haddock-comments-plugin`, `hls-stan-plugin`, and `hls-tactics-plugin` +* Don't suggest bogus modules names in `hls-module-name-plugin` (#3784) +* Add support for external Ormolu (#3771) +* Improve refine imports behaviour for qualified imports (#3806) + +### Pull Requests + +- Switch chat room to matrix + ([#3817](https://github.com/haskell/haskell-language-server/pull/3817)) by @July541 +- Fix flaky hie bios test + ([#3814](https://github.com/haskell/haskell-language-server/pull/3814)) by @fendor +- Revert "Bump actions/checkout from 3 to 4" + ([#3813](https://github.com/haskell/haskell-language-server/pull/3813)) by @wz1000 +- Add test directories to hls-retrie-plugin + ([#3808](https://github.com/haskell/haskell-language-server/pull/3808)) by @Vekhir +- Change refine imports behaviour for qualified imports + ([#3806](https://github.com/haskell/haskell-language-server/pull/3806)) by @joyfulmantis +- Update links to Nix documentation + ([#3805](https://github.com/haskell/haskell-language-server/pull/3805)) by @maralorn +- Bump actions/checkout from 3 to 4 + ([#3802](https://github.com/haskell/haskell-language-server/pull/3802)) by @dependabot[bot] +- Bump cachix/install-nix-action from 22 to 23 + ([#3801](https://github.com/haskell/haskell-language-server/pull/3801)) by @dependabot[bot] +- Add support for Fourmolu 0.14.0.0 + ([#3796](https://github.com/haskell/haskell-language-server/pull/3796)) by @brandonchinn178 +- Add code lens and fix code actions experiments + ([#3791](https://github.com/haskell/haskell-language-server/pull/3791)) by @joyfulmantis +- Bump lsp versions in flake + ([#3790](https://github.com/haskell/haskell-language-server/pull/3790)) by @colonelpanic8 +- Clean up Release CI + ([#3787](https://github.com/haskell/haskell-language-server/pull/3787)) by @fendor +- Do not suggest bogus module names + ([#3784](https://github.com/haskell/haskell-language-server/pull/3784)) by @Bodigrim +- Delete `hls-haddock-comments-plugin`, `hls-stan-plugin`, and `hls-tactics-plugin` + ([#3782](https://github.com/haskell/haskell-language-server/pull/3782)) by @michaelpj +- Enhance/releasing checklist + ([#3781](https://github.com/haskell/haskell-language-server/pull/3781)) by @fendor +- Add cradle dependencies to session loading errors + ([#3779](https://github.com/haskell/haskell-language-server/pull/3779)) by @VeryMilkyJoe +- Prepare release 2.2.0.0 + ([#3775](https://github.com/haskell/haskell-language-server/pull/3775)) by @fendor +- Add support for external Ormolu + ([#3771](https://github.com/haskell/haskell-language-server/pull/3771)) by @sir4ur0n +- Support for resolve for class-plugin lenses + ([#3769](https://github.com/haskell/haskell-language-server/pull/3769)) by @joyfulmantis +- Introduce declarative test project definition for plugin tests + ([#3767](https://github.com/haskell/haskell-language-server/pull/3767)) by @fendor +- Use latest version of fourmolu possible + ([#3764](https://github.com/haskell/haskell-language-server/pull/3764)) by @brandonchinn178 +- Drop support for GHC 8.10 + ([#3434](https://github.com/haskell/haskell-language-server/pull/3434)) by @michaelpj + + +## 2.2.0.0 + +* Binaries for GHC 9.4.7 +* Forward compatibility with latest VSCode client changes + +### Pull Requests + +- hls-cabal-fmt-plugin: Use the file contents of the LSP request + ([#3776](https://github.com/haskell/haskell-language-server/pull/3776)) by @fendor +- Adapt to lsp changes for workspace/configuration + ([#3773](https://github.com/haskell/haskell-language-server/pull/3773)) by @michaelpj +- Rework "Configuration" and "Manually testing HLS" documentations + ([#3772](https://github.com/haskell/haskell-language-server/pull/3772)) by @sir4ur0n +- Fix `main-is` completion suggestions not being relative to `hs-source-dirs` + ([#3766](https://github.com/haskell/haskell-language-server/pull/3766)) by @VeryMilkyJoe +- Remove suggestion of stanzas inside of stanza context + ([#3761](https://github.com/haskell/haskell-language-server/pull/3761)) by @VeryMilkyJoe +- Pedantic ghcide + ([#3751](https://github.com/haskell/haskell-language-server/pull/3751)) by @joyfulmantis +- Fix #3574 and support resolve in explicit records + ([#3750](https://github.com/haskell/haskell-language-server/pull/3750)) by @joyfulmantis + +## 2.1.0.0 + +* Binaries for GHC 9.4.6 +* Completions for .cabal files +* Performance improvements +* Show package name and its version while hovering on import statements + ([#3691](https://github.com/haskell/haskell-language-server/pull/3691)) +* Fix code edits in lsp spec compliant editors like helix. + ([#3643](https://github.com/haskell/haskell-language-server/pull/3643)) + +### Pull requests merged + +- Update to latest lsp packages + ([#3747](https://github.com/haskell/haskell-language-server/pull/3747)) by @joyfulmantis +- Remove unnecessary allow-newer in stack.yaml + ([#3746](https://github.com/haskell/haskell-language-server/pull/3746)) by @July541 +- Log fourmolu and ormolu version that hls using + ([#3744](https://github.com/haskell/haskell-language-server/pull/3744)) by @July541 +- Various PluginError PR suggestions I missed earlier + ([#3737](https://github.com/haskell/haskell-language-server/pull/3737)) by @joyfulmantis +- Add resolve support in refine imports by merging it with explicit imports + ([#3729](https://github.com/haskell/haskell-language-server/pull/3729)) by @joyfulmantis +- Fix other file goto definition + ([#3725](https://github.com/haskell/haskell-language-server/pull/3725)) by @nlander +- Fix Nix builds + ([#3724](https://github.com/haskell/haskell-language-server/pull/3724)) by @cydparser +- Better plugin error infrastructure + ([#3717](https://github.com/haskell/haskell-language-server/pull/3717)) by @joyfulmantis +- Move Recorder to hls-plugin-api + ([#3714](https://github.com/haskell/haskell-language-server/pull/3714)) by @joyfulmantis +- Actually force usages + ([#3713](https://github.com/haskell/haskell-language-server/pull/3713)) by @wz1000 +- Best-effort support of Qualified Imports in GHC 9.4 + ([#3712](https://github.com/haskell/haskell-language-server/pull/3712)) by @konn +- Skip test if only CODEOWNERS changed + ([#3707](https://github.com/haskell/haskell-language-server/pull/3707)) by @July541 +- Update stack stuff + ([#3706](https://github.com/haskell/haskell-language-server/pull/3706)) by @July541 +- Mark hls-floskell-plugin as tier 3 + ([#3705](https://github.com/haskell/haskell-language-server/pull/3705)) by @July541 +- Remove isovector as an owner + ([#3700](https://github.com/haskell/haskell-language-server/pull/3700)) by @isovector +- Bump haskell/actions from 2.4.3 to 2.4.4 in /.github/actions/setup-build + ([#3699](https://github.com/haskell/haskell-language-server/pull/3699)) by @dependabot[bot] +- Bump haskell/actions from 2.4.3 to 2.4.4 + ([#3698](https://github.com/haskell/haskell-language-server/pull/3698)) by @dependabot[bot] +- Catch exceptions in commands and use lsp null + ([#3696](https://github.com/haskell/haskell-language-server/pull/3696)) by @joyfulmantis +- Show package name and its version while hovering on import statements + ([#3691](https://github.com/haskell/haskell-language-server/pull/3691)) by @July541 +- Resolve refactoring + ([#3688](https://github.com/haskell/haskell-language-server/pull/3688)) by @joyfulmantis +- Prefer non-boot files when creating the FinderCache. + ([#3687](https://github.com/haskell/haskell-language-server/pull/3687)) by @wz1000 +- Some fixes for multi component stuff + ([#3686](https://github.com/haskell/haskell-language-server/pull/3686)) by @wz1000 +- Further hlint resolve changes. + ([#3685](https://github.com/haskell/haskell-language-server/pull/3685)) by @joyfulmantis +- docs (plugin-support): fix plugin name typo + ([#3683](https://github.com/haskell/haskell-language-server/pull/3683)) by @PiDelport +- Resolve for explicit-imports + ([#3682](https://github.com/haskell/haskell-language-server/pull/3682)) by @joyfulmantis +- Hls 2.0.0.1 forward port + ([#3680](https://github.com/haskell/haskell-language-server/pull/3680)) by @hasufell +- Resolve 2: Support for resolve in hls-hlint-plugin + ([#3679](https://github.com/haskell/haskell-language-server/pull/3679)) by @joyfulmantis +- Resolve 0: Generic support for resolve in hls packages + ([#3678](https://github.com/haskell/haskell-language-server/pull/3678)) by @joyfulmantis +- Ship hls-hlint-plugin for ghc-9.6 + ([#3677](https://github.com/haskell/haskell-language-server/pull/3677)) by @July541 +- Remove extra call to newHscEnvEqWithImportPaths + ([#3676](https://github.com/haskell/haskell-language-server/pull/3676)) by @nlander +- Fixes pragma plugin offering incorrect code actions #3673 + ([#3674](https://github.com/haskell/haskell-language-server/pull/3674)) by @joyfulmantis +- Restore short option for logfile + ([#3672](https://github.com/haskell/haskell-language-server/pull/3672)) by @michaelpj +- Enable stylish-haskell for 9.6 + ([#3670](https://github.com/haskell/haskell-language-server/pull/3670)) by @michaelpj +- Bump supported ormolu, allow for 9.6 + ([#3668](https://github.com/haskell/haskell-language-server/pull/3668)) by @michaelpj +- Bump cachix/install-nix-action from 21 to 22 + ([#3666](https://github.com/haskell/haskell-language-server/pull/3666)) by @dependabot[bot] +- Add arguments to direct logs to various locations + ([#3665](https://github.com/haskell/haskell-language-server/pull/3665)) by @michaelpj +- Support fourmolu 0.13 + ([#3662](https://github.com/haskell/haskell-language-server/pull/3662)) by @brandonchinn178 +- Resolve 1: Support for resolve in overloaded-record-dot + ([#3658](https://github.com/haskell/haskell-language-server/pull/3658)) by @joyfulmantis +- fix ISO8601 related deprecation in time + ([#3654](https://github.com/haskell/haskell-language-server/pull/3654)) by @HugoPeters1024 +- Add a log-level argument to set the log level + ([#3651](https://github.com/haskell/haskell-language-server/pull/3651)) by @michaelpj +- Update Contributing.md + ([#3650](https://github.com/haskell/haskell-language-server/pull/3650)) by @VeryMilkyJoe +- Commit to prettyprinter >= 1.7 + ([#3649](https://github.com/haskell/haskell-language-server/pull/3649)) by @michaelpj +- Add missing Monad constraint in the eval plugin + ([#3648](https://github.com/haskell/haskell-language-server/pull/3648)) by @sandydoo +- hls-pragmas-plugin: Reduce noisy completions + ([#3647](https://github.com/haskell/haskell-language-server/pull/3647)) by @akshaymankar +- Correctly pass VersionedTextDocumentIdentifier through hls + ([#3643](https://github.com/haskell/haskell-language-server/pull/3643)) by @maralorn +- Add an assist for importing record fields when using OverloadedRecordDot + ([#3642](https://github.com/haskell/haskell-language-server/pull/3642)) by @simmsb +- update flakes to compile with ghc928 and ghc962 + ([#3641](https://github.com/haskell/haskell-language-server/pull/3641)) by @smunix +- Split pragmas plugin by providers + decrease disable-warning priority + ([#3640](https://github.com/haskell/haskell-language-server/pull/3640)) by @mrcjkb +- Reintroduce cabal-install in flake.nix + ([#3637](https://github.com/haskell/haskell-language-server/pull/3637)) by @cgeorgii +- Delete dead cbits + ([#3635](https://github.com/haskell/haskell-language-server/pull/3635)) by @michaelpj +- Simplify selection of GHCs to build on + ([#3633](https://github.com/haskell/haskell-language-server/pull/3633)) by @michaelpj +- Support fourmolu 0.13.0.0 + ([#3631](https://github.com/haskell/haskell-language-server/pull/3631)) by @brandonchinn178 +- Bump haskell/actions from 2.4.1 to 2.4.3 in /.github/actions/setup-build + ([#3627](https://github.com/haskell/haskell-language-server/pull/3627)) by @dependabot[bot] +- Bump haskell/actions from 2.4.1 to 2.4.3 + ([#3626](https://github.com/haskell/haskell-language-server/pull/3626)) by @dependabot[bot] +- remove ghc minor versions in nix flake package builds + ([#3625](https://github.com/haskell/haskell-language-server/pull/3625)) by @smunix +- HLS for the new generated LSP 2 release + ([#3621](https://github.com/haskell/haskell-language-server/pull/3621)) by @joyfulmantis +- Keep plugin id of cabal-fmt in sync with default config id + ([#3615](https://github.com/haskell/haskell-language-server/pull/3615)) by @VeryMilkyJoe +- Fix some grammar mistakes and typos + ([#3614](https://github.com/haskell/haskell-language-server/pull/3614)) by @VeryMilkyJoe +- Bump cachix/install-nix-action from 20 to 21 + ([#3612](https://github.com/haskell/haskell-language-server/pull/3612)) by @dependabot[bot] +- fix: remove the `Indexing` progress message when exeption in withHieDb + ([#3610](https://github.com/haskell/haskell-language-server/pull/3610)) by @guibou +- Bump haskell/actions from 2.4.0 to 2.4.1 in /.github/actions/setup-build + ([#3604](https://github.com/haskell/haskell-language-server/pull/3604)) by @dependabot[bot] +- Bump haskell/actions from 2.4.0 to 2.4.1 + ([#3603](https://github.com/haskell/haskell-language-server/pull/3603)) by @dependabot[bot] +- Cabal file completions + ([#3268](https://github.com/haskell/haskell-language-server/pull/3268)) by @VeryMilkyJoe +- Share ModuleGraphs for all files + ([#3232](https://github.com/haskell/haskell-language-server/pull/3232)) by @wz1000 + +## 2.0.0.1 + +- Add overloaded record dot plugin initial version (closes #3350) (#3560) +- Binaries for GHC 9.2.8 and GHC 9.6.2 + +## 2.0.0.0 + +- New versioning scheme for all packages distributed as part of HLS, + versioning them in lockstep for each release. +- Binaries for GHC 9.4.5 +- Keep instance lenses stable even if parsed results are unavailable (#3545) +- Keep stale lenses for module name (#3570) +- Keep type lenses stable (#3558) + +## 1.10.0.0 + +- Support for GHC 9.6 +- Binaries for GHC 9.2.7 and GHC 9.6.1 +- Eval plugin support for GHC 9.4+ (#3391) +- Don't show lenses for TH generated instances when using hls-class-plugin (#3531) + +### Pull requests merged for 1.10.0.0 +- Support fourmolu 0.11 +([#3533](https://github.com/haskell/haskell-language-server/pull/3533)) by @brandonchinn178 +- Don't show lenses for TH generated instances +([#3531](https://github.com/haskell/haskell-language-server/pull/3531)) by @July541 +- Bump haskell/actions from 2.3.3 to 2.3.6 +([#3529](https://github.com/haskell/haskell-language-server/pull/3529)) by @dependabot[bot] +- Use GHC 9.2.7 in flake +([#3527](https://github.com/haskell/haskell-language-server/pull/3527)) by @cydparser +- Remove HsLogger +([#3526](https://github.com/haskell/haskell-language-server/pull/3526)) by @fendor +- Use hie-bios 0.12 +([#3524](https://github.com/haskell/haskell-language-server/pull/3524)) by @wz1000 +- Bump haskell/actions +([#3520](https://github.com/haskell/haskell-language-server/pull/3520)) by @michaelpj +- Bump cachix/install-nix-action from 19 to 20 +([#3514](https://github.com/haskell/haskell-language-server/pull/3514)) by @dependabot[bot] +- Docs: update Emacs section: add eglot with config example +([#3509](https://github.com/haskell/haskell-language-server/pull/3509)) by @m4lvin +- Eval plugin is now supported in 9.4 +([#3508](https://github.com/haskell/haskell-language-server/pull/3508)) by @michaelpj +- Update flake to GHC 9.2.6 and 9.4.4 +([#3503](https://github.com/haskell/haskell-language-server/pull/3503)) by @cydparser +- Fix lower ghcide bounds of rename and fourmolu plugins +([#3501](https://github.com/haskell/haskell-language-server/pull/3501)) by @pepeiborra +- Add 9.2.6 to ghc-version-support.md +([#3494](https://github.com/haskell/haskell-language-server/pull/3494)) by @wz1000 +- Bump versions and add changelogs for 1.9.1.0 +([#3493](https://github.com/haskell/haskell-language-server/pull/3493)) by @hasufell +- Bump cachix/install-nix-action from 18 to 19 +([#3490](https://github.com/haskell/haskell-language-server/pull/3490)) by @dependabot[bot] +- Experiment with loading matrix values from a file +([#3481](https://github.com/haskell/haskell-language-server/pull/3481)) by @michaelpj +- 9.6 support for HLS +([#3480](https://github.com/haskell/haskell-language-server/pull/3480)) by @wz1000 +- Make the Ormolu plugin respect `.ormolu` fixity files when Ormolu ≥0.5.3.0 +([#3449](https://github.com/haskell/haskell-language-server/pull/3449)) by @amesgen +- Migrate release CI to github +([#3406](https://github.com/haskell/haskell-language-server/pull/3406)) by @hasufell +- Eval plugin for GHC 9.4 +([#3391](https://github.com/haskell/haskell-language-server/pull/3391)) by @wz1000 + +## 1.9.1.0 + +- Binaries for GHC 9.2.6. +- Fix for `hls-hlint-plugin` being unable to apply fixes due to GHC libdir from CI machines (#3241) +- Improvements for recompilation avoidance, particularly for non-vscode editors + which don't support the LSP `workspace/didChangeWatchedFiles` method (#3458) + +## 1.9.0.0 + +- Binaries for GHC 9.4.3, GHC 9.4.4 and GHC 9.2.5. +- Dropped support for GHC 8.8 and GHC 8.10. +- New plugins including: + - Expanding record wild cards using hls-explicit-record-fields-plugin (#3304). + - Formatting cabal fields using cabal-fmt via hls-cabal-fmt-plugin (#2047). + - Warnings and errors for cabal files using hls-cabal-plugin (#2954). + - Folding ranges using hls-code-range-plugin (#3058). +- Support for many plugins like the refactor, splice, retrie, gadt, hlint, fourmolu and class plugins. +- Completion for record dot fields (#3080). +- Performance and memory usage improvements. +- And many more bug fixes and improvements! + +- Enable plugin tests in CI for GHC 9.4 +([#3420](https://github.com/haskell/haskell-language-server/pull/3420)) by @wz1000 +- Add a mergify action to update PRs +([#3418](https://github.com/haskell/haskell-language-server/pull/3418)) by @michaelpj +- GHC 9.4: Compute the source hash before the preprocessor +([#3415](https://github.com/haskell/haskell-language-server/pull/3415)) by @wz1000 +- Clear the mi_globals field when generating an iface +([#3414](https://github.com/haskell/haskell-language-server/pull/3414)) by @wz1000 +- Various strictness improvements +([#3413](https://github.com/haskell/haskell-language-server/pull/3413)) by @wz1000 +- Remove unused GHC compat code +([#3412](https://github.com/haskell/haskell-language-server/pull/3412)) by @fendor +- Bump shake-bench to v0.2.0.0 +([#3411](https://github.com/haskell/haskell-language-server/pull/3411)) by @pepeiborra +- Support fourmolu 0.10 +([#3410](https://github.com/haskell/haskell-language-server/pull/3410)) by @brandonchinn178 +- Fix nix build CI +([#3404](https://github.com/haskell/haskell-language-server/pull/3404)) by @wavewave +- Fix fourmolu with -f-fixity-th in nix env +([#3400](https://github.com/haskell/haskell-language-server/pull/3400)) by @wavewave +- Correct list of GHC versions in caching.yml to match test.yml +([#3397](https://github.com/haskell/haskell-language-server/pull/3397)) by @fendor +- Add CI flows for 9.4.3 +([#3396](https://github.com/haskell/haskell-language-server/pull/3396)) by @pepeiborra +- Bump technote-space/get-diff-action from 6.1.1 to 6.1.2 +([#3392](https://github.com/haskell/haskell-language-server/pull/3392)) by @dependabot[bot] +- Unload once per linkable instead of once per splice +([#3390](https://github.com/haskell/haskell-language-server/pull/3390)) by @wz1000 +- Fix table in ghc-version-support.md +([#3389](https://github.com/haskell/haskell-language-server/pull/3389)) by @k4z4n0v4 +- Fix Nix CI, probably +([#3388](https://github.com/haskell/haskell-language-server/pull/3388)) by @lf- +- [hls-explicit-record-fields-plugin] Expand used fields only +([#3386](https://github.com/haskell/haskell-language-server/pull/3386)) by @ozkutuk +- Bump hlint version CI flow +([#3384](https://github.com/haskell/haskell-language-server/pull/3384)) by @fendor +- Bump fkirc/skip-duplicate-actions from 5.2.0 to 5.3.0 +([#3381](https://github.com/haskell/haskell-language-server/pull/3381)) by @dependabot[bot] +- Reword intro section in releases.md +([#3378](https://github.com/haskell/haskell-language-server/pull/3378)) by @fendor +- Make redundant import removal work on PatSyn imports +([#3377](https://github.com/haskell/haskell-language-server/pull/3377)) by @ozkutuk +- Add CI flows for GHC 9.2.5 +([#3376](https://github.com/haskell/haskell-language-server/pull/3376)) by @fendor +- Delete dead code in hls-test-utils +([#3368](https://github.com/haskell/haskell-language-server/pull/3368)) by @fendor +- Bump gha versions in setup-build/action.yml +([#3366](https://github.com/haskell/haskell-language-server/pull/3366)) by @fendor +- Wingman copy old to new +([#3363](https://github.com/haskell/haskell-language-server/pull/3363)) by @santiweight +- Cleanup Development.IDE.CodeAction +([#3360](https://github.com/haskell/haskell-language-server/pull/3360)) by @santiweight +- Use latest GHC 9.2 and 9.4 in flake.nix +([#3354](https://github.com/haskell/haskell-language-server/pull/3354)) by @cydparser +- wingman: move wingman to new directory +([#3352](https://github.com/haskell/haskell-language-server/pull/3352)) by @santiweight +- Introduce common code for Recorders in Plugin Tests +([#3347](https://github.com/haskell/haskell-language-server/pull/3347)) by @fendor +- Add `RangeMap` for unified "in-range" filtering +([#3343](https://github.com/haskell/haskell-language-server/pull/3343)) by @ozkutuk +- Docs: update and split neovim/vim configurations +([#3342](https://github.com/haskell/haskell-language-server/pull/3342)) by @MrcJkb +- Extract AddArgument modules +([#3339](https://github.com/haskell/haskell-language-server/pull/3339)) by @santiweight +- Add hls-cabal-fmt-plugin to hackage release CI script and HLS library +([#3335](https://github.com/haskell/haskell-language-server/pull/3335)) by @fendor +- Ensure at least 1 capability +([#3334](https://github.com/haskell/haskell-language-server/pull/3334)) by @pepeiborra +- Add support for Fourmolu 0.9 +([#3331](https://github.com/haskell/haskell-language-server/pull/3331)) by @brandonchinn178 +- [skip ci] Add myself to CODEOWNERS +([#3329](https://github.com/haskell/haskell-language-server/pull/3329)) by @ozkutuk +- Typo fixes +([#3325](https://github.com/haskell/haskell-language-server/pull/3325)) by @Deltaspace0 +- Gitlab CI improvements +([#3324](https://github.com/haskell/haskell-language-server/pull/3324)) by @wz1000 +- Refactor overlay composition +([#3323](https://github.com/haskell/haskell-language-server/pull/3323)) by @Gabriella439 +- Add support for `.env` shells to `flake.nix` +([#3322](https://github.com/haskell/haskell-language-server/pull/3322)) by @Gabriella439 +- feat: update type signature during add argument action +([#3321](https://github.com/haskell/haskell-language-server/pull/3321)) by @santiweight +- Update refactor/splice/hlint/fourmolu/retrie/gadt plugin for GHC 9.4 +([#3317](https://github.com/haskell/haskell-language-server/pull/3317)) by @9999years +- Remove stack from installation docs since it is not supported anymore +([#3314](https://github.com/haskell/haskell-language-server/pull/3314)) by @fendor +- Bump cachix/cachix-action from 11 to 12 +([#3310](https://github.com/haskell/haskell-language-server/pull/3310)) by @dependabot[bot] +- Restore ability to run source plugins +([#3309](https://github.com/haskell/haskell-language-server/pull/3309)) by @JakobBruenker +- New plugin: Explicit record fields +([#3304](https://github.com/haskell/haskell-language-server/pull/3304)) by @ozkutuk +- support haddock-library 1.11 +([#3303](https://github.com/haskell/haskell-language-server/pull/3303)) by @kokobd +- Record diagnostics source rule when testing +([#3301](https://github.com/haskell/haskell-language-server/pull/3301)) by @pepeiborra +- Make a test more reliable +([#3300](https://github.com/haskell/haskell-language-server/pull/3300)) by @pepeiborra +- Change default cabal install target name on docs/installation.md +([#3298](https://github.com/haskell/haskell-language-server/pull/3298)) by @caiquefigueiredo +- Bump technote-space/get-diff-action from 6.1.0 to 6.1.1 +([#3293](https://github.com/haskell/haskell-language-server/pull/3293)) by @dependabot[bot] +- Bump cachix/install-nix-action from 17 to 18 +([#3292](https://github.com/haskell/haskell-language-server/pull/3292)) by @dependabot[bot] +- Bump cachix/cachix-action from 10 to 11 +([#3291](https://github.com/haskell/haskell-language-server/pull/3291)) by @dependabot[bot] +- Purge GHC 8.8 +([#3287](https://github.com/haskell/haskell-language-server/pull/3287)) by @michaelpj +- Bump partial ghc support warning to 9.4 +([#3286](https://github.com/haskell/haskell-language-server/pull/3286)) by @andys8 +- Improved message for missing command or plugin +([#3285](https://github.com/haskell/haskell-language-server/pull/3285)) by @andys8 +- Register Fourmolu plugin properties +([#3284](https://github.com/haskell/haskell-language-server/pull/3284)) by @georgefst +- Cleanup GHC macros (because min version is 8.8.4) +([#3281](https://github.com/haskell/haskell-language-server/pull/3281)) by @andys8 +- Remove unlawful Ord instance and replace it by a compare function +([#3279](https://github.com/haskell/haskell-language-server/pull/3279)) by @ChristophHochrainer +- Exclude the implicit prelude import (#2798) +([#3277](https://github.com/haskell/haskell-language-server/pull/3277)) by @ChristophHochrainer +- Fix typos in documentation +([#3274](https://github.com/haskell/haskell-language-server/pull/3274)) by @bendo +- Use an importance score to order the suggested import code action +([#3271](https://github.com/haskell/haskell-language-server/pull/3271)) by @ChristophHochrainer +- Update plugin tutorial +([#3266](https://github.com/haskell/haskell-language-server/pull/3266)) by @dyniec +- configuration-ghc-94.nix: Fix references to lsp and lsp-types source +([#3265](https://github.com/haskell/haskell-language-server/pull/3265)) by @akshaymankar +- Add suggestions about licenses in cabal file +([#3261](https://github.com/haskell/haskell-language-server/pull/3261)) by @dyniec +- Fix action removes ticks from TemplateHaskellQuotes (#628) +([#3260](https://github.com/haskell/haskell-language-server/pull/3260)) by @bendo +- Hlint: A handful of fixes to hints +([#3259](https://github.com/haskell/haskell-language-server/pull/3259)) by @andys8 +- Support ghc 9.4 for hls-class-plugin +([#3258](https://github.com/haskell/haskell-language-server/pull/3258)) by @July541 +- Fix nix developement shell +([#3257](https://github.com/haskell/haskell-language-server/pull/3257)) by @akshaymankar +- GCH -> GHC +([#3252](https://github.com/haskell/haskell-language-server/pull/3252)) by @michaelpj +- Docs: Plugin Support hls-explicit-fixity-plugin +([#3251](https://github.com/haskell/haskell-language-server/pull/3251)) by @andys8 +- Fix dead link to supported GHC versions +([#3244](https://github.com/haskell/haskell-language-server/pull/3244)) by @buggymcbugfix +- Update link to supported versions in README +([#3242](https://github.com/haskell/haskell-language-server/pull/3242)) by @citrusmunch +- Bump fkirc/skip-duplicate-actions from 5.1.0 to 5.2.0 +([#3239](https://github.com/haskell/haskell-language-server/pull/3239)) by @dependabot[bot] +- Move new imports down the code action list +([#3235](https://github.com/haskell/haskell-language-server/pull/3235)) by @kokobd +- Improve memory characteristics of ExportsMap +([#3231](https://github.com/haskell/haskell-language-server/pull/3231)) by @wz1000 +- [skip ci] Remove myself from codeowners +([#3230](https://github.com/haskell/haskell-language-server/pull/3230)) by @jneira +- Fix error in code range +([#3229](https://github.com/haskell/haskell-language-server/pull/3229)) by @kokobd +- Use nixpkgs variants of Sphinx packages +([#3227](https://github.com/haskell/haskell-language-server/pull/3227)) by @ozkutuk +- Bump fkirc/skip-duplicate-actions from 4.0.0 to 5.1.0 +([#3226](https://github.com/haskell/haskell-language-server/pull/3226)) by @dependabot[bot] +- Add `source-repository` to all cabal files +([#3219](https://github.com/haskell/haskell-language-server/pull/3219)) by @hololeap +- hls-hlint-plugin: Update README.md +([#3216](https://github.com/haskell/haskell-language-server/pull/3216)) by @hololeap +- wrapper.in: Require runtime ghc-pkgs to be an abi compatible superset of bootpkgs +([#3214](https://github.com/haskell/haskell-language-server/pull/3214)) by @maralorn +- Add diagnostics to Stan descriptor +([#3213](https://github.com/haskell/haskell-language-server/pull/3213)) by @pepeiborra +- Document the `stack` requirement in wrapper tests +([#3212](https://github.com/haskell/haskell-language-server/pull/3212)) by @ozkutuk +- Improve haddock comments +([#3207](https://github.com/haskell/haskell-language-server/pull/3207)) by @kokobd +- Implement sharing for hls-graph Keys +([#3206](https://github.com/haskell/haskell-language-server/pull/3206)) by @wz1000 +- Improve hls-fixity-plugin +([#3205](https://github.com/haskell/haskell-language-server/pull/3205)) by @wz1000 +- Implement completionItem/resolve +([#3204](https://github.com/haskell/haskell-language-server/pull/3204)) by @wz1000 +- Sort vscode extension schema json by keys +([#3203](https://github.com/haskell/haskell-language-server/pull/3203)) by @fendor +- docs/supported-versions: Fix typo and more precise brittany support +([#3201](https://github.com/haskell/haskell-language-server/pull/3201)) by @maralorn +- Stylish Haskell: CPP parse issues +([#3199](https://github.com/haskell/haskell-language-server/pull/3199)) by @andys8 +- Bump technote-space/get-diff-action from 4.0.1 to 6.1.0 +([#3198](https://github.com/haskell/haskell-language-server/pull/3198)) by @dependabot[bot] +- Log plugin name and attribute errors to plugins +([#3194](https://github.com/haskell/haskell-language-server/pull/3194)) by @pepeiborra +- Support optional plugins +([#3193](https://github.com/haskell/haskell-language-server/pull/3193)) by @pepeiborra +- Add policy on plugin support tiers +([#3189](https://github.com/haskell/haskell-language-server/pull/3189)) by @michaelpj +- Fix broken call-hierarchy-plugin-tests for type signatures +([#3188](https://github.com/haskell/haskell-language-server/pull/3188)) by @July541 +- Update supported GHC versions doc +([#3186](https://github.com/haskell/haskell-language-server/pull/3186)) by @michaelpj +- Docs: Fix checkParents documentation +([#3184](https://github.com/haskell/haskell-language-server/pull/3184)) by @andys8 +- Configuration: more advanced Vim / Coc example (suggestion) +([#3181](https://github.com/haskell/haskell-language-server/pull/3181)) by @andys8 +- Docs: List stan plugin +([#3180](https://github.com/haskell/haskell-language-server/pull/3180)) by @andys8 +- Stan: Respect plugin configuration globalOn +([#3179](https://github.com/haskell/haskell-language-server/pull/3179)) by @andys8 +- Solve formatting issues (stylish-haskell, pre-commit CI) +([#3171](https://github.com/haskell/haskell-language-server/pull/3171)) by @andys8 +- remove manual heap profiling from ghcide +([#3168](https://github.com/haskell/haskell-language-server/pull/3168)) by @pepeiborra +- Refactor plugin: Prefer code action +([#3167](https://github.com/haskell/haskell-language-server/pull/3167)) by @andys8 +- Fixes the flake deps to align with cabal bounds +([#3163](https://github.com/haskell/haskell-language-server/pull/3163)) by @mjrussell +- Remove unused build-depends and install warnings +([#3155](https://github.com/haskell/haskell-language-server/pull/3155)) by @pepeiborra +- Release script fixes +([#3154](https://github.com/haskell/haskell-language-server/pull/3154)) by @wz1000 +- Allow hackage upload +([#3153](https://github.com/haskell/haskell-language-server/pull/3153)) by @wz1000 +- support add-argument action +([#3149](https://github.com/haskell/haskell-language-server/pull/3149)) by @santiweight +- Only run the pre-commit hook on changed files +([#3145](https://github.com/haskell/haskell-language-server/pull/3145)) by @drsooch +- unescape printable characters +([#3140](https://github.com/haskell/haskell-language-server/pull/3140)) by @kokobd +- nix: fix nix environment for GHC 9.4 +([#3133](https://github.com/haskell/haskell-language-server/pull/3133)) by @guibou +- Drop compatibility with GHC 8.6.5 +([#3101](https://github.com/haskell/haskell-language-server/pull/3101)) by @pepeiborra +- Feat: basic record dot completions +([#3080](https://github.com/haskell/haskell-language-server/pull/3080)) by @coltenwebb +- Feat: Folding Ranges +([#3058](https://github.com/haskell/haskell-language-server/pull/3058)) by @sloorush +- Parse .cabal files; show error and warning diagnostics +([#2954](https://github.com/haskell/haskell-language-server/pull/2954)) by @runeksvendsen +- Make splice plugin compatible with GHC 9.2 +([#2816](https://github.com/haskell/haskell-language-server/pull/2816)) by @eddiejessup +- Add formatting plugin for cabal files which uses cabal-fmt +([#2047](https://github.com/haskell/haskell-language-server/pull/2047)) by @VeryMilkyJoe + +## 1.8.0.0 + +- Binaries for GHC 9.2.3 and GHC 9.2.4 +- Initial support for GHC 9.4 with binaries for GHC 9.4.1 and GHC 9.4.2 +- Startup time and performance improvements on projects using Template Haskell by serializing intermediate core (#2813) +- Memory usage improvements due to using a packed representation for filepaths (#3067, @kokobd) +- Improvements for hls-class-plugin (#2920, @July541) +- The new hls-gadt-plugin (#2899, @July541) +- Moving code actions from ghcide to the new hls-refactor-plugin (#3091, @wz1000) +- Many more improvements and bug fixes thanks to our contributors! + +### Pull requests merged for 1.8.0.0 + +- Alternate Number Format Plugin buildable with GHC 9.4 +([#3138](https://github.com/haskell/haskell-language-server/pull/3138)) by @drsooch +- Enable a bunch of plugins that build with ghc 9.4 +([#3136](https://github.com/haskell/haskell-language-server/pull/3136)) by @pepeiborra +- Enable support for 9.4 on windows +([#3132](https://github.com/haskell/haskell-language-server/pull/3132)) by @wz1000 +- flake.nix Add ghcide-bench to sourceDirs +([#3125](https://github.com/haskell/haskell-language-server/pull/3125)) by @akshaymankar +- Update hls-retrie-plugin to be usable with 9.2.4. +([#3120](https://github.com/haskell/haskell-language-server/pull/3120)) by @drsooch +- Add link to homepage and issues for `hie-compat` +([#3119](https://github.com/haskell/haskell-language-server/pull/3119)) by @parsonsmatt +- Remove pluginId from getNormalizedFilePath error message +([#3118](https://github.com/haskell/haskell-language-server/pull/3118)) by @drsooch +- HLS benchmarks +([#3117](https://github.com/haskell/haskell-language-server/pull/3117)) by @pepeiborra +- Fix --testing +([#3113](https://github.com/haskell/haskell-language-server/pull/3113)) by @pepeiborra +- Deduplicate HLS plugins +([#3112](https://github.com/haskell/haskell-language-server/pull/3112)) by @pepeiborra +- Do not send Heap Stats to the LSP log +([#3111](https://github.com/haskell/haskell-language-server/pull/3111)) by @pepeiborra +- Send begin progress message synchronously +([#3110](https://github.com/haskell/haskell-language-server/pull/3110)) by @pepeiborra +- Remove unused config in hls-class-plugin +([#3107](https://github.com/haskell/haskell-language-server/pull/3107)) by @July541 +- Support fourmolu-0.8.1.0 +([#3103](https://github.com/haskell/haskell-language-server/pull/3103)) by @brandonchinn178 +- Probe-tools: Print stack ghc version +([#3093](https://github.com/haskell/haskell-language-server/pull/3093)) by @andys8 +- Fix #3047 +([#3092](https://github.com/haskell/haskell-language-server/pull/3092)) by @July541 +- Remove exactprint dependencies from ghcide by introducing hls-refactor-plugin. +([#3091](https://github.com/haskell/haskell-language-server/pull/3091)) by @wz1000 +- Stan: Avoid terminal colors in messages +([#3090](https://github.com/haskell/haskell-language-server/pull/3090)) by @andys8 +- Support ghc-9.2.4 +([#3085](https://github.com/haskell/haskell-language-server/pull/3085)) by @July541 +- Bump Nix flake GHC 9.2.3 to 9.2.4 +([#3081](https://github.com/haskell/haskell-language-server/pull/3081)) by @cydparser +- fix lsp-types benchmark +([#3079](https://github.com/haskell/haskell-language-server/pull/3079)) by @pepeiborra +- Add support for Fourmolu 0.8 +([#3078](https://github.com/haskell/haskell-language-server/pull/3078)) by @brandonchinn178 +- upgrade lsp to 1.5 +([#3072](https://github.com/haskell/haskell-language-server/pull/3072)) by @kokobd +- Bump actions/cache from 2 to 3 +([#3071](https://github.com/haskell/haskell-language-server/pull/3071)) by @dependabot[bot] +- Bump actions/setup-python from 3 to 4 +([#3070](https://github.com/haskell/haskell-language-server/pull/3070)) by @dependabot[bot] +- Run the benchmark suite on GHC 9.2.3 +([#3069](https://github.com/haskell/haskell-language-server/pull/3069)) by @pepeiborra +- Simplify instructions about 'ghcup compile hls' +([#3068](https://github.com/haskell/haskell-language-server/pull/3068)) by @hasufell +- Improve performance of NormalizedFilePath +([#3067](https://github.com/haskell/haskell-language-server/pull/3067)) by @kokobd +- add a prefix to plugin CPP definitions +([#3065](https://github.com/haskell/haskell-language-server/pull/3065)) by @kokobd +- Add Github precommit workflow +([#3060](https://github.com/haskell/haskell-language-server/pull/3060)) by @lunaticare +- Run pre-commit hooks +([#3059](https://github.com/haskell/haskell-language-server/pull/3059)) by @lunaticare +- Fix grammar and spelling errors in configuration.md +([#3056](https://github.com/haskell/haskell-language-server/pull/3056)) by @arsenkhy +- Remove redundant WARNING prefix +([#3055](https://github.com/haskell/haskell-language-server/pull/3055)) by @michaelpj +- fix a typo in src/Ide/Plugin/Class/CodeLens.hs +([#3053](https://github.com/haskell/haskell-language-server/pull/3053)) by @tensorknower69 +- fix record-dot-syntax test +([#3051](https://github.com/haskell/haskell-language-server/pull/3051)) by @coltenwebb +- build(nix): ghc922 -> ghc923 +([#3049](https://github.com/haskell/haskell-language-server/pull/3049)) by @teto +- build(nix): bumped gitignore dependency +([#3048](https://github.com/haskell/haskell-language-server/pull/3048)) by @teto +- Update issue templates +([#3044](https://github.com/haskell/haskell-language-server/pull/3044)) by @michaelpj +- Simplify hlint config +([#3038](https://github.com/haskell/haskell-language-server/pull/3038)) by @michaelpj +- handle trailing comma in import list properly +([#3035](https://github.com/haskell/haskell-language-server/pull/3035)) by @kokobd +- upgrade ghc-check to fix #3002 +([#3034](https://github.com/haskell/haskell-language-server/pull/3034)) by @kokobd +- Fix Stack build with Nix on macOS +([#3031](https://github.com/haskell/haskell-language-server/pull/3031)) by @lunaticare +- haskell-language-server: add lower bound for githash +([#3030](https://github.com/haskell/haskell-language-server/pull/3030)) by @Bodigrim +- hls-eval-plugin: add lower bound for parser-combinators +([#3029](https://github.com/haskell/haskell-language-server/pull/3029)) by @Bodigrim +- hls-fourmolu-plugin: add lower bound for process-extras +([#3028](https://github.com/haskell/haskell-language-server/pull/3028)) by @Bodigrim +- ghcide: lower bounds +([#3025](https://github.com/haskell/haskell-language-server/pull/3025)) by @Bodigrim +- remove all usages of pre-commit-check in nix +([#3024](https://github.com/haskell/haskell-language-server/pull/3024)) by @kokobd +- hls-plugin-api: add lower bounds +([#3022](https://github.com/haskell/haskell-language-server/pull/3022)) by @Bodigrim +- hls-graph: add lower bound for async +([#3021](https://github.com/haskell/haskell-language-server/pull/3021)) by @Bodigrim +- Hlint: CodeAction with isPreferred +([#3018](https://github.com/haskell/haskell-language-server/pull/3018)) by @andys8 +- Record Dot Hover Types +([#3016](https://github.com/haskell/haskell-language-server/pull/3016)) by @coltenwebb +- re-enable haddock +([#3015](https://github.com/haskell/haskell-language-server/pull/3015)) by @kokobd +- add Helix to configuration.md +([#3014](https://github.com/haskell/haskell-language-server/pull/3014)) by @0rphee +- Renaming of indirect references (RecordFieldPuns) +([#3013](https://github.com/haskell/haskell-language-server/pull/3013)) by @OliverMadine +- Revert back to Warning not Error in Logging `ResponseErrors` +([#3009](https://github.com/haskell/haskell-language-server/pull/3009)) by @drsooch +- Disable flaky test on Windows +([#3008](https://github.com/haskell/haskell-language-server/pull/3008)) by @michaelpj +- Improve troubleshooting and installation docs a bit +([#3004](https://github.com/haskell/haskell-language-server/pull/3004)) by @michaelpj +- refactor selection range plugin +([#3003](https://github.com/haskell/haskell-language-server/pull/3003)) by @kokobd +- Hlint more partial functions, and Debug.Trace +([#3000](https://github.com/haskell/haskell-language-server/pull/3000)) by @michaelpj +- Don't use typecheck rule for non FOIs in refine imports plugin +([#2995](https://github.com/haskell/haskell-language-server/pull/2995)) by @wz1000 +- GHC 9.4 compatibility + Multiple Home Units +([#2994](https://github.com/haskell/haskell-language-server/pull/2994)) by @wz1000 +- unify pre-commit hook & update Gitpod config +([#2991](https://github.com/haskell/haskell-language-server/pull/2991)) by @kokobd +- Log response errors returned from Plugins +([#2988](https://github.com/haskell/haskell-language-server/pull/2988)) by @drsooch +- Add associated type families to local completions +([#2987](https://github.com/haskell/haskell-language-server/pull/2987)) by @gasparattila +- Remove some partial functions from Shake.hs +([#2986](https://github.com/haskell/haskell-language-server/pull/2986)) by @michaelpj +- Clean up ghc-9.0 partial support contents +([#2983](https://github.com/haskell/haskell-language-server/pull/2983)) by @July541 +- fix new import position +([#2981](https://github.com/haskell/haskell-language-server/pull/2981)) by @kokobd +- Implement PluginMethod for hard-wired in handlers +([#2977](https://github.com/haskell/haskell-language-server/pull/2977)) by @fendor +- Set up partial functions ratchet +([#2974](https://github.com/haskell/haskell-language-server/pull/2974)) by @michaelpj +- Turn HLS-wrapper into an LSP Server +([#2960](https://github.com/haskell/haskell-language-server/pull/2960)) by @smatting +- More Fourmolu improvements +([#2959](https://github.com/haskell/haskell-language-server/pull/2959)) by @georgefst +- hls-class-plugin: Only create placeholders for unimplemented methods +([#2956](https://github.com/haskell/haskell-language-server/pull/2956)) by @akshaymankar +- Fix Fourmolu 0.7 support +([#2950](https://github.com/haskell/haskell-language-server/pull/2950)) by @georgefst +- Teach HLS about different file extensions +([#2945](https://github.com/haskell/haskell-language-server/pull/2945)) by @fendor +- Support `fourmolu ^>= 0.7` +([#2944](https://github.com/haskell/haskell-language-server/pull/2944)) by @parsonsmatt +- hls-explicit-fixity-plugin +([#2941](https://github.com/haskell/haskell-language-server/pull/2941)) by @July541 +- chore(nix): bump nixpkgs to prevent glibc issues +([#2937](https://github.com/haskell/haskell-language-server/pull/2937)) by @teto +- Support ghc-9.2.3 +([#2936](https://github.com/haskell/haskell-language-server/pull/2936)) by @July541 +- Typo fix, dependecies -> dependencies +([#2934](https://github.com/haskell/haskell-language-server/pull/2934)) by @vikrem +- Update Archlinux installation section +([#2933](https://github.com/haskell/haskell-language-server/pull/2933)) by @marcin-rzeznicki +- docs/installation: Remove unused clone with submodule command +([#2930](https://github.com/haskell/haskell-language-server/pull/2930)) by @sloorush +- Omit more parens for wildcard type signature +([#2929](https://github.com/haskell/haskell-language-server/pull/2929)) by @sergv +- Add `throwPluginError` to Plugin Utilities +([#2924](https://github.com/haskell/haskell-language-server/pull/2924)) by @drsooch +- hls-class-plugin enhancement +([#2920](https://github.com/haskell/haskell-language-server/pull/2920)) by @July541 +- Bump documentation requirements +([#2918](https://github.com/haskell/haskell-language-server/pull/2918)) by @xsebek +- Document eval plugin limitations +([#2917](https://github.com/haskell/haskell-language-server/pull/2917)) by @xsebek +- Replace TextDocumentIdentifier with Uri in getNormalizedFilePath +([#2912](https://github.com/haskell/haskell-language-server/pull/2912)) by @July541 +- Fix hover format +([#2911](https://github.com/haskell/haskell-language-server/pull/2911)) by @July541 +- Fix multiline eval plugin padding +([#2910](https://github.com/haskell/haskell-language-server/pull/2910)) by @xsebek +- Stan integration #258 +([#2908](https://github.com/haskell/haskell-language-server/pull/2908)) by @uhbif19 +- A plugin for GADT syntax converter +([#2899](https://github.com/haskell/haskell-language-server/pull/2899)) by @July541 +- Fix DisplayTHWarning error +([#2895](https://github.com/haskell/haskell-language-server/pull/2895)) by @pepeiborra +- Enable hls-eval-plugin test on ghc-9.2.2 +([#2893](https://github.com/haskell/haskell-language-server/pull/2893)) by @July541 +- nix update +([#2892](https://github.com/haskell/haskell-language-server/pull/2892)) by @michaelpj +- Build hls-alternate-number-format-plugin with stack.yaml +([#2891](https://github.com/haskell/haskell-language-server/pull/2891)) by @July541 +- Modify ghcide requirements of hls-change-type-signature-plugin +([#2889](https://github.com/haskell/haskell-language-server/pull/2889)) by @July541 +- Fix hls-call-hierarchy-plugin tests +([#2888](https://github.com/haskell/haskell-language-server/pull/2888)) by @July541 +- Add .txt files as extra-source-files for hls-change-type-signature-plugin +([#2887](https://github.com/haskell/haskell-language-server/pull/2887)) by @cdepillabout +- Prefer Data.HashSet.member to Data.Foldable.elem +([#2886](https://github.com/haskell/haskell-language-server/pull/2886)) by @sergv +- no longer disable -dynamic in CI +([#2885](https://github.com/haskell/haskell-language-server/pull/2885)) by @pepeiborra +- hls-pragmas-plugin requires ghcide >= 1.7 +([#2884](https://github.com/haskell/haskell-language-server/pull/2884)) by @Bodigrim +- Make iface-error-test-1 less flaky +([#2882](https://github.com/haskell/haskell-language-server/pull/2882)) by @pepeiborra +- hls-haddock-comments does not support ghc-exactprint >= 1.0 +([#2878](https://github.com/haskell/haskell-language-server/pull/2878)) by @Bodigrim +- Restore compat. with prettyprinter 1.6 +([#2877](https://github.com/haskell/haskell-language-server/pull/2877)) by @pepeiborra +- ghcide requires ghc-exactprint >= 1.4 +([#2876](https://github.com/haskell/haskell-language-server/pull/2876)) by @Bodigrim +- ghcide needs prettyprinter-1.7 to build +([#2875](https://github.com/haskell/haskell-language-server/pull/2875)) by @juhp +- Review project stack descriptors according to #2533 +([#2874](https://github.com/haskell/haskell-language-server/pull/2874)) by @pepeiborra +- hls-call-hierarchy-plugin Patch release +([#2873](https://github.com/haskell/haskell-language-server/pull/2873)) by @pepeiborra +- Expand input to pragma if available +([#2871](https://github.com/haskell/haskell-language-server/pull/2871)) by @July541 +- Fix hanging redundant import on Unicode function +([#2870](https://github.com/haskell/haskell-language-server/pull/2870)) by @drsooch +- Compatibility with older aeson releases +([#2868](https://github.com/haskell/haskell-language-server/pull/2868)) by @pepeiborra +- simplify hlint plugin Cabal descriptor +([#2867](https://github.com/haskell/haskell-language-server/pull/2867)) by @pepeiborra +- Consolidate all cabal.project files +([#2866](https://github.com/haskell/haskell-language-server/pull/2866)) by @pepeiborra +- release script fixes +([#2861](https://github.com/haskell/haskell-language-server/pull/2861)) by @wz1000 +- Support hls-hlint-plugin and hls-stylish-plugin for ghc9.0 and ghc9.2 +([#2854](https://github.com/haskell/haskell-language-server/pull/2854)) by @July541 +- Bump haskell/actions from 1 to 2 +([#2852](https://github.com/haskell/haskell-language-server/pull/2852)) by @dependabot[bot] +- Add scripts for releases and final 1.7 tweaks +([#2850](https://github.com/haskell/haskell-language-server/pull/2850)) by @wz1000 +- Fix Completion document format +([#2848](https://github.com/haskell/haskell-language-server/pull/2848)) by @July541 +- Improve name export code action +([#2847](https://github.com/haskell/haskell-language-server/pull/2847)) by @sergv +- Update plugin support table +([#2840](https://github.com/haskell/haskell-language-server/pull/2840)) by @michaelpj +- Unify showSDocUnsafe +([#2830](https://github.com/haskell/haskell-language-server/pull/2830)) by @July541 +- ghcide: remove redundant env NIX_GHC_LIBDIR +([#2819](https://github.com/haskell/haskell-language-server/pull/2819)) by @sloorush +- Serialize Core +([#2813](https://github.com/haskell/haskell-language-server/pull/2813)) by @wz1000 +- Expose runtime metrics via EKG +([#2267](https://github.com/haskell/haskell-language-server/pull/2267)) by @pepeiborra + +## 1.7.0.0 + +- Distribute dynamically linked binaries for HLS to avoid statically linking against GLIBC + and system libraries, and to avoid unpredictable failures due to subtle differences + between the GHC used to compile HLS and the GHC installed on the users machine + (@hasufell, #2675, #2431) + +- Improved recompilation avoidance in projects that make use of Template Haskell (#2316). See + the [blog post](https://well-typed.com/blog/2022/04/hls-performance/) for more details. + This release includes the `avoid-recompile` set of commits described in the blog post. + +- Support for GHC 9.2.2 + +- Removal of HLS installer scripts as mentioned by the deprecation notice last release (#2773) + +- Many more improvements and bug fixed thanks to our contributors! + +### Pull requests merged for 1.6.1.1 + +- Restore concise type variables in ghc-9.2 +([#2828](https://github.com/haskell/haskell-language-server/pull/2828)) by @July541 +- Should no related code lens if the module name is correct +([#2826](https://github.com/haskell/haskell-language-server/pull/2826)) by @July541 +- Bump cachix/install-nix-action from 16 to 17 +([#2823](https://github.com/haskell/haskell-language-server/pull/2823)) by @dependabot[bot] +- Bump actions/upload-artifact from 2 to 3 +([#2822](https://github.com/haskell/haskell-language-server/pull/2822)) by @dependabot[bot] +- Bump actions/download-artifact from 2 to 3 +([#2821](https://github.com/haskell/haskell-language-server/pull/2821)) by @dependabot[bot] +- bench: Add more metrics +([#2814](https://github.com/haskell/haskell-language-server/pull/2814)) by @wz1000 +- Enable rename plugin +([#2809](https://github.com/haskell/haskell-language-server/pull/2809)) by @OliverMadine +- Fix `cabal install` commands for local HLS build in docs +([#2807](https://github.com/haskell/haskell-language-server/pull/2807)) by @9999years +- Bump actions/cache from 2 to 3 +([#2806](https://github.com/haskell/haskell-language-server/pull/2806)) by @dependabot[bot] +- [hls-graph] Optimise waitConcurrently +([#2805](https://github.com/haskell/haskell-language-server/pull/2805)) by @pepeiborra +- [bench] track changes to hls-* projects +([#2803](https://github.com/haskell/haskell-language-server/pull/2803)) by @pepeiborra +- Fix Show instance +([#2802](https://github.com/haskell/haskell-language-server/pull/2802)) by @pepeiborra +- Provide all format suggestions in AlternatFormat Code Action +([#2790](https://github.com/haskell/haskell-language-server/pull/2790)) by @drsooch +- Avoid race conditions with VFS and VFS versions +([#2789](https://github.com/haskell/haskell-language-server/pull/2789)) by @wz1000 +- Don't show the redundant space +([#2788](https://github.com/haskell/haskell-language-server/pull/2788)) by @July541 +- Target GHC 9.2.2 +([#2787](https://github.com/haskell/haskell-language-server/pull/2787)) by @pepeiborra +- Allow import all constructors +([#2782](https://github.com/haskell/haskell-language-server/pull/2782)) by @July541 +- Customizable TH warning +([#2781](https://github.com/haskell/haskell-language-server/pull/2781)) by @pepeiborra +- Fix #2693 +([#2780](https://github.com/haskell/haskell-language-server/pull/2780)) by @wz1000 +- Add Gentoo installation details +([#2778](https://github.com/haskell/haskell-language-server/pull/2778)) by @paul-jewell +- Eval plugin: mark exceptions +([#2775](https://github.com/haskell/haskell-language-server/pull/2775)) by @xsebek +- Fix 2 space leaks +([#2774](https://github.com/haskell/haskell-language-server/pull/2774)) by @pepeiborra +- Delete HLS installer scripts +([#2773](https://github.com/haskell/haskell-language-server/pull/2773)) by @fendor +- Purge some more hslogger +([#2770](https://github.com/haskell/haskell-language-server/pull/2770)) by @michaelpj +- Abbreviate explicit import code lenses +([#2769](https://github.com/haskell/haskell-language-server/pull/2769)) by @michaelpj +- Review masking and add traces when things don't cancel timely +([#2768](https://github.com/haskell/haskell-language-server/pull/2768)) by @pepeiborra +- Upgrade to hie-bios 0.9.1 +([#2766](https://github.com/haskell/haskell-language-server/pull/2766)) by @fendor +- Avoid extra parens for wildcard type signature +([#2764](https://github.com/haskell/haskell-language-server/pull/2764)) by @xsebek +- Add an option to run Fourmolu via the CLI interface of a separate binary, rather than the bundled library +([#2763](https://github.com/haskell/haskell-language-server/pull/2763)) by @georgefst +- Fix Change Type Signature Plugin test suite for 9.2.1 +([#2761](https://github.com/haskell/haskell-language-server/pull/2761)) by @drsooch +- Bump actions/checkout from 2 to 3 +([#2759](https://github.com/haskell/haskell-language-server/pull/2759)) by @dependabot[bot] +- Refactor LSP logger and log via window/logMessage also +([#2758](https://github.com/haskell/haskell-language-server/pull/2758)) by @michaelpj +- Fix the tower of Babel +([#2757](https://github.com/haskell/haskell-language-server/pull/2757)) by @hasufell +- Implement cycle detection in hls-graph +([#2756](https://github.com/haskell/haskell-language-server/pull/2756)) by @pepeiborra +- Adjust rendering of error logs and drop unneeded MonadUnliftIO instance +([#2755](https://github.com/haskell/haskell-language-server/pull/2755)) by @pepeiborra +- Estimate file versions safely +([#2753](https://github.com/haskell/haskell-language-server/pull/2753)) by @pepeiborra +- Fix test failure for AlternateNumberFormat +([#2752](https://github.com/haskell/haskell-language-server/pull/2752)) by @drsooch +- LSP window message log recorder +([#2750](https://github.com/haskell/haskell-language-server/pull/2750)) by @pepeiborra +- Fix FreeBSD bindist build +([#2748](https://github.com/haskell/haskell-language-server/pull/2748)) by @hasufell +- Improve bindist makefile +([#2746](https://github.com/haskell/haskell-language-server/pull/2746)) by @hasufell +- Fix flake.lock +([#2743](https://github.com/haskell/haskell-language-server/pull/2743)) by @michaelpj +- Add failing test for variables in hovers +([#2742](https://github.com/haskell/haskell-language-server/pull/2742)) by @michaelpj +- Update Define Function Code Action to have knowledge of comments +([#2740](https://github.com/haskell/haskell-language-server/pull/2740)) by @drsooch +- Upgrade to hie-bios 0.9.0 +([#2738](https://github.com/haskell/haskell-language-server/pull/2738)) by @fendor +- Track file versions accurately. +([#2735](https://github.com/haskell/haskell-language-server/pull/2735)) by @wz1000 +- Fix hls-class-plugin on ghc-9.2 +([#2733](https://github.com/haskell/haskell-language-server/pull/2733)) by @July541 +- Bump actions/github-script from 2 to 6 +([#2730](https://github.com/haskell/haskell-language-server/pull/2730)) by @dependabot[bot] +- Delete the Telemetry log level +([#2727](https://github.com/haskell/haskell-language-server/pull/2727)) by @michaelpj +- Tone down logging of plugin rules +([#2723](https://github.com/haskell/haskell-language-server/pull/2723)) by @pepeiborra +- Troubleshooting: GHC 9.2 partial support +([#2722](https://github.com/haskell/haskell-language-server/pull/2722)) by @andys8 +- Remove `getHspecFormattedConfig` which is no longer used +([#2721](https://github.com/haskell/haskell-language-server/pull/2721)) by @hololeap +- Fix crash for non-LSP modes wrt #2627 +([#2719](https://github.com/haskell/haskell-language-server/pull/2719)) by @hasufell +- Wingman: Don't use keywords for variable names +([#2717](https://github.com/haskell/haskell-language-server/pull/2717)) by @isovector +- Expose DisplayTHWarning (backport #2712) +([#2714](https://github.com/haskell/haskell-language-server/pull/2714)) by @mergify[bot] +- Send LSP error when GHC cannot be found +([#2713](https://github.com/haskell/haskell-language-server/pull/2713)) by @hasufell +- Expose DisplayTHWarning +([#2712](https://github.com/haskell/haskell-language-server/pull/2712)) by @pepeiborra +- Improve wrapper cradle errors +([#2711](https://github.com/haskell/haskell-language-server/pull/2711)) by @hasufell +- Fix min bound for ghc-exactprint dependency in hls-class-plugin +([#2710](https://github.com/haskell/haskell-language-server/pull/2710)) by @pepeiborra +- Remove duplicate help messages & format CRLF to LF +([#2709](https://github.com/haskell/haskell-language-server/pull/2709)) by @July541 +- Add @July541 for call-hierarchy-plugin +([#2708](https://github.com/haskell/haskell-language-server/pull/2708)) by @July541 +- Fix releasing +([#2707](https://github.com/haskell/haskell-language-server/pull/2707)) by @hasufell +- Print info message when ignoring a file due to a none cradle +([#2701](https://github.com/haskell/haskell-language-server/pull/2701)) by @ThomasCrevoisier +- fix: handle comma in extend import list with ghc 9.2 +([#2697](https://github.com/haskell/haskell-language-server/pull/2697)) by @guibou +- Build Alternate Number Format Plugin with GHC 9.2 +([#2696](https://github.com/haskell/haskell-language-server/pull/2696)) by @drsooch +- Optionally publish packages definitely in the hackage workflow +([#2689](https://github.com/haskell/haskell-language-server/pull/2689)) by @jneira +- Set -dynamic in cabal.project +([#2688](https://github.com/haskell/haskell-language-server/pull/2688)) by @jneira +- Multi component issues in GHC 9.2 +([#2687](https://github.com/haskell/haskell-language-server/pull/2687)) by @pepeiborra +- Fix flaky boot def test +([#2686](https://github.com/haskell/haskell-language-server/pull/2686)) by @eddiemundo +- Fix typos in troubleshooting.md +([#2680](https://github.com/haskell/haskell-language-server/pull/2680)) by @visortelle +- Add pre-commit hook for cleaning up mixed-line endings +([#2679](https://github.com/haskell/haskell-language-server/pull/2679)) by @drsooch +- Add a test for #2673 +([#2676](https://github.com/haskell/haskell-language-server/pull/2676)) by @pepeiborra +- Implement distribution of dynamic builds +([#2675](https://github.com/haskell/haskell-language-server/pull/2675)) by @hasufell +- Restore eval plugin build for GHC 9.2 +([#2669](https://github.com/haskell/haskell-language-server/pull/2669)) by @guibou +- Change Type Signature Plugin +([#2660](https://github.com/haskell/haskell-language-server/pull/2660)) by @drsooch +- Nix flake fix dev shells +([#2655](https://github.com/haskell/haskell-language-server/pull/2655)) by @guibou +- Speed up fuzzy search +([#2639](https://github.com/haskell/haskell-language-server/pull/2639)) by @Bodigrim +- Improve logging +([#2558](https://github.com/haskell/haskell-language-server/pull/2558)) by @eddiemundo +- Improve recompilation avoidance in the presence of TH +([#2316](https://github.com/haskell/haskell-language-server/pull/2316)) by @wz1000 + +## 1.6.1.1 (*only hackage release*) + +- Release to update haskell-language-server.cabal in hackage, setting the build for the executable component as dynamically linked + - The motivation is build by default a hls executable which works for Template Haskell + - This doesn't need a full release cause it does not affect release executables which continue being fully static + +### Pull requests merged for 1.6.1.1 + +- Prepare 1.6.1.1 (only hackage release) +([#2681](https://github.com/haskell/haskell-language-server/pull/2681)) by @jneira +- Add the -dynamic flag and update build instructions +([#2668](https://github.com/haskell/haskell-language-server/pull/2668)) by @pepeiborra + +## 1.6.1.0 + +This is a bug fix release to restore a fully statically linked haskell-language-server-wrapper executable. + +- It has been reported [here](https://github.com/haskell/haskell-language-server/issues/2650) + - Thanks all reporters for the fast feedback +- The bug has been traced [here](https://github.com/haskell/haskell-language-server/pull/2615#discussion_r795059782) +- And the fix is in [this pr](https://github.com/haskell/haskell-language-server/pull/2647) + +### Pull requests merged for 1.6.1.0 + +- Post 1.6.0.0 fixes and prepare 1.6.1.0 bug fix release +([#2647](https://github.com/haskell/haskell-language-server/pull/2647)) by @jneira +- Move hackage back to flake.nix +([#2652](https://github.com/haskell/haskell-language-server/pull/2652)) by @guibou +- Wingman: Fix #1879 +([#2644](https://github.com/haskell/haskell-language-server/pull/2644)) by @MorrowM + + +## 1.6.0.0 + +Time for a new and exciting hls release: + +- It includes *three* brand new plugins: + - [Alternate number literals](https://haskell-language-server.readthedocs.io/en/latest/features.html#convert-numbers-to-alternative-formats) thanks to @drsooch + - [Qualify imported names](https://haskell-language-server.readthedocs.io/en/latest/features.html#qualify-imported-names) thanks to @eddiemundo + - New plugin to support [selection range](https://haskell-language-server.readthedocs.io/en/latest/features.html#selection-range) (aka double click text selection) thanks to @kokobd +- Finally hls [supports *ghc 9.2.1*](https://github.com/haskell/haskell-language-server/issues/2179) + - Including core features and [many plugins](https://haskell-language-server.readthedocs.io/en/latest/supported-versions.html#plugins-support-by-ghc-version) + - Thanks to a great collective effort coordinated by @pepeiborra and with the help of @wz1000, @mpickering and @alanz among others +- Hls now also [supports *ghc 9.0.2*](https://github.com/haskell/haskell-language-server/issues/297) with all plugins but the stylish-haskell formatter + - Including the [wingman plugin](https://github.com/haskell/haskell-language-server/tree/master/plugins/hls-tactics-plugin) thanks to @isovector and @anka-213 +- And many many fixes and performance improvements, thanks to all contributors! + +### Deprecation notice for 1.6.0 + +- As we noted in the previous release we have dropped support for ghc versions 8.10.5 and 8.8.3 in *this release* + - We recommend upgrading ghc to the last minor version: 8.8.4 or 8.10.7 + - You can read more about ghc deprecation policy and schedule [here](https://haskell-language-server.readthedocs.io/en/latest/supported-versions.html) +- *After this release*: + - [We will remove all project stack.yaml's](https://github.com/haskell/haskell-language-server/issues/2533) but two: one for last lts and other for nightly. Temporary we could keep one more stack yaml when nightly upgrades the ghc version, to help in the transition + - [We will remove the install script](https://github.com/haskell/haskell-language-server/issues/2491) which lives [here](https://github.com/haskell/haskell-language-server/tree/master/install) + - If you want to install hls from source we recommend using `ghcup`. Download it and run `ghcup compile hls --help` to get more info about. + +### Pull requests merged for 1.6.0 + +- Prepare 1.6.0 release +([#2642](https://github.com/haskell/haskell-language-server/pull/2642)) by @jneira +- Implement stripPrefix via T.stripPrefix +([#2645](https://github.com/haskell/haskell-language-server/pull/2645)) by @Bodigrim +- Change Type Family Export pattern +([#2643](https://github.com/haskell/haskell-language-server/pull/2643)) by @drsooch +- Disable alpine build by default +([#2638](https://github.com/haskell/haskell-language-server/pull/2638)) by @jneira +- Use T.decodeUtf8 + BS.readFile instead of T.readFile +([#2637](https://github.com/haskell/haskell-language-server/pull/2637)) by @Bodigrim +- Add ghc 9.2.1 to gitlab ci +([#2636](https://github.com/haskell/haskell-language-server/pull/2636)) by @jneira +- Specialize ghcide indent style to .hs +([#2631](https://github.com/haskell/haskell-language-server/pull/2631)) by @mrgutkun +- Fix off by one indexing error in openingBacktick +([#2629](https://github.com/haskell/haskell-language-server/pull/2629)) by @pepeiborra +- Drop bytestring-encoding +([#2628](https://github.com/haskell/haskell-language-server/pull/2628)) by @pepeiborra +- fix positionInRange +([#2625](https://github.com/haskell/haskell-language-server/pull/2625)) by @kokobd +- Fix #2612 - hlint plugin - Apply fixities to parsed source before sending to apply-refact +([#2624](https://github.com/haskell/haskell-language-server/pull/2624)) by @eddiemundo +- Flake ghc 92 +([#2621](https://github.com/haskell/haskell-language-server/pull/2621)) by @guibou +- Use ghc+integer-gmp for alpine linux build release +([#2615](https://github.com/haskell/haskell-language-server/pull/2615)) by @jneira +- Use helpers from lsp to do code action prefixing +([#2614](https://github.com/haskell/haskell-language-server/pull/2614)) by @michaelpj +- Wingman: Fix fundeps +([#2611](https://github.com/haskell/haskell-language-server/pull/2611)) by @isovector +- Wingman idioms +([#2607](https://github.com/haskell/haskell-language-server/pull/2607)) by @isovector +- Make work stack-9.2.1.yaml and enable `pedantic` (`-WError`) for cabal +([#2606](https://github.com/haskell/haskell-language-server/pull/2606)) by @jneira +- Improve qualified import plugin readme +([#2605](https://github.com/haskell/haskell-language-server/pull/2605)) by @eddiemundo +- Correct typo in Ide.Arguments:listPluginsParser +([#2604](https://github.com/haskell/haskell-language-server/pull/2604)) by @tombusby +- Rework features documentation +([#2603](https://github.com/haskell/haskell-language-server/pull/2603)) by @michaelpj +- [ghc-9.2] Fix refine-imports plugin +([#2601](https://github.com/haskell/haskell-language-server/pull/2601)) by @mrgutkun +- [ghc-9.2] Fix qualify-imported-names plugin +([#2600](https://github.com/haskell/haskell-language-server/pull/2600)) by @mrgutkun +- Correct issues with pre-commit hook +([#2597](https://github.com/haskell/haskell-language-server/pull/2597)) by @bradrn +- Fix some import module completions being dropped (and fix flaky test too) +([#2595](https://github.com/haskell/haskell-language-server/pull/2595)) by @eddiemundo +- Fix module-name plugin on ghc-9.2.1 +([#2594](https://github.com/haskell/haskell-language-server/pull/2594)) by @mrgutkun +- [ghc-9.2] Fix rename plugin +([#2593](https://github.com/haskell/haskell-language-server/pull/2593)) by @pepeiborra +- Fix progress eval test randomly failing +([#2590](https://github.com/haskell/haskell-language-server/pull/2590)) by @eddiemundo +- More work around next ghc-9.2.1 support +([#2587](https://github.com/haskell/haskell-language-server/pull/2587)) by @jneira +- Post ghc-9.2.1 config cleanup +([#2582](https://github.com/haskell/haskell-language-server/pull/2582)) by @jneira +- GHC-9.0 support for hls-tactics-plugin +([#2581](https://github.com/haskell/haskell-language-server/pull/2581)) by @isovector +- Wingman: Fix TODO(sandy) when performing subsequent actions +([#2580](https://github.com/haskell/haskell-language-server/pull/2580)) by @isovector +- Bump Ormolu and Fourmolu to GHC-9.2-compatible versions +([#2579](https://github.com/haskell/haskell-language-server/pull/2579)) by @georgefst +- test: Add regression tests for #2403 +([#2576](https://github.com/haskell/haskell-language-server/pull/2576)) by @guibou +- Fix crash on completion with type family +([#2569](https://github.com/haskell/haskell-language-server/pull/2569)) by @guibou +- Add support for ghc 9.0.2 +([#2567](https://github.com/haskell/haskell-language-server/pull/2567)) by @jneira +- support selection range lsp feature +([#2565](https://github.com/haskell/haskell-language-server/pull/2565)) by @kokobd +- Reuse build setup using a dedicated github action +([#2563](https://github.com/haskell/haskell-language-server/pull/2563)) by @jneira +- Fix ci update hackage index +([#2562](https://github.com/haskell/haskell-language-server/pull/2562)) by @jneira +- Enable `aarch64-darwin` in `flake.nix` +([#2561](https://github.com/haskell/haskell-language-server/pull/2561)) by @Gabriel439 +- Fix freeze cache key correctly +([#2560](https://github.com/haskell/haskell-language-server/pull/2560)) by @jneira +- Fix nix flake by explicit version for `lsp-xxx` packages +([#2557](https://github.com/haskell/haskell-language-server/pull/2557)) by @guibou +- Apply missing update for stack-9.0.1.yaml +([#2556](https://github.com/haskell/haskell-language-server/pull/2556)) by @Ailrun +- doc: Enable relative links with anchors +([#2555](https://github.com/haskell/haskell-language-server/pull/2555)) by @sir4ur0n +- Fix space leak where EPS retained HPTs from old HscEnv +([#2553](https://github.com/haskell/haskell-language-server/pull/2553)) by @mpickering +- Remove cabal.project.freeze files in workflows after computing the cache key +([#2552](https://github.com/haskell/haskell-language-server/pull/2552)) by @jneira +- Add support for brittany (needs aeson-2) and floskell with ghc-9.0.1 +([#2551](https://github.com/haskell/haskell-language-server/pull/2551)) by @jneira +- Restore TemplateHaskell pragma in hls-graph +([#2549](https://github.com/haskell/haskell-language-server/pull/2549)) by @pepeiborra +- Add space after comma when exporting a name +([#2547](https://github.com/haskell/haskell-language-server/pull/2547)) by @sergv +- Set an unique name for Hlint job +([#2544](https://github.com/haskell/haskell-language-server/pull/2544)) by @jneira +- Fix ghcide handling project root +([#2543](https://github.com/haskell/haskell-language-server/pull/2543)) by @drsooch +- CI: linting +([#2538](https://github.com/haskell/haskell-language-server/pull/2538)) by @Anton-Latukha +- CI: add hlint workflow +([#2537](https://github.com/haskell/haskell-language-server/pull/2537)) by @Anton-Latukha +- CI: caching: closer match work/CI guarantees +([#2536](https://github.com/haskell/haskell-language-server/pull/2536)) by @Anton-Latukha +- CI: caching: keep-going +([#2535](https://github.com/haskell/haskell-language-server/pull/2535)) by @Anton-Latukha +- CI: {caching,test,bench}: mk cache aware of package dep versions +([#2532](https://github.com/haskell/haskell-language-server/pull/2532)) by @Anton-Latukha +- Test hls-pragmas-plugin in ci +([#2530](https://github.com/haskell/haskell-language-server/pull/2530)) by @jneira +- Enable manual run for caching, hackage and build workflows +([#2528](https://github.com/haskell/haskell-language-server/pull/2528)) by @jneira +- Fix random SQLite busy database is locked errors +([#2527](https://github.com/haskell/haskell-language-server/pull/2527)) by @eddiemundo +- Fix some hlint warnings +([#2523](https://github.com/haskell/haskell-language-server/pull/2523)) by @jhrcek +- Improve action for fixing import typo +([#2522](https://github.com/haskell/haskell-language-server/pull/2522)) by @jhrcek +- CI: caching: fix early termination expression check & cabal.project replacement +([#2520](https://github.com/haskell/haskell-language-server/pull/2520)) by @Anton-Latukha +- Solve crash with module name plugin under certain circumstances +([#2518](https://github.com/haskell/haskell-language-server/pull/2518)) by @ttylec +- Rework troubleshooting section, add basic explainer +([#2517](https://github.com/haskell/haskell-language-server/pull/2517)) by @michaelpj +- Refactor collectLiterals in AlternateNumberFormat. +([#2516](https://github.com/haskell/haskell-language-server/pull/2516)) by @drsooch +- cabal-*.project: index-state +1s +([#2515](https://github.com/haskell/haskell-language-server/pull/2515)) by @Anton-Latukha +- Bump up retrie +([#2513](https://github.com/haskell/haskell-language-server/pull/2513)) by @jneira +- Sort out some compatibility issues +([#2511](https://github.com/haskell/haskell-language-server/pull/2511)) by @alanz +- Fix ci cache for windows +([#2507](https://github.com/haskell/haskell-language-server/pull/2507)) by @jneira +- CI: caching: add early termination & run check on schedule +([#2506](https://github.com/haskell/haskell-language-server/pull/2506)) by @Anton-Latukha +- Fix tracing of recordDirtyKeys +([#2505](https://github.com/haskell/haskell-language-server/pull/2505)) by @pepeiborra +- Unhandled exceptions fixed +([#2504](https://github.com/haskell/haskell-language-server/pull/2504)) by @pepeiborra +- Build with GHC 9.2 +([#2503](https://github.com/haskell/haskell-language-server/pull/2503)) by @pepeiborra +- Ignore stack.yamls in test cabal workflow +([#2502](https://github.com/haskell/haskell-language-server/pull/2502)) by @jneira +- small stack yaml updates to ease maintenance +([#2501](https://github.com/haskell/haskell-language-server/pull/2501)) by @simonmichael +- Automatically read in the doc version from the cabal file +([#2500](https://github.com/haskell/haskell-language-server/pull/2500)) by @michaelpj +- Disable alternate numbers format plugin temporary +([#2498](https://github.com/haskell/haskell-language-server/pull/2498)) by @jneira +- Revert "Send unhandled exceptions to the user (#2484)" +([#2497](https://github.com/haskell/haskell-language-server/pull/2497)) by @jneira +- Upgrade to new version of lsp libraries +([#2494](https://github.com/haskell/haskell-language-server/pull/2494)) by @michaelpj +- Fail if main or pre jobs are cancelled +([#2493](https://github.com/haskell/haskell-language-server/pull/2493)) by @jneira +- stack-9.0.1: update/cleanup +([#2489](https://github.com/haskell/haskell-language-server/pull/2489)) by @simonmichael +- Correctly handle LSP shutdown/exit +([#2486](https://github.com/haskell/haskell-language-server/pull/2486)) by @pepeiborra +- Fix hls-graph ide build with embed-files +([#2485](https://github.com/haskell/haskell-language-server/pull/2485)) by @pepeiborra +- Send unhandled exceptions to the user +([#2484](https://github.com/haskell/haskell-language-server/pull/2484)) by @pepeiborra +- Fix redundant import actions for names starting with _ +([#2483](https://github.com/haskell/haskell-language-server/pull/2483)) by @Ailrun +- Update flake to use fourmolu plugin in GHC 9 +([#2482](https://github.com/haskell/haskell-language-server/pull/2482)) by @Ailrun +- Delete some dead or deprecated settings +([#2481](https://github.com/haskell/haskell-language-server/pull/2481)) by @michaelpj +- Class plugin bump up +([#2475](https://github.com/haskell/haskell-language-server/pull/2475)) by @Ailrun +- Fix some pragma completion cases +([#2474](https://github.com/haskell/haskell-language-server/pull/2474)) by @Ailrun +- Minor org to contribution doc +([#2472](https://github.com/haskell/haskell-language-server/pull/2472)) by @Anton-Latukha +- Warn if TH and Mac and static binary +([#2470](https://github.com/haskell/haskell-language-server/pull/2470)) by @pepeiborra +- Lock-less debouncer (minimal change) +([#2469](https://github.com/haskell/haskell-language-server/pull/2469)) by @pepeiborra +- Handle re-exported modules when constructing ExportsMap +([#2468](https://github.com/haskell/haskell-language-server/pull/2468)) by @jhrcek +- Caching process update +([#2467](https://github.com/haskell/haskell-language-server/pull/2467)) by @Anton-Latukha +- #2418 Also use .hlint.yaml fixity rules when HLINT_ON_LIB_GHC not defined +([#2464](https://github.com/haskell/haskell-language-server/pull/2464)) by @eddiemundo +- Build linux binaries in alpine container +([#2463](https://github.com/haskell/haskell-language-server/pull/2463)) by @pepeiborra +- Lockless iorefs +([#2460](https://github.com/haskell/haskell-language-server/pull/2460)) by @pepeiborra +- Join nested IO actions of the form `IO (IO ())` +([#2459](https://github.com/haskell/haskell-language-server/pull/2459)) by @fendor +- #600 Code action to ignore hlint hints module wide +([#2458](https://github.com/haskell/haskell-language-server/pull/2458)) by @eddiemundo +- lock-less progress-reporting +([#2453](https://github.com/haskell/haskell-language-server/pull/2453)) by @pepeiborra +- Fix the nix build +([#2452](https://github.com/haskell/haskell-language-server/pull/2452)) by @michaelpj +- Fix rerun log cache handling +([#2450](https://github.com/haskell/haskell-language-server/pull/2450)) by @jneira +- Make heavy use of common sections +([#2447](https://github.com/haskell/haskell-language-server/pull/2447)) by @fendor +- CI: organizing bootstraping +([#2446](https://github.com/haskell/haskell-language-server/pull/2446)) by @Anton-Latukha +- Describe hls installed binaries +([#2445](https://github.com/haskell/haskell-language-server/pull/2445)) by @jneira +- Remove support for ghc 8.8.3/8.10.5 +([#2444](https://github.com/haskell/haskell-language-server/pull/2444)) by @jneira +- CI: cabal 3.6 use & clean-up 8.10.5 builds +([#2443](https://github.com/haskell/haskell-language-server/pull/2443)) by @Anton-Latukha +- Lockless FileExistsMap and position mapping +([#2442](https://github.com/haskell/haskell-language-server/pull/2442)) by @pepeiborra +- Fix regression in Eval plugin and add test +([#2441](https://github.com/haskell/haskell-language-server/pull/2441)) by @pepeiborra +- Makes local record field completion respects the fields sharing one single type signature +([#2439](https://github.com/haskell/haskell-language-server/pull/2439)) by @konn +- Enable top-level hover signature test +([#2435](https://github.com/haskell/haskell-language-server/pull/2435)) by @jneira +- Lockless diagnostics +([#2434](https://github.com/haskell/haskell-language-server/pull/2434)) by @pepeiborra +- Move Common Plugin Functions into PluginUtils +([#2433](https://github.com/haskell/haskell-language-server/pull/2433)) by @drsooch +- lock-less Values state +([#2429](https://github.com/haskell/haskell-language-server/pull/2429)) by @pepeiborra +- Extract the pre-decl pragma parsing to its own module +([#2428](https://github.com/haskell/haskell-language-server/pull/2428)) by @eddiemundo +- CI: cache-deps: rm pull request hook +([#2426](https://github.com/haskell/haskell-language-server/pull/2426)) by @Anton-Latukha +- Add known broken tests for import placement +([#2425](https://github.com/haskell/haskell-language-server/pull/2425)) by @nini-faroux +- Use stm-stats to reduce contention in hls-graph +([#2421](https://github.com/haskell/haskell-language-server/pull/2421)) by @pepeiborra +- Build on FreeBSD12 only +([#2420](https://github.com/haskell/haskell-language-server/pull/2420)) by @hasufell +- Centralized caching workflow +([#2419](https://github.com/haskell/haskell-language-server/pull/2419)) by @Anton-Latukha +- Configuration docs: Typo +([#2417](https://github.com/haskell/haskell-language-server/pull/2417)) by @andys8 +- Use dependent-sum from hackage +([#2412](https://github.com/haskell/haskell-language-server/pull/2412)) by @jneira +- Lock-less hls-graph +([#2411](https://github.com/haskell/haskell-language-server/pull/2411)) by @pepeiborra +- hls-graph.cabal: link to actual readme +([#2404](https://github.com/haskell/haskell-language-server/pull/2404)) by @juhp +- Disable check project in the ghcide test suite +([#2397](https://github.com/haskell/haskell-language-server/pull/2397)) by @pepeiborra +- Add modern issue templates +([#2394](https://github.com/haskell/haskell-language-server/pull/2394)) by @jneira +- Fix extension pragma inserted below ghc options pragma #2364 +([#2392](https://github.com/haskell/haskell-language-server/pull/2392)) by @eddiemundo +- Avoid unnecessary Target canonicalisation in Session setup +([#2359](https://github.com/haskell/haskell-language-server/pull/2359)) by @fendor +- Decrease contention in Progress reporting +([#2357](https://github.com/haskell/haskell-language-server/pull/2357)) by @pepeiborra +- Qualify imported names plugin +([#2355](https://github.com/haskell/haskell-language-server/pull/2355)) by @eddiemundo +- HLS Plugin to provide Alternate Literal Formats. +([#2350](https://github.com/haskell/haskell-language-server/pull/2350)) by @drsooch +- Log live_bytes and heap_size as reported by GHC.Stats +([#1508](https://github.com/haskell/haskell-language-server/pull/1508)) by @mpickering + +## 1.5.1 + +This is a bug fix release for two regressions found after releasing 1.5.0: + +- [#2365](https://github.com/haskell/haskell-language-server/issue/2365): hs-boot files not handled correctly, discovered in the ghc codebase and fixed with [#2377](https://github.com/haskell/haskell-language-server/pull/2377) +- [#2379](https://github.com/haskell/haskell-language-server/issue/2379): `tried to look at linkable for GetModIfaceWithoutLinkable for NormalizedFilePath...` error handling template haskell, fixed with [#2380](https://github.com/haskell/haskell-language-server/pull/2380) + +Both quick fixes thanks to @pepeiborra + +Also it fixes some long standing bugs in the hlint plugin due to comments being ignored (see [#2366](https://github.com/haskell/haskell-language-server/pull/2366)) + +### Pull requests merged for 1.5.1 + +- Fix hls-graph build with embed-files flag +([#2395](https://github.com/haskell/haskell-language-server/pull/2395)) by @pepeiborra +- Prepare 1.5.1 +([#2393](https://github.com/haskell/haskell-language-server/pull/2393)) by @jneira +- Revert "Update to latest prettyprinter API (#2352)" +([#2389](https://github.com/haskell/haskell-language-server/pull/2389)) by @pepeiborra +- Add extra logging around build queue +([#2388](https://github.com/haskell/haskell-language-server/pull/2388)) by @pepeiborra +- docs: Fix typo +([#2386](https://github.com/haskell/haskell-language-server/pull/2386)) by @nh2 +- Update release instructions +([#2384](https://github.com/haskell/haskell-language-server/pull/2384)) by @jneira +- ghcide: Update dependency on `hls-plugin-api` +([#2382](https://github.com/haskell/haskell-language-server/pull/2382)) by @hololeap +- Fix regression in GhcSessionDeps +([#2380](https://github.com/haskell/haskell-language-server/pull/2380)) by @pepeiborra +- Boot files +([#2377](https://github.com/haskell/haskell-language-server/pull/2377)) by @pepeiborra +- hls-module-name-plugin: Add missing golden file to hackage tarball +([#2374](https://github.com/haskell/haskell-language-server/pull/2374)) by @maralorn +- hls-explicit-imports-plugin: Add golden files to hackage tarball +([#2373](https://github.com/haskell/haskell-language-server/pull/2373)) by @maralorn +- Update ghcide dependency for various plugins +([#2368](https://github.com/haskell/haskell-language-server/pull/2368)) by @hololeap +- Fix several hlint issues related with the use of parsed module without comments +([#2366](https://github.com/haskell/haskell-language-server/pull/2366)) by @jneira + +## 1.5.0 + +Time for another hls release: + +- @pepeiborra has done an epic work to improve performance, redefining some of the core pieces of HLS + - You can take an overall look to improvements in [these slides](https://drive.google.com/file/d/16FpmiHXX_rd2gAf5XVgWAIr4kg-AkUqX/view) +- We have fourmolu support for ghc-9.0.1 thanks to @georgefst +- We have got improvements over import suggestions thanks to @yoshitsugu and @alexnaspo +- Completions also has been improved in general thanks to @pepeiborra +- There have been lot of documentation updates by several contributors, thanks also to all of you +- In this release we still don't have full support for all plugins and ghc-9.0.1 + - Missing plugins for ghc-9.0.1 are: hls-class-plugin, hls-tactics-plugin (wingman), hls-brittany-plugin and hls-stylish-haskell-plugin + +### Deprecation notice for 1.5.0 + +- As we noted in the previous release we have dropped support for ghc versions 8.6.4, 8.10.2, 8.10.3, 8.10.4 in *this release* +- We will drop support for ghc versions 8.10.5 and 8.8.3 *after this release* +- The advise is upgrade ghc to the last minor version: 8.6.5, 8.8.4 or 8.10.7 +- You can read more about ghc deprecation policy and schedule [here](https://haskell-language-server.readthedocs.io/en/latest/supported-versions.html) + +### Pull requests merged for 1.5.0 + +- Prepare 1.5.0 +([#2361](https://github.com/haskell/haskell-language-server/pull/2361)) by @jneira +- More completion fixes +([#2354](https://github.com/haskell/haskell-language-server/pull/2354)) by @pepeiborra +- Update to latest prettyprinter API +([#2352](https://github.com/haskell/haskell-language-server/pull/2352)) by @fendor +- Use hackage version of czipwith +([#2346](https://github.com/haskell/haskell-language-server/pull/2346)) by @jneira +- Show build graph statistics in ghcide-bench +([#2343](https://github.com/haskell/haskell-language-server/pull/2343)) by @pepeiborra +- contributing: add implicit-hie gen-hie > hie.yaml note +([#2341](https://github.com/haskell/haskell-language-server/pull/2341)) by @Anton-Latukha +- add dependabot: add Actions CI merge requests automation +([#2339](https://github.com/haskell/haskell-language-server/pull/2339)) by @Anton-Latukha +- Skip parsing without haddock for above GHC9.0 +([#2338](https://github.com/haskell/haskell-language-server/pull/2338)) by @yoshitsugu +- Give unique names to post-jobs +([#2337](https://github.com/haskell/haskell-language-server/pull/2337)) by @jneira +- Cancel prev runs for bench and nix +([#2335](https://github.com/haskell/haskell-language-server/pull/2335)) by @jneira +- Trace diagnostics +([#2333](https://github.com/haskell/haskell-language-server/pull/2333)) by @pepeiborra +- Include sortText in completions and improve suggestions +([#2332](https://github.com/haskell/haskell-language-server/pull/2332)) by @pepeiborra +- Not suggest exported imports +([#2329](https://github.com/haskell/haskell-language-server/pull/2329)) by @yoshitsugu +- Update troubleshooting section +([#2326](https://github.com/haskell/haskell-language-server/pull/2326)) by @jneira +- Remove automatic comment to [skip circleci] +([#2325](https://github.com/haskell/haskell-language-server/pull/2325)) by @jneira +- Add README.md in install/ subproject +([#2324](https://github.com/haskell/haskell-language-server/pull/2324)) by @sir4ur0n +- Improve the performance of GetModIfaceFromDisk in large repos and delete GetDependencies +([#2323](https://github.com/haskell/haskell-language-server/pull/2323)) by @pepeiborra +- Add support for install hls from hackage using ghc 9.0.1 +([#2322](https://github.com/haskell/haskell-language-server/pull/2322)) by @jneira +- Rename hlint test data files and add regression tests +([#2321](https://github.com/haskell/haskell-language-server/pull/2321)) by @jneira +- Suggest hiding imports when local definition exists +([#2320](https://github.com/haskell/haskell-language-server/pull/2320)) by @yoshitsugu +- Improve trace readability +([#2319](https://github.com/haskell/haskell-language-server/pull/2319)) by @pepeiborra +- Sir4ur0n/doc/cleanup hie +([#2311](https://github.com/haskell/haskell-language-server/pull/2311)) by @sir4ur0n +- Add option to effectively cancel prev runs +([#2310](https://github.com/haskell/haskell-language-server/pull/2310)) by @jneira +- Separate features from demos +([#2307](https://github.com/haskell/haskell-language-server/pull/2307)) by @jneira +- Prevent Tactics hover provider from blocking at startup +([#2306](https://github.com/haskell/haskell-language-server/pull/2306)) by @pepeiborra +- Fix defaultIdeOptions to use the initial config settings +([#2302](https://github.com/haskell/haskell-language-server/pull/2302)) by @pepeiborra +- Use new queue rules for mergify bot +([#2301](https://github.com/haskell/haskell-language-server/pull/2301)) by @jneira +- Fix reverse dep. tracking for alwaysRerun rules +([#2298](https://github.com/haskell/haskell-language-server/pull/2298)) by @pepeiborra +- Reorganize github workflows and use specific label [skip circleci] +([#2297](https://github.com/haskell/haskell-language-server/pull/2297)) by @jneira +- Enable func-test suite for windows +([#2296](https://github.com/haskell/haskell-language-server/pull/2296)) by @jneira +- Generate linkables in the Eval plugin +([#2295](https://github.com/haskell/haskell-language-server/pull/2295)) by @pepeiborra +- [hls-graph] clean up databaseDirtySet +([#2294](https://github.com/haskell/haskell-language-server/pull/2294)) by @pepeiborra +- Update link to supported platforms by ghcup +([#2293](https://github.com/haskell/haskell-language-server/pull/2293)) by @chshersh +- Make circleci honour [skip ci] wherever is placed in the pr info (title, description) +([#2289](https://github.com/haskell/haskell-language-server/pull/2289)) by @jneira +- Note in the install script that listed ghcs are the supported ones +([#2286](https://github.com/haskell/haskell-language-server/pull/2286)) by @jneira +- Move hlint tests to its own package (and other clean ups) +([#2284](https://github.com/haskell/haskell-language-server/pull/2284)) by @jneira +- Trace rebuilds +([#2283](https://github.com/haskell/haskell-language-server/pull/2283)) by @pepeiborra +- Fix excessive interface recompilation caused by the Tactics plugin +([#2282](https://github.com/haskell/haskell-language-server/pull/2282)) by @pepeiborra +- Preserve dirty set and add dirtiness assertion +([#2279](https://github.com/haskell/haskell-language-server/pull/2279)) by @pepeiborra +- Ignore null WatchedFile events +([#2278](https://github.com/haskell/haskell-language-server/pull/2278)) by @pepeiborra +- Trace log events and fix ghcide logger +([#2277](https://github.com/haskell/haskell-language-server/pull/2277)) by @pepeiborra +- Point to GitHub from Contributing.md +([#2275](https://github.com/haskell/haskell-language-server/pull/2275)) by @georgefst +- installation.md: add Fedora copr repo +([#2274](https://github.com/haskell/haskell-language-server/pull/2274)) by @juhp +- avoid double rebuilds for FOIs +([#2266](https://github.com/haskell/haskell-language-server/pull/2266)) by @pepeiborra +- Update installation on ArchLinux - new package +([#2265](https://github.com/haskell/haskell-language-server/pull/2265)) by @marcin-rzeznicki +- Garbage collection of dirty keys +([#2263](https://github.com/haskell/haskell-language-server/pull/2263)) by @pepeiborra +- Add lsp-mode links +([#2260](https://github.com/haskell/haskell-language-server/pull/2260)) by @jneira +- Add more features and demos in docs +([#2257](https://github.com/haskell/haskell-language-server/pull/2257)) by @jneira +- Add nix installation section +([#2256](https://github.com/haskell/haskell-language-server/pull/2256)) by @jneira +- Bump Fourmolu to 0.4 +([#2254](https://github.com/haskell/haskell-language-server/pull/2254)) by @georgefst +- Remove custom version of operational +([#2249](https://github.com/haskell/haskell-language-server/pull/2249)) by @jneira +- Generate custom source tarball +([#2248](https://github.com/haskell/haskell-language-server/pull/2248)) by @jneira +- Enable the ghcide test plugin in HLS test suites +([#2243](https://github.com/haskell/haskell-language-server/pull/2243)) by @pepeiborra +- Partial sort of fuzzy filtering results +([#2240](https://github.com/haskell/haskell-language-server/pull/2240)) by @pepeiborra +- Fix build with fbghc +([#2234](https://github.com/haskell/haskell-language-server/pull/2234)) by @pepeiborra +- Tweaks to GHC support docs +([#2232](https://github.com/haskell/haskell-language-server/pull/2232)) by @michaelpj +- Add ghc deprecation policy to documentation +([#2231](https://github.com/haskell/haskell-language-server/pull/2231)) by @jneira +- Add ghcup compile option +([#2230](https://github.com/haskell/haskell-language-server/pull/2230)) by @jneira +- Parallel fuzzy filtering +([#2225](https://github.com/haskell/haskell-language-server/pull/2225)) by @pepeiborra +- Revert "Inline Text.Fuzzy to add INLINABLE pragmas" +([#2223](https://github.com/haskell/haskell-language-server/pull/2223)) by @pepeiborra +- feat(flake): expose hie-bios +([#2221](https://github.com/haskell/haskell-language-server/pull/2221)) by @teto +- flake: remove the 'follows' directive +([#2218](https://github.com/haskell/haskell-language-server/pull/2218)) by @teto +- Return completions lazily for massive savings +([#2217](https://github.com/haskell/haskell-language-server/pull/2217)) by @pepeiborra +- Inline Text.Fuzzy to add INLINABLE pragmas +([#2215](https://github.com/haskell/haskell-language-server/pull/2215)) by @pepeiborra +- Add chat on irc badge +([#2214](https://github.com/haskell/haskell-language-server/pull/2214)) by @jneira +- ghcide: Add flags to toggle building each executable +([#2212](https://github.com/haskell/haskell-language-server/pull/2212)) by @hololeap +- Add matrix haskell-tooling channel +([#2210](https://github.com/haskell/haskell-language-server/pull/2210)) by @jneira +- Relax upper bounds over ormolu and stylish-haskell +([#2207](https://github.com/haskell/haskell-language-server/pull/2207)) by @jneira +- Add missing config options in documentation +([#2203](https://github.com/haskell/haskell-language-server/pull/2203)) by @jneira +- Add gitlab CI +([#2200](https://github.com/haskell/haskell-language-server/pull/2200)) by @hasufell +- Apply workaround for 8.8.4 and windows to enable it in ci +([#2199](https://github.com/haskell/haskell-language-server/pull/2199)) by @jneira +- Drop ghc support for 8.6.4, 8.10.2, 8.10.3, 8.10.4 +([#2197](https://github.com/haskell/haskell-language-server/pull/2197)) by @jneira +- Consider all root paths when suggesting module name change. +([#2195](https://github.com/haskell/haskell-language-server/pull/2195)) by @cdsmith +- enable completions of local imports +([#2190](https://github.com/haskell/haskell-language-server/pull/2190)) by @alexnaspo +- Drop ghc-api-compat from dependency closure +([#2128](https://github.com/haskell/haskell-language-server/pull/2128)) by @fendor +- Reimplement shake (continued) +([#2060](https://github.com/haskell/haskell-language-server/pull/2060)) by @pepeiborra + +## 1.4.0 + +After a month of vacation a new hls release has arrived: + +- Support for ghc 8.10.6 and 8.10.7 +- The ormolu formatter plugin works with ghc 9.0.1 +- *Call hierarchy plugin has been improved* thanks to @July541: + - Add call from type signature + - Add call from a function pattern + - Go to typeclass instance directly +- As usual @isovector has been busy improving wingman plugin: + - New "intro and destruct" code action + - Streaming tactic solutions: when Wingman times outs, it can still pick the best solution it found + - Let-bindings in metattactics: allows you to bind variables in tactic metaprogram + - Several bug fixes +- We have new docs thanks to @michaelpj: +- Now you can ask the executable for included plugins with: `haskell-language-server --list-plugins` +- There are several bug fixes and features you can found in the merged pull requests list + +### DEPRECATION NOTICE + +- *After* this release we will drop support for ghc versions 8.6.4, 8.10.3 and 8.10.4 + - The advise is upgrade ghc to the last minor version: 8.6.5 or 8.10.7 + - Take a look to [this issue](https://github.com/haskell/haskell-language-server/issues/2168) for more details + +### Pull requests merged for 1.4.0 + +- Prepare 1.4.0 +([#2182](https://github.com/haskell/haskell-language-server/pull/2182)) by @jneira +- Update flake to fix nix builds +([#2188](https://github.com/haskell/haskell-language-server/pull/2188)) by @jneira +- Completions for project identifiers +([#2187](https://github.com/haskell/haskell-language-server/pull/2187)) by @pepeiborra +- Wingman: Don't clobber where clauses +([#2184](https://github.com/haskell/haskell-language-server/pull/2184)) by @isovector +- Add rerun workflow +([#2181](https://github.com/haskell/haskell-language-server/pull/2181)) by @jneira +- Bump up shake-bench version +([#2178](https://github.com/haskell/haskell-language-server/pull/2178)) by @jneira +- Fix hackage release +([#2177](https://github.com/haskell/haskell-language-server/pull/2177)) by @jneira +- Use maxBound of uinteger not Int. +([#2169](https://github.com/haskell/haskell-language-server/pull/2169)) by @pranaysashank +- enable the PR gitpod badge and drop the label +([#2167](https://github.com/haskell/haskell-language-server/pull/2167)) by @pepeiborra +- Plugin in config files +([#2166](https://github.com/haskell/haskell-language-server/pull/2166)) by @jneira +- Complete contributing guide +([#2165](https://github.com/haskell/haskell-language-server/pull/2165)) by @jneira +- Wingman: Add "New Unification Variable" helper +([#2164](https://github.com/haskell/haskell-language-server/pull/2164)) by @isovector +- Semiautomatic hackage releases +([#2163](https://github.com/haskell/haskell-language-server/pull/2163)) by @jneira +- Improve incoming call for typeclass and type family instance +([#2162](https://github.com/haskell/haskell-language-server/pull/2162)) by @July541 +- Add a Gitpod descriptor +([#2161](https://github.com/haskell/haskell-language-server/pull/2161)) by @pepeiborra +- Wingman: Let-bindings in metatactics +([#2160](https://github.com/haskell/haskell-language-server/pull/2160)) by @isovector +- Update nix flake +([#2159](https://github.com/haskell/haskell-language-server/pull/2159)) by @lf- +- Add ghc-8.10.7 to release build +([#2158](https://github.com/haskell/haskell-language-server/pull/2158)) by @jneira +- Reduce duplication in pragma tests +([#2157](https://github.com/haskell/haskell-language-server/pull/2157)) by @nini-faroux +- Remove ghc-api source snapshot +([#2156](https://github.com/haskell/haskell-language-server/pull/2156)) by @pepeiborra +- Create a citation +([#2155](https://github.com/haskell/haskell-language-server/pull/2155)) by @ndmitchell +- Disable window job for ghc-8.10.2 +([#2154](https://github.com/haskell/haskell-language-server/pull/2154)) by @jneira +- Auto complete definitions within imports +([#2152](https://github.com/haskell/haskell-language-server/pull/2152)) by @alexnaspo +- Filter code actions based on prefix, not equality +([#2146](https://github.com/haskell/haskell-language-server/pull/2146)) by @michaelpj +- perform a GC before find resolution +([#2144](https://github.com/haskell/haskell-language-server/pull/2144)) by @pepeiborra +- case sensitive language pragmas fix +([#2142](https://github.com/haskell/haskell-language-server/pull/2142)) by @alexnaspo +- Add ghc-8.10.7 support +([#2141](https://github.com/haskell/haskell-language-server/pull/2141)) by @jneira +- List all available plugins +([#2139](https://github.com/haskell/haskell-language-server/pull/2139)) by @July541 +- update LTS for GHC 8.10.6 +([#2138](https://github.com/haskell/haskell-language-server/pull/2138)) by @peterbecich +- fix GitHub Actions badges +([#2135](https://github.com/haskell/haskell-language-server/pull/2135)) by @peterbecich +- Move pragmas completion to pragmas plugin +([#2134](https://github.com/haskell/haskell-language-server/pull/2134)) by @alexnaspo +- Update ghc-9.0.1 support +([#2131](https://github.com/haskell/haskell-language-server/pull/2131)) by @jneira +- Support call hierarchy on pattern matching +([#2129](https://github.com/haskell/haskell-language-server/pull/2129)) by @July541 +- GHCIDE_BUILD_PROFILING env var +([#2125](https://github.com/haskell/haskell-language-server/pull/2125)) by @pepeiborra +- [ghcide] support -d cli switch +([#2124](https://github.com/haskell/haskell-language-server/pull/2124)) by @pepeiborra +- don't crash when an unused operator import ends in `.` +([#2123](https://github.com/haskell/haskell-language-server/pull/2123)) by @tscholak +- [benchmarks] Fix edit and "after edit" experiments +([#2122](https://github.com/haskell/haskell-language-server/pull/2122)) by @pepeiborra +- Add fix for correct placement of import (#2100) +([#2116](https://github.com/haskell/haskell-language-server/pull/2116)) by @nini-faroux +- Support for ghc-8.10.6 +([#2109](https://github.com/haskell/haskell-language-server/pull/2109)) by @jneira +- New rename plugin implementation +([#2108](https://github.com/haskell/haskell-language-server/pull/2108)) by @OliverMadine +- [ghcide-bench] Support extra args in examples +([#2107](https://github.com/haskell/haskell-language-server/pull/2107)) by @pepeiborra +- Fix filepath identity in cradle dependencies when using reactive change tracking +([#2106](https://github.com/haskell/haskell-language-server/pull/2106)) by @pepeiborra +- [ghcide-bench] preserve threading details in eventlogs +([#2105](https://github.com/haskell/haskell-language-server/pull/2105)) by @pepeiborra +- [ghcide-bench] fix edit experiment +([#2104](https://github.com/haskell/haskell-language-server/pull/2104)) by @pepeiborra +([#2102](https://github.com/haskell/haskell-language-server/pull/2102)) by @isovector +- reduce allow-newer entries for shake-bench +([#2101](https://github.com/haskell/haskell-language-server/pull/2101)) by @pepeiborra +- Wingman: Don't count it as using a term if you only destruct it +([#2099](https://github.com/haskell/haskell-language-server/pull/2099)) by @isovector +- Clean cabal project +([#2097](https://github.com/haskell/haskell-language-server/pull/2097)) by @jneira +- Wingman: New AbstractLSP interface +([#2094](https://github.com/haskell/haskell-language-server/pull/2094)) by @isovector +- Add badge with github release +([#2093](https://github.com/haskell/haskell-language-server/pull/2093)) by @jneira +- Add a bit more prose and some links to the README +([#2090](https://github.com/haskell/haskell-language-server/pull/2090)) by @michaelpj +- Enable tests for ormolu plugin +([#2086](https://github.com/haskell/haskell-language-server/pull/2086)) by @felixonmars +- Allow ormolu 0.2 and fix compatibility with GHC 9 +([#2084](https://github.com/haskell/haskell-language-server/pull/2084)) by @felixonmars +- Add initial sphinx doc site for RTD +([#2083](https://github.com/haskell/haskell-language-server/pull/2083)) by @michaelpj +- Amend fix for correct placement of file header pragmas (#1958) +([#2078](https://github.com/haskell/haskell-language-server/pull/2078)) by @nini-faroux +- Wingman: "Intro and destruct" code action +([#2077](https://github.com/haskell/haskell-language-server/pull/2077)) by @isovector +- Support call hierarchy on type signature & add plugin to generic config & docs +([#2072](https://github.com/haskell/haskell-language-server/pull/2072)) by @July541 +- Update nix flake +([#2065](https://github.com/haskell/haskell-language-server/pull/2065)) by @berberman +- Include sponsorship section +([#2063](https://github.com/haskell/haskell-language-server/pull/2063)) by @jneira +- Add more communication channels +([#2062](https://github.com/haskell/haskell-language-server/pull/2062)) by @jneira +- Don't suggest disabling type errors +([#2061](https://github.com/haskell/haskell-language-server/pull/2061)) by @anka-213 +- Build with lsp 1.2.0.1 +([#2059](https://github.com/haskell/haskell-language-server/pull/2059)) by @pepeiborra +- Remove HIE_CACHE from circleci cache key +([#2050](https://github.com/haskell/haskell-language-server/pull/2050)) by @jneira +- [#1958] Fix placement of language pragmas +([#2043](https://github.com/haskell/haskell-language-server/pull/2043)) by @nini-faroux +- [#2005] Fix Formatting When Brittany Returns Warnings +([#2036](https://github.com/haskell/haskell-language-server/pull/2036)) by @prikhi + +## 1.3.0 + +2021 July release of HLS arrives! This release includes binaries for GHC 9.0.1 +and some new interesting features. Here is the brief summary of changes: + +- Binaries for GHC 9.0.1 are added by @anka-213. +- Call hierarchy plugin is added, contributed by @July541. + ![hierarchy](https://user-images.githubusercontent.com/12473268/127550041-094151a6-be7b-484a-bb82-c61f326ca503.gif) +- Now completions work with definitions from non-imported modules, thanks to @pepeiborra. + ![completion](https://user-images.githubusercontent.com/12473268/127543694-718ae043-38f2-4fb0-be71-317f5f93b443.gif) +- Eval plugin + - The plugin supports GHC 9.0.1, thanks to @berberman. + - `:info` command is added by @akrmn. + - The plugin uses the same default language as GHCi with @fmehta's patch. +- Wingman, where most changes owing to @isovector + - Wingman no longer changes the fixity of function definitions. + - Wingman now gives unique names to the holes it generates. + - Wingman's ability to reason about polymorphic and GADT types is significantly improved. + - Wingman no longer suggests homomorphic destructs when the codomain is larger than the domain. + - "Complete case constructors" action supports empty lambda cases. + - Wingman now gives a warning if it ran out of gas during "attempt to fill hole". + - Metaprogramming for Wingman has been improved with symbolic-name support and the `pointwise` combinator. + - An option to enable/disable Wingman's proof state styling is added. + - Hole fit suggestions are now disabled for performance reasons when using Wingman. +- Hovering on a name displays the package where the name is defined, contributed by @berberman. + ![hover](https://user-images.githubusercontent.com/12473268/127550516-acc1f1b4-bad7-44fd-99a0-a174ce9ac909.gif) + +### Pull requests merged for 1.3.0 + +- Wingman: Properly destruct forall-quantified types +([#2049](https://github.com/haskell/haskell-language-server/pull/2049)) by @isovector +- Remove .stack-work from circleci cache +([#2044](https://github.com/haskell/haskell-language-server/pull/2044)) by @jneira +- Completions from non-imported modules +([#2040](https://github.com/haskell/haskell-language-server/pull/2040)) by @pepeiborra +- Wingman: Low gas warning +([#2038](https://github.com/haskell/haskell-language-server/pull/2038)) by @isovector +- Enable dynamic linking in stack builds +([#2031](https://github.com/haskell/haskell-language-server/pull/2031)) by @pepeiborra +- Fix nix flake +([#2030](https://github.com/haskell/haskell-language-server/pull/2030)) by @Avi-D-coder +- Tie plugins' pluginModifyDynflags to their enabled state +([#2029](https://github.com/haskell/haskell-language-server/pull/2029)) by @isovector +- Add benchmarks for hole fits +([#2027](https://github.com/haskell/haskell-language-server/pull/2027)) by @pepeiborra +- fix a typo +([#2024](https://github.com/haskell/haskell-language-server/pull/2024)) by @cdsmith +- Upgrade to refinery-0.4.0.0 +([#2021](https://github.com/haskell/haskell-language-server/pull/2021)) by @isovector +- Use implicit-hie-cradle-0.3.0.5 +([#2020](https://github.com/haskell/haskell-language-server/pull/2020)) by @jneira +- Disable hls tests for win and ghc-9.0.1 +([#2018](https://github.com/haskell/haskell-language-server/pull/2018)) by @jneira +- Use operational master commit to fix build for ghc-9.0.1 +([#2017](https://github.com/haskell/haskell-language-server/pull/2017)) by @jneira +- Fix Wingman dependency on extra +([#2007](https://github.com/haskell/haskell-language-server/pull/2007)) by @pepeiborra +- Add GHC 9.2 support for hie-compat +([#2003](https://github.com/haskell/haskell-language-server/pull/2003)) by @fendor +- Enable tests for ghc 9 and promote `ghcVersion` check +([#2001](https://github.com/haskell/haskell-language-server/pull/2001)) by @jneira +- Allow HLS plugins to declare cli commands +([#1999](https://github.com/haskell/haskell-language-server/pull/1999)) by @pepeiborra +- Remove >= from cabal-version +([#1998](https://github.com/haskell/haskell-language-server/pull/1998)) by @felixonmars +- Eval plugin: support ghc 9.0.1 +([#1997](https://github.com/haskell/haskell-language-server/pull/1997)) by @berberman +- Maximize sharing of NormalizedFilePath values in getLocatedImports +([#1996](https://github.com/haskell/haskell-language-server/pull/1996)) by @pepeiborra +- nix: add support for ghc 9.0.1 +([#1995](https://github.com/haskell/haskell-language-server/pull/1995)) by @berberman +- Warn GHC 9 Compatibility to LSP Client +([#1992](https://github.com/haskell/haskell-language-server/pull/1992)) by @konn +- Update nix to GHC 8.10.5 +([#1991](https://github.com/haskell/haskell-language-server/pull/1991)) by @berberman +- Initialize ExportsMap using hiedb exports +([#1989](https://github.com/haskell/haskell-language-server/pull/1989)) by @pepeiborra +- Wingman: add emacs example config to Readme +([#1988](https://github.com/haskell/haskell-language-server/pull/1988)) by @stuebinm +- relax megaparsec constraint in hls-tactics-plugin +([#1986](https://github.com/haskell/haskell-language-server/pull/1986)) by @pepeiborra +- follow change in lsp-types +([#1985](https://github.com/haskell/haskell-language-server/pull/1985)) by @pepeiborra +- Don't suggest import an unnecessary data constructor. +([#1984](https://github.com/haskell/haskell-language-server/pull/1984)) by @peterwicksstringfield +- Enable hyphenation embedding +([#1979](https://github.com/haskell/haskell-language-server/pull/1979)) by @isovector +- Fix nix.yaml +([#1974](https://github.com/haskell/haskell-language-server/pull/1974)) by @isovector +- Add windows to ghcup artifacts and generate sha256 sums +([#1970](https://github.com/haskell/haskell-language-server/pull/1970)) by @jneira +- Wingman: Ensure homomorphic destruct covers all constructors in the domain +([#1968](https://github.com/haskell/haskell-language-server/pull/1968)) by @isovector +- Wingman: Add the correct file offset to metaprogram parse errors +([#1967](https://github.com/haskell/haskell-language-server/pull/1967)) by @isovector +- Wingman: Config option to suppress proofstate styling +([#1966](https://github.com/haskell/haskell-language-server/pull/1966)) by @isovector +- Wingman: Don't wildify vars when running beginMetaprogram +([#1963](https://github.com/haskell/haskell-language-server/pull/1963)) by @isovector +- Wingman: Don't suggest empty case lenses for case exprs with no data cons +([#1962](https://github.com/haskell/haskell-language-server/pull/1962)) by @isovector +- Wingman: Don't introduce too many variables +([#1961](https://github.com/haskell/haskell-language-server/pull/1961)) by @isovector +- Wingman: Code lens for empty lambda case +([#1956](https://github.com/haskell/haskell-language-server/pull/1956)) by @isovector +- Call hierarchy support +([#1955](https://github.com/haskell/haskell-language-server/pull/1955)) by @July541 +- Bugfix type signature lenses / code actions for pattern synonyms. +([#1952](https://github.com/haskell/haskell-language-server/pull/1952)) by @peterwicksstringfield +- Add :info command in Eval plugin +([#1948](https://github.com/haskell/haskell-language-server/pull/1948)) by @akrmn +- avoid holding onto the hie bytestring when indexing +([#1947](https://github.com/haskell/haskell-language-server/pull/1947)) by @pepeiborra +- Wingman: Make getCurrentDefinitions return polymorphic types +([#1945](https://github.com/haskell/haskell-language-server/pull/1945)) by @isovector +- Wingman: Tactical support for deep recursion +([#1944](https://github.com/haskell/haskell-language-server/pull/1944)) by @isovector +- Properly scope GADT equality evidence in the judgment +([#1942](https://github.com/haskell/haskell-language-server/pull/1942)) by @isovector +- Add ghc-9.0.1 to the build release script +([#1940](https://github.com/haskell/haskell-language-server/pull/1940)) by @anka-213 +- Cata tactic should generalize let and ensure unifiability +([#1938](https://github.com/haskell/haskell-language-server/pull/1938)) by @isovector +- Include chocolatey hls package +([#1936](https://github.com/haskell/haskell-language-server/pull/1936)) by @jneira +- Mention ghcup and warning about updating artifacts +([#1935](https://github.com/haskell/haskell-language-server/pull/1935)) by @jneira +- Remove ghc-8.8.2 +([#1934](https://github.com/haskell/haskell-language-server/pull/1934)) by @jneira +- Workaround for GHC 8.10.5 on macOS +([#1931](https://github.com/haskell/haskell-language-server/pull/1931)) by @konn +- Add manual upload instructions +([#1930](https://github.com/haskell/haskell-language-server/pull/1930)) by @jneira +- Perform name lookup directly in TacticsM +([#1924](https://github.com/haskell/haskell-language-server/pull/1924)) by @isovector +- Include testdata in hls-refine-imports-plugin.cabal (backport #1922) +([#1923](https://github.com/haskell/haskell-language-server/pull/1923)) by @mergify[bot] +- Include testdata in hls-refine-imports-plugin.cabal +([#1922](https://github.com/haskell/haskell-language-server/pull/1922)) by @felixonmars +- Add pointwise command to the metaprogram parser +([#1921](https://github.com/haskell/haskell-language-server/pull/1921)) by @isovector +- Allow symbol identifiers in tactics +([#1920](https://github.com/haskell/haskell-language-server/pull/1920)) by @isovector +- Fall back to hiedb for invalid srcspan paths +([#1918](https://github.com/haskell/haskell-language-server/pull/1918)) by @pepeiborra +- Disable hole fit suggestions when running Wingman +([#1873](https://github.com/haskell/haskell-language-server/pull/1873)) by @isovector +- Wingman: maintain user-defined fixity for definitions +([#1697](https://github.com/haskell/haskell-language-server/pull/1697)) by @isovector +- Display package names of external libraries on hover +([#1626](https://github.com/haskell/haskell-language-server/pull/1626)) by @berberman +- Make the eval plugin use the same default language extensions as ghci. +([#1596](https://github.com/haskell/haskell-language-server/pull/1596)) by @fmehta + +## 1.2.0 + +We have finally released a new version of Haskell Language Server! +Thanks for all contributors, many bugs has been fixed, and many features has landed. +Here are the summary of changes: + +- Basic support for GHC 9.0.1 is added. + It does not support all plugins yet, but core GHCIDE features will work. For the detailed information that which plugins work, please refer [this list](https://github.com/haskell/haskell-language-server/issues/297#issuecomment-855522891). +- Support for GHC 8.10.5 is added. + Note that macOS version is unfortunately not included in this release because of [a GHC issue with `network` package](https://gitlab.haskell.org/ghc/ghc/-/issues/19968). +- HLS wrapper and GHCIDE session loader uses the same logic with implicit-hie. + This fixes [a build issue](https://github.com/haskell/haskell-language-server/issues/1782) of a stack project with implicit `hie.yaml` . +- Wingman plugin has added numerous features and fixed many bugs: + - It now supports tactic metaprogramming! + For list of commands, see [this document](https://github.com/haskell/haskell-language-server/blob/master/plugins/hls-tactics-plugin/COMMANDS.md#wingman-metaprogram-command-reference). + ![https://github.com/haskell/haskell-language-server/blob/master/plugins/hls-tactics-plugin/COMMANDS.md#wingman-metaprogram-command-reference](https://user-images.githubusercontent.com/307223/118190278-bdf24f80-b3f7-11eb-8838-b08a2582d7f1.gif) + - "Refine hole" and "Split all function arguments" code actions are publicly opened. + - "Empty case split" code lens is added. + - The name generator is fixed [to avoid dangerous summon rituals](https://github.com/haskell/haskell-language-server/pull/1760). + - Many bugs related to type families and GADTs are fixed. +- We support [nix flake](https://nixos.wiki/wiki/Flakes), an upcoming way to manage dependencies in nix. +- Every plugin (other than example plugins) now lives in its own package. + +### Pull requests merged for 1.2.0 + +- Cleanup stack build output in circleci +([#1905](https://github.com/haskell/haskell-language-server/pull/1905)) by @jhrcek +- Remove FeatureSet +([#1902](https://github.com/haskell/haskell-language-server/pull/1902)) by @isovector +- Correct a typo in ConfigUtils.hs +([#1900](https://github.com/haskell/haskell-language-server/pull/1900)) by @felixonmars +- Add GHC 8.10.5 support +([#1899](https://github.com/haskell/haskell-language-server/pull/1899)) by @Ailrun +- Fix getCurrentDirectory calls in ghcide +([#1897](https://github.com/haskell/haskell-language-server/pull/1897)) by @pepeiborra +- Wingman: FIx evidence when using GADT constructors +([#1889](https://github.com/haskell/haskell-language-server/pull/1889)) by @isovector +- [explicit-imports] Take in a predicate to filter modules +([#1888](https://github.com/haskell/haskell-language-server/pull/1888)) by @pepeiborra +- Fix unification pertaining to evidence +([#1885](https://github.com/haskell/haskell-language-server/pull/1885)) by @isovector +- Let Wingman peek through type families +([#1881](https://github.com/haskell/haskell-language-server/pull/1881)) by @isovector +- Use file watches for all workspace files +([#1880](https://github.com/haskell/haskell-language-server/pull/1880)) by @pepeiborra +- Update IRC details in README.md +([#1877](https://github.com/haskell/haskell-language-server/pull/1877)) by @fendor +- Fix nix build for #1858 +([#1870](https://github.com/haskell/haskell-language-server/pull/1870)) by @berberman +- Wingman metaprogram command documentation +([#1867](https://github.com/haskell/haskell-language-server/pull/1867)) by @isovector +- Catamorphism and collapse tactics +([#1865](https://github.com/haskell/haskell-language-server/pull/1865)) by @isovector +- Fix condition of nix build job +([#1864](https://github.com/haskell/haskell-language-server/pull/1864)) by @berberman +- Technology preview: Keep track of changes to minimize rebuilds +([#1862](https://github.com/haskell/haskell-language-server/pull/1862)) by @pepeiborra +- Trace more Shake evaluation details +([#1861](https://github.com/haskell/haskell-language-server/pull/1861)) by @pepeiborra +- No need to delete the same key twice +([#1860](https://github.com/haskell/haskell-language-server/pull/1860)) by @pepeiborra +- Use cabal-install if nix is failing in CI +([#1859](https://github.com/haskell/haskell-language-server/pull/1859)) by @berberman +- Use last apply-refact and several stack.yaml updates +([#1858](https://github.com/haskell/haskell-language-server/pull/1858)) by @jneira +- Split ghcide actions into different descriptors +([#1857](https://github.com/haskell/haskell-language-server/pull/1857)) by @berberman +- Allow module-local and imported functions in Wingman metaprograms +([#1856](https://github.com/haskell/haskell-language-server/pull/1856)) by @isovector +- Update mergify.yml +([#1853](https://github.com/haskell/haskell-language-server/pull/1853)) by @Ailrun +- Fix flake compat +([#1852](https://github.com/haskell/haskell-language-server/pull/1852)) by @berberman +- Fix record layout +([#1851](https://github.com/haskell/haskell-language-server/pull/1851)) by @isovector +- Avoid package-qualified import in Fourmolu plugin +([#1848](https://github.com/haskell/haskell-language-server/pull/1848)) by @georgefst +- Skip cachix jobs if token is unset +([#1845](https://github.com/haskell/haskell-language-server/pull/1845)) by @berberman +- Refine should either do intros or split, not both +([#1842](https://github.com/haskell/haskell-language-server/pull/1842)) by @isovector +- Add hspec upper bound +([#1837](https://github.com/haskell/haskell-language-server/pull/1837)) by @jneira +- Extract last 2 plugins and clean up others +([#1836](https://github.com/haskell/haskell-language-server/pull/1836)) by @Ailrun +- Extract pragmas plugin +([#1833](https://github.com/haskell/haskell-language-server/pull/1833)) by @Ailrun +- Extract floskell plugin as a standalone plugin +([#1829](https://github.com/haskell/haskell-language-server/pull/1829)) by @Ailrun +- nix: refactor with flakes +([#1827](https://github.com/haskell/haskell-language-server/pull/1827)) by @berberman +- Bump up hls-splice-plugin to 1.0.0.2 (backport #1825) +([#1826](https://github.com/haskell/haskell-language-server/pull/1826)) by @mergify[bot] +- Bump up hls-splice-plugin to 1.0.0.2 +([#1825](https://github.com/haskell/haskell-language-server/pull/1825)) by @jneira +- Apply formats again +([#1824](https://github.com/haskell/haskell-language-server/pull/1824)) by @Ailrun +- Extract fourmolu plugin into a standalone package +([#1823](https://github.com/haskell/haskell-language-server/pull/1823)) by @gustavoavena +- Ignore filemode in diff +([#1819](https://github.com/haskell/haskell-language-server/pull/1819)) by @Ailrun +- ghc-api cleanups cleanup +([#1816](https://github.com/haskell/haskell-language-server/pull/1816)) by @pepeiborra +- Add a hook for modifying the dynflags from a plugin +([#1814](https://github.com/haskell/haskell-language-server/pull/1814)) by @isovector +- Prepare ghcide release v1.3.0.0 +([#1811](https://github.com/haskell/haskell-language-server/pull/1811)) by @pepeiborra +- Remove hls-ghc-x.y from install script and wrapper +([#1805](https://github.com/haskell/haskell-language-server/pull/1805)) by @berberman +- Fix unwanted import refinement +([#1801](https://github.com/haskell/haskell-language-server/pull/1801)) by @rayshih +- Canonicalize hiedb path before comparing +([#1800](https://github.com/haskell/haskell-language-server/pull/1800)) by @pepeiborra +- Pin nix-pre-commit-hooks (backport #1780) +([#1798](https://github.com/haskell/haskell-language-server/pull/1798)) by @mergify[bot] +- Add upper bound to hlint (backport #1795) +([#1797](https://github.com/haskell/haskell-language-server/pull/1797)) by @mergify[bot] +- Add bounds for base in hls-stylish-haskell-plugin (backport #1794) +([#1796](https://github.com/haskell/haskell-language-server/pull/1796)) by @mergify[bot] +- Add upper bound to hlint +([#1795](https://github.com/haskell/haskell-language-server/pull/1795)) by @jneira +- Add bounds for base in hls-stylish-haskell-plugin +([#1794](https://github.com/haskell/haskell-language-server/pull/1794)) by @berberman +- Add bounds for base in hls-test-utils (backport #1791) +([#1793](https://github.com/haskell/haskell-language-server/pull/1793)) by @mergify[bot] +- Replace faulty signature test +([#1792](https://github.com/haskell/haskell-language-server/pull/1792)) by @kderme +- Add bounds for base in hls-test-utils +([#1791](https://github.com/haskell/haskell-language-server/pull/1791)) by @berberman +- Fix backport conflict of refine-import plugin +([#1790](https://github.com/haskell/haskell-language-server/pull/1790)) by @Ailrun +- Fix progress counting +([#1789](https://github.com/haskell/haskell-language-server/pull/1789)) by @pepeiborra +- Loosen dependency bounds (backport #1787) +([#1788](https://github.com/haskell/haskell-language-server/pull/1788)) by @mergify[bot] +- Loosen dependency bounds +([#1787](https://github.com/haskell/haskell-language-server/pull/1787)) by @berberman +- clean up ghc-api pragmas +([#1785](https://github.com/haskell/haskell-language-server/pull/1785)) by @pepeiborra +- Progress reporting improvements +([#1784](https://github.com/haskell/haskell-language-server/pull/1784)) by @pepeiborra +- Unify session loading using implicit-hie +([#1783](https://github.com/haskell/haskell-language-server/pull/1783)) by @fendor +- Pin nix-pre-commit-hooks +([#1780](https://github.com/haskell/haskell-language-server/pull/1780)) by @Ailrun +- Replace the unsafe getmodtime with safe posix calls +([#1778](https://github.com/haskell/haskell-language-server/pull/1778)) by @pepeiborra +- Tactic metaprogramming +([#1776](https://github.com/haskell/haskell-language-server/pull/1776)) by @isovector +- Fix wrong extend import while type constuctor and data constructor have the same name +([#1775](https://github.com/haskell/haskell-language-server/pull/1775)) by @July541 +- Add codetriage badge +([#1772](https://github.com/haskell/haskell-language-server/pull/1772)) by @jneira +- Wingman: configurable auto search depth +([#1771](https://github.com/haskell/haskell-language-server/pull/1771)) by @isovector +- Prevent accidental Cthulhu summons +([#1760](https://github.com/haskell/haskell-language-server/pull/1760)) by @isovector +- Delay the Shake session setup until the Initialized handler +([#1754](https://github.com/haskell/haskell-language-server/pull/1754)) by @pepeiborra +- Wrap the Shake functions with newtypes +([#1753](https://github.com/haskell/haskell-language-server/pull/1753)) by @ndmitchell +- Fix reduction depth +([#1751](https://github.com/haskell/haskell-language-server/pull/1751)) by @pepeiborra +- Add hls-graph abstracting over shake +([#1748](https://github.com/haskell/haskell-language-server/pull/1748)) by @ndmitchell +- Explicitly import liftIO if you need it, rather than getting it from Shake +([#1747](https://github.com/haskell/haskell-language-server/pull/1747)) by @ndmitchell +- Tease apart the custom SYB from ExactPrint +([#1746](https://github.com/haskell/haskell-language-server/pull/1746)) by @isovector +- Remove unnecessary Shake dependencies +([#1745](https://github.com/haskell/haskell-language-server/pull/1745)) by @ndmitchell +- Delete an unused import +([#1744](https://github.com/haskell/haskell-language-server/pull/1744)) by @ndmitchell +- Improve vscode extension schema generation +([#1742](https://github.com/haskell/haskell-language-server/pull/1742)) by @berberman +- Fix class method completion +([#1741](https://github.com/haskell/haskell-language-server/pull/1741)) by @July541 +- Add heralds to Wingman's use of runAction +([#1740](https://github.com/haskell/haskell-language-server/pull/1740)) by @isovector +- Wingman: case split on punned record fields +([#1739](https://github.com/haskell/haskell-language-server/pull/1739)) by @isovector +- Wingman feature release +([#1735](https://github.com/haskell/haskell-language-server/pull/1735)) by @isovector +- Add haskell-language-server Homebrew installation instructions +([#1734](https://github.com/haskell/haskell-language-server/pull/1734)) by @kret +- Add a "Split using NamedFieldPuns" code action +([#1733](https://github.com/haskell/haskell-language-server/pull/1733)) by @isovector +- Insert pragmas after shebang or to existing pragma list +([#1731](https://github.com/haskell/haskell-language-server/pull/1731)) by @OliverMadine +- Add executable stanza in hls-install.cabal. +([#1730](https://github.com/haskell/haskell-language-server/pull/1730)) by @arrowd +- Add installation instructions for FreeBSD. +([#1729](https://github.com/haskell/haskell-language-server/pull/1729)) by @arrowd +- HLint: Pass options through user config +([#1724](https://github.com/haskell/haskell-language-server/pull/1724)) by @rmehri01 +- Prepare ghcide 1.2.0.2 and HLS 1.1.0 +([#1722](https://github.com/haskell/haskell-language-server/pull/1722)) by @berberman +- Wingman: Destruct on empty case +([#1721](https://github.com/haskell/haskell-language-server/pull/1721)) by @isovector +- Fix: #1690 - Infix typed holes are now filled using infix notation +([#1708](https://github.com/haskell/haskell-language-server/pull/1708)) by @OliverMadine +- Implement refine imports +([#1686](https://github.com/haskell/haskell-language-server/pull/1686)) by @rayshih +- Ghc 9.0.1 support for ghcide +([#1649](https://github.com/haskell/haskell-language-server/pull/1649)) by @anka-213 +- hie-compat: Add basic support for ghc 9.0.1 +([#1635](https://github.com/haskell/haskell-language-server/pull/1635)) by @anka-213 +- Fix remove constraint +([#1578](https://github.com/haskell/haskell-language-server/pull/1578)) by @kderme +- Limit CodeActions within passed range +([#1442](https://github.com/haskell/haskell-language-server/pull/1442)) by @aufarg + +## 1.1.0 + +Haskell Language Server 1.1.0 has finally come! Many thanks to all contributors -- since the last release, we have merged over 100 PRs! +As always, there are many internal bug fixes and performance improvements in ghcide. Apart from that, + +- Wingman gets many enhancements, thanks to @isovector for this epic work! + - Wingman actions can now be bound to editor hotkeys + - Experimental support for "jump to next unsolved hole" + - Improved layout algorithm --- don't reflow instances, or break do-blocks + - Wingman can now deal with GADTs, rank-n types and pattern synonyms + - Wingman now respects user-written bindings on the left side of the equals sign + - Significantly more-natural synthesized code when dealing with newtypes, infix operators, records and strings + - Improved user experience --- less waiting, and friendly errors for when things go wrong +- hlint plugin not working in some cases gets fixed +- annoying log message "haskell-lsp:incoming message parse error" gets fixed in `lsp-1.2` +- eval plugin now supports `it` variable, like GHCi +- verbose message "No cradle found for ... Proceeding with implicit cradle" is GONE +- type lenses plugin now has its custom config `mode` (enum) [`always`] to control its working mode: + - `always`: always displays type signature lenses of global bindings + - `exported`: similar to `always`, but only displays for exported global bindings + - `diagnostics`: follows diagnostic messages produced by GHC +- top-level LSP option `completionSnippetsOn` and `maxNumberOfProblems` are deprecated +- completions plugin now has its custom config: + - `autoExtendOn` (boolean) [`true`]: whether to enable auto extending import lists + - `snippetsOn` (boolean) [`true`]: wheter to enable completion snippets, taking the place of `completionSnippetsOn` +- Wingman has its custom config: + - `timeout_duration` (integer) [`2`]: the timeout for Wingman actions, in seconds + - `features` (string) [`""`]: feature set used by Wingman (See [the README of Wingman](https://github.com/haskell/haskell-language-server/tree/master/plugins/hls-tactics-plugin#readme)) + - `max_use_ctor_actions` (integer) [`5`]: maximum number of `Use constructor ` code actions that can appear + - `hole_severity` (enum) [`none`]: the severity to use when showing hole diagnostics +- LSP symbols of typeclass and type families are more appropriate +- test suite of plugins are reorganized, which no longer need to be run with `test-server` executable +- two new packages `hls-test-utils` and `hls-stylish-haskell-plugin` are extracted + +This version uses `lsp-1.2.0`, `hls-plugin-api-1.1.0`, and `ghcide-1.2.0.2`. + +### Pull requests merged for 1.1.0 + +- Restore compat. with haddock-library 1.8 +([#1717](https://github.com/haskell/haskell-language-server/pull/1717)) by @pepeiborra +- Don't suggest destruct actions for already-destructed terms +([#1715](https://github.com/haskell/haskell-language-server/pull/1715)) by @isovector +- Add keybindings and jump to hole to the Wingman README +([#1712](https://github.com/haskell/haskell-language-server/pull/1712)) by @isovector +- Bracketing for snippet completions +([#1709](https://github.com/haskell/haskell-language-server/pull/1709)) by @OliverMadine +- Prepare ghcide 1.2.0 +([#1707](https://github.com/haskell/haskell-language-server/pull/1707)) by @berberman +- Adjust bounds +([#1701](https://github.com/haskell/haskell-language-server/pull/1701)) by @berberman +- Update nix +([#1699](https://github.com/haskell/haskell-language-server/pull/1699)) by @berberman +- Wingman: "Destruct all" only on ADTs +([#1695](https://github.com/haskell/haskell-language-server/pull/1695)) by @isovector +- Fix ghcide and HLS enter lsp mode by default +([#1692](https://github.com/haskell/haskell-language-server/pull/1692)) by @berberman +- Decrease Wingman timeout from 3.3 minutes to 2 seconds (configurable) +([#1688](https://github.com/haskell/haskell-language-server/pull/1688)) by @isovector +- Wrap test suite of tactics plugin into tasty test tree +([#1676](https://github.com/haskell/haskell-language-server/pull/1676)) by @berberman +- Wingman: Use infix notation for operator applications +([#1675](https://github.com/haskell/haskell-language-server/pull/1675)) by @isovector +- Ignore ghcide tests by paths +([#1673](https://github.com/haskell/haskell-language-server/pull/1673)) by @jneira +- Ignore nix job steps by path +([#1672](https://github.com/haskell/haskell-language-server/pull/1672)) by @jneira +- Intelligent derivations of Semigroup and Monoid for Wingman +([#1671](https://github.com/haskell/haskell-language-server/pull/1671)) by @isovector +- optimize ambiguity import suggestions +([#1669](https://github.com/haskell/haskell-language-server/pull/1669)) by @July541 +- Replace Barrier with MVar in lsp main +([#1668](https://github.com/haskell/haskell-language-server/pull/1668)) by @berberman +- ghcide - enable ApplicativeDo everywhere +([#1667](https://github.com/haskell/haskell-language-server/pull/1667)) by @pepeiborra +- support custom Ide commands +([#1666](https://github.com/haskell/haskell-language-server/pull/1666)) by @pepeiborra +- Add bounds for Diff +([#1665](https://github.com/haskell/haskell-language-server/pull/1665)) by @berberman +- Update shake bounds of install script +([#1664](https://github.com/haskell/haskell-language-server/pull/1664)) by @berberman +- Avoid creating IsFileOfInterest keys for non workspace files +([#1661](https://github.com/haskell/haskell-language-server/pull/1661)) by @pepeiborra +- additional .gitignore entries +([#1659](https://github.com/haskell/haskell-language-server/pull/1659)) by @pepeiborra +- Skip tracing unless eventlog is enabled +([#1658](https://github.com/haskell/haskell-language-server/pull/1658)) by @pepeiborra +- Fix a wingman bug caused by mismanaged stale data +([#1657](https://github.com/haskell/haskell-language-server/pull/1657)) by @isovector +- Fix ignore paths +([#1656](https://github.com/haskell/haskell-language-server/pull/1656)) by @jneira +- Shut the Shake session on exit, instead of restarting it +([#1655](https://github.com/haskell/haskell-language-server/pull/1655)) by @pepeiborra +- Emit holes as diagnostics +([#1653](https://github.com/haskell/haskell-language-server/pull/1653)) by @isovector +- log exceptions before killing the server +([#1651](https://github.com/haskell/haskell-language-server/pull/1651)) by @pepeiborra +- Do not override custom commands +([#1650](https://github.com/haskell/haskell-language-server/pull/1650)) by @pepeiborra +- Fix importing type operators +([#1644](https://github.com/haskell/haskell-language-server/pull/1644)) by @berberman +- Add haskell-language-server-bin to Arch Linux section +([#1642](https://github.com/haskell/haskell-language-server/pull/1642)) by @marcin-rzeznicki +- Update ISSUE_TEMPLATE.md +([#1640](https://github.com/haskell/haskell-language-server/pull/1640)) by @Ailrun +- Civilized indexing progress reporting +([#1633](https://github.com/haskell/haskell-language-server/pull/1633)) by @pepeiborra +- Update to lsp-1.2 +([#1631](https://github.com/haskell/haskell-language-server/pull/1631)) by @wz1000 +- Avoid reordering plugins +([#1629](https://github.com/haskell/haskell-language-server/pull/1629)) by @pepeiborra +- Run plugins' test suites with server in the same process +([#1628](https://github.com/haskell/haskell-language-server/pull/1628)) by @berberman +- Remove ignored paths +([#1623](https://github.com/haskell/haskell-language-server/pull/1623)) by @jneira +- Update formatting hooks to not include Wingman +([#1622](https://github.com/haskell/haskell-language-server/pull/1622)) by @Ailrun +- Add CPP Options for Stylish Haskell & Brittany Formatters +([#1620](https://github.com/haskell/haskell-language-server/pull/1620)) by @prikhi +- Use custom config for completions plugin +([#1619](https://github.com/haskell/haskell-language-server/pull/1619)) by @berberman +- Configurable I/O handles +([#1617](https://github.com/haskell/haskell-language-server/pull/1617)) by @pepeiborra +- Add installation instructions for Arch Linux +([#1616](https://github.com/haskell/haskell-language-server/pull/1616)) by @berberman +- Properly pass argFiles into defaultMain +([#1613](https://github.com/haskell/haskell-language-server/pull/1613)) by @mpickering +- Migrate tests of plugins +([#1612](https://github.com/haskell/haskell-language-server/pull/1612)) by @berberman +- Allow for customizable Haskell views of Property types +([#1608](https://github.com/haskell/haskell-language-server/pull/1608)) by @isovector +- Extract hls-test-utils +([#1606](https://github.com/haskell/haskell-language-server/pull/1606)) by @berberman +- Add test data files to extra-source-files +([#1605](https://github.com/haskell/haskell-language-server/pull/1605)) by @jneira +- Extract stylish-haskell plugin into a standalone package +([#1604](https://github.com/haskell/haskell-language-server/pull/1604)) by @berberman +- Eval plugin: evaluate expressions as statements +([#1603](https://github.com/haskell/haskell-language-server/pull/1603)) by @berberman +- Bump haddock-library to 1.10.0 +([#1598](https://github.com/haskell/haskell-language-server/pull/1598)) by @berberman +- Relax ghcides upper bound on base16-bytestring +([#1595](https://github.com/haskell/haskell-language-server/pull/1595)) by @maralorn +- Use CiInterface/SkInterface for typeclass symbols +([#1592](https://github.com/haskell/haskell-language-server/pull/1592)) by @fwcd +- Avoid duplicating known targets and import paths +([#1590](https://github.com/haskell/haskell-language-server/pull/1590)) by @pepeiborra +- Add ability for plugins to handle file change notifications +([#1588](https://github.com/haskell/haskell-language-server/pull/1588)) by @pepeiborra +- Ensure eval plugin Print class doesn't rely on Prelude being in scope +([#1587](https://github.com/haskell/haskell-language-server/pull/1587)) by @akrmn +- Give a canonical ordering for destructing terms in Wingman +([#1586](https://github.com/haskell/haskell-language-server/pull/1586)) by @isovector +- Try a homomorphic destruct before a standard destruct +([#1582](https://github.com/haskell/haskell-language-server/pull/1582)) by @isovector +- Update homepage and other urls for ghcide +([#1580](https://github.com/haskell/haskell-language-server/pull/1580)) by @felixonmars +- Regularize custom config of plugins +([#1576](https://github.com/haskell/haskell-language-server/pull/1576)) by @berberman +- Cleanup the TacticProviders interface +([#1572](https://github.com/haskell/haskell-language-server/pull/1572)) by @isovector +- Add custom code action kinds for import related code actions +([#1570](https://github.com/haskell/haskell-language-server/pull/1570)) by @berberman +- bump retrie plugin version +([#1569](https://github.com/haskell/haskell-language-server/pull/1569)) by @pepeiborra +- Use ConLikes instead of DataCons +([#1568](https://github.com/haskell/haskell-language-server/pull/1568)) by @isovector +- Remove max number of problems config option +([#1567](https://github.com/haskell/haskell-language-server/pull/1567)) by @jneira +- Prepare ghcide 1.1.0 +([#1566](https://github.com/haskell/haskell-language-server/pull/1566)) by @pepeiborra +- Use string literals to synthesize the empty string +([#1564](https://github.com/haskell/haskell-language-server/pull/1564)) by @isovector +- Add wingman branding to code actions +([#1555](https://github.com/haskell/haskell-language-server/pull/1555)) by @isovector +- Use TextEdit to insert new imports +([#1554](https://github.com/haskell/haskell-language-server/pull/1554)) by @berberman +- Introduce strict versions of modifyVar to improve contention +([#1553](https://github.com/haskell/haskell-language-server/pull/1553)) by @pepeiborra +- Improve how wingman uses evidence +([#1549](https://github.com/haskell/haskell-language-server/pull/1549)) by @isovector +- Review early cutoff fingerprints +([#1547](https://github.com/haskell/haskell-language-server/pull/1547)) by @pepeiborra +- Improve thread contention around diagnostics +([#1546](https://github.com/haskell/haskell-language-server/pull/1546)) by @pepeiborra +- Be much more intelligent about splitting matches +([#1543](https://github.com/haskell/haskell-language-server/pull/1543)) by @isovector +- Update nixpkgs to ghc 8.10.4 +([#1538](https://github.com/haskell/haskell-language-server/pull/1538)) by @berberman +- Log a warning for every diagnostic received when doDiagnostics=False +([#1537](https://github.com/haskell/haskell-language-server/pull/1537)) by @pepeiborra +- Fix missing parens of auto extending imports +([#1526](https://github.com/haskell/haskell-language-server/pull/1526)) by @berberman +- Change Wingman module structure, address -Wall +([#1519](https://github.com/haskell/haskell-language-server/pull/1519)) by @isovector +- Pull Wingman's method hypotheses directly from in-scope dicts +([#1517](https://github.com/haskell/haskell-language-server/pull/1517)) by @isovector +- Avoid redundant work in diagnostics pass +([#1514](https://github.com/haskell/haskell-language-server/pull/1514)) by @pepeiborra +- Add an option to control progress reporting +([#1513](https://github.com/haskell/haskell-language-server/pull/1513)) by @pepeiborra +- Package ghcide code actions +([#1512](https://github.com/haskell/haskell-language-server/pull/1512)) by @berberman +- Demote implicit cradle warn to logging +([#1511](https://github.com/haskell/haskell-language-server/pull/1511)) by @jneira +- Set all plugin flags to manual +([#1510](https://github.com/haskell/haskell-language-server/pull/1510)) by @jneira +- Avoid always rerunning GetModificationTime for interface files too +([#1506](https://github.com/haskell/haskell-language-server/pull/1506)) by @pepeiborra +- Let Wingman's apply tactic run endomorphisms +([#1505](https://github.com/haskell/haskell-language-server/pull/1505)) by @isovector +- Make Wingman produce user-facing error messages +([#1502](https://github.com/haskell/haskell-language-server/pull/1502)) by @isovector +- Disable HLS benchmarks +([#1501](https://github.com/haskell/haskell-language-server/pull/1501)) by @wz1000 +- Add kind and preferred flag for all Wingman code actions +([#1499](https://github.com/haskell/haskell-language-server/pull/1499)) by @isovector +- Organize Wingman tests +([#1498](https://github.com/haskell/haskell-language-server/pull/1498)) by @isovector +- Register IDE configuration when called via the command line +([#1495](https://github.com/haskell/haskell-language-server/pull/1495)) by @wz1000 +- Haddock upper bound +([#1492](https://github.com/haskell/haskell-language-server/pull/1492)) by @jneira +- Make type lenses plugin configurable +([#1491](https://github.com/haskell/haskell-language-server/pull/1491)) by @berberman +- Context-aware ExactPrint grafting for HsExpr +([#1489](https://github.com/haskell/haskell-language-server/pull/1489)) by @isovector +- Drive GetModificationTime using watched file events +([#1487](https://github.com/haskell/haskell-language-server/pull/1487)) by @pepeiborra +- Faster ModSummary fingerprints +([#1485](https://github.com/haskell/haskell-language-server/pull/1485)) by @pepeiborra +- Revert all changes to hie-compat since 11b5c2e +([#1484](https://github.com/haskell/haskell-language-server/pull/1484)) by @wz1000 +- Fix non-determinism in boot-def test +([#1483](https://github.com/haskell/haskell-language-server/pull/1483)) by @wz1000 +- Hackage needs autogen-modules +([#1481](https://github.com/haskell/haskell-language-server/pull/1481)) by @jneira +- Ignore ci for some subdirectories and files +([#1480](https://github.com/haskell/haskell-language-server/pull/1480)) by @jneira +- Split plugin tests into two cabal projects +([#1479](https://github.com/haskell/haskell-language-server/pull/1479)) by @wz1000 +- Less aggressive refine tactic +([#1475](https://github.com/haskell/haskell-language-server/pull/1475)) by @isovector +- Enable hls-tactics-plugin tests in CI +([#1474](https://github.com/haskell/haskell-language-server/pull/1474)) by @isovector +- Generate a more robust top-level binding Provenance +([#1473](https://github.com/haskell/haskell-language-server/pull/1473)) by @isovector +- Add new variables to the extract when doing intros +([#1472](https://github.com/haskell/haskell-language-server/pull/1472)) by @isovector +- Bump up hlint plugin version +([#1469](https://github.com/haskell/haskell-language-server/pull/1469)) by @jneira +- Make sure split respects GADT equalities +([#1466](https://github.com/haskell/haskell-language-server/pull/1466)) by @isovector +- Add "Split all function arguments" code action +([#1464](https://github.com/haskell/haskell-language-server/pull/1464)) by @isovector +- Add "Refine hole" code action +([#1463](https://github.com/haskell/haskell-language-server/pull/1463)) by @isovector +- Implement "use constructor" code action +([#1461](https://github.com/haskell/haskell-language-server/pull/1461)) by @isovector +- Remove tactics src-dir from func-test +([#1460](https://github.com/haskell/haskell-language-server/pull/1460)) by @isovector +- Make sure to give the correct DynFlags to the recompilation checker +([#1459](https://github.com/haskell/haskell-language-server/pull/1459)) by @pepeiborra +- Don't use record notation for single-field datacons in tactics +([#1456](https://github.com/haskell/haskell-language-server/pull/1456)) by @isovector +- update IRC channel name in plugin tutorial +([#1455](https://github.com/haskell/haskell-language-server/pull/1455)) by @shapr +- Update readme and cabal for Wingman +([#1454](https://github.com/haskell/haskell-language-server/pull/1454)) by @isovector +- Remove recursion tracking from TacticState +([#1453](https://github.com/haskell/haskell-language-server/pull/1453)) by @isovector +- Use runtime ghc libdir for ghc-exactprint and ghc-8.10 +([#1451](https://github.com/haskell/haskell-language-server/pull/1451)) by @jneira +- Simplify tactics state structure +([#1449](https://github.com/haskell/haskell-language-server/pull/1449)) by @isovector +- Extract the qualified name from already imported module +([#1445](https://github.com/haskell/haskell-language-server/pull/1445)) by @berberman +- Correct megaparsec lower bound +([#1441](https://github.com/haskell/haskell-language-server/pull/1441)) by @jneira +- Reformat all files +([#1439](https://github.com/haskell/haskell-language-server/pull/1439)) by @Ailrun +- Customize the unitId used for the fake internal component +([#1435](https://github.com/haskell/haskell-language-server/pull/1435)) by @pepeiborra +- Minor performance optimizations +([#1432](https://github.com/haskell/haskell-language-server/pull/1432)) by @pepeiborra + +## 1.0.0 + +This is the celebratory release of Haskell Language Server 1.0.0! +This release includes a lot of internal changes, bug fixes, leaks plugged, and performance improvements, thanks to all our contributors. +Among others, + +- We added the support for GHC 8.10.4, and removed the support for GHC 8.10.1 + Afterward, we will support upmost 3 patch versions for each minor version of GHC, if no special situation happens. +- As by hie-bios >= 0.7.3, we use (`${XDG_CACHE_HOME}`)[https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html]`/hie-bios/...` (or similar depends on OS) as a build directory for Cabal. +- Now Eval plugin uses the parsing mechanism of GHC and becomes more stable. +- We supports a code action to disambiguate the same identifiers from multiple imports. + ![gif](https://user-images.githubusercontent.com/21860/106387152-bfd16d80-641b-11eb-9394-c553fad5794b.gif) +- We supports a code action to hide shadowed identifiers from import. + ![gif](https://user-images.githubusercontent.com/26041945/107199055-e05b8200-6a30-11eb-9198-448ab8604cc0.gif) +- HIE DB is now integrated. This means we now support `find-references`, `workspace-symbol`. `find-definition` is also improved in a project with multiple components. +- Brittany and Stylish-haskell plugins now load language extensions from the ghc session. +- Tactic plugin supports eta-reduction and Agda-like split tactic and can create a function with piecewise definitions. + ![gif](https://user-images.githubusercontent.com/307223/107991243-0af68f00-6f8b-11eb-9ff9-3e9a7363ba5f.gif) + +### Pull requests merged for 1.0.0 + +- Correctly split non-varpats in tactics +([#1427](https://github.com/haskell/haskell-language-server/pull/1427)) by @isovector +- Move tactics tests to be standalone +([#1425](https://github.com/haskell/haskell-language-server/pull/1425)) by @isovector +- Fix the handling of default HLS config again +([#1419](https://github.com/haskell/haskell-language-server/pull/1419)) by @pepeiborra +- Patch pre-commit-hook to work with GHCIDE/hls-plugin-api codes +([#1418](https://github.com/haskell/haskell-language-server/pull/1418)) by @Ailrun +- Refactor pragmas plugin +([#1417](https://github.com/haskell/haskell-language-server/pull/1417)) by @berberman +- Add a test for #1376 +([#1414](https://github.com/haskell/haskell-language-server/pull/1414)) by @pepeiborra +- Reenable HLS example benchmark +([#1412](https://github.com/haskell/haskell-language-server/pull/1412)) by @pepeiborra +- Fix pre-commit-hook +([#1411](https://github.com/haskell/haskell-language-server/pull/1411)) by @Ailrun +- Plugin Config: Add maxCompletions to JSON instance +([#1407](https://github.com/haskell/haskell-language-server/pull/1407)) by @andys8 +- Remove custom 'trim' implementation +([#1406](https://github.com/haskell/haskell-language-server/pull/1406)) by @fendor +- Update to hie-bios 0.7.4 +([#1405](https://github.com/haskell/haskell-language-server/pull/1405)) by @fendor +- Prepare 1.0.0 release +([#1402](https://github.com/haskell/haskell-language-server/pull/1402)) by @Ailrun +- Fix install script after hlint fixes +([#1400](https://github.com/haskell/haskell-language-server/pull/1400)) by @jhrcek +- Use last with-utf8 to fix #1372 +([#1399](https://github.com/haskell/haskell-language-server/pull/1399)) by @jneira +- Implement Tactic Featuresets +([#1398](https://github.com/haskell/haskell-language-server/pull/1398)) by @isovector +- Update hie-bios +([#1397](https://github.com/haskell/haskell-language-server/pull/1397)) by @Ailrun +- Bump plugins versions +([#1392](https://github.com/haskell/haskell-language-server/pull/1392)) by @pepeiborra +- Split main Tactics module +([#1391](https://github.com/haskell/haskell-language-server/pull/1391)) by @isovector +- Prepare ghcide release 0.7.5 +([#1389](https://github.com/haskell/haskell-language-server/pull/1389)) by @pepeiborra +- Disable HLS benchmark example +([#1388](https://github.com/haskell/haskell-language-server/pull/1388)) by @pepeiborra +- Fix GenChangelog script format +([#1387](https://github.com/haskell/haskell-language-server/pull/1387)) by @Ailrun +- Tone down some logInfos to logDebug +([#1385](https://github.com/haskell/haskell-language-server/pull/1385)) by @pepeiborra +- Add a pre commit hook for code formatting +([#1384](https://github.com/haskell/haskell-language-server/pull/1384)) by @Ailrun +- remove unsafePerformIO +([#1383](https://github.com/haskell/haskell-language-server/pull/1383)) by @pepeiborra +- Use object code for TH+UnboxedTuples/Sums +([#1382](https://github.com/haskell/haskell-language-server/pull/1382)) by @wz1000 +- Update stack resolvers 8.10.3/8.10.4 +([#1380](https://github.com/haskell/haskell-language-server/pull/1380)) by @jneira +- Agda-style case splitting for tactics +([#1379](https://github.com/haskell/haskell-language-server/pull/1379)) by @isovector +- Configuration for initial ghc lib dir +([#1378](https://github.com/haskell/haskell-language-server/pull/1378)) by @pepeiborra +- Use lsp-1.1.1 +([#1377](https://github.com/haskell/haskell-language-server/pull/1377)) by @wz1000 +- use implicit-hie cradle from setInitialDynFlags +([#1375](https://github.com/haskell/haskell-language-server/pull/1375)) by @wz1000 +- Add test for multi-component goto def and make runLanguageServer responsible for hiedb +([#1373](https://github.com/haskell/haskell-language-server/pull/1373)) by @wz1000 +- Show window message when auto extending import lists +([#1371](https://github.com/haskell/haskell-language-server/pull/1371)) by @berberman +- Another extension that Brittany cannot parse +([#1369](https://github.com/haskell/haskell-language-server/pull/1369)) by @pepeiborra +- Clean dependency data structures and speed up GetDependencies +([#1368](https://github.com/haskell/haskell-language-server/pull/1368)) by @pepeiborra +- Catch GHC errors in listing module names +([#1367](https://github.com/haskell/haskell-language-server/pull/1367)) by @berberman +- Lose the ghc-lib flag +([#1366](https://github.com/haskell/haskell-language-server/pull/1366)) by @pepeiborra +- Make StylishHaskell plugin recognize extensions from DynFlags +([#1364](https://github.com/haskell/haskell-language-server/pull/1364)) by @Ailrun +- Pass language extensions to Brittany +([#1362](https://github.com/haskell/haskell-language-server/pull/1362)) by @pepeiborra +- Sanitize the setup of the default Ide.Config +([#1361](https://github.com/haskell/haskell-language-server/pull/1361)) by @pepeiborra +- Fix completion snippets on DuplicateRecordFields +([#1360](https://github.com/haskell/haskell-language-server/pull/1360)) by @berberman +- Index files on first open +([#1358](https://github.com/haskell/haskell-language-server/pull/1358)) by @wz1000 +- Make find-definition work better with multi-components +([#1357](https://github.com/haskell/haskell-language-server/pull/1357)) by @wz1000 +- Construct record datacons in tactics +([#1356](https://github.com/haskell/haskell-language-server/pull/1356)) by @isovector +- Don't insert parentheses for top-level tactics holes +([#1352](https://github.com/haskell/haskell-language-server/pull/1352)) by @isovector +- Simplify extracts after running tactics +([#1351](https://github.com/haskell/haskell-language-server/pull/1351)) by @isovector +- Fix code actions regression +([#1349](https://github.com/haskell/haskell-language-server/pull/1349)) by @pepeiborra +- Refactor the hypothesis type in hls-tactics-plugin +([#1347](https://github.com/haskell/haskell-language-server/pull/1347)) by @isovector +- Fix the Eval plugin sporadic exceptions +([#1345](https://github.com/haskell/haskell-language-server/pull/1345)) by @pepeiborra +- Eval Plugin: Proper handling of flags in `:set` +([#1343](https://github.com/haskell/haskell-language-server/pull/1343)) by @konn +- Cancel earlier queued benchmarks +([#1339](https://github.com/haskell/haskell-language-server/pull/1339)) by @pepeiborra +- Default main for ghcide +([#1338](https://github.com/haskell/haskell-language-server/pull/1338)) by @pepeiborra +- Fix duplication of code actions for adding NamedFieldPuns +([#1334](https://github.com/haskell/haskell-language-server/pull/1334)) by @berberman +- Bump explicit-imports plugin +([#1333](https://github.com/haskell/haskell-language-server/pull/1333)) by @pepeiborra +- Add support for ghc-8.10.4 and drop it for ghc-8.10.1 +([#1331](https://github.com/haskell/haskell-language-server/pull/1331)) by @jneira +- Prepare ghcide v0.7.4 release +([#1328](https://github.com/haskell/haskell-language-server/pull/1328)) by @pepeiborra +- Add a new benchmark example to characterise multi-component performance +([#1326](https://github.com/haskell/haskell-language-server/pull/1326)) by @pepeiborra +- [shake-bench] extract project dependencies action (for the HEAD binary) +([#1325](https://github.com/haskell/haskell-language-server/pull/1325)) by @pepeiborra +- [shake-bench] collect eventlogs +([#1324](https://github.com/haskell/haskell-language-server/pull/1324)) by @pepeiborra +- [benchmark] add warmups +([#1323](https://github.com/haskell/haskell-language-server/pull/1323)) by @pepeiborra +- Add code action for hiding shadowed identifiers from imports +([#1322](https://github.com/haskell/haskell-language-server/pull/1322)) by @berberman +- Parallelize benchmark CI +([#1320](https://github.com/haskell/haskell-language-server/pull/1320)) by @pepeiborra +- Fix space leak on cradle reloads +([#1316](https://github.com/haskell/haskell-language-server/pull/1316)) by @pepeiborra +- [benchmarks] speed up CI +([#1315](https://github.com/haskell/haskell-language-server/pull/1315)) by @pepeiborra +- [benchmark] check digests for input files only +([#1314](https://github.com/haskell/haskell-language-server/pull/1314)) by @pepeiborra +- Add link to hackage package to readme +([#1313](https://github.com/haskell/haskell-language-server/pull/1313)) by @expipiplus1 +- Splice Plugin: preparatory version bump for the next (not 0.9.0) HLS release +([#1312](https://github.com/haskell/haskell-language-server/pull/1312)) by @konn +- hls-splice-plugin-0.3.0.0-prepare +([#1311](https://github.com/haskell/haskell-language-server/pull/1311)) by @konn +- Trigger extending import only when the item is not in scope +([#1309](https://github.com/haskell/haskell-language-server/pull/1309)) by @berberman +- Bum up hls-eval-plugin to 0.2 +([#1305](https://github.com/haskell/haskell-language-server/pull/1305)) by @jneira +- Don't extend import list with child if the parent has been imported as (..) +([#1302](https://github.com/haskell/haskell-language-server/pull/1302)) by @berberman +- Prepare hls hlint plugin 0.2.0 +([#1296](https://github.com/haskell/haskell-language-server/pull/1296)) by @jneira +- Import disambiguation: Corrects handling of fully-applied and one-sided sectioned operators in qualifying strategy +([#1294](https://github.com/haskell/haskell-language-server/pull/1294)) by @konn +- hls-splice-plugin-0.2.0.0 +([#1293](https://github.com/haskell/haskell-language-server/pull/1293)) by @konn +- Bump haddock comments plugin to 0.1.1 +([#1292](https://github.com/haskell/haskell-language-server/pull/1292)) by @berberman +- FindImports typo (minor) +([#1291](https://github.com/haskell/haskell-language-server/pull/1291)) by @andys8 +- Bump up hls-plugin-api to 0.7.0 +([#1290](https://github.com/haskell/haskell-language-server/pull/1290)) by @jneira +- Prepare ghcide v0.7.3 release +([#1289](https://github.com/haskell/haskell-language-server/pull/1289)) by @pepeiborra +- hls-retrie-plugin 0.1.1.0 +([#1288](https://github.com/haskell/haskell-language-server/pull/1288)) by @pepeiborra +- Upgrade to lsp-1.0 +([#1284](https://github.com/haskell/haskell-language-server/pull/1284)) by @wz1000 +- Update IRC Name in README +([#1275](https://github.com/haskell/haskell-language-server/pull/1275)) by @fendor +- Restore code actions order +([#1273](https://github.com/haskell/haskell-language-server/pull/1273)) by @pepeiborra +- Prepare 0.9.0 +([#1271](https://github.com/haskell/haskell-language-server/pull/1271)) by @jneira +- Reenable auto extend imports and drop snippets for infix completions +([#1266](https://github.com/haskell/haskell-language-server/pull/1266)) by @pepeiborra +- ghcide: Implements a CodeAction to disambiguate ambiguous symbols +([#1264](https://github.com/haskell/haskell-language-server/pull/1264)) by @konn +- Doctest comment parsing using module annotations in Eval Plugin +([#1232](https://github.com/haskell/haskell-language-server/pull/1232)) by @konn +- Apply some hlint suggestions, silence some others. +([#1227](https://github.com/haskell/haskell-language-server/pull/1227)) by @peterwicksstringfield +- References via `hiedb` +([#704](https://github.com/haskell/haskell-language-server/pull/704)) by @wz1000 +- Use default config on missing configuration section +([#459](https://github.com/haskell/haskell-language-server/pull/459)) by @aufarg + +## 0.9.0 + +This release includes lot of refactorings and bug fixes over existing features, hlint and eval plugins among others. +It contains a fix for a bug in ghcide involving stale diagnostics (#1204). + +The list of contributors continues to show healthy growth, many thanks to you all! + +And remember, we have a new brand logo, courtesy of @Ailrun :slightly_smiling_face: + +![haskell-language-server](https://github.com/haskell/haskell-language-server/raw/master/docs/logos/logo-256.png) + +### Pull requests merged for 0.9.0 + +- Do not error out on failed rewrite +([#1269](https://github.com/haskell/haskell-language-server/pull/1269)) by @pepeiborra +- Tighten dependency on apply-refact +([#1268](https://github.com/haskell/haskell-language-server/pull/1268)) by @hololeap +- Add the new logos +([#1267](https://github.com/haskell/haskell-language-server/pull/1267)) by @Ailrun +- Fix a bug in completions +([#1265](https://github.com/haskell/haskell-language-server/pull/1265)) by @pepeiborra +- Produce heap profiles the old fashioned way, from .hp files +([#1261](https://github.com/haskell/haskell-language-server/pull/1261)) by @pepeiborra +- Break down ghcide functionality in HLS plugins +([#1257](https://github.com/haskell/haskell-language-server/pull/1257)) by @pepeiborra +- Enforce max completions over all plugins +([#1256](https://github.com/haskell/haskell-language-server/pull/1256)) by @pepeiborra +- Reorder code actions to put remove redundant imports first +([#1255](https://github.com/haskell/haskell-language-server/pull/1255)) by @pepeiborra +- Update bench.yml to include all the relevant artifacts +([#1254](https://github.com/haskell/haskell-language-server/pull/1254)) by @pepeiborra +- Benchmarks: generate heap profiles +([#1253](https://github.com/haskell/haskell-language-server/pull/1253)) by @pepeiborra +- Add gh workflows badges +([#1251](https://github.com/haskell/haskell-language-server/pull/1251)) by @jneira +- Add dynamic linking common issue +([#1249](https://github.com/haskell/haskell-language-server/pull/1249)) by @jneira +- Add license for hls-tactics-plugin +([#1248](https://github.com/haskell/haskell-language-server/pull/1248)) by @isovector +- Use exact print to extend import lists +([#1246](https://github.com/haskell/haskell-language-server/pull/1246)) by @berberman +- Test apply-refact with TypeApplications +([#1244](https://github.com/haskell/haskell-language-server/pull/1244)) by @jneira +- Add non reversable pragma completion +([#1243](https://github.com/haskell/haskell-language-server/pull/1243)) by @Ailrun +- Delete redundant "category: Development". +([#1241](https://github.com/haskell/haskell-language-server/pull/1241)) by @peterwicksstringfield +- Complete the No- variants of language extensions and Strict extension +([#1238](https://github.com/haskell/haskell-language-server/pull/1238)) by @mrBliss +- Add code actions for disabling a warning in the current file +([#1235](https://github.com/haskell/haskell-language-server/pull/1235)) by @georgefst +- Change packages metadata and rename tactics subfolder +([#1234](https://github.com/haskell/haskell-language-server/pull/1234)) by @jneira +- Fix the bug that generating comments would duplicate existing comments +([#1233](https://github.com/haskell/haskell-language-server/pull/1233)) by @berberman +- Delete global hie.yaml config +([#1230](https://github.com/haskell/haskell-language-server/pull/1230)) by @jneira +- Easy hlint fixes +([#1226](https://github.com/haskell/haskell-language-server/pull/1226)) by @peterwicksstringfield +- Use the runtime ghc libdir for ghc-exactprint +([#1225](https://github.com/haskell/haskell-language-server/pull/1225)) by @jneira +- Add note in README/Tutorial regarding CPP support +([#1224](https://github.com/haskell/haskell-language-server/pull/1224)) by @tittoassini +- Test and fix for issue 1213 +([#1223](https://github.com/haskell/haskell-language-server/pull/1223)) by @tittoassini +- Add traces for HLS providers +([#1222](https://github.com/haskell/haskell-language-server/pull/1222)) by @pepeiborra +- Use exact print for suggest missing constraint code actions +([#1221](https://github.com/haskell/haskell-language-server/pull/1221)) by @pepeiborra +- Fix changelog dates +([#1220](https://github.com/haskell/haskell-language-server/pull/1220)) by @pepeiborra +- Ignore .shake folder +([#1219](https://github.com/haskell/haskell-language-server/pull/1219)) by @pepeiborra +- Limit completions to top 40 +([#1218](https://github.com/haskell/haskell-language-server/pull/1218)) by @pepeiborra +- Parenthesise type operators when extending import lists +([#1212](https://github.com/haskell/haskell-language-server/pull/1212)) by @mrBliss +- Expose shake options used +([#1209](https://github.com/haskell/haskell-language-server/pull/1209)) by @pepeiborra +- Prepare ghcide release v0.7.1 +([#1207](https://github.com/haskell/haskell-language-server/pull/1207)) by @pepeiborra +- Documentation for the Eval Plugin +([#1206](https://github.com/haskell/haskell-language-server/pull/1206)) by @tittoassini +- Stale diagnostics fix +([#1204](https://github.com/haskell/haskell-language-server/pull/1204)) by @pepeiborra +- Extract Development.IDE.GHC.ExactPrint +([#1203](https://github.com/haskell/haskell-language-server/pull/1203)) by @pepeiborra +- Fix bug in Retrie "fold/unfold in local file" commands +([#1202](https://github.com/haskell/haskell-language-server/pull/1202)) by @pepeiborra +- Minor eval plugin fixes +([#1199](https://github.com/haskell/haskell-language-server/pull/1199)) by @tittoassini +- Disable win 8.6.4 job +([#1198](https://github.com/haskell/haskell-language-server/pull/1198)) by @jneira +- Add custom cache layer for session loading +([#1197](https://github.com/haskell/haskell-language-server/pull/1197)) by @fendor +- Use completionSnippetsOn flag +([#1195](https://github.com/haskell/haskell-language-server/pull/1195)) by @takoeight0821 +- Remove runs dropped by #1173 +([#1194](https://github.com/haskell/haskell-language-server/pull/1194)) by @jneira +- Remove undefined exports suggestions +([#1193](https://github.com/haskell/haskell-language-server/pull/1193)) by @kderme +- Update nixpkgs to ghc 8.10.3 +([#1191](https://github.com/haskell/haskell-language-server/pull/1191)) by @pepeiborra +- Do not disable parallel GC +([#1190](https://github.com/haskell/haskell-language-server/pull/1190)) by @pepeiborra +- Switch module outline to useWtihStale +([#1189](https://github.com/haskell/haskell-language-server/pull/1189)) by @pepeiborra +- Fix sticky diagnostics +([#1188](https://github.com/haskell/haskell-language-server/pull/1188)) by @pepeiborra +- Fix class plugin cabal +([#1186](https://github.com/haskell/haskell-language-server/pull/1186)) by @Ailrun +- Update package description of haddock comments plugin +([#1185](https://github.com/haskell/haskell-language-server/pull/1185)) by @berberman +- Installation from Hackage - add README section +([#1183](https://github.com/haskell/haskell-language-server/pull/1183)) by @pepeiborra +- Preparation for Uploading Splice Plugin to Hackage +([#1182](https://github.com/haskell/haskell-language-server/pull/1182)) by @konn +- Preparation for uploading `hls-exactprint-utils` +([#1181](https://github.com/haskell/haskell-language-server/pull/1181)) by @konn +- Complete hls-hlint-plugin package metadata +([#1180](https://github.com/haskell/haskell-language-server/pull/1180)) by @jneira +- Benchmark improvements +([#1178](https://github.com/haskell/haskell-language-server/pull/1178)) by @pepeiborra +- Make adding missing constraint work in presence of 'forall' (fixes #1164) +([#1177](https://github.com/haskell/haskell-language-server/pull/1177)) by @jhrcek +- Prepare for Hackage +([#1176](https://github.com/haskell/haskell-language-server/pull/1176)) by @pepeiborra +- Test only last ghc minor version and fix windows cache +([#1173](https://github.com/haskell/haskell-language-server/pull/1173)) by @jneira +- Fix toMethodName bug of the Class plugin +([#1170](https://github.com/haskell/haskell-language-server/pull/1170)) by @Ailrun +- Quick fix for #1158 +([#1166](https://github.com/haskell/haskell-language-server/pull/1166)) by @Ailrun +- Suggest adding pragmas for parse errors too +([#1165](https://github.com/haskell/haskell-language-server/pull/1165)) by @mrBliss +- Fix wrong component name of splice plugin in hie.yaml +([#1162](https://github.com/haskell/haskell-language-server/pull/1162)) by @berberman +- Revert "Auto cancel redundant workflows (attempt #2)" +([#1156](https://github.com/haskell/haskell-language-server/pull/1156)) by @pepeiborra +- Auto cancel redundant workflows (attempt #2) +([#1154](https://github.com/haskell/haskell-language-server/pull/1154)) by @pepeiborra +- Prepare 0.8.0 (versions) +([#1153](https://github.com/haskell/haskell-language-server/pull/1153)) by @jneira +- Streamline CircleCI jobs +([#1152](https://github.com/haskell/haskell-language-server/pull/1152)) by @pepeiborra +- Mergify: create configuration +([#1151](https://github.com/haskell/haskell-language-server/pull/1151)) by @jneira +- Bump haskell-lsp to 0.23 +([#1146](https://github.com/haskell/haskell-language-server/pull/1146)) by @berberman +- Remove no longer needed git submodule update +([#1145](https://github.com/haskell/haskell-language-server/pull/1145)) by @jhrcek +- Enable more tests +([#1143](https://github.com/haskell/haskell-language-server/pull/1143)) by @peterwicksstringfield +- Update links to issues/PRs in ghcide tests. +([#1142](https://github.com/haskell/haskell-language-server/pull/1142)) by @peterwicksstringfield +- Fix #723 (Instance declarations in hs-boot files result in GHC errors) +([#781](https://github.com/haskell/haskell-language-server/pull/781)) by @nitros12 +- Also suggest importing methods without parent class +([#766](https://github.com/haskell/haskell-language-server/pull/766)) by @mrBliss +- Delete unused utilities for controlling logging. +([#764](https://github.com/haskell/haskell-language-server/pull/764)) by @peterwicksstringfield +- Delete unused testdata +([#763](https://github.com/haskell/haskell-language-server/pull/763)) by @peterwicksstringfield +- Fix suggestAddTypeAnnotation regex +([#760](https://github.com/haskell/haskell-language-server/pull/760)) by @kderme +- Splice Plugin: expands TH splices and QuasiQuotes +([#759](https://github.com/haskell/haskell-language-server/pull/759)) by @konn +- Haddock comments plugin +([#673](https://github.com/haskell/haskell-language-server/pull/673)) by @berberman +- Leverage last apply-refact improvements in hlint plugin (include getParsedModuleWithComments in ghcide) +([#635](https://github.com/haskell/haskell-language-server/pull/635)) by @jneira + +## 0.8.0 + +- This version adds support for ghc-8.10.3 +- `hls-plugin-api` has been bumped to 0.6.0.0 and `ghcide` has been bumped from 0.6.0.1 to 0.7.0.0. +- It has a new brand plugin: hls-class-plugin, which helps to write class instances + +![gif](https://user-images.githubusercontent.com/12473268/103059293-af071f80-4572-11eb-963a-7e76b45f28b9.gif) + +- The eval plugin has been revamped, adding these new features: + - Tests in both plain comments and Haddock comments + - For Haddock comments: shows differences between latest and previous result + - Setup section, executed before every test + - Execution of a section/group of tests at the time + - Property testing + - Setup of GHC extensions +- A new tactic to generate automatically `Arbitrary` instances has been added to tactic plugin +- There had been lot of internal changes: + - ghcide lives now directly in this repository + - the test suite has been cleaned and improved (continuing the work done in 0.7.0) + +Thanks to all contributors and happy new year! + +### Pull requests merged for 0.8.0 + +- Ci fixes +([#783](https://github.com/haskell/haskell-language-server/pull/783)) by @pepeiborra +- Fix extend imports regression +([#769](https://github.com/haskell/haskell-language-server/pull/769)) by @pepeiborra +- Cleanup format testfiles +([#765](https://github.com/haskell/haskell-language-server/pull/765)) by @peterwicksstringfield +- Retry a failed cradle if the cradle descriptor changes +([#762](https://github.com/haskell/haskell-language-server/pull/762)) by @pepeiborra +- Perform memory measurement on SIGUSR1 +([#761](https://github.com/haskell/haskell-language-server/pull/761)) by @pepeiborra +- Add ghc-8.10.3 support after merging ghcide repo +([#721](https://github.com/haskell/haskell-language-server/pull/721)) by @jneira +- Merge ghcide repository (replacing the submodule) +([#702](https://github.com/haskell/haskell-language-server/pull/702)) by @pepeiborra +- Invert the dependency between hls-plugin-api and ghcide +([#701](https://github.com/haskell/haskell-language-server/pull/701)) by @pepeiborra +- Move eval plugin to hls-eval-plugin +([#700](https://github.com/haskell/haskell-language-server/pull/700)) by @tittoassini +- Fix and enable progress message tests. +([#698](https://github.com/haskell/haskell-language-server/pull/698)) by @peterwicksstringfield +- Add a known tactic for writing arbitrary instances +([#695](https://github.com/haskell/haskell-language-server/pull/695)) by @isovector +- Introduce generic config for plugins +([#691](https://github.com/haskell/haskell-language-server/pull/691)) by @alanz +- Enable get type definition tests +([#690](https://github.com/haskell/haskell-language-server/pull/690)) by @peterwicksstringfield +- Fix ghc version for windows 8.10.2.2 in github build workflow +([#688](https://github.com/haskell/haskell-language-server/pull/688)) by @jneira +- Add plugins conditionally at compile time +([#687](https://github.com/haskell/haskell-language-server/pull/687)) by @jneira +- Implement basic Class plugin +([#661](https://github.com/haskell/haskell-language-server/pull/661)) by @Ailrun +- Extended Eval Plugin +([#438](https://github.com/haskell/haskell-language-server/pull/438)) by @tittoassini + +## 0.7.1 + +- This is a minor bug fix release: + - It fixes an issue that removed accidentally desugarer warnings (#676). + - It disables auto extend import lists in completions, see #679. + +### Pull requests merged for 0.7.1 + +- Disable auto extend import lists in completions. It fixes #679. +([#685](https://github.com/haskell/haskell-language-server/pull/685)) by @pepeiborra +- Restore kick (#676). It fixes #676. +([#677](https://github.com/haskell/haskell-language-server/pull/677)) by @wz1000 +- README: Remove instructions to execute data target +([#675](https://github.com/haskell/haskell-language-server/pull/675)) by @andys8 +- Add hlint tests over cpp, extensions and ignore hints +([#674](https://github.com/haskell/haskell-language-server/pull/674)) by @jneira + +## 0.7.0 + +- This version contains mainly refactors and updates of upstream packages +- It bumps up some formatter versions: + - ormolu is 0.1.4.1 + - fourmolu is 0.3.0.0 + - brittany is 0.13.1.0 +- It uses last implicit-hie-cradle-0.3.0.2, with some [bug](https://github.com/Avi-D-coder/implicit-hie/issues/29) [fixes](https://github.com/Avi-D-coder/implicit-hie/issues/30) +- It uses last ghcide-0.6.0.1 with [improvements and bug fixes](https://github.com/haskell/ghcide/blob/master/CHANGELOG.md#060-2020-12-06): + - Do not enable every "unnecessary" warning by default + - Improvements over completions: + - record fields + - identifiers not in explicit import lists + - extend explicit import list automatically + +Thanks to all haskell-language-server, ghcide and other upstream packages contributors (the list continue growing healthy) for make this release possible. + +### Pull requests merged for 0.7.0 + +- Miscellanous fixes: correct tactic plugin package metadata and cabal.hie.yaml/stack.hie.yaml +([#672](https://github.com/haskell/haskell-language-server/pull/672)) by @berberman +- Remove unnecessary pluginId setting and user Better Map functions in tactics plugin +([#669](https://github.com/haskell/haskell-language-server/pull/669)) by @jhrcek +- Do not suggest explicitly disabled pragmas +([#666](https://github.com/haskell/haskell-language-server/pull/666)) by @berberman +- fixed hie.yaml.stack +([#664](https://github.com/haskell/haskell-language-server/pull/664)) by @tittoassini +- Add pragmas completions +([#662](https://github.com/haskell/haskell-language-server/pull/662)) by @gdevanla +- Enable code completion tests +([#657](https://github.com/haskell/haskell-language-server/pull/657)) by @peterwicksstringfield +- Enable highlight unittests +([#656](https://github.com/haskell/haskell-language-server/pull/656)) by @peterwicksstringfield +- Fix document symbols unit tests. +([#655](https://github.com/haskell/haskell-language-server/pull/655)) by @peterwicksstringfield +- Delete duplicate cabal clause for applyrefact2 +([#654](https://github.com/haskell/haskell-language-server/pull/654)) by @peterwicksstringfield +- Add extra-source-files for split plugins +([#650](https://github.com/haskell/haskell-language-server/pull/650)) by @berberman +- [nix-shell] Actually use gitignore +([#649](https://github.com/haskell/haskell-language-server/pull/649)) by @pepeiborra +- idempotent command and code cleanup +([#648](https://github.com/haskell/haskell-language-server/pull/648)) by @tittoassini +- Split the Imports and Retrie plugins +([#647](https://github.com/haskell/haskell-language-server/pull/647)) by @pepeiborra +- Simplify and Bump implicit-hie version constraints +([#645](https://github.com/haskell/haskell-language-server/pull/645)) by @Avi-D-coder +- Fix and enable disabled code action unit tests, fix fallback handler +([#643](https://github.com/haskell/haskell-language-server/pull/643)) by @peterwicksstringfield +- Add Ghcide hie.yaml instruction for Stack users +([#641](https://github.com/haskell/haskell-language-server/pull/641)) by @Sir4ur0n +- Upgrade the Nix build system +([#639](https://github.com/haskell/haskell-language-server/pull/639)) by @pepeiborra +- No longer needed to build once for Stack +([#637](https://github.com/haskell/haskell-language-server/pull/637)) by @Sir4ur0n +- Preserve the last empty comment line after eval plugin +([#631](https://github.com/haskell/haskell-language-server/pull/631)) by @expipiplus1 +- Update fourmolu to 0.3.0.0 +([#624](https://github.com/haskell/haskell-language-server/pull/624)) by @gwils +- Add hspec-discover to build-tool-depends in tactics plugin +([#623](https://github.com/haskell/haskell-language-server/pull/623)) by @gwils +- Add build to ghc-8.10.2 and windows +([#619](https://github.com/haskell/haskell-language-server/pull/619)) by @jneira +- Module Name Plugin: Treat modules starting with lowercase as Main module +([#616](https://github.com/haskell/haskell-language-server/pull/616)) by @konn +- Bump ormolu to 0.1.4.1 +([#614](https://github.com/haskell/haskell-language-server/pull/614)) by @AlistairB +- Fix fourmolu plugin inconsistent formatting +([#599](https://github.com/haskell/haskell-language-server/pull/599)) by @zweimach +- Hlint: bring over idea2Message for formatting +([#598](https://github.com/haskell/haskell-language-server/pull/598)) by @alanz +- Makes dictionary argument exclusion logic in Tactic plugin more robust +([#508](https://github.com/haskell/haskell-language-server/pull/508)) by @konn + +## 0.6.0 + +0.6.0 includes two brand new plugins! + +- [Hlint Plugin](https://github.com/haskell/haskell-language-server/pull/166): it integrates hlint diagnostics and lets you apply suggestions to fix them. + +![hls-hlint-demo](https://user-images.githubusercontent.com/54035/98731058-6ff38500-239d-11eb-8176-e4f69ef76fc2.gif) + +- [Module Name Plugin](https://github.com/haskell/haskell-language-server/pull/480): it makes easier create new modules and modify them, suggesting the appropiate module name as a code lens. + +![module-name-demo](https://user-images.githubusercontent.com/54035/98731198-a7623180-239d-11eb-8af0-73bd32b9b0b2.gif) + +This release also includes many improvements and bug fixes for the tactic plugin (see pull requests authored by @isovector for more details). + +We have updated two essential tools used by the ide: + +- `implicit-hie`: [to fix a bug](https://github.com/haskell/haskell-language-server/issues/498) present when loading cabal based projects with executables containing `other-modules` + +- `ghcide`: the ide uses [the just released version 0.5](https://github.com/haskell/ghcide/blob/master/CHANGELOG.md#050-2020-10-08) with many bug fixes and improvements, including: + - code action to remove *all* redundant imports + - improved support for Template Haskell + - emit desugarer warnings + +### Pull requests merged for 0.6.0 + +- Fix tasty rerun +([#570](https://github.com/haskell/haskell-language-server/pull/570)) by @jneira +- Bump up ghcide submodule to version 0.5.0 +([#568](https://github.com/haskell/haskell-language-server/pull/568)) by @jneira +- Refactor tactics to track hypothesis provenance +([#557](https://github.com/haskell/haskell-language-server/pull/557)) by @isovector +- Use bash shell to allow its idioms +([#552](https://github.com/haskell/haskell-language-server/pull/552)) by @jneira +- Ignore flakey tactics test +([#546](https://github.com/haskell/haskell-language-server/pull/546)) by @isovector +- Better scoring metric for deriving safeHead +([#545](https://github.com/haskell/haskell-language-server/pull/545)) by @isovector +- Discover skolems in the hypothesis, not just goal +([#542](https://github.com/haskell/haskell-language-server/pull/542)) by @isovector +- [retrie] Fix code action title +([#538](https://github.com/haskell/haskell-language-server/pull/538)) by @pepeiborra +- Tactics support for using given constraints +([#534](https://github.com/haskell/haskell-language-server/pull/534)) by @isovector +- Add missing tactic subpackage in default stack.yaml +([#529](https://github.com/haskell/haskell-language-server/pull/529)) by @jneira +- Use implicit-hie-0.1.2.0 +([#528](https://github.com/haskell/haskell-language-server/pull/528)) by @jneira +- Wait for diagnostics in tactics tests +([#525](https://github.com/haskell/haskell-language-server/pull/525)) by @isovector +- Fix a bug in tactics preventing split of split +([#520](https://github.com/haskell/haskell-language-server/pull/520)) by @isovector +- Use infix notation for destructing and splitting infix data cons +([#519](https://github.com/haskell/haskell-language-server/pull/519)) by @isovector +- Retry the build three times +([#518](https://github.com/haskell/haskell-language-server/pull/518)) by @jneira +- Separate tactics into its own package +([#516](https://github.com/haskell/haskell-language-server/pull/516)) by @isovector +- Add a Troubleshooting section to the README +([#507](https://github.com/haskell/haskell-language-server/pull/507)) by @michaelpj +- Add GitHub Actions CI for testing +([#504](https://github.com/haskell/haskell-language-server/pull/504)) by @bubba +- Fix stack build for ghc-8.8.3 failing on some machines +([#503](https://github.com/haskell/haskell-language-server/pull/503)) by @luntain +- Expand explanation of how to configure HLS +([#497](https://github.com/haskell/haskell-language-server/pull/497)) by @michaelpj +- Module Name Plugin +([#480](https://github.com/haskell/haskell-language-server/pull/480)) by @tittoassini +- Allow hole filling to deal with recursion +([#472](https://github.com/haskell/haskell-language-server/pull/472)) by @isovector +- Restrict editor config to Haskell file, to avoid affecting Makefiles or other tab-based formats +([#442](https://github.com/haskell/haskell-language-server/pull/442)) by @tittoassini +- Hlint plugin using ghc-lib +([#166](https://github.com/haskell/haskell-language-server/pull/166)) by @jneira + +## 0.5.1 + +0.5.1 is a minor bug fix release, mainly fixing an issue with the eval plugin +as well as upgrading the ormolu and stylish-haskell dependencies. + +### Pull requests merged for 0.5.1 + +- Minimal fix for eval regression +([#488](https://github.com/haskell/haskell-language-server/pull/488)) by @pepeiborra +- Bump stylish-haskell to 0.12.2.0 +([#482](https://github.com/haskell/haskell-language-server/pull/482)) by @maksbotan +- Improve the emacs instructions a little +([#479](https://github.com/haskell/haskell-language-server/pull/479)) by @michaelpj +- Update README: HLS is no longer in *very* early stage +([#475](https://github.com/haskell/haskell-language-server/pull/475)) by @Anrock +- Tactic plugin: Excludes Dictionary arguments in GADTs in Destruct Tactic +([#474](https://github.com/haskell/haskell-language-server/pull/474)) by @konn +- Update doom emacs install instructions in README +([#470](https://github.com/haskell/haskell-language-server/pull/470)) by @iyefrat +- Add ghc-8.10.2 to circleci +([#464](https://github.com/haskell/haskell-language-server/pull/464)) by @jneira +- Bump ormolu to 0.1.3.0 +([#422](https://github.com/haskell/haskell-language-server/pull/422)) by @AlistairB + +## 0.5.0 + +0.5.0 comes with a new tactics plugin which provides case splitting, homomorphic case splitting, and lambda introduction: + +![Case splitting](https://user-images.githubusercontent.com/307223/92657198-3d4be400-f2a9-11ea-8ad3-f541c8eea891.gif) + +It can even attempt to fully fill a hole! + +![Attempt to fill in hole code action](https://user-images.githubusercontent.com/307223/94743611-82a18580-032c-11eb-9f13-8f46bc45f928.gif) + +The imports lens plugin also learnt a new code action to make all imports explicit: + +![Explicit imports code action](https://user-images.githubusercontent.com/2488460/94994815-1a53dd80-0592-11eb-8a12-ec704ae92385.gif) + +There's also plenty of bug fixes, improvements and updates to the underlying tools, including Fourmolu, implicit-hie-cradle and ghcide. [Some of the improvements from ghcide](https://github.com/haskell/ghcide/releases/tag/v0.4.0) include: + +- The entire project is typechecked on load +- Reverse dependencies of a module are typechecked upon saving +- Code completion includes local terms +- Import code actions now also suggest open imports +- Documentation on hover shows for symbols defined in the same module + +If you're eager to try all this out, haskell-language-server is now also installable via [ghcup](https://www.haskell.org/ghcup/): + +```shell +> ghcup install hls +``` + +### Pull requests merged for 0.5.0 + +- Update GHC version 8.12 to 9.0 in README +([#460](https://github.com/haskell/haskell-language-server/pull/460)) by @maralorn +- Update Fourmolu to 0.2 +([#455](https://github.com/haskell/haskell-language-server/pull/455)) by @georgefst +- Generate .gz tars of all the binaries for macOS and Linux in GitHub Actions +([#454](https://github.com/haskell/haskell-language-server/pull/454)) by @bubba +- install: create hls hardlinks instead of copies except on Windows +([#451](https://github.com/haskell/haskell-language-server/pull/451)) by @juhp +- wrapper: cd to --cwd earlier +([#448](https://github.com/haskell/haskell-language-server/pull/448)) by @ocharles +- Update README.md +([#446](https://github.com/haskell/haskell-language-server/pull/446)) by @moodmosaic +- Upate Emacs setup notes +([#440](https://github.com/haskell/haskell-language-server/pull/440)) by @gdevanla +- Use ghcide master and prepare hls-plugin-api-0.4.1.0 +([#439](https://github.com/haskell/haskell-language-server/pull/439)) by @jneira +- Add a code action to make all imports explicit +([#436](https://github.com/haskell/haskell-language-server/pull/436)) by @pepeiborra +- Add docs on how to choose a formatter +([#432](https://github.com/haskell/haskell-language-server/pull/432)) by @googleson78 +- Implement 'Attempt to fill hole' code action +([#431](https://github.com/haskell/haskell-language-server/pull/431)) by @TOTBWF +- Clarify that eval is a lens +([#428](https://github.com/haskell/haskell-language-server/pull/428)) by @Anrock +- Use implicit-hie-cradle-0.2.0.1 +([#427](https://github.com/haskell/haskell-language-server/pull/427)) by @jneira +- [retrie] Fix uris in workspace edit +([#424](https://github.com/haskell/haskell-language-server/pull/424)) by @pepeiborra +- Separate paragraphs +([#423](https://github.com/haskell/haskell-language-server/pull/423)) by @jneira +- Include .editorconfig in the contributing section +([#420](https://github.com/haskell/haskell-language-server/pull/420)) by @jneira +- Mention the copy of executables wit ghc version +([#419](https://github.com/haskell/haskell-language-server/pull/419)) by @jneira +- Eval plugin: proper multilined results handling and command-name abbreviations +([#413](https://github.com/haskell/haskell-language-server/pull/413)) by @konn +- Retrie - calculate imports in the command handler +([#408](https://github.com/haskell/haskell-language-server/pull/408)) by @pepeiborra +- Progress reporting for Eval plugin +([#398](https://github.com/haskell/haskell-language-server/pull/398)) by @pepeiborra +- bump ghcide submodule +([#396](https://github.com/haskell/haskell-language-server/pull/396)) by @wz1000 +- Fix cradles +([#393](https://github.com/haskell/haskell-language-server/pull/393)) by @pepeiborra +- Case splitting and lambda introduction +([#391](https://github.com/haskell/haskell-language-server/pull/391)) by @isovector +- Use stale data in explicit imports lens +([#383](https://github.com/haskell/haskell-language-server/pull/383)) by @pepeiborra +- Create hls-plugin-api and move plugins to exe +([#379](https://github.com/haskell/haskell-language-server/pull/379)) by @jneira +- Rebase on ghcide HEAD +([#378](https://github.com/haskell/haskell-language-server/pull/378)) by @pepeiborra +- README clarify how exactly to use code evaluation +([#377](https://github.com/haskell/haskell-language-server/pull/377)) by @DunetsNM +- Revise README.md +([#374](https://github.com/haskell/haskell-language-server/pull/374)) by @gihyeonsung + +## 0.4.0 + +0.4.0 introduces the import lens plugin, which can convert your import statements into qualified imports, or into an explicit import list: + +![Imports code lens](https://imgur.com/pX9kvY4.gif) + +The eval plugin has also learnt two new commands, `:type` and `:kind`: + +```haskell +{-# LANGUAGE TypeApplications #-} +foo :: Show a => a -> String +foo = show + +-- >>> :type foo @Int +-- foo @Int :: Int -> String + +-- >>> :type +v foo @Int +-- foo @Int :: Show Int => Int -> String +``` + +```haskell +-- >>> type N = 1 +-- >>> type M = 40 +-- >>> :kind N + M + 1 +-- N + M + 1 :: Nat + +-- >>> type N = 1 +-- >>> type M = 40 +-- >>> :kind N + M + 1 +-- N + M + 1 :: Nat +``` + +There is now also support for GHC 8.10.2, and a new `haskell-language-server --probe-tools` command to help debug what version of each tool HLS is using. + +```shell +$ haskell-language-server --probe-tools +haskell-language-server version: 0.3.0.0 (GHC: 8.10.1) (PATH: /Users/luke/.cabal/store/ghc-8.10.1/hskll-lngg-srvr-0.3.0.0-7c6d48c3/bin/haskell-language-server) +Tool versions found on the $PATH +cabal: 3.2.0.0 +stack: 2.3.3 +ghc: 8.10.2 +``` + +### Pull requests merged for 0.4.0 + +- Bring over a [tutorial about how to add hls plugins](https://github.com/pepeiborra/hls-tutorial) +([#372](https://github.com/haskell/haskell-language-server/pull/372)) by @bubba +- Update the ghcide upstream to be in haskell/ghcide +([#370](https://github.com/haskell/haskell-language-server/pull/370)) by @alanz +- Add ISSUE_TEMPLATE for github +([#305](https://github.com/haskell/haskell-language-server/pull/305)) by @fendor +- Add use-package to the list of emacs packages +([#343](https://github.com/haskell/haskell-language-server/pull/343)) by @rgleichman +- Implements `:type [+v/+d]` in Eval Plugin +([#361](https://github.com/haskell/haskell-language-server/pull/361)) by @konn +- Bump bounds of hie-bios to 0.7.0 +([#357](https://github.com/haskell/haskell-language-server/pull/357)) by @maralorn +- Fix ImportLens plugin to work with GHC 8.10 +([#356](https://github.com/haskell/haskell-language-server/pull/356)) by @Ailrun +- Add single file rewrites and ignore unknown files +([#321](https://github.com/haskell/haskell-language-server/pull/321)) by @pepeiborra +- Do not suggest explicit import lists for qualified imports +([#354](https://github.com/haskell/haskell-language-server/pull/354)) by @expipiplus1 +- Explicit imports lens (as seen on Twitter) +([#310](https://github.com/haskell/haskell-language-server/pull/310)) by @pepeiborra +- Adds `:kind` and `:kind!` commands to Eval Plugin +([#345](https://github.com/haskell/haskell-language-server/pull/345)) by @konn +- tech(nix): update niv and remove allowbroken +([#350](https://github.com/haskell/haskell-language-server/pull/350)) by @willbush +- Update VS Code Haskell URL/repo +([#338](https://github.com/haskell/haskell-language-server/pull/338)) by @Sir4ur0n +- doc(hack): Add explanation to hack and test HLS +([#329](https://github.com/haskell/haskell-language-server/pull/329)) by @Sir4ur0n +- Apply the module pragmas for evaluation +([#322](https://github.com/haskell/haskell-language-server/pull/322)) by @pepeiborra +- Copy working stack-8.6.5.yaml to stack.yaml +([#332](https://github.com/haskell/haskell-language-server/pull/332)) by @jneira +- tech(nix): Allow broken as retrie is marked as broken +([#331](https://github.com/haskell/haskell-language-server/pull/331)) by @Sir4ur0n +- feat(git): Add install/hie.yaml to gitignore +([#328](https://github.com/haskell/haskell-language-server/pull/328)) by @Sir4ur0n +- Replace wrong occurrences of "engine" by "server" +([#319](https://github.com/haskell/haskell-language-server/pull/319)) by @tchoutri +- Simplify coc.nvim instructions +([#315](https://github.com/haskell/haskell-language-server/pull/315)) by @oblitum +- Coc config file requires a {} nesting everything +([#317](https://github.com/haskell/haskell-language-server/pull/317)) by @hyiltiz +- Restrict opentelemetry version for stack builds +([#312](https://github.com/haskell/haskell-language-server/pull/312)) by @jneira +- Add support for ghc-8.10.2 +([#308](https://github.com/haskell/haskell-language-server/pull/308)) by @jneira +- Return nothing if tool is not on the PATH +([#309](https://github.com/haskell/haskell-language-server/pull/309)) by @fendor +- Probe tools cli +([#306](https://github.com/haskell/haskell-language-server/pull/306)) by @fendor +- Add fourmolu plugin (attempt 2) and add Brittany for ghc-8.10.1 +([#264](https://github.com/haskell/haskell-language-server/pull/264)) by @georgefst + +## 0.3.0 + +0.3.0 comes with two new plugins, retrie and fourmolu, provides binaries for +GHC 8.8.4, and comes with a host of bug fixes. + +The retrie plugin supports RULES, functions and type synonyms which can be +accessed through contextual code actions. + +Fourmolu can be used to format your code by setting the +`haskell.formattingProvider` field in your LSP configuration to + +```json +{ + "haskell": { + "formattingProvider": "fourmolu" + } +} +``` + +The Brittany formatter is now also available on GHC 8.10.1. + +### Pull requests merged + +- Fix haddock parse error in install.hs +([#255](https://github.com/haskell/haskell-language-server/pull/255)) by @georgefst +- Ormolu flags +([#246](https://github.com/haskell/haskell-language-server/pull/246)) by @pepeiborra +- Ormolu fix +([#257](https://github.com/haskell/haskell-language-server/pull/257)) by @sureyeaah +- Remove redundant CircleCI steps +([#259](https://github.com/haskell/haskell-language-server/pull/259)) by @bubba +- Slow down Tasty by limiting it to -j1 +([#261](https://github.com/haskell/haskell-language-server/pull/261)) by @bubba +- Remove hspec-expectations +([#260](https://github.com/haskell/haskell-language-server/pull/260)) by @bubba +- Remove a redundant caching step +([#262](https://github.com/haskell/haskell-language-server/pull/262)) by @Ailrun +- add hie.yaml to coc configuration +([#267](https://github.com/haskell/haskell-language-server/pull/267)) by @sureyeaah +- Initial Retrie plugin +([#266](https://github.com/haskell/haskell-language-server/pull/266)) by @pepeiborra +- Add exe extension to win executables +([#284](https://github.com/haskell/haskell-language-server/pull/284)) by @jneira +- Use wz1000/hls-3 ghcide branch +([#275](https://github.com/haskell/haskell-language-server/pull/275)) by @alanz +- Fix rename capability being declared +([#285](https://github.com/haskell/haskell-language-server/pull/285)) by @bubba +- Add CI job for 8.8.4 +([#287](https://github.com/haskell/haskell-language-server/pull/287)) by @bubba +- Make the AGPL flag manual in cabal +([#250](https://github.com/haskell/haskell-language-server/pull/250)) by @fendor +- Bring in doc URL fix for Windows +([#289](https://github.com/haskell/haskell-language-server/pull/289)) by @bubba +- Bring in fix for libm on Linux static binaries +([#293](https://github.com/haskell/haskell-language-server/pull/293)) by @bubba +- Add fourmolu plugin (attempt 2) and add Brittany for ghc-8.10.1 +([#264](https://github.com/haskell/haskell-language-server/pull/264)) by @georgefst +- Trying new hls-3 branch +([#300](https://github.com/haskell/haskell-language-server/pull/300)) by @alanz + +## 0.2.2 + +This changes the configuration section from "languageServerHaskell" to "haskell" +to align it with vscode-haskell-1.0.0. Whilst the old section is still +supported for now, you should update your LSP configuration (which varies per +client) from + +```json +{ + "languageServerHaskell": { + "formattingProvider": "stylish-haskell" + } +} +``` + +to + +```json +{ + "haskell": { + "formattingProvider": "stylish-haskell" + } +} +``` + +### Pull requests merged for 0.2.2 + +- Mention docs on hover feature in README +([#209](https://github.com/haskell/haskell-language-server/pull/209)) by @georgefst +- Add static binaries for ghc-8.8.4 +([#224](https://github.com/haskell/haskell-language-server/pull/224)) by @bubba +- Rename the configuration section from languageServerHaskell => haskell +([#227](https://github.com/haskell/haskell-language-server/pull/227)) by @bubba +- Use -haddock for cabal and stack +([#214](https://github.com/haskell/haskell-language-server/pull/214)) by @jneira +- slightly better shell.nix for local development +([#235](https://github.com/haskell/haskell-language-server/pull/235)) by @pepeiborra +- Shell nix further steps +([#240](https://github.com/haskell/haskell-language-server/pull/240)) by @pepeiborra +- Add numeric-version option for wrapper and server +([#241](https://github.com/haskell/haskell-language-server/pull/241)) by @fendor +- Accept the legacy "languageServerHaskell" config name +([#243](https://github.com/haskell/haskell-language-server/pull/243)) by @bubba +- Fix for Eval plugin: Error from tests not reported +([#244](https://github.com/haskell/haskell-language-server/pull/244)) by @tittoassini +- Rename binaries before uploading +([#248](https://github.com/haskell/haskell-language-server/pull/248)) by @bubba + ## 0.2.1 This release includes a new eval plugin that allows Haddock code examples to be @@ -22,138 +4028,101 @@ the fly, so either `ghc`, `cabal` or `stack` will need to be present on your PATH depending on your project. See `docs/releases.md` for more information. If you find any issues with this, please let us know! -### Pull requests merged +### Pull requests merged for 0.2.1 - Bump ormolu to 0.1.2.0 -([#189](https://github.com/haskell/haskell-language-server/pull/189) by @AlistairB) +([#189](https://github.com/haskell/haskell-language-server/pull/189)) by @AlistairB - Remove dependency on Cabal -([#195](https://github.com/haskell/haskell-language-server/pull/195) by @bubba) +([#195](https://github.com/haskell/haskell-language-server/pull/195)) by @bubba - Fix extraneous extra-dep in stack-8.6.4.yaml -([#199](https://github.com/haskell/haskell-language-server/pull/199) by @bubba) +([#199](https://github.com/haskell/haskell-language-server/pull/199)) by @bubba - Fix install script stack targets -([#203](https://github.com/haskell/haskell-language-server/pull/203) by @jneira) +([#203](https://github.com/haskell/haskell-language-server/pull/203)) by @jneira - Add support for ghc-8.8.4 -([#206](https://github.com/haskell/haskell-language-server/pull/206) by @jneira) +([#206](https://github.com/haskell/haskell-language-server/pull/206)) by @jneira - Simple Eval plugin -([#191](https://github.com/haskell/haskell-language-server/pull/191) by @pepeiborra) +([#191](https://github.com/haskell/haskell-language-server/pull/191)) by @pepeiborra - Distributable binaries -([#165](https://github.com/haskell/haskell-language-server/pull/165) by @bubba) +([#165](https://github.com/haskell/haskell-language-server/pull/165)) by @bubba ## 0.2 - Use cabal-plan from Hackage -([#185](https://github.com/haskell/haskell-language-server/pull/185) by @georgefst) - +([#185](https://github.com/haskell/haskell-language-server/pull/185)) by @georgefst - Bump ghcide to wz1000 hls-2 branch -([#184](https://github.com/haskell/haskell-language-server/pull/184) by @alanz) - +([#184](https://github.com/haskell/haskell-language-server/pull/184)) by @alanz - doc(preprocessor): Document the preprocessor limitation -([#177](https://github.com/haskell/haskell-language-server/pull/177) by @Sir4ur0n) - +([#177](https://github.com/haskell/haskell-language-server/pull/177)) by @Sir4ur0n - Use shell.nix from Haskell-IDE-Engine -([#169](https://github.com/haskell/haskell-language-server/pull/169) by @fendor) - +([#169](https://github.com/haskell/haskell-language-server/pull/169)) by @fendor - Remove last occurrences of shake.yaml -([#163](https://github.com/haskell/haskell-language-server/pull/163) by @fendor) - +([#163](https://github.com/haskell/haskell-language-server/pull/163)) by @fendor - Use an unique install/stack.yaml -([#154](https://github.com/haskell/haskell-language-server/pull/154) by @jneira) - +([#154](https://github.com/haskell/haskell-language-server/pull/154)) by @jneira - Introduce golden testing -([#152](https://github.com/haskell/haskell-language-server/pull/152) by @Ailrun) - +([#152](https://github.com/haskell/haskell-language-server/pull/152)) by @Ailrun - Revert "Use bullet as separator instead of HR" -([#150](https://github.com/haskell/haskell-language-server/pull/150) by @alanz) - +([#150](https://github.com/haskell/haskell-language-server/pull/150)) by @alanz - feat(hie-bios): Multi-cradle, ignore directories -([#147](https://github.com/haskell/haskell-language-server/pull/147) by @Sir4ur0n) - +([#147](https://github.com/haskell/haskell-language-server/pull/147)) by @Sir4ur0n - [Plugin] stylish-haskell formatter -([#146](https://github.com/haskell/haskell-language-server/pull/146) by @Ailrun) - +([#146](https://github.com/haskell/haskell-language-server/pull/146)) by @Ailrun - Separate ghcide tests and disable them for now -([#137](https://github.com/haskell/haskell-language-server/pull/137) by @jneira) - +([#137](https://github.com/haskell/haskell-language-server/pull/137)) by @jneira - Convert private lib in common stanza -([#136](https://github.com/haskell/haskell-language-server/pull/136) by @jneira) - +([#136](https://github.com/haskell/haskell-language-server/pull/136)) by @jneira - Add zlibc to readme -([#134](https://github.com/haskell/haskell-language-server/pull/134) by @Sir4ur0n) - +([#134](https://github.com/haskell/haskell-language-server/pull/134)) by @Sir4ur0n - Complete editor integrations -([#132](https://github.com/haskell/haskell-language-server/pull/132) by @jneira) - +([#132](https://github.com/haskell/haskell-language-server/pull/132)) by @jneira - Remove inexistent component from hie.yaml.stack -([#131](https://github.com/haskell/haskell-language-server/pull/131) by @jneira) - +([#131](https://github.com/haskell/haskell-language-server/pull/131)) by @jneira - Bump to new mpickering/ghcide -([#130](https://github.com/haskell/haskell-language-server/pull/130) by @alanz) - +([#130](https://github.com/haskell/haskell-language-server/pull/130)) by @alanz - Update ghc-lib-parser version -([#129](https://github.com/haskell/haskell-language-server/pull/129) by @jneira) - +([#129](https://github.com/haskell/haskell-language-server/pull/129)) by @jneira - Remove redundant import -([#128](https://github.com/haskell/haskell-language-server/pull/128) by @bubba) - +([#128](https://github.com/haskell/haskell-language-server/pull/128)) by @bubba - Default the number of Shake threads to 0 (automatic) -([#127](https://github.com/haskell/haskell-language-server/pull/127) by @bubba) - +([#127](https://github.com/haskell/haskell-language-server/pull/127)) by @bubba - Added kakoune integration instructions -([#125](https://github.com/haskell/haskell-language-server/pull/125) by @414owen) - +([#125](https://github.com/haskell/haskell-language-server/pull/125)) by @414owen - Fix install script dev target -([#124](https://github.com/haskell/haskell-language-server/pull/124) by @jneira) - +([#124](https://github.com/haskell/haskell-language-server/pull/124)) by @jneira - Add plugin support for Rename providers -([#123](https://github.com/haskell/haskell-language-server/pull/123) by @pepeiborra) - +([#123](https://github.com/haskell/haskell-language-server/pull/123)) by @pepeiborra - Add jobs for stack and cabal using ghc-8.10.1 -([#120](https://github.com/haskell/haskell-language-server/pull/120) by @jneira) - +([#120](https://github.com/haskell/haskell-language-server/pull/120)) by @jneira - Add lower bound to tasty-ant-xml -([#119](https://github.com/haskell/haskell-language-server/pull/119) by @jneira) - +([#119](https://github.com/haskell/haskell-language-server/pull/119)) by @jneira - Fix build using brittany revision -([#117](https://github.com/haskell/haskell-language-server/pull/117) by @jneira) - +([#117](https://github.com/haskell/haskell-language-server/pull/117)) by @jneira - Use floskell released version 0.10.3 -([#116](https://github.com/haskell/haskell-language-server/pull/116) by @jneira) - +([#116](https://github.com/haskell/haskell-language-server/pull/116)) by @jneira - Add emacs/doom-emacs integration sub-section -([#115](https://github.com/haskell/haskell-language-server/pull/115) by @yuanw) - +([#115](https://github.com/haskell/haskell-language-server/pull/115)) by @yuanw - Port hie README partially -([#112](https://github.com/haskell/haskell-language-server/pull/112) by @jneira) - +([#112](https://github.com/haskell/haskell-language-server/pull/112)) by @jneira - Use cabal-helper-1.1, add stack-8.10.1.yaml and unify cabal.project's -([#108](https://github.com/haskell/haskell-language-server/pull/108) by @jneira) - +([#108](https://github.com/haskell/haskell-language-server/pull/108)) by @jneira - [#87] Fix completion via ghcide's `getCompletionsLSP` -([#107](https://github.com/haskell/haskell-language-server/pull/107) by @korayal) - +([#107](https://github.com/haskell/haskell-language-server/pull/107)) by @korayal - Create specific project file for ghc-8.10. -([#106](https://github.com/haskell/haskell-language-server/pull/106) by @jneira) - +([#106](https://github.com/haskell/haskell-language-server/pull/106)) by @jneira - Issue 5 - Move HIE Tests and convert to Tasty -([#105](https://github.com/haskell/haskell-language-server/pull/105) by @jeffwindsor) - +([#105](https://github.com/haskell/haskell-language-server/pull/105)) by @jeffwindsor - Hls update latest hie bios -([#100](https://github.com/haskell/haskell-language-server/pull/100) by @fendor) - +([#100](https://github.com/haskell/haskell-language-server/pull/100)) by @fendor - Update extra-deps to use latest fork version of shake -([#98](https://github.com/haskell/haskell-language-server/pull/98) by @fendor) - +([#98](https://github.com/haskell/haskell-language-server/pull/98)) by @fendor - Activate typechecking in non-lsp mode -([#95](https://github.com/haskell/haskell-language-server/pull/95) by @jneira) - +([#95](https://github.com/haskell/haskell-language-server/pull/95)) by @jneira - Fix haddock parsing errors -([#92](https://github.com/haskell/haskell-language-server/pull/92) by @jneira) - +([#92](https://github.com/haskell/haskell-language-server/pull/92)) by @jneira - Update for haskell-lsp 0.22 -([#89](https://github.com/haskell/haskell-language-server/pull/89) by @alanz) - +([#89](https://github.com/haskell/haskell-language-server/pull/89)) by @alanz - Get building with ghc-8.10 -([#83](https://github.com/haskell/haskell-language-server/pull/83) by @bubba) +([#83](https://github.com/haskell/haskell-language-server/pull/83)) by @bubba ## 0.1 diff --git a/FUNDING.yml b/FUNDING.yml new file mode 100644 index 0000000000..f01dc66a0f --- /dev/null +++ b/FUNDING.yml @@ -0,0 +1 @@ +open_collective: haskell-language-server diff --git a/GNUmakefile b/GNUmakefile new file mode 100644 index 0000000000..7dc357b9e8 --- /dev/null +++ b/GNUmakefile @@ -0,0 +1,159 @@ +#################################################### +# This makefile's main purpose is to build +# dynamically linked HLS executables on gitlab CI +# and produce appropriate bindists. This can also +# be executed locally on dev machines. +# +# It is not meant to be run by users. +# ################################################## + +UNAME := $(shell uname) +ROOT_DIR:=$(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) + +GHC_VERSION ?= +ARTIFACT ?= unknown-platform + +HLS_VERSION := $(shell grep '^version:' haskell-language-server.cabal | awk '{ print $$2 }') +TARBALL ?= haskell-language-server-$(HLS_VERSION)-$(ARTIFACT).tar.xz + +CHMOD := chmod +CHMOD_X := $(CHMOD) 755 +INSTALL := install +INSTALL_D := $(INSTALL) -d +INSTALL_X := $(INSTALL) -vm 755 +PATCHELF := patchelf +FIND := find +SED := sed +MKDIR := mkdir +MKDIR_P := $(MKDIR) -p +TAR := tar +TAR_MK := $(TAR) caf +CABAL := cabal +STRIP := strip +ifeq ($(UNAME), Darwin) +STRIP_S := strip +else +STRIP_S := strip -s +endif +RM := rm +RM_RF := $(RM) -rf +CD := cd +CP := cp + +# by default don't run ghcup +GHCUP ?= echo +GHCUP_GC ?= $(GHCUP) gc +GHCUP_RM ?= $(GHCUP) rm + +CABAL_CACHE_BIN ?= echo + +ifeq ($(UNAME), Darwin) +DLL := *.dylib +else +DLL := *.so +endif + +INSTALL_NAME_TOOL := install_name_tool + +STORE_DIR := store/$(ARTIFACT) +BINDIST_BASE_DIR := out/bindist/$(ARTIFACT) +BINDIST_OUT_DIR := $(BINDIST_BASE_DIR)/haskell-language-server-$(HLS_VERSION) + +CABAL_BASE_ARGS ?= --store-dir=$(ROOT_DIR)/$(STORE_DIR) +CABAL_ARGS ?= --disable-tests --disable-profiling -O2 $(ADD_CABAL_ARGS) +CABAL_INSTALL_ARGS ?= --overwrite-policy=always --install-method=copy +CABAL_INSTALL := $(CABAL) $(CABAL_BASE_ARGS) v2-install +PROJECT_FILE := cabal.project + +S3_HOST ?= +S3_KEY ?= + +# set rpath relative to the current executable +# TODO: on darwin, this doesn't overwrite rpath, but just adds to it, +# so we'll have the old rpaths from the build host in there as well +define set_rpath + $(if $(filter Darwin,$(UNAME)), $(INSTALL_NAME_TOOL) -add_rpath "@executable_path/$(1)" "$(2)", $(PATCHELF) --force-rpath --set-rpath "\$$ORIGIN/$(1)" "$(2)") +endef + +define sync_from + $(CABAL_CACHE_BIN) sync-from-archive --host-name-override=$(S3_HOST) --host-port-override=443 --host-ssl-override=True --region us-west-2 --store-path="$(ROOT_DIR)/$(STORE_DIR)" --archive-uri "s3://haskell-language-server/$(S3_KEY)" +endef + +define sync_to + $(CABAL_CACHE_BIN) sync-to-archive --host-name-override=$(S3_HOST) --host-port-override=443 --host-ssl-override=True --region us-west-2 --store-path="$(ROOT_DIR)/$(STORE_DIR)" --archive-uri "s3://haskell-language-server/$(S3_KEY)" +endef + +hls: + @if test -z "$(GHCS)" ; then echo >&2 "GHCS is not set" ; false ; fi + for ghc in $(GHCS) ; do \ + $(GHCUP) install ghc `echo $$ghc` && \ + $(GHCUP_GC) -p -s -c -t && \ + $(MAKE) GHC_VERSION=`echo $$ghc` hls-ghc || exit 1 && \ + $(GHCUP_RM) `echo $$ghc` ; \ + done + +hls-ghc: + $(MKDIR_P) out/$(ARTIFACT) + $(MKDIR_P) out/plan.json + @if test -z "$(GHC_VERSION)" ; then echo >&2 "GHC_VERSION is not set" ; false ; fi + $(CABAL) $(CABAL_BASE_ARGS) configure --project-file="$(PROJECT_FILE)" -w "ghc-$(GHC_VERSION)" $(CABAL_ARGS) exe:haskell-language-server exe:haskell-language-server-wrapper + $(CABAL) $(CABAL_BASE_ARGS) build --project-file="$(PROJECT_FILE)" -w "ghc-$(GHC_VERSION)" $(CABAL_ARGS) --dependencies-only --dry-run exe:haskell-language-server exe:haskell-language-server-wrapper + $(call sync_from) + $(CP) dist-newstyle/cache/plan.json "$(ROOT_DIR)/out/plan.json/$(ARTIFACT)-ghc-$(GHC_VERSION)-plan.json" + $(CABAL_INSTALL) --project-file="$(PROJECT_FILE)" -w "ghc-$(GHC_VERSION)" $(CABAL_ARGS) $(CABAL_INSTALL_ARGS) --installdir="$(ROOT_DIR)/out/$(ARTIFACT)/$(GHC_VERSION)" exe:haskell-language-server exe:haskell-language-server-wrapper + $(call sync_to) + $(STRIP_S) "$(ROOT_DIR)/out/$(ARTIFACT)/$(GHC_VERSION)/haskell-language-server" + $(STRIP_S) "$(ROOT_DIR)/out/$(ARTIFACT)/$(GHC_VERSION)/haskell-language-server-wrapper" + +bindist: + @if test -z "$(GHCS)" ; then echo >&2 "GHCS is not set" ; false ; fi + for ghc in $(GHCS) ; do \ + $(GHCUP) install ghc `echo $$ghc` && \ + $(GHCUP_GC) -p -s -c -t && \ + $(MAKE) GHC_VERSION=`echo $$ghc` bindist-ghc || exit 1 && \ + $(GHCUP_RM) `echo $$ghc` ; \ + done + $(SED) -e "s/@@HLS_VERSION@@/$(HLS_VERSION)/" \ + bindist/GNUmakefile.in > "$(BINDIST_OUT_DIR)/GNUmakefile" + $(INSTALL_D) "$(BINDIST_OUT_DIR)/scripts/" + $(INSTALL_X) "bindist/relpath.sh" "$(BINDIST_OUT_DIR)/scripts/relpath.sh" + +bindist-tar: + $(CD) "$(BINDIST_BASE_DIR)" ; $(TAR_MK) "$(ROOT_DIR)/out/$(TARBALL)" "haskell-language-server-$(HLS_VERSION)" + +bindist-ghc: + if test -z "$(GHC_VERSION)" ; then echo >&2 "GHC_VERSION is not set" ; false ; fi + $(MKDIR_P) "$(BINDIST_OUT_DIR)/bin" + $(MKDIR_P) "$(BINDIST_OUT_DIR)/lib/$(GHC_VERSION)" + $(INSTALL_D) "$(BINDIST_OUT_DIR)/bin/" + $(INSTALL_X) "out/$(ARTIFACT)/$(GHC_VERSION)/haskell-language-server" "$(BINDIST_OUT_DIR)/bin/haskell-language-server-$(GHC_VERSION)" + $(call set_rpath,../lib/$(GHC_VERSION),$(BINDIST_OUT_DIR)/bin/haskell-language-server-$(GHC_VERSION)) + $(SED) \ + -e "s/@@EXE_NAME@@/haskell-language-server-$(GHC_VERSION)/" \ + -e "s/@@GHC_VERSION@@/$(GHC_VERSION)/" \ + -e "s/@@BOOT_PKGS@@/$(shell ghc-pkg-$(GHC_VERSION) --global list --simple-output)/" \ + -e "s/@@ABI_HASHES@@/$(shell for dep in `ghc-pkg-$(GHC_VERSION) --global list --simple-output` ; do printf "%s:" "$$dep" && ghc-pkg-$(GHC_VERSION) field $$dep abi --simple-output ; done | tr '\n' ' ' | xargs)/" \ + bindist/wrapper.in > "$(BINDIST_OUT_DIR)/haskell-language-server-$(GHC_VERSION).in" + $(CHMOD_X) "$(BINDIST_OUT_DIR)/haskell-language-server-$(GHC_VERSION).in" + $(INSTALL_D) "$(BINDIST_OUT_DIR)/bin/" + $(INSTALL_X) "out/$(ARTIFACT)/$(GHC_VERSION)/haskell-language-server-wrapper" "$(BINDIST_OUT_DIR)/bin/haskell-language-server-wrapper" + $(INSTALL_D) "$(ROOT_DIR)/$(BINDIST_OUT_DIR)/lib/$(GHC_VERSION)" + $(FIND) "$(STORE_DIR)/ghc-$(GHC_VERSION)" -type f -name "$(DLL)" -execdir $(INSTALL_X) "{}" "$(ROOT_DIR)/$(BINDIST_OUT_DIR)/lib/$(GHC_VERSION)/{}" \; + $(FIND) "$(ROOT_DIR)/$(BINDIST_OUT_DIR)/lib/$(GHC_VERSION)" -type f -name '$(DLL)' -execdir $(call set_rpath,,{}) \; + +version: + @echo "$(HLS_VERSION)" + +clean: + $(RM_RF) out/* + +clean-ghcs: + @if test -z "$(GHCS)" ; then echo >&2 "GHCS is not set" ; false ; fi + for ghc in $(GHCS) ; do \ + $(GHCUP) rm ghc `echo $$ghc` ; \ + done + +clean-all: clean-ghcs + $(RM_RF) out/* $(STORE_DIR) + +.PHONY: hls hls-ghc bindist bindist-ghc bindist-tar clean clean-all install-ghcs version diff --git a/GenChangelogs.hs b/GenChangelogs.hs new file mode 100755 index 0000000000..a6100e52a4 --- /dev/null +++ b/GenChangelogs.hs @@ -0,0 +1,38 @@ +#!/usr/bin/env cabal +{- cabal: +build-depends: base, bytestring, process, text, github, time >= 1.9 +-} + +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +import Control.Monad +import qualified Data.ByteString.Char8 as BS +import Data.List +import Data.Maybe +import qualified Data.Text as T +import Data.Time.Format.ISO8601 +import Data.Time.LocalTime +import GitHub +import System.Environment +import System.Process + +main = do + args <- getArgs + let (githubReq,tag) = case args of + token:tag:_ -> (github (OAuth $ BS.pack token), tag) + prs <- githubReq $ pullRequestsForR "haskell" "haskell-language-server" stateClosed FetchAll + lastDateStr <- last . lines <$> readProcess "git" ["show", "-s", "--format=%cI", "-1", tag] "" + lastDate <- zonedTimeToUTC <$> iso8601ParseM lastDateStr + + let prsAfterLastTag = either (error . show) + (foldMap (\pr -> [pr | inRange pr])) + prs + inRange pr + | Just mergedDate <- simplePullRequestMergedAt pr = mergedDate > lastDate + | otherwise = False + + forM_ prsAfterLastTag $ \SimplePullRequest{..} -> + putStrLn $ T.unpack $ "- " <> simplePullRequestTitle <> + "\n ([#" <> T.pack (show $ unIssueNumber simplePullRequestNumber) <> "](" <> getUrl simplePullRequestHtmlUrl <> "))" <> + " by @" <> untagName (simpleUserLogin simplePullRequestUser) diff --git a/README.md b/README.md index bcaa7b8531..808f11e670 100644 --- a/README.md +++ b/README.md @@ -1,549 +1,36 @@ # haskell-language-server +![haskell-language-server][logo] + + +[![Release][badge-github-release]][github-release] +[![Hackage][badge-hackage]][hackage] [![License Apache 2.0][badge-license]][license] [![CircleCI][badge-circleci]][circleci] +[![GitHub Testing Workflow](https://github.com/haskell/haskell-language-server/actions/workflows/test.yml/badge.svg)](https://github.com/haskell/haskell-language-server/actions/workflows/test.yml) +[![GitHub Nix Workflow](https://github.com/haskell/haskell-language-server/actions/workflows/nix.yml/badge.svg)](https://github.com/haskell/haskell-language-server/actions/workflows/nix.yml) +[![matrix][badge-matrix]][matrix] +[![codetriage][badge-codetriage]][codetriage] +[logo]: ./docs/logos/logo-256.png [badge-license]: https://img.shields.io/badge/license-Apache2-green.svg?dummy [license]: https://github.com/haskell/haskell-language-server/blob/master/LICENSE [badge-circleci]: https://img.shields.io/circleci/project/github/haskell/haskell-language-server/master.svg [circleci]: https://circleci.com/gh/haskell/haskell-language-server/ - -Integration point for [ghcide](https://github.com/digital-asset/ghcide) and [haskell-ide-engine](https://github.com/haskell/haskell-ide-engine). One IDE to rule -them all. Read the [project's -background](https://neilmitchell.blogspot.com/2020/01/one-haskell-ide-to-rule-them-all.html). - -This is *very* early stage software. - -- [Haskell Language Server (HLS)](#haskell-language-server) - - [Features](#features) - - [Installation](#installation) - - [Installation from source](#installation-from-source) - - [Common pre-requirements](#common-pre-requirements) - - [Linux-specific pre-requirements](#linux-specific-pre-requirements) - - [Windows-specific pre-requirements](#windows-specific-pre-requirements) - - [Download the source code](#download-the-source-code) - - [Building](#building) - - [Install via cabal](#install-via-cabal) - - [Install specific GHC Version](#install-specific-ghc-version) - - [Project Configuration](#project-configuration) - - [Editor Integration](#editor-integration) - - [VS Code](#using-haskell-language-server-with-vs-code) - - [Sublime Text](#using-haskell-language-server-with-sublime-text) - - [Vim or Neovim](#using-haskell-language-server-with-vim-or-neovim) - - [Coc](#coc) - - [LanguageClient-neovim](#languageclient-neovim) - - [vim-plug](#vim-plug) - - [Clone the LanguageClient-neovim repo](#clone-the-languageclient-neovim-repo) - - [Sample `~/.vimrc`](#sample-vimrc) - - [Atom](#using-haskell-language-server-with-atom) - - [Emacs](#using-haskell-language-server-with-emacs) - - [Doom emacs](#using-haskell-language-server-with-doom-emacs) - - [Kakoune](#using-haskell-language-server-with-kakoune) - - [Known limitations](#known-limitations) - - [Preprocessor](#preprocessor) - - [Contributing](#contributing) - - [It's time to join the project!](#its-time-to-join-the-project) - -## Features - - - Code evaluation (inspired by [Dante](https://github.com/jyp/dante#-reploid)) - - ![Eval](https://i.imgur.com/bh992sT.gif) - - - Many more (TBD) - -## Installation - -For now only installation from source is supported. - -### Installation from source - -#### Common pre-requirements - -- `stack` or `cabal` must be in your PATH - - You need stack version >= 2.1.1 or cabal >= 2.4.0.0 -- `git` must be in your PATH -- The directory where `stack`or `cabal` put the binaries must be in you PATH: - - For stack you can get it with `stack path --local-bin` - - For cabal it is by default `$HOME/.cabal/bin` in linux and `%APPDATA%\cabal\bin` in windows. - -Tip: you can quickly check if some command is in your path by running the command. -If you receive some meaningful output instead of "command not found"-like message -then it means you have the command in PATH. - -#### Linux-specific pre-requirements - -On Linux you will need install a couple of extra libraries: -- [Unicode (ICU)](http://site.icu-project.org/) -- [NCURSES](https://www.gnu.org/software/ncurses/) -- [Zlib](https://zlib.net/) - -**Debian 9/Ubuntu 18.04 or earlier**: - -```bash -sudo apt install libicu-dev libtinfo-dev libgmp-dev zlib1g-dev -``` - -**Debian 10/Ubuntu 18.10 or later**: - -```bash -sudo apt install libicu-dev libncurses-dev libgmp-dev zlib1g-dev -``` - -**Fedora**: - -```bash -sudo dnf install libicu-devel ncurses-devel zlib-devel -``` - -#### Windows-specific pre-requirements - -In order to avoid problems with long paths on Windows you can do either one of the following: - -1. Clone the `haskell-language-server` to a short path, for example the root of your logical drive (e.g. to - `C:\hls`). Even if you choose `C:\haskell-language-server` you could hit the problem. If this doesn't work or you want to use a longer path, try the second option. - -2. If the `Local Group Policy Editor` is available on your system, go to: `Local Computer Policy -> Computer Configuration -> Administrative Templates -> System -> Filesystem` set `Enable Win32 long paths` to `Enabled`. If you don't have the policy editor you can use regedit by using the following instructions [here](https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file#enable-long-paths-in-windows-10-version-1607-and-later). You also need to configure git to allow longer paths by using unicode paths. To set this for all your git repositories use `git config --system core.longpaths true` (you probably need an administrative shell for this) or for just this one repository use `git config core.longpaths true`. - -In addition make sure `haskell-language-server.exe` is not running by closing your editor, otherwise in case of an upgrade the executable can not be installed. - -#### Download the source code - -```bash -git clone https://github.com/haskell/haskell-language-server --recurse-submodules -cd haskell-language-server -``` - -#### Building - -Note, on first invocation of the build script with stack, a GHC is being installed for execution. -The GHC used for the `install.hs` can be adjusted in `./install/stack.yaml` by using a different resolver. - -Available commands can be seen with: - -```bash -stack ./install.hs help -``` - -Remember, this will take time to download a Stackage-LTS and an appropriate GHC for build -haskell-language-server the first time. - -##### Install via cabal - -The install-script can be invoked via `cabal` instead of `stack` with the command - -```bash -cabal v2-run ./install.hs --project-file install/shake.project -``` - -or using the existing alias script - -```bash -./cabal-hls-install -``` - -Running the script with cabal on windows requires a cabal version greater or equal to `3.0.0.0`. - -For brevity, only the `stack`-based commands are presented in the following sections. - -##### Install specific GHC Version - -Install haskell-language-server for the latest available and supported GHC version (and hoogle docs): - -```bash -stack ./install.hs hls -``` - -Install haskell-language-server for a specific GHC version (and hoogle docs): - -```bash -stack ./install.hs hls-8.8.3 -stack ./install.hs data -``` - -The Haskell Language Server can also be built with `cabal v2-build` instead of `stack build`. -This has the advantage that you can decide how the GHC versions have been installed. -To see what GHC versions are available, the command `cabal-hls-install ghcs` can be used. -It will list all *supported* GHC versions that are on the path for build with their respective installation directory. -If you think, this list is incomplete, you can try to modify the PATH variable, such that the executables can be found. -Note, that the targets `hls` and `data` depend on the found GHC versions. - -An example output is: - -```bash -> ./cabal-hls-install ghcs -****************************************************************** -Found the following GHC paths: -ghc-8.6.5: /opt/bin/ghc-8.6.5 -ghc-8.8.3: /opt/bin/ghc-8.8.3 - -****************************************************************** -``` - -If your desired ghc has been found, you use it to install haskell-language-server. - -```bash -./cabal-hls-install hls-8.6.5 -./cabal-hls-install data -``` - -## Project Configuration - -**For a full explanation of possible configurations, refer to [hie-bios/README](https://github.com/mpickering/hie-bios/blob/master/README.md).** - -haskell-language-server has some limited support via hie-bios to detect automatically -your project configuration and set up the environment for GHC. -The plan is to improve it to handle most use cases. - -However, for now, the more reliable way is using a `hie.yaml` file in the root -of the workspace to **explicitly** describe how to setup the environment. -For that you need to know what *components* have your project and the path -associated with each one. So you will need some knowledge about -[stack](https://docs.haskellstack.org/en/stable/build_command/#components) or [cabal](https://cabal.readthedocs.io/en/latest/cabal-commands.html?#cabal-v2-build) components. - -You also can use [this utility](https://github.com/Avi-D-coder/implicit-hie -) to generate automatically `hie.yaml` files for -the most common stack and cabal configurations - -For example, to state that you want to use `stack` then the configuration file -would look like: - -```yaml -cradle: - stack: - component: "haskell-language-server:lib" -``` - -If you use `cabal` then you probably need to specify which component you want -to use. - -```yaml -cradle: - cabal: - component: "lib:haskell-language-server" -``` - -If you have a project with multiple components, you can use a cabal-multi -cradle: - -```yaml -cradle: - cabal: - - path: "./test/functional/" - component: "haskell-language-server:func-test" - - path: "./test/utils/" - component: "haskell-language-server:hls-test-utils" - - path: "./exe/Main.hs" - component: "haskell-language-server:exe:haskell-language-server" - - path: "./exe/Wrapper.hs" - component: "haskell-language-server:exe:haskell-language-server-wrapper" - - path: "./src" - component: "lib:haskell-language-server" - - path: "./ghcide/src" - component: "ghcide:lib:ghcide" - - path: "./ghcide/exe" - component: "ghcide:exe:ghcide" -``` - -Equivalently, you can use stack: - -```yaml -cradle: - stack: - - path: "./test/functional/" - component: "haskell-language-server:func-test" - - path: "./exe/Main.hs" - component: "haskell-language-server:exe:haskell-language-server" - - path: "./exe/Wrapper.hs" - component: "haskell-language-server:exe:haskell-language-server-wrapper" - - path: "./src" - component: "haskell-language-server:lib" - - path: "./ghcide/src" - component: "ghcide:lib:ghcide" - - path: "./ghcide/exe" - component: "ghcide:exe:ghcide" -``` - -Or you can explicitly state the program which should be used to collect -the options by supplying the path to the program. It is interpreted -relative to the current working directory if it is not an absolute path. - -```yaml -cradle: - bios: - program: ".hie-bios" -``` - -The complete configuration is a subset of - -```yaml -cradle: - cabal: - component: "optional component name" - stack: - component: "optional component name" - bios: - program: "program to run" - dependency-program: "optional program to run" - direct: - arguments: ["list","of","ghc","arguments"] - default: - none: - -dependencies: - - someDep -``` - -## Editor Integration - -Note to editor integrators: there is a `haskell-language-server-wrapper` executable, which is installed alongside the `haskell-language-server` executable. When this is invoked in the project root directory, it attempts to work out the GHC version used in the project, and then launch the matching `haskell-language-server` executable. - -All of the editor integrations assume that you have already installed `haskell-language-server` (see above) and that the installation script put the `haskell-language-server` and `haskell-language-server-wrapper` binaries in your `PATH` (usually `~/.local/bin` or `~/.cabal/bin` on Linux and macOS, `%APPDATA%\local\bin` or `%APPDATA%\cabal\bin` on Windows). - -### Using Haskell Language Server with VS Code - -Install from -[the VSCode marketplace](https://marketplace.visualstudio.com/items?itemName=alanz.vscode-hie-server), or manually from the repository [vscode-hie-server](https://github.com/alanz/vscode-hie-server). - -Choose `haskell-language-server` in the extension setting `languageServerHaskell.hieVariant`. - -### Using Haskell Language Server with Sublime Text - -- Install [LSP](https://packagecontrol.io/packages/LSP) using [Package Control](https://packagecontrol.io/) -- From Sublime Text, go to Preferences and search for LSP Settings -- Paste in these settings. Make sure to change the command path to your `haskell-language-server-wrapper` - -```json -{ - "clients": { - "haskell-language-server": { - "command": ["haskell-language-server-wrapper", "--lsp"], - "scopes": ["source.haskell"], - "syntaxes": ["Packages/Haskell/Haskell.sublime-syntax"], - "languageId": "haskell", - }, - }, -} -``` - -Now open a Haskell project with Sublime Text and enable Language Server in the project. -You should have these features available: - -1. Errors are underlined in red -2. LSP: Show Diagnostics will show a list of hints and errors -3. LSP: Format Document will prettify the file - -### Using Haskell Language Server with Vim or Neovim - -You can use [Coc](https://github.com/neoclide/coc.nvim), [LanguageClient-neovim](https://github.com/autozimu/LanguageClient-neovim) -or any other Vim Language server protocol client. -Coc is recommend since it is the only complete LSP implementation for Vim and Neovim and offers snippets and floating documentation out of the box. - -#### Coc - -Follow Coc's [installation instructions](https://github.com/neoclide/coc.nvim). -Then issue `:CocConfig` and add the following to your Coc config file. - -```json -"languageserver": { - "haskell": { - "command": "haskell-language-server-wrapper", - "args": ["--lsp"], - "rootPatterns": [ - "*.cabal", - "stack.yaml", - "cabal.project", - "package.yaml" - ], - "filetypes": [ - "hs", - "lhs", - "haskell" - ], - "initializationOptions": { - "languageServerHaskell": { - } - } - } -} -``` - -#### LanguageClient-neovim - -##### vim-plug - -If you use [vim-plug](https://github.com/junegunn/vim-plug), then you can do this by e.g., -including the following line in the Plug section of your `init.vim` or `~/.vimrc`: - -```text -Plug 'autozimu/LanguageClient-neovim', { - \ 'branch': 'next', - \ 'do': 'bash install.sh' - \ } -``` - -and issuing a `:PlugInstall` command within Neovim or Vim. - -##### Clone the LanguageClient-neovim repo - -As an alternative to using [vim-plug](https://github.com/junegunn/vim-plug) shown above, clone [LanguageClient-neovim](https://github.com/autozimu/LanguageClient-neovim) -into `~/.vim/pack/XXX/start/`, where `XXX` is just a name for your "plugin suite". - -##### Sample `~/.vimrc` - -```vim -set rtp+=~/.vim/pack/XXX/start/LanguageClient-neovim -let g:LanguageClient_serverCommands = { 'haskell': ['haskell-language-server-wrapper', '--lsp'] } -``` - -You'll probably want to add some mappings for common commands: - -```vim -nnoremap :call LanguageClient_contextMenu() -map lk :call LanguageClient#textDocument_hover() -map lg :call LanguageClient#textDocument_definition() -map lr :call LanguageClient#textDocument_rename() -map lf :call LanguageClient#textDocument_formatting() -map lb :call LanguageClient#textDocument_references() -map la :call LanguageClient#textDocument_codeAction() -map ls :call LanguageClient#textDocument_documentSymbol() -``` - -Use Ctrl+xCtrl+o (``) to open up the auto-complete menu, -or for asynchronous auto-completion, follow the setup instructions on -[LanguageClient](https://github.com/autozimu/LanguageClient-neovim). - -If you'd like diagnostics to be highlighted, add a highlight group for `ALEError`/`ALEWarning`/`ALEInfo`, -or customize `g:LanguageClient_diagnosticsDisplay`: - -```vim -hi link ALEError Error -hi Warning term=underline cterm=underline ctermfg=Yellow gui=undercurl guisp=Gold -hi link ALEWarning Warning -hi link ALEInfo SpellCap -``` - -If you're finding that the server isn't starting at the correct project root, -it may also be helpful to also specify root markers: - -```vim -let g:LanguageClient_rootMarkers = ['*.cabal', 'stack.yaml'] -``` - -### Using Haskell Language Server with Atom - -Install the two Atom packages [atom-ide-ui](https://atom.io/packages/atom-ide-ui) and [ide-haskell-hie](https://atom.io/packages/ide-haskell-hie), - -```bash -$ apm install language-haskell atom-ide-ui ide-haskell-hie -``` - -The plugin ide-haskell-hie is designed to work with haskell-ide-engine by default, so you will have to put the path to haskell-language-server-wrapper in the configuration option `Absolute path to hie executable`. - -### Using haskell-language-server with Emacs - -Install HLS along with the following emacs packages: - -[lsp-mode](https://github.com/emacs-lsp/lsp-mode) -[lsp-ui](https://github.com/emacs-lsp/lsp-ui) -[lsp-haskell](https://github.com/emacs-lsp/lsp-haskell) - -Make sure to follow the instructions in the README of each of these packages. - -``` emacs-lisp -(use-package lsp-haskell - :ensure t - :config - (setq lsp-haskell-process-path-hie "haskell-language-server-wrapper") - ;; Comment/uncomment this line to see interactions between lsp client/server. - ;;(setq lsp-log-io t) -) -``` - -### Using haskell-language-server with [doom-emacs](https://github.com/hlissner/doom-emacs/tree/develop/modules/lang/haskell#module-flags) - -Install haskell-language-server, and then enable haskell lang module with lsp flag in `.doom.d/init.el` - -``` emacs-lisp -:lang -(haskell +lsp) -``` - -in your `.doom.d/config.el` file - -``` emacs-lisp -(use-package lsp-haskell - :ensure t - :config - (setq lsp-haskell-process-path-hie "haskell-language-server-wrapper") - ;; Comment/uncomment this line to see interactions between lsp client/server. - ;;(setq lsp-log-io t) -) -``` - -then do `$HOME/.emacs.d/bin/doom refresh` - -### Using haskell-language-server with [Kakoune](https://github.com/mawww/kakoune) - -1. Grab a copy of [kak-lsp](https://github.com/ul/kak-lsp), and follow the setup instructions. -2. Point your `kak-lsp.toml` to `haskell-language-server-wrapper`. - -```toml -[language.haskell] -filetypes = ["haskell"] -roots = ["Setup.hs", "stack.yaml", "*.cabal"] -command = "haskell-language-server-wrapper" -args = ["--lsp"] -``` - -## Known limitations - -### Preprocessor -HLS is not yet able to find project preprocessors, which may result in `could not execute: ` errors. This problem is -tracked in https://github.com/haskell/haskell-language-server/issues/176 and originally comes from https://github.com/mpickering/hie-bios/issues/125 - -As a workaround, you need to ensure the preprocessor is available in the path (install globally with Stack or Cabal, provide in `shell.nix`, etc.). - -Example with `tasty-discover`: -```haskell -{-# OPTIONS_GHC -F -pgmF tasty-discover #-} -``` -This returns an error in HLS if 'tasty-discover' is not in the path: `could not execute: tasty-discover`. - - -## Contributing - -### It's time to join the project - -:heart: Haskell tooling dream is near, we need your help! :heart: - -- Join [our IRC channel](https://webchat.freenode.net/?channels=haskell-ide-engine) at `#haskell-ide-engine` on `freenode`. -- Fork this repo and hack as much as you can. -- Ask @alanz or @hvr to join the project. - -### Hacking on haskell-language-server - -Haskell-language-server can be used on itself. We provide -preset samples of `hie.yaml` for Cabal and Stack. - -Note: the `./install/` folder is not directly tied to the project so it has dedicated `./install/hie.yaml.[cbl|stack]` -templates. - -#### Using Cabal - -```shell -$ cp hie.yaml.cbl hie.yaml -$ cp install/hie.yaml.cbl install/hie.yaml -``` - -#### Using Stack - -Note: Stack project must also be built once until [this issue](https://github.com/commercialhaskell/stack/issues/5213) is fixed. - -```shell -$ cp hie.yaml.stack hie.yaml -$ cp install/hie.yaml.stack install/hie.yaml -$ stack build --test --no-run-tests -$ cd install -$ stack build -``` +[badge-hackage]: https://img.shields.io/hackage/v/haskell-language-server.svg?logo=haskell +[badge-github-release]:https://img.shields.io/github/v/release/haskell/haskell-language-server.svg +[hackage]: https://hackage.haskell.org/package/haskell-language-server +[badge-codetriage]: https://www.codetriage.com/haskell/haskell-language-server/badges/users.svg +[codetriage]:https://www.codetriage.com/haskell/haskell-language-server +[badge-matrix]:https://img.shields.io/badge/chat-on%20matrix-brightgreen.svg +[matrix]:https://matrix.to/#/#haskell-language-server:matrix.org +[github-release]:https://github.com/haskell/haskell-language-server/releases/latest + +The official Haskell language server (LSP) implementation. Consult the [project documentation](https://haskell-language-server.readthedocs.io/en/latest/) for more details. + +- [Features](https://haskell-language-server.readthedocs.io/en/latest/features.html) +- [Installation](https://haskell-language-server.readthedocs.io/en/latest/installation.html) +- [Supported GHC Versions](https://haskell-language-server.readthedocs.io/en/latest/support/ghc-version-support.html) +- [Configuration](https://haskell-language-server.readthedocs.io/en/latest/configuration.html) +- [Troubleshooting](https://haskell-language-server.readthedocs.io/en/latest/troubleshooting.html) +- [Contributing](https://haskell-language-server.readthedocs.io/en/latest/contributing/index.html) diff --git a/RELEASING.md b/RELEASING.md new file mode 100644 index 0000000000..74da125d86 --- /dev/null +++ b/RELEASING.md @@ -0,0 +1,57 @@ +# Releasing + +## Release checklist + +- [ ] check ghcup supports new GHC releases if any +- [ ] check all plugins still work if release includes code changes +- [ ] set the supported GHCs in workflow file `.github/generate-ci/gen_ci.hs` +- [ ] regenerate the CI via `./.github/generate-ci/generate-jobs` +- [ ] bump package versions in all `*.cabal` files (same version as hls) + - HLS uses lockstep versioning. The core packages and all plugins use the same version number, and only support exactly this version. + - Exceptions: + - `shake-bench` is an internal testing tool, not exposed to the outside world. Thus, no version bump required for releases. + - For updating cabal files, the following script can be used: + - ```sh + ./release/update_versions.sh + ``` + - It still requires manual verification and review +- [ ] generate and update changelog + - Generate a ChangeLog via `./GenChangelogs.hs ` + - `` is the git tag you want to generate the ChangeLog from. + - `` is a github access key: https://github.com/settings/tokens +- [ ] update https://haskell-language-server.readthedocs.io/en/latest/support/ghc-version-support.html#current-ghc-version-support-status +- [ ] create release branch as `wip/` + - `git switch -c wip/` +- [ ] create release tag as `` + - `git tag ` +- [ ] trigger release pipeline by pushing the tag + - this creates a draft release + - `git push ` +- [ ] run `sh scripts/release/download-gh-artifacts.sh ` + - downloads artifacts to `gh-release-artifacts/haskell-language-server-/` + - also downloads FreeBSD bindist from circle CI + - adds signatures +- [ ] upload artifacts to downloads.haskell.org from `gh-release-artifacts/haskell-language-server-/` + - You require sftp access, contact wz1000, bgamari or chreekat + - `cd gh-release-artifacts/haskell-language-server-` + - `SIGNING_KEY=... ../../release/upload.sh upload` + - Your SIGNING_KEY can be obtained with `gpg --list-secret-keys --keyid-format=long` + - Afterwards, the artifacts are available at: `https://downloads.haskell.org/~hls/haskell-language-server-/` + - Run `SIGNING_KEY=... ../../release/upload.sh purge_all` to remove CDN caches +- [ ] create PR to [ghcup-metadata](https://github.com/haskell/ghcup-metadata) + - [ ] update `ghcup-vanilla-0.0.8.yaml` and `ghcup-vanilla-0.0.7.yaml` + - can use `sh scripts/release/create-yaml-snippet.sh ` to generate a snippet that can be manually inserted into the yaml files + - ~~update `hls-metadata-0.0.1.json`~~ Currently unnecessary, GHCup builds its own HLS binaries and updates that file. + - utilize `cabal run ghcup-gen -- generate-hls-ghcs -f ghcup-0.0.7.yaml --format json --stdout` in the root of ghcup-metadata repository + - Be sure to mark the correct latest version and add the 'recommended' tag to the latest release. +- [ ] get sign-off on release + - from wz1000, michealpj, maerwald and fendor +- [ ] publish release on github +- [ ] upload hackage packages + - requires credentials +- [ ] Supported tools table needs to be updated: + - https://www.haskell.org/ghcup/install/#supported-platforms + - https://github.com/haskell/ghcup-hs/blob/master/docs/install.md#supported-platforms + - https://github.com/haskell/ghcup-metadata/blob/44c6e2b5d0fcae15abeffff03e87544edf76dd7a/ghcup-gen/Main.hs#L67 +- [ ] post release on discourse and reddit +- [ ] merge release PR to master or forward port relevant changes diff --git a/bench/Main.hs b/bench/Main.hs new file mode 100644 index 0000000000..eec4380eb4 --- /dev/null +++ b/bench/Main.hs @@ -0,0 +1,281 @@ + +{- Bench history + + A Shake script to analyze the performance of HLS over the git history of the project + + Driven by a config file `bench/config.yaml` containing the list of Git references to analyze. + + Builds each one of them and executes a set of experiments using the ghcide-bench suite. + + The results of the benchmarks and the analysis are recorded in the file + system with the following structure: + + bench-results + ├── + │  ├── ghc.path - path to ghc used to build the binary + │  └── haskell-language-server - binary for this version + ├─ + │ ├── results.csv - aggregated results for all the versions + │ └── + | └── + │   ├── .gcStats.log - RTS -s output + │   ├── .csv - stats for the experiment + │   ├── .svg - Graph of bytes over elapsed time + │   ├── .diff.svg - idem, including the previous version + │   ├── .log - ghcide-bench output + │   └── results.csv - results of all the experiments for the example + ├── results.csv - aggregated results of all the experiments and versions + └── .svg - graph of bytes over elapsed time, for all the included versions + + For diff graphs, the "previous version" is the preceding entry in the list of versions + in the config file. A possible improvement is to obtain this info via `git rev-list`. + + To execute the script: + + > cabal/stack bench + + To build a specific analysis, enumerate the desired file artifacts + + > stack bench --ba "bench-results/HEAD/results.csv bench-results/HEAD/edit.diff.svg" + > cabal bench --benchmark-options "bench-results/HEAD/results.csv bench-results/HEAD/edit.diff.svg" + + -} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} +{-# OPTIONS -Wno-orphans #-} + +import Control.Lens (preview, (^.)) +import Control.Monad.Extra +import Data.Aeson (Value (..), encode) +import Data.Aeson.Lens +import Data.Default +import Data.Foldable (find) +import qualified Data.Map.Strict as Map +import Data.Maybe +import Data.Text (unpack) +import Data.Yaml (FromJSON (..), ToJSON (toJSON), + decodeFileThrow) +import Development.Benchmark.Rules hiding (parallelism) +import Development.Shake (Action, + Change (ChangeModtimeAndDigestInput), + CmdOption (Cwd, StdinBS), + RuleResult, Rules, + ShakeOptions (shakeChange, shakeThreads), + actionBracket, addOracle, + askOracle, command, command_, + getDirectoryFiles, liftIO, need, + newCache, shakeArgsWith, + shakeOptions, versioned, want) +import Development.Shake.Classes +import Experiments.Types (Example (exampleName), + exampleToOptions) +import GHC.Exts (toList) +import GHC.Generics (Generic) +import HlsPlugins (idePlugins) +import qualified Ide.Plugin.Config as Plugin +import Ide.Types hiding (Config) +import Numeric.Natural (Natural) +import System.Console.GetOpt +import System.Directory +import System.FilePath +import System.IO.Error (tryIOError) + +configPath :: FilePath +configPath = "bench/config.yaml" + +configOpt :: OptDescr (Either String FilePath) +configOpt = Option [] ["config"] (ReqArg Right configPath) "config file" + +binaryName :: String +binaryName = "haskell-language-server" + +-- | Read the config without dependency +readConfigIO :: FilePath -> IO (Config BuildSystem) +readConfigIO = decodeFileThrow + +instance IsExample Example where getExampleName = exampleName +type instance RuleResult GetExample = Maybe Example +type instance RuleResult GetExamples = [Example] + +shakeOpts :: ShakeOptions +shakeOpts = + shakeOptions{shakeChange = ChangeModtimeAndDigestInput, shakeThreads = 0} + +main :: IO () +main = shakeArgsWith shakeOpts [configOpt] $ \configs wants -> pure $ Just $ do + let config = fromMaybe configPath $ listToMaybe configs + _configStatic <- createBuildSystem config + case wants of + [] -> want ["all"] + _ -> want wants + +hlsBuildRules :: MkBuildRules BuildSystem +hlsBuildRules = MkBuildRules findGhcForBuildSystem binaryName projectDepends buildHls + where + recordDepends path = + need . map (path ) =<< getDirectoryFiles path ["//*.hs"] + projectDepends = do + recordDepends "src" + recordDepends "exe" + recordDepends "plugins" + recordDepends "ghcide/session-loader" + recordDepends "ghcide/src" + recordDepends "hls-graph/src" + recordDepends "hls-plugin-api/src" + need =<< getDirectoryFiles "." ["*.cabal"] + +-------------------------------------------------------------------------------- +data Config buildSystem = Config + { experiments :: [Unescaped String], + configurations :: [ConfigurationDescriptor], + examples :: [Example], + samples :: Natural, + versions :: [GitCommit], + -- | Output folder ('foo' works, 'foo/bar' does not) + outputFolder :: String, + buildTool :: buildSystem, + profileInterval :: Maybe Double, + parallelism :: Natural + } + deriving (Generic, Show) + deriving anyclass (FromJSON) + +createBuildSystem :: FilePath -> Rules (Config BuildSystem) +createBuildSystem config = do + readConfig <- newCache $ \fp -> need [fp] >> liftIO (readConfigIO fp) + + _ <- addOracle $ \GetExperiments {} -> experiments <$> readConfig config + _ <- addOracle $ \GetVersions {} -> versions <$> readConfig config + _ <- versioned 1 $ addOracle $ \GetExamples{} -> examples <$> readConfig config + _ <- versioned 1 $ addOracle $ \(GetExample name) -> find (\e -> getExampleName e == name) . examples <$> readConfig config + _ <- addOracle $ \GetBuildSystem {} -> buildTool <$> readConfig config + _ <- addOracle $ \GetSamples{} -> samples <$> readConfig config + _ <- addOracle $ \GetConfigurations{} -> do + Config{configurations} <- readConfig config + return [ Configuration confName (encode $ disableAllPluginsBut (`elem` confPlugins)) + | ConfigurationDescriptor{..} <- configurations + ] + + configStatic <- liftIO $ readConfigIO config + let build = outputFolder configStatic + + buildRules build hlsBuildRules + benchRules build (MkBenchRules (askOracle $ GetSamples ()) benchHls warmupHls "haskell-language-server" (parallelism configStatic)) + addGetParentOracle + csvRules build + svgRules build + heapProfileRules build + phonyRules "" binaryName NoProfiling build (examples configStatic) + + whenJust (profileInterval configStatic) $ \i -> do + phonyRules "profiled-" binaryName (CheapHeapProfiling i) build (examples configStatic) + + return configStatic + +disableAllPluginsBut :: (PluginId -> Bool) -> Plugin.Config +disableAllPluginsBut pred = def {Plugin.plugins = pluginsMap} where + pluginsMap = Map.fromList + [ (plugin, def { Plugin.plcGlobalOn = globalOn}) + | PluginDescriptor{pluginId = plugin} <- plugins + , let globalOn = + -- ghcide-core is required, nothing works without it + plugin == "ghcide-core" + -- document symbols is required by the benchmark suite + || plugin == "ghcide-hover-and-symbols" + || pred plugin + ] + IdePlugins plugins = idePlugins mempty + +newtype GetSamples = GetSamples () deriving newtype (Binary, Eq, Hashable, NFData, Show) +type instance RuleResult GetSamples = Natural + +-------------------------------------------------------------------------------- + +buildHls :: BuildSystem -> ProjectRoot -> OutputFolder -> Action () +buildHls Cabal root out = actionBracket + (do + projectLocalExists <- liftIO $ doesFileExist projectLocal + when projectLocalExists $ liftIO $ do + void $ tryIOError $ removeFile (projectLocal <.> "restore-after-benchmark") + renameFile projectLocal (projectLocal <.> "restore-after-benchmark") + liftIO $ writeFile projectLocal $ unlines + ["package haskell-language-server" + ," ghc-options: -eventlog -rtsopts" + ] + return projectLocalExists) + (\projectLocalExists -> do + removeFile projectLocal + when projectLocalExists $ + renameFile (projectLocal <.> "restore-after-benchmark") projectLocal + ) $ \_ -> command_ [Cwd root] "cabal" + ["install" + ,"haskell-language-server:exe:haskell-language-server" + ,"--installdir=" ++ out + ,"--install-method=copy" + ,"--overwrite-policy=always" + ] + where + projectLocal = root "cabal.project.local" + +buildHls Stack root out = + command_ [Cwd root] "stack" + ["--local-bin-path=" <> out + ,"build" + ,"haskell-language-server:haskell-language-server" + ,"--copy-bins" + ,"--ghc-options=-rtsopts" + ,"--ghc-options=-eventlog" + ] + +benchHls + :: Natural -> BuildSystem -> [CmdOption] -> BenchProject Example -> Action () +benchHls samples buildSystem args BenchProject{..} = do + command_ ([StdinBS configuration] ++ args) "ghcide-bench" $ + [ "--timeout=300", + "--no-clean", + "-v", + "--samples=" <> show samples, + "--csv=" <> outcsv, + "--ghcide=" <> exePath, + "--select", + unescaped (unescapeExperiment experiment), + "--lsp-config" + ] ++ + exampleToOptions example exeExtraArgs ++ + [ "--stack" | Stack == buildSystem + ] + +warmupHls :: BuildSystem -> FilePath -> [CmdOption] -> Example -> Action () +warmupHls buildSystem exePath args example = do + command args "ghcide-bench" $ + [ "--no-clean", + "-v", + "--samples=1", + "--ghcide=" <> exePath, + "--select=hover" + ] ++ + exampleToOptions example [] ++ + [ "--stack" | Stack == buildSystem + ] + +-------------------------------------------------------------------------------- +data ConfigurationDescriptor = ConfigurationDescriptor + { confName :: String + , confPlugins :: [PluginId] + } + deriving Show + +instance FromJSON ConfigurationDescriptor where + parseJSON (String s) = pure $ ConfigurationDescriptor (unpack s) [PluginId s] + parseJSON o@Object{} = do + let keymap = o ^. _Object + matchKey = preview _String . toJSON + case toList keymap of + -- excuse the aeson 2.0 compatibility hack + [(matchKey -> Just name, Array values)] -> do + pluginIds <- traverse parseJSON values + pure $ ConfigurationDescriptor (unpack name) (map PluginId $ toList pluginIds) + other -> fail $ "Expected object with name and array of plugin ids: " <> show other + parseJSON _ = fail "Expected plugin id or object with name and array of plugin ids" diff --git a/bench/MultiLayerModules.sh b/bench/MultiLayerModules.sh new file mode 100755 index 0000000000..38d85ce9ed --- /dev/null +++ b/bench/MultiLayerModules.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +# Generate $DEPTH layers of modules with $WIDTH modules on each layer +# Every module on layer N imports all the modules on layer N-1 +# MultiLayerModules.hs imports all the modules from the last layer +DEPTH=15 +WIDTH=40 +cat >hie.yaml << EOF +cradle: + direct: + arguments: +EOF +for i in $(seq -w 1 $WIDTH); do + echo "module DummyLevel0M$i where" > DummyLevel0M$i.hs; + echo " - DummyLevel0M$i.hs" >> hie.yaml; +done +for l in $(seq 1 $DEPTH); do + for i in $(seq -w 1 $WIDTH); do + echo "module DummyLevel${l}M$i where" > DummyLevel${l}M$i.hs; + echo " - DummyLevel${l}M$i.hs" >> hie.yaml; + for j in $(seq -w 1 $WIDTH); do + echo "import DummyLevel$((l-1))M$j" >> DummyLevel${l}M$i.hs; + done + done +done +case "$1" in + '--th') + echo "{-# LANGUAGE TemplateHaskell #-}" > MultiLayerModules.hs + ;; +esac +echo "module MultiLayerModules where" >> MultiLayerModules.hs + echo " - MultiLayerModules.hs" >> hie.yaml; +for j in $(seq -w 1 $WIDTH); do + echo "import DummyLevel${DEPTH}M$j" >> MultiLayerModules.hs; +done diff --git a/bench/README.md b/bench/README.md new file mode 100644 index 0000000000..1dc1e6a3d4 --- /dev/null +++ b/bench/README.md @@ -0,0 +1,70 @@ + +# Benchmarks + +This folder contains a Shake script to simplify the performance analysis of HLS. +It drives the `ghcide-bench` benchmark suite over a set of commits and experiments. +To run it, use `cabal bench`. +To configure it, edit `bench/config.yaml`. +By default it compares HEAD with "origin/master" + +# Examples and experiments + +The benchmark suites runs a set of experiments (hover, completion, edit, etc.) +over all the defined examples (currently Cabal and lsp-types). Examples are defined +in `bench/config.yaml` whereas experiments are coded in `ghcide-bench/src/Experiments.hs`. + +# Phony targets + +The Shake script supports a number of phony targets that allow running a subset of the benchmarks: + +* all +: runs all the examples, unprofiled + +* profiled-all +: runs all the examples with heap profiling, assuming `profilingInterval` is defined + +* Cabal-3.0.0.0 +: runs the Cabal example, unprofiled + +* profiled-Cabal-3.0.0.0 +: runs the Cabal example, with heap profiling + +* all-binaries +: build all the HLS binaries for each of the versions under analysis + +* etc + +`--help` lists all the phony targets. Invoke it with: + + cabal bench --benchmark-options="--help" + +``` +Targets: + - bench-results/binaries/*/commitid + - bench-results/binaries/HEAD/ghcide + - bench-results/binaries/HEAD/ghc.path + - bench-results/binaries/*/ghcide + - bench-results/binaries/*/ghc.path + - bench-results/binaries/*/*.warmup + - bench-results/*/*/*/*.csv + - bench-results/*/*/*/*.gcStats.log + - bench-results/*/*/*/*.output.log + - bench-results/*/*/*/*.eventlog + - bench-results/*/*/*/*.hp + - bench-results/*/*/*/results.csv + - bench-results/*/*/results.csv + - bench-results/*/results.csv + - bench-results/*/*/*/resultDiff.csv + - bench-results/*/*/resultDiff.csv + - bench-results/*/resultDiff.csv + - bench-results/*/*/*/*.svg + - bench-results/*/*/*/*.diff.svg + - bench-results/*/*/*.svg + - bench-results/*/*/*/*.heap.svg + - Cabal-3.0.0.0 + - lsp-types-1.0.0.1 + - all + - profiled-Cabal-3.0.0.0 + - profiled-lsp-types-1.0.0.1 + - profiled-all + ``` diff --git a/bench/config.yaml b/bench/config.yaml new file mode 100644 index 0000000000..18211f4f24 --- /dev/null +++ b/bench/config.yaml @@ -0,0 +1,229 @@ +# The number of samples to run per experiment. +# At least 100 is recommended in order to observe space leaks +samples: 50 + +buildTool: cabal + +# Output folder for the experiments +outputFolder: bench-results + +# Heap profile interval in seconds (+RTS -i) +# Comment out to disable heap profiling +profileInterval: 1 + +# Number of concurrent benchmark and warmup runs +parallelism: 1 + +# Example project used to run the experiments +# Can either be a Hackage package (name,version) +# or a local project (path) with a valid `hie.yaml` file +examples: + # Medium-sized project without TH + - name: cabal + package: Cabal + version: 3.10.2.1 + modules: + - src/Distribution/Simple.hs + - src/Distribution/Types/ComponentLocalBuildInfo.hs + extra-args: [] # extra HLS command line args + # Small-sized project with TH + - name: lsp-types + package: lsp-types + version: 2.1.1.0 + modules: + - src/Language/LSP/Protocol/Types/SemanticTokens.hs + - generated/Language/LSP/Protocol/Internal/Types/NotebookDocumentChangeEvent.hs + + - name: MultiLayerModules + path: bench/MultiLayerModules.sh + script: True + script-args: ["--th"] + modules: + - MultiLayerModules.hs + - DummyLevel0M01.hs + - DummyLevel1M01.hs + - name: MultiLayerModulesNoTH + path: bench/MultiLayerModules.sh + script: True + script-args: [] + modules: + - MultiLayerModules.hs + - DummyLevel0M01.hs + - DummyLevel1M01.hs + + - name: DummyLevel0M01 + path: bench/MultiLayerModules.sh + script: True + script-args: ["--th"] + modules: + - DummyLevel0M01.hs + - name: DummyLevel0M01NoTH + path: bench/MultiLayerModules.sh + script: True + script-args: [] + modules: + - DummyLevel0M01.hs + + - name: DummyLevel1M01 + path: bench/MultiLayerModules.sh + script: True + script-args: ["--th"] + modules: + - DummyLevel1M01.hs + - name: DummyLevel1M01NoTH + path: bench/MultiLayerModules.sh + script: True + script-args: [] + modules: + - DummyLevel1M01.hs + + # Small but heavily multi-component example + # Disabled as it is far to slow. hie-bios >0.7.2 should help + # - name: HLS + # path: bench/example/HLS + # modules: + # - hls-plugin-api/src/Ide/Plugin/Config.hs + # - ghcide/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs + # - ghcide/bench/hist/Main.hs + # - ghcide/bench/lib/Experiments/Types.hs + # - ghcide/test/exe/Main.hs + # - exe/Plugins.hs + +# The set of experiments to execute +experiments: + - "edit-header" + - "edit" + - "hover" + - "semanticTokens" + - "hover after edit" + # - "hover after cradle edit" + - "getDefinition" + - "getDefinition after edit" + - "completions" + - "completions after edit" + - "code actions" + - "code actions after edit" + - "code actions after cradle edit" + - "documentSymbols after edit" + - "hole fit suggestions" + - "eval execute single-line code lens" + - "eval execute multi-line code lens" + +# An ordered list of versions to analyze +versions: +# A version can be defined briefly: +# - +# - +# - + +# Or in extended form, where all the fields are optional: +# - : +# git: +# include: true # whether to include in comparison graphs +# parent: # version to compare with in .diff graphs + + +# - 1.8.0.0 +- upstream: origin/master +# - HEAD~1 +- HEAD + +# A list of plugin configurations to analyze +# WARNING: Currently bench versions later than e4234a3a5e347db249fccefb8e3fb36f89e8eafb +# will be unable to send plugin configurations to earlier HLS versions. This causes +# all plugins in those versions to always be enabled. +# In addition bench proactively disables all plugins it knows about besides the +# ones in the following list. However because it can only disable plugins it +# knows about, any plugins that are in old versions but were removed from HLS +# before the current bench will not be disabled. +configurations: +# A configuration contains one or more plugins: +# - ConfigurationName: +# - plugin1 +# - plugin2 +# +# There is short-hand notation for defining singleton configurations. +# Simply give the plugin name top level: +# - plugin1 +# +# Some plugins are implicitly included since they are required by the benchmark driver: +# The implicitly included plugins are: +# - ghcide-core +# - ghcide-hover-and-symbols + +# Uncomment below sections if needed +# - None: [] +# - Core: +# - callHierarchy +# - codeRange +# - eval +# - ghcide-code-actions-bindings +# - ghcide-code-actions-fill-holes +# - ghcide-code-actions-imports-exports +# - ghcide-code-actions-type-signatures +# - ghcide-completions +# - ghcide-type-lenses +# - pragmas +# - Ghcide: +# - ghcide-completions +# - ghcide-type-lenses +# - Refactor: +# - ghcide-code-actions-bindings +# - ghcide-code-actions-fill-holes +# - ghcide-code-actions-imports-exports +# - ghcide-code-actions-type-signatures +- All: + - alternateNumberFormat + - callHierarchy + - changeTypeSignature + - class + - codeRange + - eval + - explicitFixity + - floskell + - fourmolu + - gadt + - ghcide-code-actions-bindings + - ghcide-code-actions-fill-holes + - ghcide-code-actions-imports-exports + - ghcide-code-actions-type-signatures + - ghcide-completions + - ghcide-type-lenses + - hlint + - importLens + - moduleName + - ormolu + - pragmas + - qualifyImportedNames + - rename + - stylish-haskell + - semanticTokens +# - alternateNumberFormat +# - callHierarchy +# - changeTypeSignature +# - class +# - codeRange +# - eval +# - explicitFixity +# # - floskell +# # - fourmolu +# - gadt +# - ghcide-code-actions-bindings +# - ghcide-code-actions-fill-holes +# - ghcide-code-actions-imports-exports +# - ghcide-code-actions-type-signatures +# - ghcide-completions +# # - ghcide-core # implicitly included in all configurations +# # - ghcide-hover-and-symbols # implicitly included in all configurations +# - ghcide-type-lenses +# - hlint +# - importLens +# - moduleName +# # - ormolu +# - pragmas +# - qualifyImportedNames +# - rename +# - retrie +# - splice +# - stan +# # - stylish-haskell diff --git a/bindist/GNUmakefile.in b/bindist/GNUmakefile.in new file mode 100644 index 0000000000..aec8480fcf --- /dev/null +++ b/bindist/GNUmakefile.in @@ -0,0 +1,38 @@ +DESTDIR ?= +PREFIX ?= /usr/local +LIBDIR ?= $(PREFIX)/lib +BINDIR ?= $(PREFIX)/bin + +HLS_VERSION := @@HLS_VERSION@@ + +FIND := find +INSTALL := install +INSTALL_D := $(INSTALL) -d +INSTALL_X := $(INSTALL) -vm 755 +SED := sed +CHMOD := chmod +CHMOD_X := $(CHMOD) 755 +LN := ln +LN_S := $(LN) -sf + +install: + $(INSTALL_D) "$(DESTDIR)$(BINDIR)" + $(INSTALL_D) "$(DESTDIR)$(LIBDIR)/haskell-language-server-$(HLS_VERSION)/lib" + $(INSTALL_D) "$(DESTDIR)$(LIBDIR)/haskell-language-server-$(HLS_VERSION)/bin" + $(FIND) lib -mindepth 2 -type f -exec sh -c '$(INSTALL_D) "$(DESTDIR)$(LIBDIR)/haskell-language-server-$(HLS_VERSION)/`dirname $$1`" && $(INSTALL_X) "$$1" "$(DESTDIR)$(LIBDIR)/haskell-language-server-$(HLS_VERSION)/$$1"' sh '{}' \; + for b in $(wildcard bin/*) ; do \ + $(INSTALL_D) "$(DESTDIR)$(LIBDIR)/haskell-language-server-$(HLS_VERSION)/" && \ + $(INSTALL_X) "$$b" "$(DESTDIR)$(LIBDIR)/haskell-language-server-$(HLS_VERSION)/$$b" ; \ + done + for h in $(wildcard haskell-language-server-*.in) ; do \ + $(SED) -e "s#@@EXE_DIR@@#$(LIBDIR)/haskell-language-server-$(HLS_VERSION)/bin/#" \ + "$$h" > "$(DESTDIR)$(BINDIR)/$${h%.in}" && \ + $(CHMOD_X) "$(DESTDIR)$(BINDIR)/$${h%.in}" ; \ + done + $(LN_S) "`scripts/relpath.sh "$(DESTDIR)$(BINDIR)" "$(DESTDIR)$(LIBDIR)/haskell-language-server-$(HLS_VERSION)/bin/haskell-language-server-wrapper"`" \ + "$(DESTDIR)$(BINDIR)/haskell-language-server-wrapper" + +version: + @echo "$(HLS_VERSION)" + +.PHONY: install diff --git a/bindist/relpath.sh b/bindist/relpath.sh new file mode 100755 index 0000000000..3385155e97 --- /dev/null +++ b/bindist/relpath.sh @@ -0,0 +1,105 @@ +#!/bin/sh + +# POSIX shell implementation of `realpath --relative-to=$1 $2. +# This is an adaptation of the implementation from +# . + +# returns relative path to $2=$target from $1=$source +## NOTE : path are compared in text only. They don’t have to exist +## and they WONT be normalized/escaped +## Result in "$return_value"# both $1 and $2 are absolute paths beginning with / + +# @FUNCTION: die +# @USAGE: [msg] +# @DESCRIPTION: +# Exits the shell script with status code 2 +# and prints the given message in red to STDERR, if any. +die() { + (>&2 echo "$1") + exit 2 +} + +# @FUNCTION: posix_realpath +# @USAGE: +# @DESCRIPTION: +# Portably gets the realpath and prints it to stdout. +# This was initially inspired by +# https://gist.github.com/tvlooy/cbfbdb111a4ebad8b93e +# and +# https://stackoverflow.com/a/246128 +# +# If the file does not exist, just prints it appended to the current directory. +# @STDOUT: realpath of the given file +posix_realpath() { + [ -z "$1" ] && die "Internal error: no argument given to posix_realpath" + current_loop=0 + max_loops=50 + mysource=$1 + + while [ -h "${mysource}" ]; do + current_loop=$((current_loop+1)) + mydir="$( cd -P "$( dirname "${mysource}" )" > /dev/null 2>&1 && pwd )" + mysource="$(readlink "${mysource}")" + [ "${mysource%${mysource#?}}"x != '/x' ] && mysource="${mydir}/${mysource}" + + if [ ${current_loop} -gt ${max_loops} ] ; then + (>&2 echo "${1}: Too many levels of symbolic links") + exit 1 + fi + done + mydir="$( cd -P "$( dirname "${mysource}" )" > /dev/null 2>&1 && pwd )" + + # TODO: better distinguish between "does not exist" and "permission denied" + if [ -z "${mydir}" ] ; then + (>&2 echo "${1}: Permission denied") + echo "$(pwd)/$1" + else + echo "${mydir%/}/$(basename "${mysource}")" + fi + + unset current_loop max_loops mysource mydir +} + + +src="$(posix_realpath $1)" || exit 1 +target="$(posix_realpath $2)" || exit 1 + +common_part="$src" +result="" + +while test "${target#$common_part}" = "${target}" ; do + #echo "common_part is now : \"$common_part\"" + #echo "result is now : \"$result\"" + #echo "target#common_part : \"${target#$common_part}\"" + # no match, means that candidate common part is not correct + # go up one level (reduce common part) + common_part="$(dirname "$common_part")" + # and record that we went back + if test -z "$result" ; then + result=".." + else + result="../$result" + fi +done + +#echo "common_part is : \"$common_part\"" + +if test "$common_part" = "/" ; then + # special case for root (no common path) + result="$result/" +fi + +# since we now have identified the common part, +# compute the non-common part +forward_part="${target#$common_part}" +#echo "forward_part = \"$forward_part\"" + +if test -n "$result" && test -n "$forward_part" ; then + #echo "(simple concat)" + result="$result$forward_part" +elif test -n "$forward_part" ; then + #echo "(concat with slash removal)" + result="$(printf "%s" "$forward_part" | cut -c 1-)" +fi + +printf "%s" "$result" diff --git a/bindist/wrapper.in b/bindist/wrapper.in new file mode 100644 index 0000000000..bb2affcf42 --- /dev/null +++ b/bindist/wrapper.in @@ -0,0 +1,186 @@ +#!/bin/sh + +exedir="@@EXE_DIR@@" +executablename="@@EXE_NAME@@" +GHC_VERSION="@@GHC_VERSION@@" + +# This space separated list contains the names and versions of the boot libraries used to compile hls. +BOOT_PKGS="@@BOOT_PKGS@@" +# This space separated list contains the ABI hashes of the pkgs in BOOT_PKGS at compiletime. +ABI_HASHES="@@ABI_HASHES@@" + +debug_msg() { + if [ -n "$HLS_WRAPPER_DEBUG" ] ; then + (>&2 printf "\\033[0;34m%s\\033[0m\\n" "$1") + fi +} + +err_msg() { + if [ -n "$HLS_WRAPPER_DEBUG" ] ; then + (>&2 printf "\\033[0;31m%s\\033[0m\\n" "$1") + elif [ -n "$HLS_WRAPPER_VERBOSE" ] ; then + (>&2 printf "\\033[0;31m%s\\033[0m\\n" "$1") + fi +} + +instruction_msg() { + (>&2 printf "\\033[0;35m%s\\033[0m\\n" "$1") +} + +err_exit() { + msg="Couldn't find a working/matching GHC installation. Consider installing ghc-${GHC_VERSION} via ghcup or build HLS from source." + # adjust Content-Length when changing json + json="{\"jsonrpc\":\"2.0\", \"method\":\"window/showMessage\", \"params\": {\"type\": 1, \"message\": \"${msg}\"}}" + printf "%s\r\n" "Content-Length: 203" + printf "%s\r\n" + printf "%s" "${json}" + unset msg json +} + +err_ghc_pkg() { + err_msg "Could not find a ghc-pkg binary (found: $1)!" +} + +err_abi() { + err_msg "GHC ABIs don't match!" + err_msg "" + err_msg "Expected: ${ABI_HASHES}" + err_msg "Got: $1" +} + +err_ver() { + err_msg "GHC versions don't match!" + err_msg "" + err_msg "Expected: ${GHC_VERSION}" + err_msg "Got: $1" +} + +# Check the version of GHC and the ABI. +check_ghc() { + { [ -z "$1" ] || [ -z "$2" ] || [ -z "$3" ] ;} && debug_msg "internal error: not enough arguments to check_ghc: 1:$1,2:$2,3:$3" && return 4 + + check_ghc_libdir=$1 + check_ghc_bin=$2 + GHC_PKG=$3 + check_ghc_ver="$("${check_ghc_bin}" --numeric-version 2>/dev/null)" + + # check version + if [ "${check_ghc_ver}" = "${GHC_VERSION}" ] ; then + # check for all packages listed in BOOT_PKGS that they are present with the same ABI hash as at hls-compiletime to prevent linking issues. + if "${GHC_PKG}" --version >/dev/null ; then + : + elif "${GHC_PKG}-${GHC_VERSION}" --version >/dev/null ; then + GHC_PKG=${GHC_PKG}-${GHC_VERSION} + else + err_ghc_pkg "${GHC_PKG}" + unset GHC_LIBDIR + return 1 + fi + PKGCONF="${check_ghc_libdir}/package.conf.d" + MY_ABI_HASHES="$(for dep in ${BOOT_PKGS} ; do printf "%s:" "${dep}" && "${GHC_PKG}" --global --global-package-db "$PKGCONF" field "${dep}" abi --simple-output ; done | tr '\n' ' ' | xargs)" + if [ "${ABI_HASHES}" != "${MY_ABI_HASHES}" ] ; then + err_abi "${MY_ABI_HASHES}" + return 3 + fi + unset PKGCONF + else + err_ver "${check_ghc_ver}" + unset GHC_LIBDIR + return 2 + fi + + unset check_ghc_libdir check_ghc_bindir GHC_PKG check_ghc_ver +} + +# Infer ghc-pkg from the given ghc path. Doesn't check for existence of any +# components. +infer_ghc_pkg() { + infer_ghc_path=$1 + infer_ghc_bin=${infer_ghc_path##**/} + infer_ghc_ver_suffix=${infer_ghc_bin#ghc} + path_prefix="$(dirname "${infer_ghc_path}")" + + if [ "${path_prefix}" = "." ] ; then + echo "ghc-pkg${infer_ghc_ver_suffix}" + elif [ "${path_prefix}" = "/" ] ; then + echo "${path_prefix}ghc-pkg${infer_ghc_ver_suffix}" + else + echo "${path_prefix}/ghc-pkg${infer_ghc_ver_suffix}" + fi + unset infer_ghc_path infer_ghc_bin infer_ghc_ver_suffix path_prefix +} + +# try GHC_LIBDIR from the environment (e.g. user set it, or haskell-language-server-wrapper) +if [ -n "${GHC_LIBDIR}" ] && + [ -n "${GHC_BIN}" ] && + { debug_msg "Trying method: GHC_LIBDIR and GHC_BIN from env" ; HLS_WRAPPER_VERBOSE=1 ; check_ghc "${GHC_LIBDIR}" "${GHC_BIN}" "$(infer_ghc_pkg "${GHC_BIN}")" || { err_exit ; exit 1 ; } ; } +then + : +# try GHC_BIN from the environment (e.g. user set it) +elif [ -n "${GHC_BIN}" ] && + GHC_LIBDIR="$("${GHC_BIN}" --print-libdir)" && + { debug_msg "Trying method: GHC_BIN from env" ; HLS_WRAPPER_VERBOSE=1 ; check_ghc "${GHC_LIBDIR}" "${GHC_BIN}" "$(infer_ghc_pkg "${GHC_BIN}")" || { err_exit ; exit 2 ; } ; } +then + : +# try ghcup +elif command -v ghcup >/dev/null && + GHC_BIN="$(ghcup whereis ghc "${GHC_VERSION}")" && + GHC_LIBDIR="$("${GHC_BIN}" --print-libdir)" && + { debug_msg "Trying method: ghcup" ; check_ghc "${GHC_LIBDIR}" "${GHC_BIN}" "$(infer_ghc_pkg "${GHC_BIN}")" ; } +then + : +# try ghc-${GHC_VERSION} +elif command -v ghc-${GHC_VERSION} >/dev/null && + GHC_LIBDIR="$("ghc-${GHC_VERSION}" --print-libdir)" && + { debug_msg "Trying method: ghc-${GHC_VERSION} in PATH" ; check_ghc "${GHC_LIBDIR}" "ghc-${GHC_VERSION}" "$(infer_ghc_pkg "ghc-${GHC_VERSION}")" ; } +then + : +# try ghc +elif command -v ghc >/dev/null && + GHC_LIBDIR="$(ghc --print-libdir)" && + { debug_msg "Trying method: ghc in PATH" ; check_ghc "${GHC_LIBDIR}" "ghc" "$(infer_ghc_pkg "ghc")" ; } +then + : +# try stack +elif command -v stack >/dev/null && + GHC_BIN="$(cd "$(mktemp -d)" && stack --no-system-ghc --no-install-ghc --resolver "ghc-${GHC_VERSION}" exec sh -- -c 'command -v ghc')" && + GHC_LIBDIR="$("${GHC_BIN}" --print-libdir)" && + { debug_msg "Trying method: stack" ; check_ghc "${GHC_LIBDIR}" "${GHC_BIN}" "$(infer_ghc_pkg "${GHC_BIN}")" ; } +then + : +else + HLS_WRAPPER_VERBOSE=1 + err_msg "All methods exhausted!" + err_exit + err_msg "exiting..." + exit 42 +fi + +debug_msg "Found GHC libdir at: ${GHC_LIBDIR}" + +case "$(uname -s)" in + "Darwin"|"darwin") + if [ -n "$DYLD_LIBRARY_PATH" ] ; then + DYLD_LIBRARY_PATH="$(for i in "${GHC_LIBDIR}"/* ; do [ -d "$i" ] && printf "%s" "$i:" ; done)$DYLD_LIBRARY_PATH" + debug_msg "Exporting DYLD_LIBRARY_PATH=${DYLD_LIBRARY_PATH}" + export DYLD_LIBRARY_PATH + else + DYLD_LIBRARY_PATH="$(for i in "${GHC_LIBDIR}"/* ; do [ -d "$i" ] && printf "%s" "$i:" ; done | sed 's/:$//')" + debug_msg "Exporting DYLD_LIBRARY_PATH=${DYLD_LIBRARY_PATH}" + export DYLD_LIBRARY_PATH + fi + ;; + *) + if [ -n "$LD_LIBRARY_PATH" ] ; then + LD_LIBRARY_PATH="$(for i in "${GHC_LIBDIR}"/* ; do [ -d "$i" ] && printf "%s" "$i:" ; done)$LD_LIBRARY_PATH" + debug_msg "Exporting LD_LIBRARY_PATH=${LD_LIBRARY_PATH}" + export LD_LIBRARY_PATH + else + LD_LIBRARY_PATH="$(for i in "${GHC_LIBDIR}"/* ; do [ -d "$i" ] && printf "%s" "$i:" ; done | sed 's/:$//')" + debug_msg "Exporting LD_LIBRARY_PATH=${LD_LIBRARY_PATH}" + export LD_LIBRARY_PATH + fi + ;; +esac + +exec "${exedir}/${executablename}" ${1+"$@"} diff --git a/cabal-hls-install b/cabal-hls-install deleted file mode 100755 index eebdee21ed..0000000000 --- a/cabal-hls-install +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -cabal v2-run ./install.hs --project-file install/shake.project $@ \ No newline at end of file diff --git a/cabal-hls-install.cmd b/cabal-hls-install.cmd deleted file mode 100755 index 866f987b87..0000000000 --- a/cabal-hls-install.cmd +++ /dev/null @@ -1 +0,0 @@ -@cabal v2-run .\install.hs --project-file=install\shake.project %* \ No newline at end of file diff --git a/cabal.project b/cabal.project index 0c4da5e3aa..8d8bd080af 100644 --- a/cabal.project +++ b/cabal.project @@ -1,15 +1,58 @@ packages: ./ - ghcide + ./shake-bench + ./hls-graph + ./ghcide + ./hls-plugin-api + ./hls-test-utils -tests: true -documentation: true -package haskell-language-server - test-show-details: direct -package ghcide - test-show-details: direct +index-state: 2025-08-08T12:31:54Z + +tests: True +test-show-details: direct + +benchmarks: True write-ghc-environment-files: never -index-state: 2020-07-16T17:24:10Z +-- Many of our tests only work single-threaded, and the only way to +-- ensure tasty runs everything purely single-threaded is to pass +-- this at the top-level +test-options: -j1 + +-- Make sure dependencies are build with haddock so we get +-- haddock shown on hover +package * + ghc-options: -haddock + +constraints: + -- C++ is hard to distribute, especially on older GHCs + -- See https://github.com/haskell/haskell-language-server/issues/3822 + text -simdutf, + ghc-check -ghc-check-use-package-abis, + ghc-lib-parser-ex -auto, + -- This is only present in some versions, and it's on by default since + -- 0.14.5.0, but there are some versions we allow that need this + -- setting + stylish-haskell +ghc-lib, + -- Centos 7 comes with an old gcc version that doesn't know about + -- the flag '-fopen-simd', which blocked the release 2.2.0.0. + -- We want to be able to benefit from the performance optimisations + -- in the future, thus: TODO: remove this flag. + bitvec -simd, + + +-- Some of the formatters need the latest Cabal-syntax version, +-- but 'cabal-install-parsers-0.6.2' only has Cabal-syntax (>=3.12.0.0 && <3.13). +-- So, we relax the upper bounds here. +-- fourmolu-0.18.0 and ormolu-0.8 depend on Cabal-syntax == 3.14.*, while +-- cabal-add depends on cabal-install-parsers. +allow-newer: + cabal-install-parsers:Cabal-syntax, + +if impl(ghc >= 9.11) + benchmarks: False + allow-newer: + cabal-install-parsers:base, + cabal-install-parsers:time, diff --git a/default.nix b/default.nix new file mode 100644 index 0000000000..d87fd23886 --- /dev/null +++ b/default.nix @@ -0,0 +1,11 @@ +# This file is the compt layer of flakes: https://github.com/edolstra/flake-compat +# See flake.nix for details +(import ( + let + lock = builtins.fromJSON (builtins.readFile ./flake.lock); + in fetchTarball { + url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; + sha256 = lock.nodes.flake-compat.locked.narHash; } +) { + src = ./.; +}).defaultNix diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 0000000000..e35d8850c9 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1 @@ +_build diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000000..bb113155fa --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,13 @@ +# Minimal makefile for Sphinx documentation + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +.PHONY: Makefile + +html: Makefile + @$(SPHINXBUILD) -n -W "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/_static/.gitkeep b/docs/_static/.gitkeep new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/_static/theme_overrides.css b/docs/_static/theme_overrides.css new file mode 100644 index 0000000000..44a0fd7699 --- /dev/null +++ b/docs/_static/theme_overrides.css @@ -0,0 +1,6 @@ +/* Fix table wrapping https://github.com/readthedocs/sphinx_rtd_theme/issues/117 */ +@media screen and (min-width: 768px) { + .wy-table-responsive table td, .wy-table-responsive table th { + white-space: normal !important; + } +} diff --git a/docs/_templates/.gitkeep b/docs/_templates/.gitkeep new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000000..2b51c24567 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,94 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + +import re +import sys + +# -- Project information ----------------------------------------------------- + +project = 'haskell-language-server' + +# We want to take some of the metadata from the Cabal file, especially the version. +# (otherwise it's very easy to forget to update it!) +release = None +copyright = None +author = None +versionPattern = re.compile("^version:\s*([\d.]+)") +copyrightPattern = re.compile("^copyright:\s*(.+)") +authorPattern = re.compile("^author:\s*(.+)") +for i, line in enumerate(open('../haskell-language-server.cabal')): + versionMatch = re.search(versionPattern, line) + if versionMatch: + release = versionMatch.group(1) + copyrightMatch = re.search(copyrightPattern, line) + if copyrightMatch: + copyright = copyrightMatch.group(1) + authorMatch = re.search(authorPattern, line) + if authorMatch: + author = authorMatch.group(1) + +if not release: + print("Couldn't find version") + sys.exit() +if not copyright: + print("Couldn't find copyright") + sys.exit() +if not author: + print("Couldn't find author") + sys.exit() + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'myst_parser', + 'sphinx_rtd_theme', + 'sphinx.ext.autosectionlabel' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' +html_logo = "logos/logo-64.png" +html_favicon = "logos/logo.svg" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Enable linking to an anchor of a relative page +# See https://github.com/executablebooks/MyST-Parser/issues/443 +myst_heading_anchors = 3 + +# -- Custom Document processing ---------------------------------------------- + +def setup(app): + app.add_css_file("theme_overrides.css") diff --git a/docs/configuration.md b/docs/configuration.md new file mode 100644 index 0000000000..9da816c09e --- /dev/null +++ b/docs/configuration.md @@ -0,0 +1,510 @@ +# Configuration + +## Configuring `haskell-language-server` + +Language servers like `haskell-language-server` expose most of their configuration via the client (i.e. the editor). +That means that the way in which you configure the settings will depend on the client. + +Most clients (editors) already have an opinion about how settings should be configured! +For example, in VS Code you use the graphical Settings tab or `settings.json`, whereas in Emacs you use customization variables. +In the [editor configuration section](#configuring-your-editor) we give some pointers for popular editors, but you should consult the documentation for your specific editor if you have trouble. + +However, we can say some high-level things about the kinds of configuration `haskell-language-server` uses, and how to use them. +This can sound a bit confusing, but ultimately the client should present you with these options in a user-friendly way that makes sense for that editor. + +### Generic server options + +The LSP protocol is designed to support many useful server configuration options generically. +These are sent to the server by the client, and can be controlled without reference to a specific language. + +For example, there are protocol methods for highlighting matching identifiers throughout a document. +This is a capability that any server can implement, so the client can decide generically whether to ask the server to do it or not. +So your editor can provide a setting to turn this on or off globally, for any language server you might use. + +Settings like this are typically provided by the generic LSP client support for your editor, for example in Emacs by [lsp-mode](https://github.com/emacs-lsp/lsp-mode). + +### Generic editor options + +Your editor may provide some settings that affect how the information from the language server is used. +For example, whether popups are shown, or whether code lenses appear by default. + +Settings like this are typically provided by the generic LSP client support for your editor, for example in Emacs by [lsp-mode](https://github.com/emacs-lsp/lsp-mode). + +### Language-specific server options + +A specific language server can also have its own configuration options. +These are still sent to the server by the client, but they can only be controlled by a specific client that knows about those options. + +For example, `haskell-language-server` allows you to choose the formatting provider which will be used for formatting Haskell source. +This option obviously would not make sense for language servers for other languages, or even for other Haskell language servers (which need not even support formatting). + +Here is a list of the additional settings currently supported by `haskell-language-server`, along with their setting key (you may not need to know this) and default: + +- Formatting provider (`haskell.formattingProvider`, default `ormolu`): what formatter to use; one of `floskell`, `ormolu`, `fourmolu`, or `stylish-haskell`. +- Cabal formatting provider (`haskell.cabalFormattingProvider`, default `cabal-gild`): what formatter to use for cabal files; one of `cabal-gild` or `cabal-fmt`. +- Max completions (`haskell.maxCompletions`, default 40): maximum number of completions sent to the LSP client. +- Check project (`haskell.checkProject`, default true): whether to typecheck the entire project on initial load. As it is activated by default could drive to bad performance in large projects. +- Check parents (`haskell.checkParents`, default `CheckOnSave`): when to typecheck reverse dependencies of a file; one of `NeverCheck`, `CheckOnSave` (means dependent/parent modules will only be checked when you save), or `AlwaysCheck` (means re-typechecking them on every change). +- Session loading preference (`haskell.sessionLoading`, default `singleComponent`): how to load sessions; one of `singleComponent` (means always loading only a single component when a new component is discovered) or `multipleComponents` (means always preferring loading multiple components in the cradle at once). `multipleComponents` might not be always possible, if the tool doesn't support multiple components loading. The cradle can decide how to handle these situations, and whether to honour the preference at all. + +#### Generic plugin configuration + +Plugins have a generic config to control their behaviour. The schema of such config is: + +- `haskell.plugin.${pluginName}.globalOn`: usually with default true. Whether the plugin is enabled at runtime or it is not. That is the option you might use if you want to disable completely a plugin. + - Actual plugin names are: `ghcide-code-actions-fill-holes`, `ghcide-completions`, `ghcide-hover-and-symbols`, `ghcide-type-lenses`, `ghcide-code-actions-type-signatures`, `ghcide-code-actions-bindings`, `ghcide-code-actions-imports-exports`, `eval`, `moduleName`, `pragmas`, `importLens`, `class`, `hlint`, `retrie`, `rename`, `splice`, `stan`. + - So to disable the import lens with an explicit list of module definitions you could set `haskell.plugin.importLens.globalOn: false` +- `haskell.plugin.${pluginName}.${lspCapability}On`: usually with default true. Whether a concrete plugin capability is enabled. + - Capabilities are the different ways a lsp server can interact with the editor. The current available capabilities of the server are: `callHierarchy`, `codeActions`, `codeLens`, `diagnostics`, `hover`, `symbols`, `completion`, `rename`. + - Note that usually plugins don't provide all capabilities but some of them or even only one. + - So to disable code changes suggestions from the `hlint` plugin (but no diagnostics) you could set `haskell.plugin.hlint.codeActionsOn: false` +- Plugin specific configuration: + - `eval`: + - `haskell.plugin.eval.config.diff`, default true: When reloading haddock test results in changes, mark it with WAS/NOW. + - `haskell.plugin.eval.config.exception`, default false: When the command results in an exception, mark it with `*** Exception:`. + - `rename`: + - `haskell.plugin.rename.config.crossModule`, default false: Enables renaming across modules (experimental) + - `ghcide-completions`: + - `haskell.plugin.ghcide-completions.config.snippetsOn`, default true: Inserts snippets when using code completions. + - `haskell.plugin.ghcide-completions.config.autoExtendOn`, default true: Extends the import list automatically when completing a out-of-scope identifier. + - `ghcide-type-lenses`: + - `haskell.plugin.ghcide-type-lenses.config.mode`, default `always`: Control how type lenses are shown. One of `always`, `exported`, `diagnostics`. + - `hlint`: + - `haskell.plugin.hlint.config.flags`, default empty: List of flags used by hlint. + - `ormolu`: + - `haskell.plugin.ormolu.config.external`, default `false`: Use an external `ormolu` executable rather than the one packaged with HLS. + - `fourmolu`: + - `haskell.plugin.fourmolu.config.external`, default `false`: Use an external `fourmolu` executable rather than the one packaged with HLS. +This reference of configuration can be outdated at any time but we can query the `haskell-server-executable` about what configuration is effectively used: +- `haskell-language-server generate-default-config`: will print the json configuration with all default values. It can be used as template to modify it. +- `haskell-language-server vscode-extension-schema`: will print a json schema used to setup the haskell vscode extension. But it is useful to see what range of values can an option take and a description about it. + +Settings like this are typically provided by the language-specific LSP client support for your editor, for example in Emacs by `lsp-haskell`. + +### Client options + +A particular client might also have some options of its own, for example to control how the server executable is started. + +Settings like this are typically be provided by the language-specific LSP client support for your editor, for example in Emacs by `lsp-haskell`. + +## Configuring your project build + +`haskell-language-server` has to compile your project in order to give you diagnostics, which means that it needs to know how to do so. +This is handled under the hood by the [hie-bios](https://github.com/mpickering/hie-bios) application. +In turn, `hie-bios` needs some configuration to identify all files, GHC options, etc., needed to compile a project. + +There are several ways to provide this configuration to `hie-bios`, detailed below. + +### Implicit configuration +If no `hie.yaml` file is present, `haskell-language-server` automatically detects your `hie-bios` configuration using [implicit-hie](https://github.com/Avi-D-coder/implicit-hie). +**For most cases, this works just fine, and is the recommended way.** + +### Explicit, generated configuration +Maybe using the implicit configuration does not suit you. +E.g., it does not work, or you prefer to have explicit configuration in your project. +In that case, you can automatically generate a `hie.yaml` file, using [implicit-hie](https://github.com/Avi-D-coder/implicit-hie): + +```shell +gen-hie > hie.yaml # In the root directory of your project +``` + +### Explicit, manual configuration +Maybe using the generated `hie.yaml` file does not suit you. +E.g., it still does not work, or you want to fine-tune the configuration. + +In that case, refer to the [hie-bios explicit configuration documentation](https://github.com/haskell/hie-bios#explicit-configuration). +Keep in mind that you can start from the `hie.yaml` file generated by `implicit-hie` (see previous section) and modify it to your liking. + +#### Examples of explicit `hie.yaml` configurations + +##### Basic Stack +```yaml +cradle: + stack: +``` + +##### Basic Cabal +```yaml +cradle: + cabal: +``` + +##### Single Stack component + +```yaml +cradle: + stack: + component: "haskell-language-server:lib" +``` + +##### Single Cabal component + +```yaml +cradle: + cabal: + component: "lib:haskell-language-server" +``` + +##### Multiple Stack components + +```yaml +cradle: + stack: + - path: "./test/functional/" + component: "haskell-language-server:func-test" + - path: "./exe/Main.hs" + component: "haskell-language-server:exe:haskell-language-server" + - path: "./exe/Wrapper.hs" + component: "haskell-language-server:exe:haskell-language-server-wrapper" + - path: "./src" + component: "haskell-language-server:lib" + - path: "./ghcide/src" + component: "ghcide:lib:ghcide" + - path: "./ghcide/exe" + component: "ghcide:exe:ghcide" +``` + +##### Multiple Cabal components + +```yaml +cradle: + cabal: + - path: "./test/functional/" + component: "haskell-language-server:func-test" + - path: "./test/utils/" + component: "haskell-language-server:hls-test-utils" + - path: "./exe/Main.hs" + component: "haskell-language-server:exe:haskell-language-server" + - path: "./exe/Wrapper.hs" + component: "haskell-language-server:exe:haskell-language-server-wrapper" + - path: "./src" + component: "lib:haskell-language-server" + - path: "./ghcide/src" + component: "ghcide:lib:ghcide" + - path: "./ghcide/exe" + component: "ghcide:exe:ghcide" +``` + +##### Custom program +You can explicitly state the program which should be used to collect +the options by supplying the path to the program. It is interpreted +relative to the current working directory if it is not an absolute path. + +```yaml +cradle: + bios: + program: ".hie-bios" +``` + +The complete configuration is a subset of + +```yaml +cradle: + cabal: + component: "optional component name" + stack: + component: "optional component name" + bios: + program: "program to run" + dependency-program: "optional program to run" + direct: + arguments: ["list","of","ghc","arguments"] + default: + none: + +dependencies: + - someDep +``` + +### How to show local documentation on hover + +Haskell Language Server can display Haddock documentation on hover and completions if the project and +its dependencies have been built with the `-haddock` GHC flag. + +- For cabal: + + - Add to your global config file (e.g. `~/.cabal/config`): + + ```yaml + program-default-options + ghc-options: -haddock + ``` + + - Or, for a single project, run `cabal configure --ghc-options=-haddock` + +- For stack, add to global `$STACK_ROOT\config.yaml`, or project's `stack.yaml`: + + ```yaml + ghc-options: + '$everything': -haddock + ``` + + Note that this flag will cause compilation errors if a dependency contains invalid Haddock markup, + until GHC 9.0 which [will report warnings](https://gitlab.haskell.org/ghc/ghc/-/merge_requests/2377) + instead. + + +## Configuring your editor + +Most editors provide a Haskell-specific extension that provides support for launching `haskell-language-server` and talking to it, as well as [exposing configuration options](#configuring-haskell-language-server). + +Editors typically assume that you have already installed `haskell-language-server` (see above) and that the installation script put the `haskell-language-server` and `haskell-language-server-wrapper` binaries in your `PATH` (usually `~/.local/bin` or `~/.cabal/bin` on Linux and macOS, `%APPDATA%\local\bin` or `%APPDATA%\cabal\bin` on Windows). +The exception is VS Code, which can automatically install the binaries if they are not installed already. + +### VS Code + +Install from +[the VSCode marketplace](https://marketplace.visualstudio.com/items?itemName=haskell.haskell), or manually from the repository [vscode-haskell](https://github.com/haskell/vscode-haskell). +The `haskell-language-server` and `haskell-language-server-wrapper` binaries will be automatically downloaded on an ad-hoc basis, but if you have them already installed on your PATH then it will just use them instead. + +Configuration is done via the "Haskell" section of "Settings". + +### Sublime Text + +Install [LSP](https://packagecontrol.io/packages/LSP) using [Package Control](https://packagecontrol.io/). + +Open `Preferences > Package Settings > LSP > Settings` and add the following "haskell-language-server" client configuration to the "clients" key: + +```json +{ + "clients": { + "haskell-language-server": { + "enabled": true, + "command": ["haskell-language-server-wrapper", "--lsp"], + "selector": "source.haskell" + } + } +} + +``` + +See [the Sublime Text LSP documentation](https://lsp.sublimetext.io) for information on configuring the client. In particular, you can add a "settings" key to the "haskell-language-server" setting to configure specific HLS plugins as described elsewhere in these docs. + +### [Neovim](https://neovim.io) + +Neovim provides a [native LSP implementation with a Lua framework](https://neovim.io/doc/user/lsp). +Plugins that streamline the setup of `haskell-language-server` using Neovim's built-in LSP framework include: + +* [haskell-tools.nvim](https://github.com/MrcJkb/haskell-tools.nvim): A plugin with a focus on Haskell tooling, including `haskell-language-server`. +* [nvim-lspconfig](https://github.com/neovim/nvim-lspconfig): A collection of quickstart configs for various LSP servers. + - Includes a basic [`hls` configuration](https://github.com/neovim/nvim-lspconfig/blob/master/doc/server_configurations.md#hls). + +Neovim is also compatible with the [Vim plugins](#vim). + +### [Vim](https://www.vim.org) + +You can use [Coc](https://github.com/neoclide/coc.nvim), [LanguageClient-neovim](https://github.com/autozimu/LanguageClient-neovim) +or any other Vim Language server protocol client. +Coc is recommend since it is the only complete LSP implementation for Vim and offers snippets and floating documentation out of the box. + +#### Coc + +Follow Coc's [installation instructions](https://github.com/neoclide/coc.nvim). +Then issue `:CocConfig` and add the following to your Coc config file. + +##### Minimal Example + +```json +{ + "languageserver": { + "haskell": { + "command": "haskell-language-server-wrapper", + "args": ["--lsp"], + "rootPatterns": ["*.cabal", "stack.yaml", "cabal.project", "package.yaml", "hie.yaml"], + "filetypes": ["haskell", "lhaskell"] + } + } +} +``` + +##### Example with Settings + +```json +{ + "languageserver": { + "haskell": { + "command": "haskell-language-server-wrapper", + "args": ["--lsp"], + "rootPatterns": [ "*.cabal", "stack.yaml", "cabal.project", "package.yaml", "hie.yaml" ], + "filetypes": ["haskell", "lhaskell"], + "settings": { + "haskell": { + "checkParents": "CheckOnSave", + "checkProject": true, + "maxCompletions": 40, + "formattingProvider": "ormolu", + "plugin": { + "stan": { "globalOn": true } + } + } + } + } + } +} +``` + +#### LanguageClient-neovim + +##### vim-plug + +If you use [vim-plug](https://github.com/junegunn/vim-plug), then you can do this by e.g., +including the following line in the Plug section of your `init.vim` or `~/.vimrc`: + +```text +Plug 'autozimu/LanguageClient-neovim', { + \ 'branch': 'next', + \ 'do': 'bash install.sh' + \ } +``` + +and issuing a `:PlugInstall` command within Neovim or Vim. + +##### Clone the LanguageClient-neovim repo + +As an alternative to using [vim-plug](https://github.com/junegunn/vim-plug) shown above, clone [LanguageClient-neovim](https://github.com/autozimu/LanguageClient-neovim) +into `~/.vim/pack/XXX/start/`, where `XXX` is just a name for your "plugin suite". + +##### Configuration and sample `~/.vimrc` sections + +```vim +set rtp+=~/.vim/pack/XXX/start/LanguageClient-neovim +let g:LanguageClient_serverCommands = { 'haskell': ['haskell-language-server-wrapper', '--lsp'] } +``` + +You'll probably want to add some mappings for common commands: + +```vim +nnoremap :call LanguageClient_contextMenu() +map lk :call LanguageClient#textDocument_hover() +map lg :call LanguageClient#textDocument_definition() +map lr :call LanguageClient#textDocument_rename() +map lf :call LanguageClient#textDocument_formatting() +map lb :call LanguageClient#textDocument_references() +map la :call LanguageClient#textDocument_codeAction() +map ls :call LanguageClient#textDocument_documentSymbol() +``` + +Use Ctrl+xCtrl+o (``) to open up the auto-complete menu, +or for asynchronous auto-completion, follow the setup instructions on +[LanguageClient](https://github.com/autozimu/LanguageClient-neovim). + +If you'd like diagnostics to be highlighted, add a highlight group for `ALEError`/`ALEWarning`/`ALEInfo`, +or customize `g:LanguageClient_diagnosticsDisplay`: + +```vim +hi link ALEError Error +hi Warning term=underline cterm=underline ctermfg=Yellow gui=undercurl guisp=Gold +hi link ALEWarning Warning +hi link ALEInfo SpellCap +``` + +If you're finding that the server isn't starting at the correct project root, +it may also be helpful to also specify root markers: + +```vim +let g:LanguageClient_rootMarkers = ['*.cabal', 'stack.yaml'] +``` + +Further configuration can be done by pointing the `g:LanguageClient_settingsPath` [option](https://github.com/autozimu/LanguageClient-neovim/blob/0e5c9546bfddbaa2b01e5056389c25aefc8bf989/doc/LanguageClient.txt#L221) +variable to the file in which you want to keep your LSP settings. + +### Atom + +Install the two Atom packages [atom-ide-ui](https://atom.io/packages/atom-ide-ui) and [haskell](https://atom.io/packages/haskell), + +```bash +$ apm install language-haskell atom-ide-ui haskell +``` + +### [Emacs](https://www.gnu.org/software/emacs/) + +Emacs support can be provided by different combinations of packages: + +- [eglot](https://github.com/joaotavora/eglot) (built-in from Emacs 29 onwards) + +or + +- [lsp-mode](https://github.com/emacs-lsp/lsp-mode), + [lsp-ui](https://github.com/emacs-lsp/lsp-ui) and + [lsp-haskell](https://github.com/emacs-lsp/lsp-haskell) + +You can install these manually if you are using plain Emacs; instructions for some specific flavours +are included below. + +Make sure to check the READMEs of each of these packages, which explain how to configure the +various parts of the Emacs integration. +In particular, `lsp-haskell` provides customization options for the `haskell-language-server`-specific parts, +such as the path to the server executable. + +#### [use-package](https://github.com/jwiegley/use-package) [eglot](https://github.com/joaotavora/eglot) + +If you are using vanilla emacs with `use-package`, put the following into your `~/.emacs`. +This will install `eglot` and enable it by default in `haskell-mode`. +To configure `haskell-language-server` we use the `eglot-workspace-configuration` variable. +With `M-x eglot-show-workspace-configuration` you can see the JSON that `eglot` will send to `haskell-language-server`. +See for more information. +As an example, the setting below will disable the `stan` plugin. + +```emacs-lisp +(use-package eglot + :ensure t + :config + (add-hook 'haskell-mode-hook 'eglot-ensure) + :config + (setq-default eglot-workspace-configuration + '((haskell + (plugin + (stan + (globalOn . :json-false)))))) ;; disable stan + :custom + (eglot-autoshutdown t) ;; shutdown language server after closing last file + (eglot-confirm-server-initiated-edits nil) ;; allow edits without confirmation + ) +``` + +#### [doom-emacs](https://github.com/hlissner/doom-emacs/tree/develop/modules/lang/haskell#module-flags) + +Manual installation of packages is not required. +Enable the lsp module and the haskell lang module with lsp flag in `.doom.d/init.el`: + +``` emacs-lisp +:tools +lsp +;; ... +:lang +(haskell +lsp) +``` + +then do `$HOME/.emacs.d/bin/doom sync` + +#### [Spacemacs](https://github.com/syl20bnr/spacemacs) + +Manual installation of packages is not required. +Enable the `haskell` layer and the `lsp` layer in your Spacemacs config file: + +```emacs-lisp +dotspacemacs-configuration-layers + '( + haskell + lsp + ;; ... + ) +``` + +### [Kakoune](https://github.com/mawww/kakoune) + +1. Grab a copy of [kak-lsp](https://github.com/ul/kak-lsp), and follow the setup instructions. +2. Point your `kak-lsp.toml` to `haskell-language-server-wrapper`. + +```toml +[language.haskell] +filetypes = ["haskell"] +roots = ["Setup.hs", "stack.yaml", "*.cabal"] +command = "haskell-language-server-wrapper" +args = ["--lsp"] +``` + +### [Helix](https://github.com/helix-editor/helix) + +Once `haskell-language-server-wrapper` is installed in your system, it will be used automatically by the editor. +For more details please refer to the [helix guide on installing language servers](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers) diff --git a/docs/contributing/contributing.md b/docs/contributing/contributing.md new file mode 100644 index 0000000000..08ad21f12e --- /dev/null +++ b/docs/contributing/contributing.md @@ -0,0 +1,254 @@ +# Contributing guidelines + +The Haskell tooling dream is near, we need your help! + +## How to contact the Haskell Language Server (HLS) team + +- Join the [haskell-language-server channel](https://matrix.to/#/#haskell-language-server:matrix.org) on [matrix](https://matrix.org/) (primary communication channel). +- Join [our IRC channel](https://web.libera.chat/?channels=#haskell-language-server) at `#haskell-language-server` on [`libera`](https://libera.chat/) (secondary communication channel - all messages in this IRC channel are automatically bridged to the Matrix channel). +- Visit [the project GitHub repo](https://github.com/haskell/haskell-language-server) to view the source code, or open issues or pull requests. + +## Building + +Clone the repository: +```shell +$ git clone https://github.com/haskell/haskell-language-server +``` + +The project can then be built with both `cabal build` and `stack build`. + +### Building with Cabal + +```shell +# If you have not run `cabal update` in a while +$ cabal update +# Then +$ cabal build +``` + +### Building with Stack + +```shell +$ stack build +``` + +### Building with Nix + +The instructions below show how to set up a Cachix binary cache and open a Nix shell for local development. + +```shell +$ cachix use haskell-language-server +$ nix-shell +$ cabal update +$ cabal build +``` + +#### Flakes support + +If you are using Nix 2.4 style commands (enabled by `experimental-features = nix-command`), +you can use `nix develop` instead of `nix-shell` to enter the development shell. To enter the shell with specific GHC versions: + +* `nix develop` - default GHC version, +* `nix develop .#shell-ghc90` - GHC 9.0.1 (substitute GHC version as appropriate). + +If you are looking for a Nix expression to create `haskell-language-server` binaries, see https://github.com/haskell/haskell-language-server/issues/122 + +## Testing + +The tests make use of the [Tasty](https://github.com/feuerbach/tasty) test framework. + +There are two test suites in the main `haskell-language-server` package, functional tests, and wrapper tests. +Some of the wrapper tests expect `stack` to be present on the system, or else they fail. +Other project packages, like the core library or plugins, can have their own test suite. + +### Testing with Cabal + +Running all the tests + +```bash +$ cabal test +``` + +Running just the functional tests + +```bash +$ cabal test func-test +``` + +Running just the wrapper tests + +```bash +$ cabal test wrapper-test +``` + +Running just the tests for a specific plugin + +```bash +$ cabal test hls--plugin-tests +# E.g. +$ cabal test hls-refactor-plugin-tests +``` + +Running a subset of tests + +Tasty supports providing +[patterns](https://github.com/feuerbach/tasty#patterns) as command +line arguments, to select the specific tests to run. + +```bash +$ cabal test func-test --test-option "-p hlint" +``` + +The above recompiles everything every time you use a different test option though. +An alternative, which only recompiles when tests (or dependencies) change is to pass the `TASTY_PATTERN` environment variable: + +```bash +$ TASTY_PATTERN='hlint' cabal test func-test +``` + +## Using HLS on HLS code + +Refer to the [HLS project configuration guidelines](../configuration.md#configuring-your-project-build) as they also apply to the HLS project itself. + +Note: HLS implicitly detects the HLS codebase as a Stack project (since there is a `stack.yaml` file). +If you want HLS to use Cabal, create this `hie.yaml` file at the root of the project: + +```yaml +cradle: + cabal: +``` + +## Manually testing your hacked HLS +If you want to test HLS while hacking on it (you can even test it on HLS codebase itself, see previous section), you need to: + +1. (Once) Find the path to the hacked HLS you build +2. (Once) Configure your editor to use it +3. (Every time you change the HLS code) Rebuild HLS +4. (Every time you change the HLS code) Restart the LSP workspace + +### Find the path to your HLS build +Note that unless you change the GHC version or the HLS version between builds, the path should remain the same, this is why you need to set it only once. + +#### Using Cabal +Run: +```shell +$ cabal build exe:haskell-language-server && cabal list-bin exe:haskell-language-server +[..] +/haskell-language-server +``` + +#### Using Stack +Run: +```shell +$ echo $(pwd)/$(stack path --dist-dir)/build/haskell-language-server/haskell-language-server +[..] +/haskell-language-server +``` + +### Configuring your editor to use your HLS build + +#### Configuring VS Code +When using VS Code you can set up each project to use a specific HLS executable: + +- If it doesn't already exist in your project directory, create a directory called `.vscode`. +- In the `.vscode` directory create a file called `settings.json` with the below contents. +```json +{ + "haskell.serverExecutablePath": "/path/to/your/hacked/haskell-language-server" +} +``` + +#### Configuring Emacs +There are several ways to configure the HLS server path: +- `M-x customize-grouplsp-haskellLsp Haskell Server Path` +- Evaluate `(setq lsp-haskell-server-path "/path/to/your/hacked/haskell-language-server")` +- Create a file `.dir-locals.el` with the following content: +```lisp +((haskell-mode . ((lsp-haskell-server-path . "/path/to/your/hacked/haskell-language-server")))) +``` + +### Rebuild HLS +- With Stack: `stack build haskell-language-server:exe:haskell-language-server` +- With Cabal: `cabal build exe:haskell-language-server` + +### Restart the LSP workspace + +- With VS Code: Press `Ctrl + Shift + p` and type `Haskell: Restart Haskell LSP Server` +- With Emacs: `M-x lsp-workspace-restart` + +## Style guidelines + +The project includes a [`.editorconfig`](https://editorconfig.org) [file](https://github.com/haskell/haskell-language-server/blob/master/.editorconfig) with the editor basic settings used by the project. +However, most editors will need some action to honour those settings automatically. +For example VS Code needs to have installed a specific [extension](https://marketplace.visualstudio.com/items?itemName=EditorConfig.EditorConfig). +Please, try to follow those basic settings to keep the codebase as uniform as possible. + +### Formatter pre-commit hook + +We are using [pre-commit](https://pre-commit.com/) to configure the git pre-commit hook for formatting. Although it is possible to format code manually, we recommend you to use the pre-commit hook as our CI checks if the hook was applied or not. + +If you are using Nix or Gitpod, the pre-commit hook is automatically installed. Otherwise, follow the instructions on +[https://pre-commit.com/](https://pre-commit.com/) to install the `pre-commit` tool. Then run the following command: + +```sh +pre-commit install +``` + +#### Why are some components excluded from automatic formatting? + +- `test/testdata` and `test/data` are excluded because we want to test formatting plugins. + +## Plugin tutorial + +See the [tutorial on writing a plugin in HLS](./plugin-tutorial.md). + +## Measuring, benchmarking and tracing + +### Benchmarks + +If you are touching performance sensitive code, take the time to run a differential benchmark between `HEAD` and `origin/master` (see [bench/README](https://github.com/haskell/haskell-language-server/blob/master/bench/README.md)). + +Run the benchmarks with `cabal bench`. The runtime is about 25 minutes and the results will be stored in the `bench-results` folder. To interpret the results, see the comments in the [bench/Main.hs](https://github.com/haskell/haskell-language-server/blob/master/bench/Main.hs) module. + +### Tracing + +HLS records [eventlog traces](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/runtime_control.html#rts-eventlog) via [opentelemetry](https://hackage.haskell.org/package/opentelemetry). To generate the traces, build with `-eventlog` and run with `+RTS -l`. To visualize the traces, install [Tracy](https://github.com/wolfpld/tracy) and use [eventlog-to-tracy](https://hackage.haskell.org/package/opentelemetry-extra) to open the generated eventlog. + +## Adding support for a new editor + +Adding support for new editors is fairly easy if the editor already has good support for generic LSP-based extensions. +In that case, there will likely be an editor-specific support system (e.g., `lsp-mode` for Emacs). +The support system will typically provide instructions for how to add support for new languages. + +In some cases you may need to write a small bit of additional client support, or expose a way for the user to set the server's [configuration options](../configuration.md#configuring-haskell-language-server) and for them to configure how the server is started. + +## Building the documentation + +The documentation is built with [Sphinx](https://www.sphinx-doc.org/en/master/) and [ReadTheDocs](https://docs.readthedocs.io/en/stable/index.html), the documentation of both is helpful. + +You need to install some Python prerequisites. You can either `pip install -r docs/requirements.txt`, or simply enter a `nix-shell`. + +Then to build and preview the documentation: + +``` +cd docs +make html +firefox _build/html/index.html +``` + +Alternatively, you can build the documentation as a Nix derivation from the Flake with `nix build .#docs`. + +The documentation is also built and previewed on every PR, so you can check them from the PR status. + +## Working on code actions + +To make HLS easier to maintain, please follow these design guidelines when adding or modifying code actions: + +1. Prefer `ghc-exactprint` to manual text parsing. +2. Prefer `ghc-exactprint` to manual code generation. +3. Code generating actions should not try to format the generated code. Assume that the user is also leveraging HLS for automated code formatting. +4. Put new code actions in their own plugin unless they are very closely aligned with an existing code action. + +## Sponsorship + +If you want to contribute financially, you can do so via [open-collective](https://opencollective.com/haskell-language-server). In the past, the funding was used to sponsor [summer student projects](https://mpickering.github.io/ide/posts/2021-07-22-summer-of-hls.html). diff --git a/docs/contributing/imports.gif b/docs/contributing/imports.gif new file mode 100644 index 0000000000..fc6ae585e5 Binary files /dev/null and b/docs/contributing/imports.gif differ diff --git a/docs/contributing/index.rst b/docs/contributing/index.rst new file mode 100644 index 0000000000..c6c500c630 --- /dev/null +++ b/docs/contributing/index.rst @@ -0,0 +1,8 @@ +Contributing +============ + +.. toctree:: + :maxdepth: 2 + + contributing + plugin-tutorial diff --git a/docs/contributing/plugin-tutorial.lhs b/docs/contributing/plugin-tutorial.lhs new file mode 120000 index 0000000000..e1837100c2 --- /dev/null +++ b/docs/contributing/plugin-tutorial.lhs @@ -0,0 +1 @@ +plugin-tutorial.md \ No newline at end of file diff --git a/docs/contributing/plugin-tutorial.md b/docs/contributing/plugin-tutorial.md new file mode 100644 index 0000000000..d9ca59c0ad --- /dev/null +++ b/docs/contributing/plugin-tutorial.md @@ -0,0 +1,418 @@ +# Let’s write a Haskell Language Server plugin + +Originally written by Pepe Iborra, maintained by the Haskell community. + +Haskell Language Server (HLS) is a Language Server Protocol (LSP) server for the Haskell programming language. It builds on several previous efforts to create a Haskell IDE. +You can find many more details on the history and architecture on the [IDE 2020](https://mpickering.github.io/ide/index.html) community page. +In this article we are going to cover the creation of an HLS plugin from scratch: a code lens to display explicit import lists. +Along the way we will learn about HLS, its plugin model, and the relationship with [ghcide](https://github.com/haskell/haskell-language-server/tree/master/ghcide) and LSP. + +## Introduction + +Writing plugins for HLS is a joy. Personally, I enjoy the ability to tap into the gigantic bag of goodies that is GHC, as well as the IDE integration thanks to LSP. + +In the last couple of months, I have written various HLS plugins, including: + +1. Suggest imports for variables not in scope, +2. Remove redundant imports, +3. Evaluate code in comments (à la [doctest](https://docs.python.org/3/library/doctest.html)), +4. Integrate the [retrie](https://github.com/facebookincubator/retrie) refactoring library. + +These plugins are small but meaningful steps towards a more polished IDE experience. +While writing them, I didn't have to worry about performance, UI, or distribution; another tool (usually GHC) always did the heavy lifting. + +The plugins also make these tools much more accessible to all users of HLS. + +## Preamble + +This tutorial is a literate Haskell file that can be compiled. +As such, we list the imports, extensions etc... necessary for compilation. + +Please just skip over this `import` section, if you are only interested in the tutorial! + +```haskell +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveAnyClass #-} + +import Ide.Types +import Ide.Logger +import Ide.Plugin.Error + +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Service hiding (Log) +import Development.IDE.Core.Shake hiding (Log) +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Compat.Core +import Development.IDE.GHC.Error +import Development.IDE.Types.HscEnvEq +import Development.IDE.Core.PluginUtils + +import qualified Language.LSP.Server as LSP +import Language.LSP.Protocol.Types as JL +import Language.LSP.Protocol.Message + +import Data.Aeson as Aeson +import Data.Map (Map) +import Data.IORef +import Data.Maybe (fromMaybe, catMaybes) +import qualified Data.Map as Map +import qualified Data.HashMap.Strict as HashMap +import qualified Data.Text as T +import Control.Monad (forM) +import Control.Monad.IO.Class (liftIO) +import Control.Monad.Trans.Class +import GHC.Generics (Generic) +``` + +## Plugins in the HLS codebase + +The HLS codebase includes several plugins (found in `./plugins`). For example: + +- The `ormolu`, `fourmolu`, `floskell` and `stylish-haskell` plugins used to format code +- The `eval` plugin, a code lens provider to evaluate code in comments +- The `retrie` plugin, a code action provider to execute retrie commands + +I recommend looking at the existing plugins for inspiration and reference. A few conventions shared by all plugins are: + +- Plugins are in the `./plugins` folder +- Plugins implement their code under the `Ide.Plugin.*` namespace +- Folders containing the plugin follow the `hls-pluginname-plugin` naming convention +- Plugins are "linked" in `src/HlsPlugins.hs#idePlugins`. New plugin descriptors + must be added there. + + ```haskell ignore + -- Defined in src/HlsPlugins.**hs** + + idePlugins = pluginDescToIdePlugins allPlugins + where + allPlugins = + [ GhcIde.descriptor "ghcide" + , Pragmas.descriptor "pragmas" + , Floskell.descriptor "floskell" + , Fourmolu.descriptor "fourmolu" + , Ormolu.descriptor "ormolu" + , StylishHaskell.descriptor "stylish-haskell" + , Retrie.descriptor "retrie" + , Eval.descriptor "eval" + , NewPlugin.descriptor "new-plugin" -- Add new plugins here. + ] + ``` + +To add a new plugin, extend the list of `allPlugins` and rebuild. + +## The goal of the plugin we will write + +Here is a visual statement of what we want to accomplish: + + ![Imports code lens](imports.gif) + +And here is the gist of the algorithm: + +1. Request the type checking artifacts from the `ghcide` subsystem +2. Extract the actual import lists from the type-checked AST +3. Ask GHC to produce the minimal import lists for this AST +4. For every import statement without an explicit import list: + - Determine the minimal import list + - Produce a code lens to display it and a command to apply it + +## Setup + +To get started, fetch the HLS repository and build it by following the [installation instructions](https://haskell-language-server.readthedocs.io/en/latest/contributing/contributing.html#building). + +If you run into any issues trying to build the binaries, you can get in touch with the HLS team using one of the [contact channels](https://haskell-language-server.readthedocs.io/en/latest/contributing/contributing.html#how-to-contact-the-haskell-ide-team) or [open an issue](https://github.com/haskell/haskell-language-server/issues) in the HLS repository. + +Once the build is done, you can find the location of the HLS binary with `cabal list-bin exe:haskell-language-server` and point your LSP client to it. +This way you can simply test your changes by reloading your editor after rebuilding the binary. + +> **Note:** In VSCode, edit the "Haskell Server Executable Path" setting. +> +> **Note:** In Emacs, edit the `lsp-haskell-server-path` variable. + +![Settings](settings-vscode.png) + +[Manually test your hacked HLS](https://haskell-language-server.readthedocs.io/en/latest/contributing/contributing.html#manually-testing-your-hacked-hls) to ensure you use the HLS package you just built. + +## Digression about the Language Server Protocol + +There are two main types of communication in the Language Server Protocol: + +- A **request-response interaction** type where one party sends a message that requires a response from the other party. +- A **notification** is a one-way interaction where one party sends a message without expecting any response. + +> **Note**: The LSP client and server can both send requests or notifications to the other party. + +## Anatomy of a plugin + +HLS plugins are values of the `PluginDescriptor` datatype, which is defined in `hls-plugin-api/src/Ide/Types.hs` as: + +```haskell ignore +data PluginDescriptor (ideState :: Type) = + PluginDescriptor { pluginId :: !PluginId + , pluginCommands :: ![PluginCommand ideState] + , pluginHandlers :: PluginHandlers ideState + , pluginNotificationHandlers :: PluginNotificationHandlers ideState +-- , [...] -- Other fields omitted for brevity. + } +``` + +### Request-response interaction + +The `pluginHandlers` handle LSP client requests and provide responses to the client. They must fulfill these requests as quickly as possible. + +- Example: When you want to format a file, the client sends the [`textDocument/formatting`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_formatting) request to the server. The server formats the file and responds with the formatted content. + +### Notification + +The `pluginNotificationHandlers` handle notifications sent by the client to the server that are not explicitly triggered by a user. + +- Example: Whenever you modify a Haskell file, the client sends a notification informing HLS about the changes to the file. + +The `pluginCommands` are special types of user-initiated notifications sent to +the server. These actions can be long-running and involve multiple modules. + +## The explicit imports plugin + +To achieve our plugin goals, we need to define: + +- a command handler (`importLensCommand`), +- a code lens request handler (`lensProvider`). + +These will be assembled in the `descriptor` function of the plugin, which contains all the information wrapped in the `PluginDescriptor` datatype mentioned above. + +Using the convenience `defaultPluginDescriptor` function, we can bootstrap the plugin with the required parts: + +```haskell +-- plugins/hls-explicit-imports-plugin/src/Ide/Plugin/ExplicitImports.hs + +data Log + +-- | The "main" function of a plugin. +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultPluginDescriptor plId "A plugin for generating the minimal imports") + { pluginCommands = [importLensCommand], -- The plugin provides a command handler + pluginHandlers = mconcat -- The plugin provides request handlers + [ mkPluginHandler SMethod_TextDocumentCodeLens provider + ] + } +``` + +We'll start with the command, since it's the simplest of the two. + +### The command handler + +In short, LSP commands work like this: + +- The LSP server (HLS) initially sends a command descriptor to the client, in this case as part of a code lens. +- When the user clicks on the code lens, the client asks HLS to execute the command with the given descriptor. The server then handles and executes the command; this latter part is implemented by the `commandFunc` field of our `PluginCommand` value. + +> **Note**: Check the [LSP spec](https://microsoft.github.io/language-server-protocol/specification) for a deeper understanding of how commands work. + +The command handler will be called `importLensCommand` and have the `PluginCommand` type, a type defined in `Ide.Types` as: + +```haskell ignore +-- hls-plugin-api/src/Ide/Types.hs + +data PluginCommand ideState = forall a. (FromJSON a) => + PluginCommand { commandId :: CommandId + , commandDesc :: T.Text + , commandFunc :: CommandFunction ideState a + } +``` + +Let's start by creating an unfinished command handler. We'll give it an ID and a description for now: + +```haskell +-- | The command handler. +importLensCommand :: PluginCommand IdeState +importLensCommand = + PluginCommand + { commandId = importCommandId + , commandDesc = "Explicit import command" + , commandFunc = runImportCommand + } + +importCommandId :: CommandId +importCommandId = "ImportLensCommand" +``` + +```haskell ignore +-- | Not implemented yet. +runImportCommand = undefined +``` + +The most important (and still `undefined`) field is `commandFunc :: CommandFunction`, a type synonym from `LSP.Types`: + +```haskell ignore +-- hls-plugin-api/src/Ide/Types.hs + +type CommandFunction ideState a + = ideState + -> a + -> LspM Config (Either ResponseError Value) +``` + +`CommandFunction` takes an `ideState` and a JSON-encodable argument. `LspM` is a monad transformer with access to IO, and having access to a language context environment `Config`. The action evaluates to an `Either` value. `Left` indicates failure with a `ResponseError`, `Right` indicates sucess with a `Value`. + +Our handler will ignore the state argument and only use the `WorkspaceEdit` argument. + +```haskell +-- | The type of the parameters accepted by our command +newtype ImportCommandParams = ImportCommandParams WorkspaceEdit + deriving (Generic) + deriving anyclass (FromJSON, ToJSON) + +-- | The actual command handler +runImportCommand :: CommandFunction IdeState ImportCommandParams +runImportCommand _ _ (ImportCommandParams edit) = do + -- This command simply triggers a workspace edit! + _ <- lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) + return $ InR JL.Null +``` + +`runImportCommand` [sends a request](https://hackage.haskell.org/package/lsp/docs/Language-LSP-Server.html#v:sendRequest) to the client using the method `SWorkspaceApplyEdit` and the parameters `ApplyWorkspaceEditParams Nothing edit`, providing a response handler that does nothing. It then returns `Right Null`, which is an empty `Aeson.Value` wrapped in `Right`. + +### The code lens provider + +The code lens provider implements all the steps of the algorithm described earlier: + +> 1. Request the type checking artifacts. +> 2. Extract the actual import lists from the type-checked AST. +> 3. Ask GHC to produce the minimal import lists for this AST. +> 4. For each import statement lacking an explicit list, determine its minimal import list and generate a code lens displaying this list along with a command to insert it. + +The provider takes the usual `LspFuncs` and `IdeState` arguments, as well as a `CodeLensParams` value containing a file URI. It returns an IO action that produces either an error or a list of code lenses for that file. + +```haskell +provider :: PluginMethodHandler IdeState Method_TextDocumentCodeLens +provider state -- ghcide state, used to retrieve typechecking artifacts + pId -- Plugin ID + CodeLensParams{_textDocument = TextDocumentIdentifier{_uri}} = do + -- VSCode uses URIs instead of file paths + -- haskell-lsp provides conversion functions + nfp <- getNormalizedFilePathE _uri + -- Get the typechecking artifacts from the module + tmr <- runActionE "importLens" state $ useE TypeCheck nfp + -- We also need a GHC session with all the dependencies + hsc <- runActionE "importLens" state $ useE GhcSessionDeps nfp + -- Use the GHC API to extract the "minimal" imports + (imports, mbMinImports) <- liftIO $ extractMinimalImports hsc tmr + + case mbMinImports of + Just minImports -> do + let minImportsMap = + Map.fromList [ (realSrcLocToPosition loc, i) + | L l i <- minImports + , let RealSrcLoc loc _ = srcSpanStart (locA l) + ] + lenses <- forM imports $ \imp -> + -- for every import, maybe generate a code lens + liftIO (generateLens pId _uri minImportsMap imp) + return $ InL (catMaybes lenses) + _ -> + return $ InL [] +``` + +Note the simplicity of retrieving the type checking artifacts for the module, as well as a fully set up GHC session, via the `ghcide` rules. + +The function `extractMinimalImports` extracts the import statements from the AST and generates the minimal import lists, implementing steps 2 and 3 of the algorithm. + +The details of the GHC API are not relevant to this tutorial, but the code is terse and easy to read: + +```haskell +extractMinimalImports + :: HscEnvEq + -> TcModuleResult + -> IO ([LImportDecl GhcRn], Maybe [LImportDecl GhcRn]) +extractMinimalImports hsc TcModuleResult{..} = do + -- Extract the original imports and the typechecking environment + let tcEnv = tmrTypechecked + (_, imports, _, _) = tmrRenamed + ParsedModule{ pm_parsed_source = L loc _} = tmrParsed + span = fromMaybe (error "expected real") $ realSpan loc + + -- GHC is secretly full of mutable state + gblElts <- readIORef (tcg_used_gres tcEnv) + + let usage = findImportUsage imports gblElts + (_, minimalImports) <- + -- getMinimalImports computes the minimal explicit import lists + initTcWithGbl (hscEnv hsc) tcEnv span $ getMinimalImports usage + return (imports, minimalImports) +``` + +The function `generateLens` implements step 4 of the algorithm, producing a code lens for an import statement that lacks an import list. The code lens includes an `ImportCommandParams` value containing a workspace edit that rewrites the import statement, as our command provider expects. + +```haskell +-- | Given an import declaration, generate a code lens unless it has an explicit import list +generateLens :: PluginId + -> Uri + -> Map Position (ImportDecl GhcRn) + -> LImportDecl GhcRn + -> IO (Maybe CodeLens) +generateLens pId uri minImports (L src imp) + -- Explicit import list case + | ImportDecl{ideclImportList = Just _} <- imp + = return Nothing + -- No explicit import list + | RealSrcSpan l _ <- locA src + , let position = realSrcLocToPosition $ realSrcSpanStart l + , Just explicit <- Map.lookup position minImports + , L _ mn <- ideclName imp + -- (Almost) no one wants to see an explicit import list for Prelude + , mn /= moduleName pRELUDE + = do + -- The title of the command is just the minimal explicit import decl + let title = T.pack $ printWithoutUniques explicit + -- The range of the code lens is the span of the original import decl + _range :: Range = realSrcSpanToRange l + -- The code lens has no extra data + _xdata = Nothing + -- An edit that replaces the whole declaration with the explicit one + edit = WorkspaceEdit (Just editsMap) Nothing Nothing + editsMap = Map.fromList [(uri, [importEdit])] + importEdit = TextEdit _range title + -- The command argument is simply the edit + _arguments = Just [toJSON $ ImportCommandParams edit] + _data_ = Nothing + -- Create the command + _command = Just $ mkLspCommand pId importCommandId title _arguments + -- Create and return the code lens + return $ Just CodeLens{..} + | otherwise + = return Nothing +``` + +## Wrapping up + +There's only one Haskell code change left to do at this point: "link" the plugin in the `HlsPlugins` HLS module. + +Integrating the plugin into HLS itself requires changes to several configuration files. + +A good approach is to search for the ID of an existing plugin (e.g., `hls-class-plugin`): + +- `./haskell-language-server.cabal`: Add a conditional block with the plugin package dependency. +- `./.github/workflows/test.yml`: Add a block to run the plugin's test suite. +- `./.github/workflows/hackage.yml`: Add the plugin to the component list for releasing the plugin package to Hackage. +- `./*.nix`: Add the plugin to Nix builds. + +This plugin tutorial re-implements parts of the [`hls-explicit-imports-plugin`] which is part of HLS. +The plugin code additionally contains advanced concepts, such as `Rules`. + +I hope this has given you a taste of how easy and joyful it is to write plugins for HLS. If you are looking for contribution ideas, here are some good ones listed in the HLS [issue tracker](https://github.com/haskell/haskell-language-server/issues). + +
+ Placeholder Main, unused + +```haskell +main :: IO () +main = putStrLn "Just here to silence the error!" +``` +
diff --git a/docs/contributing/settings-vscode.png b/docs/contributing/settings-vscode.png new file mode 100644 index 0000000000..a7ffeaac20 Binary files /dev/null and b/docs/contributing/settings-vscode.png differ diff --git a/docs/features.md b/docs/features.md new file mode 100644 index 0000000000..1eab0054b4 --- /dev/null +++ b/docs/features.md @@ -0,0 +1,455 @@ +# Features + +This table gives a summary of the features that HLS supports. +Many of these are standard LSP features, but a lot of special features are provided as [code actions](#code-actions) and [code lenses](#code-lenses). + +| Feature | [LSP method](./what-is-hls.md#lsp-terminology) | +| --------------------------------------------------- | ------------------------------------------------------------------------------------------------- | +| [Diagnostics](#diagnostics) | `textDocument/publishDiagnostics` | +| [Hovers](#hovers) | `textDocument/hover` | +| [Jump to definition](#jump-to-definition) | `textDocument/definition` | +| [Jump to type definition](#jump-to-type-definition) | `textDocument/typeDefinition` | +| [Find references](#find-references) | `textDocument/references` | +| [Completions](#completions) | `textDocument/completion` | +| [Formatting](#formatting) | `textDocument/formatting`, `textDocument/rangeFormatting` | +| [Document symbols](#document-symbols) | `textDocument/documentSymbol` | +| [Workspace symbols](#workspace-symbols) | `workspace/symbol` | +| [Call hierarchy](#call-hierarchy) | `textDocument/prepareCallHierarchy`, `callHierarchy/incomingCalls`, `callHierarchy/outgoingCalls` | +| [Highlight references](#highlight-references) | `textDocument/documentHighlight` | +| [Code actions](#code-actions) | `textDocument/codeAction` | +| [Code lenses](#code-lenses) | `textDocument/codeLens` | +| [Selection range](#selection-range) | `textDocument/selectionRange` | +| [Rename](#rename) | `textDocument/rename` | +| [Semantic tokens](#semantic-tokens) | `textDocument/semanticTokens/full` | + +The individual sections below also identify which [HLS plugin](./what-is-hls.md#hls-plugins) is responsible for providing the given functionality, which is useful if you want to raise an issue report or contribute! +Additionally, not all plugins are supported on all versions of GHC, see the [plugin support page](./support/plugin-support.md) for details. + +## Diagnostics + +### GHC compiler errors and warnings + +Provided by: `ghcide` + +Provides errors and warnings from GHC as diagnostics. + +### Hlint hints + +Provided by: `hls-hlint-plugin` + +Provides hlint hints as diagnostics. + +### Stan hints + +Provided by: `hls-stan-plugin` + +Provides Stan hints as diagnostics. + +### Cabal parse errors and warnings + +Provided by: `hls-cabal-plugin` + +Provides errors and warnings from Cabal as diagnostics + +## Hovers + +Provided by: `ghcide` + +Type information and documentation on hover, [including from local definitions](./configuration.md#how-to-show-local-documentation-on-hover). + +### Show fixity + +Provided by: `hls-explicit-fixity-plugin` + +Provides fixity information. + +## Jump to definition + +Provided by: `ghcide` + +Jump to the definition of a name. + +Known limitations: + +- Only works for [local definitions](https://github.com/haskell/haskell-language-server/issues/708). + +## Jump to type definition + +Provided by: `ghcide` + +Known limitations: + +- Only works for [local definitions](https://github.com/haskell/haskell-language-server/issues/708). + +## Jump to implementation + +Provided by: `ghcide` + +Jump to the implementation instance of a type class method. + +Known limitations: + +- Only works for [local definitions](https://github.com/haskell/haskell-language-server/issues/708). + +## Jump to note definition + +Provided by: `hls-notes-plugin` + +Jump to the definition of a [GHC-style note](https://gitlab.haskell.org/ghc/ghc/-/wikis/commentary/coding-style#2-using-notes). + +## Find references + +Provided by: `ghcide` + +Find references to a name within the project. + +## Completions + +### Code completions + +Provided by: `ghcide` + +- Completion of names from qualified imports. +- Completion of names from non-imported modules. + +### Pragma completions + +Provided by: `hls-pragmas-plugin` + +Completions for language pragmas. + +## Formatting + +Format your code with various Haskell code formatters. +The default Haskell code formatter is `ormolu`, and the Haskell formatter can be configured via the `formattingProvider` option. + +| Formatter | Provided by | +| --------------- | ---------------------------- | +| Floskell | `hls-floskell-plugin` | +| Fourmolu | `hls-fourmolu-plugin` | +| Ormolu | `hls-ormolu-plugin` | +| Stylish Haskell | `hls-stylish-haskell-plugin` | + +--- + +Format your cabal files with a cabal code formatter. +The default cabal code formatter is `cabal-gild`, which needs to be available on the `$PATH`, +or the location needs to be explicitly provided. +To change the cabal formatter, edit the `cabalFormattingProvider` option. + +| Formatter | Provided by | +|-----------------|------------------------------| +| cabal-fmt | `hls-cabal-fmt-plugin` | +| cabal-gild | `hls-cabal-gild-plugin` | + +## Document symbols + +Provided by: `ghcide` + +Provides listing of the symbols defined in a module, used to power outline displays. + +## Workspace symbols + +Provided by: `ghcide` + +Provides listing of the symbols defined in the project, used to power searches. + +## Call hierarchy + +Provided by: `hls-call-hierarchy-plugin` + +Shows ingoing and outgoing calls for a function. + +![Call Hierarchy in VSCode](../plugins/hls-call-hierarchy-plugin/call-hierarchy-in-vscode.gif) + +## Highlight references + +Provided by: `ghcide` + +Highlights references to a name in a document. + +## Code actions + +### Insert missing pragmas + +Provided by: `hls-pragma-plugin` + +Code action kind: `quickfix` + +Inserts missing pragmas needed by GHC. + +### Apply Hlint fixes + +Provided by: `hls-hlint-plugin` + +Code action kind: `quickfix` + +Applies hints, either individually or for the whole file. +Uses [apply-refact](https://github.com/mpickering/apply-refact). + +![Hlint Demo](https://user-images.githubusercontent.com/54035/110860028-8f9fa900-82bc-11eb-9fe5-6483d8bb95e6.gif) + +Known limitations: + +- May have strange behaviour in files with CPP, since `apply-refact` does not support CPP. +- The `hlint` executable by default turns on many extensions when parsing a file because it is not certain about the exact extensions that apply to the file (they may come from project files). This differs from HLS which uses only the extensions the file needs to parse the file. Hence it is possible for the `hlint` executable to report a parse error on a file, but the `hlint` plugin to work just fine on the same file. This does mean that the turning on/off of extensions in the hlint config may be ignored by the `hlint` plugin. +- Hlint restrictions do not work (yet). This [PR](https://github.com/ndmitchell/hlint/pull/1340) should enable that functionality, but this requires a newer version of hlint to be used in HLS. + +### Make import lists fully explicit + +Provided by: `hls-explicit-imports-plugin` + +Code action kind: `quickfix.literals.style` + +Make import lists fully explicit (same as the code lens). + +### Refine import + +Provided by: `hls-explicit-imports-plugin` + +Code action kind: `quickfix.import.refine` + +Refines imports to more specific modules when names are re-exported (same as the code lens). + +### Qualify imported names + +Provided by: `hls-qualify-imported-names-plugin` + +Code action kind: `quickfix` + +Rewrites imported names to be qualified. + +![Qualify Imported Names Demo](../plugins/hls-qualify-imported-names-plugin/qualify-imported-names-demo.gif) + +For usage see the [readme](https://github.com/haskell/haskell-language-server/blob/master/plugins/hls-qualify-imported-names-plugin/README.md). + +### Add missing class methods + +Provided by: `hls-class-plugin` + +Code action kind: `quickfix` + +Adds placeholders for missing class methods in a class instance definition. + +### Unfold definition + +Provided by: `hls-retrie-plugin` + +Code action kind: `refactor.extract` + +Extracts a definition from the code. + +### Fold definition + +Provided by: `hls-retrie-plugin` + +Code action kind: `refactor.inline` + +Inlines a definition from the code. + +![Retrie Demo](https://i.imgur.com/Ev7B87k.gif) + +### Insert contents of Template Haskell splice + +Provided by: `hls-splice-plugin` + +Code action kind: `refactor.rewrite` + +Evaluates a Template Haskell splice and inserts the resulting code in its place. + +### Convert numbers to alternative formats + +Provided by: `hls-alternate-number-format-plugin` + +Code action kind: `quickfix.literals.style` + +Converts numeric literals to different formats. + +![Alternate Number Format Demo](../plugins/hls-alternate-number-format-plugin/HLSAll.gif) + +### Change Type Signature + +Provided by: `hls-change-type-signature-plugin` + +Code action kind: `quickfix` + +Change/Update a type signature to match implementation. + +Status: Until GHC 9.4, the implementation is ad-hoc and relies on GHC error messages to create a new signature. Not all GHC error messages are supported. + +Known Limitations: + +- Not all GHC error messages are supported +- Top-level and Function-local bindings with the same names can cause issues, such as incorrect signature changes or no code actions available. + +![Change Type Signature Demo](../plugins/hls-change-type-signature-plugin/change1.gif) + +![Change Type Signature Demo](../plugins/hls-change-type-signature-plugin/change2.gif) + +[Link to Docs](https://github.com/haskell/haskell-language-server/blob/master/plugins/hls-change-type-signature-plugin/README.md) + +### Add argument to function + +Provided by: `hls-refactor-plugin` + +Code action kind: `quickfix` + +Add an undefined variable as an argument to the top-level binding. + +### Convert to GADT syntax + +Provided by: `hls-gadt-plugin` + +Code action kind: `refactor.rewrite` + +Convert a datatype to GADT syntax. + +![GADT Demo](../plugins/hls-gadt-plugin/gadt.gif) + +[Link to Docs](https://github.com/haskell/haskell-language-server/blob/master/plugins/hls-gadt-plugin/README.md) + +### Expand record wildcard + +Provided by: `hls-explicit-record-fields-plugin` + +Code action kind: `refactor.rewrite` + +Expand record wildcards, explicitly listing all record fields as field puns. + +![Explicit Wildcard Demo](../plugins/hls-explicit-record-fields-plugin/wildcard.gif) + +### Unknown SPDX License suggestion + +Provided by: `hls-cabal-plugin` + +Code action kind: `quickfix` + +Correct common misspelling of SPDX Licenses such as `BSD-3-Clause`. + +### Add dependency to `cabal` file + +Provided by: `hls-cabal-plugin` + +Code action kind: `quickfix` + +Add a missing package dependency to your `.cabal` file. + +## Code lenses + +### Add type signature + +Provided by: `ghcide` + +Shows the type signature for bindings without type signatures, and adds it with a click. + +### Evaluation code snippets in comments + +Provided by: `hls-eval-plugin` + +Evaluates code blocks in comments with a click. A code action is also provided. [Tutorial](https://github.com/haskell/haskell-language-server/blob/master/plugins/hls-eval-plugin/README.md). + +![Eval Demo](../plugins/hls-eval-plugin/demo.gif) + +Known limitations: + +- Standard input is shared with HLS, so e.g. [`getLine` breaks the connection to server](https://github.com/haskell/haskell-language-server/issues/2913). +- Standard (error) output [is not captured](https://github.com/haskell/haskell-language-server/issues/1977). +- While similar to [doctest](https://hackage.haskell.org/package/doctest), some of its features are unsupported, + see [Differences with doctest](https://github.com/haskell/haskell-language-server/blob/master/plugins/hls-eval-plugin/README.md#differences-with-doctest). + +### Make import lists fully explicit code lens + +Provided by: `hls-explicit-imports-plugin` + +Shows fully explicit import lists and rewrites them with a click (same as the code action). + +![Imports code lens Demo](https://imgur.com/pX9kvY4.gif) + +### Refine import code lens + +Provided by: `hls-explicit-imports-plugin` + +Shows refined imports and applies them with a click (same as the code action). + +### Fix module names + +Provided by: `hls-module-name-plugin` + +Shows module name matching file path, and applies it with a click. + +![Module Name Demo](https://user-images.githubusercontent.com/54035/110860755-78ad8680-82bd-11eb-9845-9ea4b1cc1f76.gif) + +## Selection range + +Provided by: `hls-code-range-plugin` + +Provides haskell specific +[shrink/expand selection](https://code.visualstudio.com/docs/editor/codebasics#_shrinkexpand-selection) +support. + +![Selection range demo](https://user-images.githubusercontent.com/16440269/177240833-7dc8fe39-b446-477e-b5b1-7fc303608d4f.gif) + +## Folding range + +Provided by: `hls-code-range-plugin` + +Provides haskell specific +[Folding](https://code.visualstudio.com/docs/editor/codebasics#_folding) +support. + +![Folding range demo](https://user-images.githubusercontent.com/54478821/184468510-7c0d5182-c684-48ef-9b39-3866dc2309df.gif) + +## Rename + +Provided by: `hls-rename-plugin` + +Provides renaming of symbols within a module. Experimental cross-module renaming can be enabled via the configuration. + +![Rename Demo](https://user-images.githubusercontent.com/30090176/133072143-d7d03ec7-3db1-474e-ad5e-6f40d75ff7ab.gif) + +Known limitations: + +- Cross-module renaming requires all components to be indexed, which sometimes causes [partial renames in multi-component projects](https://github.com/haskell/haskell-language-server/issues/2193). + +To eagerly load all components, you need to + +- set `haskell.sessionLoading` to `multipleComponents`, +- set `hie.yaml` to load all components (currently only cabal supports this), + ```yaml + cradle: + cabal: + component: all + ``` +- and enable tests and benchmarks in `cabal.project` with `tests: True` and `benchmarks: True`. + +## Semantic tokens + +Provided by: `hls-semantic-tokens-plugin` + +Provides semantic tokens for each token in the source code to support semantic highlighting. + +## Rewrite to overloaded record syntax + +Provided by: `hls-overloaded-record-dot-plugin` + +Code action kind: `refactor.rewrite` + +Rewrites record selectors to use overloaded dot syntax + +![Explicit Wildcard Demo](../plugins/hls-overloaded-record-dot-plugin/example.gif) + +## Missing features + +The following features are supported by the LSP specification but not implemented in HLS. +Contributions welcome! + +| Feature | Status | [LSP method](./what-is-hls.md#lsp-terminology) | +| ---------------------- | ----------------- | ---------------------------------------------- | +| Signature help | Unimplemented | `textDocument/signatureHelp` | +| Jump to declaration | Unclear if useful | `textDocument/declaration` | +| Jump to implementation | Unclear if useful | `textDocument/implementation` | +| Linked editing | Unimplemented | `textDocument/linkedEditingRange` | +| Document links | Unimplemented | `textDocument/documentLink` | +| Document color | Unclear if useful | `textDocument/documentColor` | +| Color presentation | Unclear if useful | `textDocument/colorPresentation` | +| Monikers | Unclear if useful | `textDocument/moniker` | diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000000..e3e8fab81c --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,15 @@ +haskell-language-server +======================= + +Official Haskell Language Server implementation. :ref:`Read more`. + +.. toctree:: + :maxdepth: 2 + + what-is-hls + features + installation + support/index + configuration + troubleshooting + contributing/index diff --git a/docs/installation.md b/docs/installation.md new file mode 100644 index 0000000000..4a1147ade5 --- /dev/null +++ b/docs/installation.md @@ -0,0 +1,165 @@ +# Installation + +## Prerequisites + +- For standalone `.hs`/`.lhs` files, [ghc](https://www.haskell.org/ghc/) must be installed and on the `PATH`. The easiest way to install it is with [ghcup](https://www.haskell.org/ghcup/) or [chocolatey](https://community.chocolatey.org/packages/ghc) on Windows. +- For Cabal based projects, both ghc and [cabal-install](https://www.haskell.org/cabal/) must be installed and on the `PATH`. It can also be installed with [ghcup](https://www.haskell.org/ghcup/) or [chocolatey](https://community.chocolatey.org/packages/cabal) on Windows. + +## ghcup + +If you are using [`ghcup`](https://www.haskell.org/ghcup/) to manage your installations, you can install `haskell-language-server` with + +```bash +ghcup install hls +``` + +You can check if HLS is available for your platform via `ghcup` here: . + +You can also install HLS from source without checking out the code manually: + +```bash +ghcup compile hls -v $HLS_VERSION --ghc $GHC_VERSION +``` + +More information here: + +## Installation from source + +Direct installation from source, while possible via `cabal install exe:haskell-language-server`, is not recommended for most people. +Said command builds the `haskell-language-server` binary and installs it in the default `cabal` binaries folder, +but the binary will only work with projects that use the same GHC version that built it. + +### Common pre-requirements + +- `cabal` must be in your `PATH` + - You need `cabal` >= 2.4.0.0 +- `git` must be in your `PATH` +- The directory where `cabal` put the binaries must be in you PATH: + - For `cabal` it is by default `$HOME/.cabal/bin` in Linux and `%APPDATA%\cabal\bin` in windows. + +Tip: you can quickly check if some command is in your path by running the command. +If you receive some meaningful output instead of "command not found"-like message +then it means you have the command in `PATH`. + +### Linux-specific pre-requirements + +On Linux you will need install a couple of extra libraries: + +- [Unicode (ICU)](http://site.icu-project.org/) +- [NCURSES](https://www.gnu.org/software/ncurses/) +- [Zlib](https://zlib.net/) + +**Debian 9/Ubuntu 18.04 or earlier**: + +```bash +sudo apt install libicu-dev libtinfo-dev libgmp-dev zlib1g-dev +``` + +**Debian 10/Ubuntu 18.10 or later**: + +```bash +sudo apt install libicu-dev libncurses-dev libgmp-dev zlib1g-dev +``` + +**Fedora**: + +```bash +sudo dnf install libicu-devel ncurses-devel zlib-devel +``` + +### Windows-specific pre-requirements + +In order to avoid problems with long paths on Windows you can do either one of the following: + +1. Clone the `haskell-language-server` to a short path, for example the root of your logical drive (e.g. to + `C:\hls`). Even if you choose `C:\haskell-language-server` you could hit the problem. If this doesn't work or you want to use a longer path, try the second option. + +2. If the `Local Group Policy Editor` is available on your system, go to: `Local Computer Policy -> Computer Configuration -> Administrative Templates -> System -> Filesystem` set `Enable Win32 long paths` to `Enabled`. If you don't have the policy editor you can use regedit by using the following instructions [here](https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file#enable-long-paths-in-windows-10-version-1607-and-later). You also need to configure git to allow longer paths by using unicode paths. To set this for all your git repositories use `git config --system core.longpaths true` (you probably need an administrative shell for this) or for just this one repository use `git config core.longpaths true`. + +In addition make sure `haskell-language-server.exe` is not running by closing your editor, otherwise in case of an upgrade the executable can not be installed. + +### Download the source code + +```bash +git clone https://github.com/haskell/haskell-language-server +cd haskell-language-server +``` + +## chocolatey + +If you are using [`chocolatey`](https://chocolatey.org/) to manage your installations in windows, [you can install `haskell-language-server`](https://community.chocolatey.org/packages/haskell-language-server) with + +```bash +choco install haskell-language-server +``` + +## Visual Studio Code + +If you are using Visual Studio Code, the [Haskell extension](https://marketplace.visualstudio.com/items?itemName=haskell.haskell) will automatically download and install `haskell-language-server` for you. + +If you need to find the binaries, please consult the [documentation](https://github.com/haskell/vscode-haskell#downloaded-binaries) for the extension. + +## Pre-built binaries + +There are pre-built binaries available from the [releases page](https://github.com/haskell/haskell-language-server/releases) for Linux, Windows and macOS. +To install, download the `haskell-language-server-wrapper` executable for your platform as well as any `haskell-language-server` executables for the GHC versions you plan on working with, and either put them on your `PATH` or point your client to them. + +## Arch Linux + +The preferred method of installation for development purposes is to use the [haskell-language-server-static](https://aur.archlinux.org/packages/haskell-language-server-static) package from AUR. +This package contains pre-built binaries for each supported GHC version and `haskell-language-server-wrapper` for automatic GHC version selection. +It is updated regularly, requires no additional dependencies, and is independent of other haskell packages you may have on your system, including GHC. + +See [ArchWiki](https://wiki.archlinux.org/index.php/Haskell) for the details of Haskell infrastructure on Arch Linux. + +## Fedora + + +Binary packages for Fedora are available from [this Copr repo](https://copr.fedorainfracloud.org/coprs/petersen/haskell-language-server), +built against the official Fedora ghc package. + +## FreeBSD + +HLS is available for installation via [devel/hs-haskell-language-server](https://www.freshports.org/devel/hs-haskell-language-server) +port or from official binary packages. Use + +```bash +pkg install hs-haskell-language-server +``` + +to install it. HLS installed this way targets the same GHC version that the [lang/ghc](https://www.freshports.org/lang/ghc) +port produces. Use the `pkg search haskell-language` command to list HLS packages +for other GHCs. + +## Gentoo + +Haskell Language Server is available via the Haskell overlay. Follow the instructions [here](https://github.com/gentoo-haskell/gentoo-haskell) to install the overlay, then run: + +```bash +emerge -av dev-util/haskell-language-server +``` +Depending on your system setup, you may need to enable the unstable flag for this package before install, and possible also for the dependencies. If you enabled the ~testing versions as explained in the gentoo-haskell overlay instructions, then this won't be necessary. + +## Installation from Hackage + +Direct installation from Hackage, while possible via `cabal install haskell-language-server`, is not recommended for most people. +Said command builds the `haskell-language-server` binary and installs it in the default Cabal binaries folder, +but the binary will only work with projects that use the same GHC version that built it. + +The package can be found here on Hackage: + +## Installation via Homebrew + +Homebrew users can install `haskell-language-server` using the following command: + +```bash +brew install haskell-language-server +``` + +This formula contains HLS binaries compiled with GHC versions available via Homebrew. + +You need to provide your own GHC/Cabal/Stack as required by your project, possibly via Homebrew. + +## Installation using Nix + +You can read full instructions on how to install HLS with Nix in the [Nixpkgs manual](https://nixos.org/manual/nixpkgs/unstable/#haskell-language-server). diff --git a/docs/logos/logo-1024.png b/docs/logos/logo-1024.png new file mode 100644 index 0000000000..892a43ac82 Binary files /dev/null and b/docs/logos/logo-1024.png differ diff --git a/docs/logos/logo-128.png b/docs/logos/logo-128.png new file mode 100644 index 0000000000..2a18f42f9b Binary files /dev/null and b/docs/logos/logo-128.png differ diff --git a/docs/logos/logo-256.png b/docs/logos/logo-256.png new file mode 100644 index 0000000000..82dc2341c1 Binary files /dev/null and b/docs/logos/logo-256.png differ diff --git a/docs/logos/logo-32.png b/docs/logos/logo-32.png new file mode 100644 index 0000000000..d481989462 Binary files /dev/null and b/docs/logos/logo-32.png differ diff --git a/docs/logos/logo-512.png b/docs/logos/logo-512.png new file mode 100644 index 0000000000..c43466a11b Binary files /dev/null and b/docs/logos/logo-512.png differ diff --git a/docs/logos/logo-64.png b/docs/logos/logo-64.png new file mode 100644 index 0000000000..e8d3dcb3d8 Binary files /dev/null and b/docs/logos/logo-64.png differ diff --git a/docs/logos/logo.svg b/docs/logos/logo.svg new file mode 100644 index 0000000000..d3f2cdf77f --- /dev/null +++ b/docs/logos/logo.svg @@ -0,0 +1,14 @@ + + + + + + + + + diff --git a/docs/releases.md b/docs/releases.md deleted file mode 100644 index e058ff5855..0000000000 --- a/docs/releases.md +++ /dev/null @@ -1,72 +0,0 @@ -# Releases and distributable binaries - -Starting with 0.2.1.0 haskell-language-server provides pre-built binaries on -each [GitHub -release](https://github.com/haskell/haskell-language-server/releases). These -binaries are used by the [vscode-hie-server -extension](https://github.com/alanz/vscode-hie-server) to provide automatic -installation for users on VS Code, but they can also be installed manually -when added to the path. - -## Making a new release of haskell-language-server - -Go to the [GitHub releases -page](https://github.com/haskell/haskell-language-server/releases) for -haskell-language-server and start to create a new release. Choose or create a -tag, fill out the release notes etc., but before you create it -**make sure to check the pre-release checkbox**. This will prevent VS Code -*extension -users from attempting to install this version before the binaries are -created. - -Once the release is created the [GitHub Actions -workflow](https://github.com/haskell/haskell-language-server/actions) will be -kicked off and will start creating binaries. They will be gzipped and -uploaded to the release. - -It creates a `haskell-language-server-OS-GHC` binary for each platform -(Linux, macOS, Windows) and each GHC version that we currently support, as well -as a `haskell-language-server-wrapper-OS` binary for each platform. Note that -only one wrapper binary is created per platform, and it should be built with the -most recent GHC version. - -Once all these binaries are present - -## Distributable binaries -In order to compile a hls binary on one machine and have it run on another, you -need to make sure there are **no hardcoded paths or data-files**. - -### ghc libdir -One noteable thing which cannot be hardcoded is the **GHC libdir** – this is -a path to `/usr/local/lib/ghc` or something like that, which was previously -baked in at compile-time with ghc-paths. Note that with static binaries we -can no longer use this because the GHC libdir of the GitHub Actions machine -will most almost certainly not exist on the end user's machine. -Therefore, hie-bios provides `getGhcRuntimeLibDir` to obtain this path on the fly -by consulting the cradle. - -### Static binaries -We use the word "distributable" here because technically only the Linux builds -are static. They are built by passing `--enable-executable-static` to cabal. -Static binaries don't really exist on macOS, and there are issues with -proprietary code being linked in on Windows. However, the `.dylib`s linked on -macOS are all already provided by the system: - -``` -$ objdump -macho --dylibs-used haskell-language-server -haskell-language-server: - /usr/lib/libncurses.5.4.dylib (compatibility version 5.4.0, current version 5.4.0) - /usr/lib/libiconv.2.dylib (compatibility version 7.0.0, current version 7.0.0) - /usr/lib/libSystem.B.dylib (compatibility version 1.0.0, current version 1281.100.1) - /usr/lib/libcharset.1.dylib (compatibility version 2.0.0, current version 2.0.0) -``` - -## The GitHub Actions workflow -It just kicks off a matrix of jobs varying across GHC versions and OSs, building -the binaries with Cabal and extracting them from the dist-newstyle directory. -The binaries are built with -O2. - -One caveat is that we need to rename the binaries from -haskell-language-server/haskell-language-server-wrapper to hls/hls-wrapper due to -path length limitations on windows. But whenever we upload them to the release, -we make sure to upload them as their full name variant. diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000000..4bdb963497 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,4 @@ +Sphinx~=8.1.3 +sphinx-rtd-theme~=3.0.2 +myst-parser~=4.0.0 +docutils~=0.21.2 diff --git a/docs/support/ghc-version-support.md b/docs/support/ghc-version-support.md new file mode 100644 index 0000000000..df0bc23494 --- /dev/null +++ b/docs/support/ghc-version-support.md @@ -0,0 +1,151 @@ +# GHC version support + +## Current GHC version support status + +The current support for different GHC versions is given in the following table. + +Last supporting HLS version: +- "next": this GHC version is supported in master, and will be in the next released version of HLS. +- "latest": this GHC version is one of the actively supported versions (see below) and is supported in the latest released version of HLS. +- specific version number: this GHC version is no longer one of the actively supported versions, and the last version of HLS which supports it is listed. + +Support status (see the support policy below for more details): +- "initial support": support for this GHC version is underway, but it is not ready to be released yet +- "basic support": this GHC version is currently actively supported, and all [tier 1 plugins](./plugin-support.md) work +- "full support": this GHC version is currently actively supported, and most [tier 2 plugins](./plugin-support.md) work +- "deprecated": this GHC version was supported in the past, but is now deprecated + +| GHC version | Last supporting HLS version | Support status | +| ------------ | ------------------------------------------------------------------------------------ | -------------- | +| 9.12.2 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | +| 9.10.2 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | +| 9.10.1 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | +| 9.8.4 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | +| 9.8.2 | [2.9.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.1) | deprecated | +| 9.8.1 | [2.6.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.6.0.0) | deprecated | +| 9.6.7 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | +| 9.6.6 | [2.9.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.1) | deprecated | +| 9.6.5 | [2.9.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.1) | deprecated | +| 9.6.4 | [2.6.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.6.0.0) | deprecated | +| 9.6.3 | [2.5.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.5.0.0) | deprecated | +| 9.6.2 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated | +| 9.6.1 | [2.0.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.0.0.0) | deprecated | +| 9.4.8 | [latest](https://github.com/haskell/haskell-language-server/releases/latest) | full support | +| 9.4.7 | [2.5.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.5.0.0) | deprecated | +| 9.4.6 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated | +| 9.4.5 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated | +| 9.4.4 | [1.10.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.10.0.0) | deprecated | +| 9.4.3 | [1.9.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.9.1.0) | deprecated | +| 9.4.(1,2) | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated | +| 9.2.8 | [2.9.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.9.0.0) | deprecated | +| 9.2.7 | [2.0.0.1](https://github.com/haskell/haskell-language-server/releases/tag/2.0.0.1) | deprecated | +| 9.2.(5,6) | [1.9.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.9.1.0) | deprecated | +| 9.2.(3,4) | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated | +| 9.2.(1,2) | [1.7.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.7.0.0) | deprecated | +| 9.0.2 | [2.4.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.4.0.0) | deprecated | +| 9.0.1 | [1.6.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.6.1.0) | deprecated | +| 8.10.7 | [2.2.0.0](https://github.com/haskell/haskell-language-server/releases/tag/2.2.0.0) | deprecated | +| 8.10.6 | [1.6.1.0](https://github.com/haskell/haskell-language-server/releases/tag/1.6.1.0) | deprecated | +| 8.10.5 | [1.5.1](https://github.com/haskell/haskell-language-server/releases/tag/1.5.1) | deprecated | +| 8.10.(4,3,2) | [1.4.0](https://github.com/haskell/haskell-language-server/releases/tag/1.4.0) | deprecated | +| 8.10.1 | [0.9.0](https://github.com/haskell/haskell-language-server/releases/tag/0.9.0) | deprecated | +| 8.8.4 | [1.8.0](https://github.com/haskell/haskell-language-server/releases/1.8.0) | deprecated | +| 8.8.3 | [1.5.1](https://github.com/haskell/haskell-language-server/releases/1.5.1) | deprecated | +| 8.8.2 | [1.2.0](https://github.com/haskell/haskell-language-server/releases/tag/1.2.0) | deprecated | +| 8.6.5 | [1.8.0.0](https://github.com/haskell/haskell-language-server/releases/tag/1.8.0.0) | deprecated | +| 8.6.4 | [1.4.0](https://github.com/haskell/haskell-language-server/releases/tag/1.4.0) | deprecated | + +GHC versions not in the list have never been supported by HLS. +LTS stands for [Stackage](https://www.stackage.org/) Long Term Support. + +The policy for when we deprecate support for versions of GHC is given below. +The table reflects that, but we may decide to deviate from it for good reasons. + +### Using deprecated GHC versions + +Users who want to use a GHC version which is not supported by the latest HLS can still use older versions of HLS (consult the version support table above to identify the appropriate HLS version). +In the future, we may extend the existing discovery mechanisms (`haskell-language-server-wrapper`, automatic download in vscode extension) to find and download older HLS binaries in this case. + +Users of a deprecated minor version (where the major version is still supported) can try building the latest HLS from source, which will likely still work, since the GHC API tends to remain compatible across minor versions. + +### Using GHC versions not yet supported in a HLS release + +Some users may wish to use a version of GHC that is not yet supported by a released version of HLS. +In particular, this means that pre-built binaries will not be available for that GHC version. + +The easiest thing to do in this case is to build HLS from source yourself. +This can be done easily with `ghcup`, see the examples for `ghcup compile` on the [installation page](../installation.md). + +Generally, if a version of GHC is supported by HLS on master _or_ is a new minor version of a GHC version that is supported by HLS on master, then compiling from source is likely to work. +Major versions of GHC which are not supported by HLS on master are extremely unlikely to work. + +## GHC version deprecation policy + +### Base policy + +This is the static part of the policy that can be checked by a machine. + +#### Major versions + +HLS will support major versions of GHC until they are older than _both_ + +1. The major version of GHC used in the current Stackage LTS; and +2. The major version of GHC recommended by GHCup + +For example, if + +1. Stackage LTS uses GHC 9.2; and +2. GHCUp recommends GHC 9.4 + +then HLS will support back to GHC 9.2. + +#### Minor versions + +For the latest supported major GHC version we will support at least 2 minor versions. + +For the rest of the supported major GHC versions, we will support at least the latest minor version in Stackage LTS (so 1 minor version). + +### Extended policy + +This is the part of the policy that needs evaluation by a human and possibly followed +by a discussion. + +#### Ecosystem factors + +To establish and apply the policy we take the following ecosystem factors into account: + +- Support status of HLS +- The most recent [stackage](https://www.stackage.org/) LTS snapshot +- The GHC version recommended by GHCup +- The GHC versions used in the most popular [linux distributions](https://repology.org/project/ghc/versions) +- The reliability of different ghc versions on the major operating systems (Linux, Windows, MacOS) +- The [Haskell Survey results](https://taylor.fausak.me/2022/11/18/haskell-survey-results/#s2q4) + +### Supporting a GHC version beyond normal deprecation time + +In cases where the base policy demands a deprecation, but ecosystem factors +suggest that it's still widely used (e.g. last [Haskell Survey results](https://taylor.fausak.me/2022/11/18/haskell-survey-results/#s2q4)), +the deprecation should be suspended for the next release and the situation be re-evaluated for the release after that. + +When we decide to keep on an old version, we should track it as follows: + +1. open a ticket on HLS issue tracker wrt discussing to deprecate said GHC version + - explain the reason the GHC version wasn't deprecated (context) + - explain the maintenance burden it causes (reason) + - evaluate whether it impacts the next HLS release (impact) +2. discuss whether ecosystem factors changed + - e.g. if Haskell Survey results show that 25% or more of users are still on the GHC version in question, then dropping should be avoided +3. if dropping is still undesired, but maintenance burden is also high, then set out a call-for-help and contact HF for additional funding to support this GHC version +4. if no help or funding was received within 2 releases (say, e.g. 3-6 months), then drop the version regardless + +### Why deprecate older versions of GHC? + +`haskell-language-server`(HLS) is highly tied to the GHC API. This imposes a high maintenance cost: + +- The codebase is littered with conditional logic +- We own auxiliary packages to support older versions of GHC +- CI has to cover all the supported versions + +So we need to limit the GHC support to save maintainers and contributors time and reduce CI resources. + +At same time we aim to support the right balance of GHC versions to minimize the impact on users. diff --git a/docs/support/index.rst b/docs/support/index.rst new file mode 100644 index 0000000000..1005e51b24 --- /dev/null +++ b/docs/support/index.rst @@ -0,0 +1,8 @@ +GHC and Plugin Support +====================== + +.. toctree:: + :maxdepth: 2 + + ghc-version-support + plugin-support diff --git a/docs/support/plugin-support.md b/docs/support/plugin-support.md new file mode 100644 index 0000000000..4263f0d035 --- /dev/null +++ b/docs/support/plugin-support.md @@ -0,0 +1,70 @@ +# Plugin support + +## Plugin support tiers + +Plugins vary in how well-supported they are, in particular how quickly they are updated to support new GHC versions. +This is important to keep track of because we want to release new versions of HLS for new GHC versions quickly, but also to present a consistent set of features. + +For this reason we group plugins into _support tiers_. + +**Tier 1** + +A tier 1 plugin is a plugin which we believe is so essential to the functioning of HLS that we should not release HLS unless the plugin is working for all supported GHC versions. + +Tier 1 plugins must be well-supported, or else we will be blocked from releasing HLS. +If we are not able to maintain tier 1 plugins, then we have a critical maintenance problem. +Consequently, few plugins should be considered tier 1. + +**Tier 2** + +A tier 2 plugin is a plugin which is important or well-enough maintained that we usually will not release HLS unless the plugin is working for all supported GHC versions. + +Tier 2 plugins should be well-supported enough to usually make the cut for HLS releases, but we will not hold a release for one. + +Tier 2 plugins provide a decent experience for users, since they can (mostly) rely on them being present in a release. +Hence, most plugins should ideally be tier 2. + +**Tier 3** + +A tier 3 plugin is anything else. + +Tier 3 plugins are maintained on a best-effort basis, often by irregular contributors. +A plugin may have to be tier 3 despite being well-maintained if it depends on a tool (e.g. a formatter) which is not itself reliably updated for new GHC versions. + +Since we cannot make any guarantees that a tier 3 plugin will be working, they provide a bad experience for users. +Hence a tier 3 plugin should ideally have some kind of plan for getting out of tier 3, either by getting the plugin to tier 2 or by sunsetting it. +For example, a plugin to provide a formatter which has itself been abandoned has no hope of reaching tier 2, but may be gracefully sunset by only being supported for old versions of GHC, and deleted once those exit our GHC support window. + +## Current plugin support tiers + +| Plugin | Tier | Unsupported GHC versions | +| ------------------------------------ | ---- | ------------------------ | +| ghcide core plugins | 1 | | +| `hls-call-hierarchy-plugin` | 1 | | +| `hls-code-range-plugin` | 1 | | +| `hls-explicit-imports-plugin` | 1 | | +| `hls-pragmas-plugin` | 1 | | +| `hls-refactor-plugin` | 2 | | +| `hls-alternate-number-format-plugin` | 2 | | +| `hls-cabal-fmt-plugin` | 2 | | +| `hls-cabal-gild-plugin` | 2 | | +| `hls-class-plugin` | 2 | | +| `hls-change-type-signature-plugin` | 2 | | +| `hls-eval-plugin` | 2 | | +| `hls-explicit-fixity-plugin` | 2 | | +| `hls-explicit-record-fields-plugin` | 2 | | +| `hls-fourmolu-plugin` | 2 | | +| `hls-gadt-plugin` | 2 | | +| `hls-hlint-plugin` | 2 | | +| `hls-module-name-plugin` | 2 | | +| `hls-notes-plugin` | 2 | | +| `hls-qualify-imported-names-plugin` | 2 | | +| `hls-ormolu-plugin` | 2 | | +| `hls-rename-plugin` | 2 | | +| `hls-stylish-haskell-plugin` | 2 | | +| `hls-overloaded-record-dot-plugin` | 2 | | +| `hls-semantic-tokens-plugin` | 2 | | +| `hls-floskell-plugin` | 3 | 9.10.1, 9.12.2 | +| `hls-stan-plugin` | 3 | 9.12.2 | +| `hls-retrie-plugin` | 3 | 9.10.1, 9.12.2 | +| `hls-splice-plugin` | 3 | 9.10.1, 9.12.2 | diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md new file mode 100644 index 0000000000..428fbe32f2 --- /dev/null +++ b/docs/troubleshooting.md @@ -0,0 +1,214 @@ +# Troubleshooting + +## Help, I have no idea what these words mean + +If you are new to the project, you may find it helpful to read the [introduction](./what-is-hls.md) page, which explains some of the terminology used on this page. + +## Getting help + +### Where to ask + +Many of the developers are active on [IRC](https://web.libera.chat/?channels=#haskell-language-server). +However, the most direct way to get help is to open an [issue](https://github.com/haskell/haskell-language-server/issues). + +If you can diagnose whether a problem is with HLS or with the client that you are using, then it is helpful to open an issue in the appropriate repository. +But this can be tricky, and if you're not sure then you can always open one in the HLS repository and we'll help you figure out what's going on. + +### What to include + +Please try and give us as much information as you can! +In particular, the more you can diagnose the problem, the better. + +## Basic diagnosis steps + +This section lists a few basic diagnostic steps that are almost always helpful. + +Sometimes these checks may be enough to work out where the problem is. +If not, refer to the sections below about diagnosing problems with the server and client, respectively. +That will also require you to figure out is whether you are looking at an issue with the server or the client. +This can be tricky to work out: if in doubt, open an issue and we'll help you figure it out. + +Typical examples of client issues: + +- The wrong server binary is being launched +- Diagnostics are being shown in the wrong place + +Typical examples of server issues: + +- The server crashes on certain files +- A code action doesn't work the way it's supposed to + +Unclear examples: + +- Hover documentation looks wrong (the client might be rendering it wrong, or the server might be sending the wrong thing) +- Missing functionality (the client might not support it, or the server might not support it) + +### Finding your `haskell-language-server` binary + +Several of the diagnostic steps require you to run the actual `haskell-language-server` binary that is installed on your computer. + +Where the binary is will depend on how you installed HLS. +Consult the [installation](./installation.md) page for help. + +As usual, if you installed HLS with the wrapper, you will want to run `haskell-language-server-wrapper` instead. + +### Getting basic information about your installation + +Running `haskell-language-server --probe-tools` will produce useful information, such as the version of HLS that you are using. +Including this in issue reports is helpful. + +### Checking that the server is running + +If the server isn't running at all when you are editing a Haskell file in your project, then that suggests that the client is having difficulty launching it. +Often this means that the client is configured to run the wrong binary, or the correct one is not available in your `PATH`. + +The easiest way to check whether the server is running is to use an OS process monitor to see if there is a `haskell-language-server` process running. + +### Checking whether the client is connecting to the server + +If the server is running, you should see some kind of indicator in your client. +In some clients (e.g. `coc`) you may need to run a command to query the client's beliefs about the server state. +If the client doesn't seem to be connected despite the server running, this may indicate a bug in the client or HLS. + +### Checking whether the project is being built correctly by HLS + +HLS needs to build the project correctly, with the correct flags, and if it does not do so then very little is likely to work. +A typical symptom of this going wrong is "incorrect" compilation errors being sent to the client. + +If this is happening, then it usually indicates a problem in the server's configuration. + +### Checking whether basic functionality is working + +If everything otherwise seems to be fine, then it is useful to check whether basic functionality is working. +Hover documentation (at least including type signatures) is usually a good one to try. + +### Identifying specific files that cause problems + +If possible, identifying specific files that cause problems is helpful. +If you want to be really helpful, minimising the example can really speed up diagnosis. + +## Diagnosing problems with the server + +### Examining the server log + +Most clients will launch `haskell-language-server` with `--logfile` to make it write a log file. +Please consult the documentation for your client to find out where this is (or how to set it). + +The log will contain all the messages that are sent to the server and its responses. +This is helpful for low-level debugging: if you expect a certain action to happen, you can look in the log to see if the corresponding messages are sent, or if there are any errors. + +To get a more verbose log, you can also pass the `--debug` argument to the server. + +### Reproducing failures on the command-line + +The `haskell-language-server` binary can be run from the command line. + +If it is run with a specific file as an argument, it will try and load that file specifically. +If it is run without a specific file, it will try and load all the files in the project. + +If you are having trouble loading one or many files in the editor, then testing it this way can help make the failure more obvious and reproducible. + +Running HLS from the command-line directly also provides an easy way to get the logs (with or without `--debug`). + +### Plugin-related issues + +Sometimes the issue is related to one of HLS's plugins. +One strategy for diagnosing this is simply disable all plugins, check if the issue is gone and then enable them selectively until the issue is reproduced again. + +There is a configuration JSON snippet which disables all plugins [here](https://github.com/haskell/haskell-language-server/issues/2151#issuecomment-911397030). + +### Clearing HLS's build cache + +HLS builds the dependencies of your project in a separate directory to avoid clashing with your normal build tools. +Sometimes clearing this out can help if you have persistent build problems. +The cache directory is at `$HOME/.cache/hie-bios`. +You may be able to identify a specific subdirectory that relates to your project, but it should always be safe to delete the whole thing, at worst it will cause HLS to redo build work next time it opens a project. + +## Diagnosing problems with the client + +The most important thing to do is to consult the client's documentation. +Usually this will provide some information about troubleshooting. + +For example: + +- `lsp-mode` has a [troubleshooting page](https://emacs-lsp.github.io/lsp-mode/page/troubleshooting/) +- The VSCode Haskell extension has a [troubleshooting section](https://github.com/haskell/vscode-haskell#investigating-and-reporting-problems) + +Many clients provide diagnostic information about a LSP session. +In particular, look for a way to get the status of the server, the server stderr, or a log of the messages that the client has sent to the server. + +## Common issues + +### Wrong server binary being used + +HLS needs to be compiled against the same version of GHC as is used in the project. +Normally, we ship binaries for multiple versions and `haskell-language-server-wrapper` selects the correct one. + +If you see an error about HLS being compiled with the wrong version of GHC, then you either need to install the correct one (if you installed it yourself), or there is something going wrong with the wrapper selecting the right HLS binary to launch. + +### Unsupported GHC version or missing binaries + +HLS does not support every GHC version - there are a lot of them! +Please see the [supported versions page](./support/ghc-version-support.md) for more information, including what to do if you need binaries for a version that is not yet supported by a HLS release. + +### Missing server or build tools + +The most common client-related problem is the client simply not finding the server executable or the tools needed to load Haskell code (`ghc`, `cabal`, or `stack`). So make sure that you have the right `PATH` and you have configured the client to look for the right executables. + +Usually this will be visible in the client's log. + +### Compilation failures + +Sometimes HLS will simply fail to do anything with a file, or give nonsensical error messages. +The most common cause of this is that HLS is using the wrong `hie-bios` cradle to decide how to build the project (e.g., trying to use `stack` instead of `cabal`). +The server log will show which cradle is being chosen. + +Using an explicit `hie.yaml` to configure the cradle can resolve the problem, see the [configuration page](./configuration.md#configuring-your-project-build). + +### Multi Cradle: No prefixes matched +The error message `Multi Cradle: No prefixes matched` usually means that implicit configuration failed. +In that case, you must use [explicit configuration](./configuration.md#configuring-your-project-build). + +### Static binaries + +Static binaries use the GHC linker for dynamically loading dependencies when typechecking Template Haskell code, and this can run into issues when loading shared objects linked against mismatching system libraries, or into GHC linker bugs (mainly the Mach linker used in Mac OS, but also potentially the ELF linker). +Dynamically linked binaries (including`ghci`) use the system linker instead of the GHC linker and avoid both issues. + +The easiest way to obtain a dynamically linked HLS binary is to build HLS locally. With `cabal` this can be done as follows: + +```bash +cabal update && cabal install :pkg:haskell-language-server +``` + +Or with `stack`: + +```bash +stack install haskell-language-server +``` + +You also can leverage `ghcup compile hls`: + +```bash +ghcup compile hls -v 2.9.0.0 --ghc 9.6.5 +``` + +### Preprocessors + +HLS is [not yet](https://github.com/haskell/haskell-language-server/issues/176) able to find project preprocessors, which may result in `could not execute: ` errors. + +As a workaround, you can ensure the preprocessor is available in `PATH` (install globally with Stack or Cabal, provide in `shell.nix`, etc.). + +Example with `tasty-discover`: + +```haskell +{-# OPTIONS_GHC -F -pgmF tasty-discover #-} +``` + +This returns an error in HLS if `tasty-discover` is not in the path: `could not execute: tasty-discover`. + +### Problems with multi component support using stack + +Due to some limitations in the interaction between HLS and `stack`, there are [issues](https://github.com/haskell/haskell-language-server/issues/366) in projects with multiple components (i.e. a main library and executables, test suites or benchmarks): + +- The project has to be built successfully *before* loading it with HLS to get components other than the library work. +- Changes in the library are not automatically propagated to other components, especially in the presence of errors in the library. So you have to restart HLS in order for those components to be loaded correctly. The usual symptom is the editor showing errors like `Could not load module ...` or `Cannot satisfy -package ...`. diff --git a/docs/what-is-hls.md b/docs/what-is-hls.md new file mode 100644 index 0000000000..8b46076121 --- /dev/null +++ b/docs/what-is-hls.md @@ -0,0 +1,63 @@ +# What is the Haskell Language Server? + +The Haskell Language Server (HLS) is an implementation of a server (a "language server") for the [Language Server Protocol](https://microsoft.github.io/language-server-protocol/) (LSP). +A language server talks to a client (typically an editor), which can ask the server to perform various operations, such as reporting errors or providing code completions. +The advantage of this system is that clients and servers can interoperate more easily so long as they all speak the LSP protocol. +In the case of HLS, that means that it can be used with many different editors, since editor support for the LSP protocol is now widespread. + +## Language Server Protocol + +### Servers and clients + +HLS is responsible for actually understanding your project and answering the questions that the client asks of it, such as: what completion items could go here? are there any errors in the project? and so on. +HLS provides [many](./features.md) (but not all) of the features that the LSP protocol supports. + +But HLS only provides the server part of the setup. +In order to actually use it you also need a client (editor). +The client is responsible for managing your interaction with the server: launching it, dispatching commands to it, and displaying or implementing responses. +Some clients will even install the server binaries for you! + +Common clients include: +- VSCode (the reference implementation for a LSP client) +- Emacs, with the `lsp-mode`+`lsp-haskell` or `eglot` packages +- Vim/neovim, with the builtin LSP support or `coc.vim` +- Kate +- ... and more every day! + +### LSP terminology + +Here are a few pieces of jargon that you may come across in the HLS docs or when discussing problems: + +- *Code action*: A code action is a specific action triggered by a user on a particular region of code. Examples might include "add a type signature to this function". +- *Code lens*: A pre-rendered edit or action shown in the body of the document itself, usually triggered with a click. Examples might include "the type signature for a function, which is actually inserted on click". +- *Completion item*: An item that can be inserted into the text, including its metadata. +- *Diagnostic*: Any information about the project that is shown in the editor, including errors, warnings, and hints from tools such as hlint. +- *Semantic highlighting*: Special syntax highlighting performed by the server. +- *Method*: A LSP method is a function in the LSP protocol that the client can invoke to perform some action, e.g. ask for completions at a point. + +## Haskell Language Server + +### HLS and its wrapper + +HLS is a binary that must be compiled with the same version of GHC as the project you are using it on. +For this reason it is usually distributed as a _collection_ of binaries, along with a `haskell-language-server-wrapper` executable that selects the correct one based on which version of GHC it thinks you are using. + +In general you can use `haskell-language-server-wrapper` wherever you need to run `haskell-language-server`. + +### HLS plugins + +HLS has a plugin architecture, whereby individual pieces of functionality are provided by smaller packages that just do one thing. +Plugins can also be disabled independently to allow users to customize the behaviour of HLS to their liking. + +These plugins all (currently) live in the HLS repository and are developed in tandem with the core HLS functionality. + +See the [configuration page](./configuration.md#Generic plugin configuration) for more on configuring plugins. + +### hie-bios + +HLS needs to know how to build your Haskell project: what flags to pass, what packages to provide, etc. +It gets this information from the build system used by your project (typically `cabal` or `stack`). +The tool used to do this is called [`hie-bios`](https://github.com/haskell/hie-bios). +`hie-bios` calls the strategy it uses to get compilation flags (e.g. "ask `cabal`") a "cradle". + +See the [configuration page](./configuration.md#configuring-your-project-build) for more on configuring cradles. diff --git a/exe/Arguments.hs b/exe/Arguments.hs deleted file mode 100644 index 81e388d3de..0000000000 --- a/exe/Arguments.hs +++ /dev/null @@ -1,96 +0,0 @@ --- Copyright (c) 2019 The DAML Authors. All rights reserved. --- SPDX-License-Identifier: Apache-2.0 -{-# LANGUAGE CPP #-} -- To get precise GHC version -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE ViewPatterns #-} -{-# OPTIONS_GHC -Wno-dodgy-imports #-} -- GHC no longer exports def in GHC 8.6 and above - -module Arguments - ( Arguments(..) - , getArguments - , haskellLanguageServerVersion - ) where - -import Data.Version -import Development.GitRev -import Options.Applicative -import Paths_haskell_language_server -import System.Environment - --- --------------------------------------------------------------------- - -data Arguments = Arguments - {argLSP :: Bool - ,argsCwd :: Maybe FilePath - ,argFiles :: [FilePath] - ,argsVersion :: Bool - ,argsShakeProfiling :: Maybe FilePath - ,argsTesting :: Bool - ,argsExamplePlugin :: Bool - -- These next two are for compatibility with existing hie clients, allowing - -- them to just change the name of the exe and still work. - , argsDebugOn :: Bool - , argsLogFile :: Maybe String - , argsThreads :: Int - , argsProjectGhcVersion :: Bool - } deriving Show - -getArguments :: String -> IO Arguments -getArguments exeName = execParser opts - where - opts = info (arguments exeName <**> helper) - ( fullDesc - <> progDesc "Used as a test bed to check your IDE Client will work" - <> header (exeName ++ " - GHC Haskell LSP server")) - -arguments :: String -> Parser Arguments -arguments exeName = Arguments - <$> switch (long "lsp" <> help "Start talking to an LSP server") - <*> optional (strOption $ long "cwd" <> metavar "DIR" - <> help "Change to this directory") - <*> many (argument str (metavar "FILES/DIRS...")) - <*> switch (long "version" - <> help ("Show " ++ exeName ++ " and GHC versions")) - <*> optional (strOption $ long "shake-profiling" <> metavar "DIR" - <> help "Dump profiling reports to this directory") - <*> switch (long "test" - <> help "Enable additional lsp messages used by the testsuite") - <*> switch (long "example" - <> help "Include the Example Plugin. For Plugin devs only") - - <*> switch - ( long "debug" - <> short 'd' - <> help "Generate debug output" - ) - <*> optional (strOption - ( long "logfile" - <> short 'l' - <> metavar "LOGFILE" - <> help "File to log to, defaults to stdout" - )) - <*> option auto - (short 'j' - <> help "Number of threads (0: automatic)" - <> metavar "NUM" - <> value 0 - <> showDefault - ) - <*> switch (long "project-ghc-version" - <> help "Work out the project GHC version and print it") - --- --------------------------------------------------------------------- - -haskellLanguageServerVersion :: IO String -haskellLanguageServerVersion = do - path <- getExecutablePath - let gitHashSection = case $(gitHash) of - x | x == "UNKNOWN" -> "" - x -> " (GIT hash: " <> x <> ")" - return $ "haskell-language-server version: " <> showVersion version - <> " (GHC: " <> VERSION_ghc - <> ") (PATH: " <> path <> ")" - <> gitHashSection - diff --git a/exe/Main.hs b/exe/Main.hs index 54e2354e8e..5684c6f898 100644 --- a/exe/Main.hs +++ b/exe/Main.hs @@ -1,231 +1,117 @@ -- Copyright (c) 2019 The DAML Authors. All rights reserved. -- SPDX-License-Identifier: Apache-2.0 -{-# LANGUAGE CPP #-} -- To get precise GHC version -{-# LANGUAGE TemplateHaskell #-} -{-# OPTIONS_GHC -Wno-dodgy-imports #-} -- GHC no longer exports def in GHC 8.6 and above -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE ViewPatterns #-} - module Main(main) where -import Arguments -import Control.Concurrent.Extra -import Control.Monad.Extra -import Data.Default -import qualified Data.HashSet as HashSet -import Data.List.Extra -import qualified Data.Map.Strict as Map -import Data.Maybe -import qualified Data.Text as T -import qualified Data.Text.IO as T -import Development.IDE.Core.Debouncer -import Development.IDE.Core.FileStore -import Development.IDE.Core.OfInterest -import Development.IDE.Core.RuleTypes -import Development.IDE.Core.Rules -import Development.IDE.Core.Service -import Development.IDE.Core.Shake -import Development.IDE.LSP.LanguageServer -import Development.IDE.LSP.Protocol -import Development.IDE.Plugin -import Development.IDE.Session -import Development.IDE.Types.Diagnostics -import Development.IDE.Types.Location -import Development.IDE.Types.Logger -import Development.IDE.Types.Options -import HIE.Bios.Cradle -import qualified Language.Haskell.LSP.Core as LSP -import Ide.Logger -import Ide.Plugin -import Ide.Plugin.Config -import Ide.Types (IdePlugins, ipMap) -import Language.Haskell.LSP.Messages -import Language.Haskell.LSP.Types -import qualified System.Directory.Extra as IO -import System.Exit -import System.FilePath -import System.IO -import qualified System.Log.Logger as L -import System.Time.Extra - --- --------------------------------------------------------------------- --- ghcide partialhandlers -import Development.IDE.Plugin.CodeAction as CodeAction -import Development.IDE.Plugin.Completions as Completions -import Development.IDE.LSP.HoverDefinition as HoverDefinition - - -- haskell-language-server plugins -import Ide.Plugin.Eval as Eval -import Ide.Plugin.Example as Example -import Ide.Plugin.Example2 as Example2 -import Ide.Plugin.GhcIde as GhcIde -import Ide.Plugin.Floskell as Floskell -import Ide.Plugin.Ormolu as Ormolu -import Ide.Plugin.StylishHaskell as StylishHaskell -#if AGPL -import Ide.Plugin.Brittany as Brittany -#endif -import Ide.Plugin.Pragmas as Pragmas - - --- --------------------------------------------------------------------- - - - --- | The plugins configured for use in this instance of the language --- server. --- These can be freely added or removed to tailor the available --- features of the server. - -idePlugins :: Bool -> IdePlugins -idePlugins includeExamples = pluginDescToIdePlugins allPlugins - where - allPlugins = if includeExamples - then basePlugins ++ examplePlugins - else basePlugins - basePlugins = - [ - -- applyRefactDescriptor "applyrefact" - -- , haddockDescriptor "haddock" - -- , hareDescriptor "hare" - -- , hsimportDescriptor "hsimport" - -- , liquidDescriptor "liquid" - -- , packageDescriptor "package" - GhcIde.descriptor "ghcide" - , Pragmas.descriptor "pragmas" - , Floskell.descriptor "floskell" - -- , genericDescriptor "generic" - -- , ghcmodDescriptor "ghcmod" - , Ormolu.descriptor "ormolu" - , StylishHaskell.descriptor "stylish-haskell" -#if AGPL - , Brittany.descriptor "brittany" -#endif - , Eval.descriptor "eval" - ] - examplePlugins = - [Example.descriptor "eg" - ,Example2.descriptor "eg2" - -- ,hfaAlignDescriptor "hfaa" - ] - -ghcIdePlugins :: T.Text -> IdePlugins -> (Plugin Config, [T.Text]) -ghcIdePlugins pid ps = (asGhcIdePlugin ps, allLspCmdIds' pid ps) - --- --------------------------------------------------------------------- +import Control.Exception (displayException) +import Control.Monad.IO.Class (liftIO) +import Data.Bifunctor (first) +import Data.Function ((&)) +import Data.Functor ((<&>)) +import Data.Maybe (catMaybes) +import Data.Text (Text) +import qualified HlsPlugins as Plugins +import Ide.Arguments (Arguments (..), + GhcideArguments (..), + getArguments) +import Ide.Logger (Doc, Priority (Error, Info), + Recorder, + WithPriority (WithPriority, priority), + cfilter, cmapWithPrio, + defaultLayoutOptions, + layoutPretty, logWith, + makeDefaultStderrRecorder, + renderStrict, withFileRecorder) +import qualified Ide.Logger as Logger +import Ide.Main (defaultMain) +import qualified Ide.Main as IdeMain +import Ide.PluginUtils (pluginDescToIdePlugins) +import Ide.Types (PluginDescriptor (pluginNotificationHandlers), + defaultPluginDescriptor, + mkPluginNotificationHandler) +import Language.LSP.Protocol.Message as LSP +import Language.LSP.Server as LSP +import Prettyprinter (Pretty (pretty), vcat, vsep) + +data Log + = LogIdeMain IdeMain.Log + | LogPlugins Plugins.Log + +instance Pretty Log where + pretty log = case log of + LogIdeMain ideMainLog -> pretty ideMainLog + LogPlugins pluginsLog -> pretty pluginsLog main :: IO () main = do - -- WARNING: If you write to stdout before runLanguageServer - -- then the language server will not work - args@Arguments{..} <- getArguments "haskell-language-server" - - hlsVer <- haskellLanguageServerVersion - if argsVersion then putStrLn hlsVer - else hPutStrLn stderr hlsVer {- see WARNING above -} - - LSP.setupLogger argsLogFile ["hls", "hie-bios"] - $ if argsDebugOn then L.DEBUG else L.INFO - - -- lock to avoid overlapping output on stdout - lock <- newLock - let logger p = Logger $ \pri msg -> when (pri >= p) $ withLock lock $ - T.putStrLn $ T.pack ("[" ++ upper (show pri) ++ "] ") <> msg - - whenJust argsCwd IO.setCurrentDirectory - - dir <- IO.getCurrentDirectory - - pid <- getPid - let - idePlugins' = idePlugins argsExamplePlugin - (ps, commandIds) = ghcIdePlugins pid idePlugins' - plugins = Completions.plugin <> CodeAction.plugin <> - Plugin mempty HoverDefinition.setHandlersDefinition <> - ps - options = def { LSP.executeCommandCommands = Just commandIds - , LSP.completionTriggerCharacters = Just "." - } - - if argLSP then do - t <- offsetTime - hPutStrLn stderr "Starting (haskell-language-server)LSP server..." - hPutStrLn stderr $ " with arguments: " <> show args - hPutStrLn stderr $ " with plugins: " <> show (Map.keys $ ipMap idePlugins') - hPutStrLn stderr $ " in directory: " <> dir - hPutStrLn stderr "If you are seeing this in a terminal, you probably should have run ghcide WITHOUT the --lsp option!" - runLanguageServer options (pluginHandler plugins) getInitialConfig getConfigFromNotification $ \getLspId event vfs caps wProg wIndefProg -> do - t <- t - hPutStrLn stderr $ "Started LSP server in " ++ showDuration t - sessionLoader <- loadSession dir - let options = (defaultIdeOptions sessionLoader) - { optReportProgress = clientSupportsProgress caps - , optShakeProfiling = argsShakeProfiling - , optTesting = IdeTesting argsTesting - , optThreads = argsThreads - } - debouncer <- newAsyncDebouncer - initialise caps (mainRule >> pluginRules plugins) - getLspId event wProg wIndefProg hlsLogger debouncer options vfs - else do - -- GHC produces messages with UTF8 in them, so make sure the terminal doesn't error - hSetEncoding stdout utf8 - hSetEncoding stderr utf8 - - putStrLn $ "(haskell-language-server)Ghcide setup tester in " ++ dir ++ "." - putStrLn "Report bugs at https://github.com/haskell/haskell-language-server/issues" - - putStrLn $ "\nStep 1/4: Finding files to test in " ++ dir - files <- expandFiles (argFiles ++ ["." | null argFiles]) - -- LSP works with absolute file paths, so try and behave similarly - files <- nubOrd <$> mapM IO.canonicalizePath files - putStrLn $ "Found " ++ show (length files) ++ " files" - - putStrLn "\nStep 2/4: Looking for hie.yaml files that control setup" - cradles <- mapM findCradle files - let ucradles = nubOrd cradles - let n = length ucradles - putStrLn $ "Found " ++ show n ++ " cradle" ++ ['s' | n /= 1] - putStrLn "\nStep 3/4: Initializing the IDE" - vfs <- makeVFSHandle - debouncer <- newAsyncDebouncer - let dummyWithProg _ _ f = f (const (pure ())) - sessionLoader <- loadSession dir - ide <- initialise def mainRule (pure $ IdInt 0) (showEvent lock) dummyWithProg (const (const id)) (logger Info) debouncer (defaultIdeOptions sessionLoader) vfs - - putStrLn "\nStep 4/4: Type checking the files" - setFilesOfInterest ide $ HashSet.fromList $ map toNormalizedFilePath' files - results <- runAction "User TypeCheck" ide $ uses TypeCheck (map toNormalizedFilePath' files) - let (worked, failed) = partition fst $ zip (map isJust results) files - when (failed /= []) $ - putStr $ unlines $ "Files that failed:" : map ((++) " * " . snd) failed - - let files xs = let n = length xs in if n == 1 then "1 file" else show n ++ " files" - putStrLn $ "\nCompleted (" ++ files worked ++ " worked, " ++ files failed ++ " failed)" - unless (null failed) (exitWith $ ExitFailure (length failed)) - -expandFiles :: [FilePath] -> IO [FilePath] -expandFiles = concatMapM $ \x -> do - b <- IO.doesFileExist x - if b then return [x] else do - let recurse "." = True - recurse x | "." `isPrefixOf` takeFileName x = False -- skip .git etc - recurse x = takeFileName x `notElem` ["dist","dist-newstyle"] -- cabal directories - files <- filter (\x -> takeExtension x `elem` [".hs",".lhs"]) <$> IO.listFilesInside (return . recurse) x - when (null files) $ - fail $ "Couldn't find any .hs/.lhs files inside directory: " ++ x - return files - --- | Print an LSP event. -showEvent :: Lock -> FromServerMessage -> IO () -showEvent _ (EventFileDiagnostics _ []) = return () -showEvent lock (EventFileDiagnostics (toNormalizedFilePath' -> file) diags) = - withLock lock $ T.putStrLn $ showDiagnosticsColored $ map (file,ShowDiag,) diags -showEvent lock e = withLock lock $ print e + stderrRecorder <- makeDefaultStderrRecorder Nothing + -- plugin cli commands use stderr logger for now unless we change the args + -- parser to get logging arguments first or do more complicated things + let pluginCliRecorder = cmapWithPrio pretty stderrRecorder + args <- getArguments "haskell-language-server" (Plugins.idePlugins (cmapWithPrio LogPlugins pluginCliRecorder)) + + -- Recorder that logs to the LSP client with logMessage + (lspLogRecorder, cb1) <- + Logger.withBacklog Logger.lspClientLogRecorder + <&> first (cmapWithPrio renderDoc) + -- Recorder that logs to the LSP client with showMessage + (lspMessageRecorder, cb2) <- + Logger.withBacklog Logger.lspClientMessageRecorder + <&> first (cmapWithPrio renderDoc) + -- Recorder that logs Error severity logs to the client with showMessage and some extra text + let lspErrorMessageRecorder = lspMessageRecorder + & cfilter (\WithPriority{ priority } -> priority >= Error) + & cmapWithPrio (\msg -> vsep + ["Error condition, please check your setup and/or the [issue tracker](" <> issueTrackerUrl <> "): " + , msg + ]) + -- This plugin just installs a handler for the `initialized` notification, which then + -- picks up the LSP environment and feeds it to our recorders + let lspRecorderPlugin = (defaultPluginDescriptor "LSPRecorderCallback" "Internal plugin") + { pluginNotificationHandlers = mkPluginNotificationHandler LSP.SMethod_Initialized $ \_ _ _ _ -> do + env <- LSP.getLspEnv + liftIO $ (cb1 <> cb2) env + } + + let (minPriority, logFilePath, logStderr, logClient) = + case args of + Ghcide GhcideArguments{ argsLogLevel, argsLogFile, argsLogStderr, argsLogClient} -> + (argsLogLevel, argsLogFile, argsLogStderr, argsLogClient) + _ -> (Info, Nothing, True, False) + + -- Adapter for withFileRecorder to handle the case where we don't want to log to a file + let withLogFileRecorder action = case logFilePath of + Just p -> withFileRecorder p Nothing $ \case + Left e -> do + let exceptionMessage = pretty $ displayException e + let message = vcat [exceptionMessage, "Couldn't open log file; not logging to it."] + logWith stderrRecorder Error message + action Nothing + Right r -> action (Just r) + Nothing -> action Nothing + + withLogFileRecorder $ \logFileRecorder -> do + let + lfr = logFileRecorder + ser = if logStderr then Just stderrRecorder else Nothing + lemr = Just lspErrorMessageRecorder + llr = if logClient then Just lspLogRecorder else Nothing + recorder :: Recorder (WithPriority Log) = + [lfr, ser, lemr, llr] + & catMaybes + & mconcat + & cmapWithPrio pretty + & cfilter (\WithPriority{ priority } -> priority >= minPriority) + plugins = Plugins.idePlugins (cmapWithPrio LogPlugins recorder) + + defaultMain + (cmapWithPrio LogIdeMain recorder) + args + (plugins <> pluginDescToIdePlugins [lspRecorderPlugin]) + +renderDoc :: Doc a -> Text +renderDoc d = renderStrict $ layoutPretty defaultLayoutOptions d + +issueTrackerUrl :: Doc a +issueTrackerUrl = "https://github.com/haskell/haskell-language-server/issues" diff --git a/exe/Wrapper.hs b/exe/Wrapper.hs index 703ceedaf3..2fd885ffb3 100644 --- a/exe/Wrapper.hs +++ b/exe/Wrapper.hs @@ -1,24 +1,62 @@ -{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} -- | This module is based on the hie-wrapper.sh script in -- https://github.com/alanz/vscode-hie-server module Main where -import Arguments -import Control.Monad.Extra -import Data.Foldable -import Data.List -import Data.Void -import HIE.Bios -import HIE.Bios.Environment -import HIE.Bios.Types -import Ide.Version -import System.Directory -import System.Environment -import System.Exit -import System.FilePath -import System.IO -import System.Info -import System.Process +import Control.Monad.Extra +import Data.Default +import Data.Foldable +import Data.List +import Data.List.Extra (trimEnd) +import Data.Void +import qualified Development.IDE.Session as Session +import qualified HIE.Bios.Environment as HieBios +import HIE.Bios.Types +import Ide.Arguments +import Ide.Version +import System.Directory +import System.Environment +import System.Exit +import System.FilePath +import System.Info +import System.IO +#ifndef mingw32_HOST_OS +import qualified Data.Map.Strict as Map +import System.Posix.Process (executeFile) +#else +import System.Process +#endif +import Control.Concurrent (tryPutMVar) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Control.Monad.IO.Unlift (MonadUnliftIO) +import Control.Monad.Trans.Except (ExceptT, runExceptT, + throwE) +import Data.Maybe +import qualified Data.Text as T +import qualified Data.Text.IO as T +import Development.IDE.LSP.LanguageServer (Setup (..), + runLanguageServer) +import qualified Development.IDE.Main as Main +import Ide.Logger (Doc, Pretty (pretty), + Recorder, WithPriority, + cmapWithPrio, + makeDefaultStderrRecorder) +import Ide.Plugin.Config (Config) +import Ide.Types (IdePlugins (IdePlugins)) +import Language.LSP.Protocol.Message (Method (Method_Initialize), + SMethod (SMethod_Exit, SMethod_WindowShowMessageRequest), + TRequestMessage, + TResponseError) +import Language.LSP.Protocol.Types (MessageActionItem (MessageActionItem), + MessageType (MessageType_Error), + ShowMessageRequestParams (ShowMessageRequestParams), + type (|?) (InL)) +import Language.LSP.Server (LspM) +import qualified Language.LSP.Server as LSP -- --------------------------------------------------------------------- @@ -26,18 +64,67 @@ main :: IO () main = do -- WARNING: If you write to stdout before runLanguageServer -- then the language server will not work - Arguments{..} <- getArguments "haskell-language-server-wrapper" + args <- getArguments "haskell-language-server-wrapper" mempty + + hlsVer <- haskellLanguageServerVersion + recorder <- makeDefaultStderrRecorder Nothing + case args of + ProbeToolsMode -> do + programsOfInterest <- findProgramVersions + putStrLn hlsVer + putStrLn "Tool versions found on the $PATH" + putStrLn $ showProgramVersionOfInterest programsOfInterest + putStrLn "Tool versions in your project" + cradle <- findProjectCradle' recorder False + runExceptT (getRuntimeGhcVersion' cradle) >>= \case + Left err -> + T.hPutStrLn stderr (prettyError err NoShorten) + Right ghcVersion -> + putStrLn $ showProgramVersion "ghc" $ mkVersion ghcVersion + + VersionMode PrintVersion -> + putStrLn hlsVer + + VersionMode PrintNumericVersion -> + putStrLn haskellLanguageServerNumericVersion + + BiosMode PrintCradleType -> + print =<< findProjectCradle recorder + PrintLibDir -> do + cradle <- findProjectCradle' recorder False + (CradleSuccess libdir) <- HieBios.getRuntimeGhcLibDir cradle + putStr libdir + _ -> launchHaskellLanguageServer recorder args >>= \case + Right () -> pure () + Left err -> do + T.hPutStrLn stderr (prettyError err NoShorten) + case args of + Ghcide (GhcideArguments { argsCommand = Main.LSP }) -> + launchErrorLSP recorder (prettyError err Shorten) + + _ -> exitFailure + +launchHaskellLanguageServer :: Recorder (WithPriority (Doc ())) -> Arguments -> IO (Either WrapperSetupError ()) +launchHaskellLanguageServer recorder parsedArgs = do + case parsedArgs of + Ghcide GhcideArguments{..} -> whenJust argsCwd setCurrentDirectory + _ -> pure () d <- getCurrentDirectory - -- Get the cabal directory from the cradle - cradle <- findLocalCradle (d "a") - setCurrentDirectory $ cradleRootDir cradle + -- search for the project cradle type + cradle <- findProjectCradle recorder - when argsProjectGhcVersion $ getRuntimeGhcVersion' cradle >>= putStrLn >> exitSuccess - when argsVersion $ haskellLanguageServerVersion >>= putStrLn >> exitSuccess + -- Get the root directory from the cradle + setCurrentDirectory $ cradleRootDir cradle - whenJust argsCwd setCurrentDirectory + case parsedArgs of + Ghcide GhcideArguments{..} -> + when argsProjectGhcVersion $ do + runExceptT (getRuntimeGhcVersion' cradle) >>= \case + Right ghcVersion -> putStrLn ghcVersion >> exitSuccess + Left err -> T.putStrLn (prettyError err NoShorten) >> exitFailure + _ -> pure () progName <- getProgName hPutStrLn stderr $ "Run entered for haskell-language-server-wrapper(" ++ progName ++ ") " @@ -48,77 +135,187 @@ main = do hPutStrLn stderr $ "Arguments: " ++ show args hPutStrLn stderr $ "Cradle directory: " ++ cradleRootDir cradle hPutStrLn stderr $ "Cradle type: " ++ show (actionName (cradleOptsProg cradle)) - + programsOfInterest <- findProgramVersions + hPutStrLn stderr "" + hPutStrLn stderr "Tool versions found on the $PATH" + hPutStrLn stderr $ showProgramVersionOfInterest programsOfInterest + hPutStrLn stderr "" -- Get the ghc version -- this might fail! - hPutStrLn stderr $ "Consulting the cradle to get project GHC version..." - ghcVersion <- getRuntimeGhcVersion' cradle - hPutStrLn stderr $ "Project GHC version: " ++ ghcVersion + hPutStrLn stderr "Consulting the cradle to get project GHC version..." + + runExceptT $ do + ghcVersion <- getRuntimeGhcVersion' cradle + liftIO $ hPutStrLn stderr $ "Project GHC version: " ++ ghcVersion + + let + hlsBin = "haskell-language-server-" ++ ghcVersion + candidates' = [hlsBin, "haskell-language-server"] + candidates = map (++ exeExtension) candidates' - let - hlsBin = "haskell-language-server-" ++ ghcVersion - backupHlsBin = - case dropWhileEnd (/='.') ghcVersion of - [] -> "haskell-language-server" - xs -> "haskell-language-server-" ++ init xs - candidates' = [hlsBin, backupHlsBin, "haskell-language-server"] - candidates = map (++ exeExtension) candidates' + liftIO $ hPutStrLn stderr $ "haskell-language-server exe candidates: " ++ show candidates - hPutStrLn stderr $ "haskell-language-server exe candidates: " ++ show candidates + mexes <- liftIO $ traverse findExecutable candidates - mexes <- traverse findExecutable candidates + case asum mexes of + Nothing -> throwE (NoLanguageServer ghcVersion candidates) + Just e -> do + liftIO $ hPutStrLn stderr $ "Launching haskell-language-server exe at:" ++ e - case asum mexes of - Nothing -> hPutStrLn stderr $ "Cannot find any haskell-language-server exe, looked for: " ++ intercalate ", " candidates - Just e -> do - hPutStrLn stderr $ "Launching haskell-language-server exe at:" ++ e - callProcess e args +#ifdef mingw32_HOST_OS + liftIO $ callProcess e args +#else + + let Cradle { cradleOptsProg = CradleAction { runGhcCmd } } = cradle + + let cradleName = actionName (cradleOptsProg cradle) + -- we need to be compatible with NoImplicitPrelude + ghcBinary <- liftIO (fmap trim <$> runGhcCmd ["-v0", "-package-env=-", "-ignore-dot-ghci", "-e", "Control.Monad.join (Control.Monad.fmap System.IO.putStr System.Environment.getExecutablePath)"]) + >>= cradleResult cradleName + + libdir <- liftIO (HieBios.getRuntimeGhcLibDir cradle) + >>= cradleResult cradleName + + env <- Map.fromList <$> liftIO getEnvironment + let newEnv = Map.insert "GHC_BIN" ghcBinary $ Map.insert "GHC_LIBDIR" libdir env + liftIO $ executeFile e True args (Just (Map.toList newEnv)) +#endif + + + +cradleResult :: ActionName Void -> CradleLoadResult a -> ExceptT WrapperSetupError IO a +cradleResult _ (CradleSuccess ver) = pure ver +cradleResult cradleName (CradleFail error) = throwE $ FailedToObtainGhcVersion cradleName error +cradleResult cradleName CradleNone = throwE $ NoneCradleGhcVersion cradleName -- | Version of 'getRuntimeGhcVersion' that dies if we can't get it, and also -- checks to see if the tool is missing if it is one of -getRuntimeGhcVersion' :: Show a => Cradle a -> IO String +getRuntimeGhcVersion' :: Cradle Void -> ExceptT WrapperSetupError IO String getRuntimeGhcVersion' cradle = do + let cradleName = actionName (cradleOptsProg cradle) -- See if the tool is installed - case actionName (cradleOptsProg cradle) of + case cradleName of Stack -> checkToolExists "stack" Cabal -> checkToolExists "cabal" Default -> checkToolExists "ghc" Direct -> checkToolExists "ghc" _ -> pure () - ghcVersionRes <- getRuntimeGhcVersion cradle - case ghcVersionRes of - CradleSuccess ver -> do - return ver - CradleFail error -> die $ "Failed to get project GHC version:" ++ show error - CradleNone -> die "Failed get project GHC version, since we have a none cradle" + ghcVersionRes <- liftIO $ HieBios.getRuntimeGhcVersion cradle + cradleResult cradleName ghcVersionRes + where checkToolExists exe = do - exists <- findExecutable exe + exists <- liftIO $ findExecutable exe case exists of Just _ -> pure () - Nothing -> - die $ "Cradle requires " ++ exe ++ " but couldn't find it" ++ "\n" - ++ show cradle - --- | Find the cradle that the given File belongs to. --- --- First looks for a "hie.yaml" file in the directory of the file --- or one of its parents. If this file is found, the cradle --- is read from the config. If this config does not comply to the "hie.yaml" --- specification, an error is raised. --- --- If no "hie.yaml" can be found, the implicit config is used. --- The implicit config uses different heuristics to determine the type --- of the project that may or may not be accurate. -findLocalCradle :: FilePath -> IO (Cradle Void) -findLocalCradle fp = do - cradleConf <- findCradle fp - crdl <- case cradleConf of - Just yaml -> do - hPutStrLn stderr $ "Found \"" ++ yaml ++ "\" for \"" ++ fp ++ "\"" - loadCradle yaml - Nothing -> loadImplicitCradle fp - hPutStrLn stderr $ "Module \"" ++ fp ++ "\" is loaded by Cradle: " ++ show crdl - return crdl + Nothing -> throwE $ ToolRequirementMissing exe (actionName (cradleOptsProg cradle)) + +findProjectCradle :: Recorder (WithPriority (Doc ())) -> IO (Cradle Void) +findProjectCradle recorder = findProjectCradle' recorder True + +findProjectCradle' :: Recorder (WithPriority (Doc ())) -> Bool -> IO (Cradle Void) +findProjectCradle' recorder log = do + d <- getCurrentDirectory + + let initialFp = d "a" + hieYaml <- Session.findCradle def initialFp + + -- Some log messages + when log $ + case hieYaml of + Just yaml -> hPutStrLn stderr $ "Found \"" ++ yaml ++ "\" for \"" ++ initialFp ++ "\"" + Nothing -> hPutStrLn stderr "No 'hie.yaml' found. Try to discover the project type!" + + Session.loadCradle def (cmapWithPrio pretty recorder) hieYaml d + +trim :: String -> String +trim s = case lines s of + [] -> s + ls -> trimEnd $ last ls + +data WrapperSetupError + = FailedToObtainGhcVersion (ActionName Void) CradleError + | NoneCradleGhcVersion (ActionName Void) + | NoLanguageServer String [FilePath] + | ToolRequirementMissing String (ActionName Void) + deriving (Show) + +data Shorten = Shorten | NoShorten + +-- | Pretty error message displayable to the future. +-- Extra argument 'Shorten' can be used to shorten error message. +-- Reduces usefulness, but allows us to show the error message via LSP +-- as LSP doesn't allow any newlines and makes it really hard to read +-- the message otherwise. +prettyError :: WrapperSetupError -> Shorten -> T.Text +prettyError (FailedToObtainGhcVersion name crdlError) shorten = + "Failed to find the GHC version of this " <> T.pack (show name) <> " project." <> + case shorten of + Shorten -> + "\n" <> T.pack (fromMaybe "" . listToMaybe $ cradleErrorStderr crdlError) + NoShorten -> + "\n" <> T.pack (intercalate "\n" (cradleErrorStderr crdlError)) +prettyError (NoneCradleGhcVersion name) _ = + "Failed to get the GHC version of this " <> T.pack (show name) <> + " project because a none cradle is configured" +prettyError (NoLanguageServer ghcVersion candidates) _ = + "Failed to find a HLS version for GHC " <> T.pack ghcVersion <> + "\nExecutable names we failed to find: " <> T.pack (intercalate "," candidates) +prettyError (ToolRequirementMissing toolExe name) _ = + "Failed to find executable \"" <> T.pack toolExe <> "\" in $PATH for this " <> T.pack (show name) <> " project." + +newtype ErrorLSPM c a = ErrorLSPM { unErrorLSPM :: (LspM c) a } + deriving (Functor, Applicative, Monad, MonadIO, MonadUnliftIO, LSP.MonadLsp c) + +-- | Launches a LSP that displays an error and presents the user with a request +-- to shut down the LSP. +launchErrorLSP :: Recorder (WithPriority (Doc ())) -> T.Text -> IO () +launchErrorLSP recorder errorMsg = do + cwd <- getCurrentDirectory + let defaultArguments = Main.defaultArguments (cmapWithPrio pretty recorder) cwd (IdePlugins []) + + inH <- Main.argsHandleIn defaultArguments + + outH <- Main.argsHandleOut defaultArguments + + let parseConfig cfg _ = Right cfg + onConfigChange _ = pure () + + let setup clientMsgVar = do + -- Forcefully exit + let exit = void $ tryPutMVar clientMsgVar () + + let doInitialize :: LSP.LanguageContextEnv Config -> TRequestMessage Method_Initialize -> IO (Either (TResponseError Method_Initialize) (LSP.LanguageContextEnv Config, ())) + doInitialize env _ = do + + let restartTitle = "Try to restart" + void $ LSP.runLspT env $ LSP.sendRequest SMethod_WindowShowMessageRequest (ShowMessageRequestParams MessageType_Error errorMsg (Just [MessageActionItem restartTitle])) $ \case + Right (InL (MessageActionItem title)) + | title == restartTitle -> liftIO exit + _ -> pure () + + pure (Right (env, ())) + + let asyncHandlers = mconcat + [ exitHandler exit ] + + let interpretHandler (env, _st) = LSP.Iso (LSP.runLspT env . unErrorLSPM) liftIO + pure MkSetup + { doInitialize + , staticHandlers = asyncHandlers + , interpretHandler + , onExit = [exit] + } + + runLanguageServer (cmapWithPrio pretty recorder) + (Main.argsLspOptions defaultArguments) + inH + outH + (Main.argsDefaultHlsConfig defaultArguments) + parseConfig + onConfigChange + setup +exitHandler :: IO () -> LSP.Handlers (ErrorLSPM c) +exitHandler exit = LSP.notificationHandler SMethod_Exit $ const $ liftIO exit diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000000..352483a773 --- /dev/null +++ b/flake.lock @@ -0,0 +1,78 @@ +{ + "nodes": { + "flake-compat": { + "flake": false, + "locked": { + "lastModified": 1747046372, + "narHash": "sha256-CIVLLkVgvHYbgI2UpXvIIBJ12HWgX+fjA8Xf8PUmqCY=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1748437873, + "narHash": "sha256-E2640ouB7VxooUQdCiDRo/rVXnr1ykgF9A7HrwWZVSo=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "c742ae7908a82c9bf23ce27bfca92a00e9bcd541", + "type": "github" + }, + "original": { + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "c742ae7908a82c9bf23ce27bfca92a00e9bcd541", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-compat": "flake-compat", + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000000..1002eb87b5 --- /dev/null +++ b/flake.nix @@ -0,0 +1,127 @@ +{ + description = "haskell-language-server development flake"; + + inputs = { + # Don't use nixpkgs-unstable as aarch64-darwin is currently broken there. + # Check again, when https://github.com/NixOS/nixpkgs/pull/414242 is resolved. + nixpkgs.url = "github:NixOS/nixpkgs/c742ae7908a82c9bf23ce27bfca92a00e9bcd541"; + flake-utils.url = "github:numtide/flake-utils"; + # For default.nix + flake-compat = { + url = "github:edolstra/flake-compat"; + flake = false; + }; + }; + + outputs = + { nixpkgs, flake-utils, ... }: + flake-utils.lib.eachSystem + [ "x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin" ] + (system: + let + pkgs = import nixpkgs { + inherit system; + config = { allowBroken = true; }; + }; + + pythonWithPackages = pkgs.python3.withPackages (ps: + [ ps.docutils + ps.myst-parser + ps.pip + ps.sphinx + ps.sphinx_rtd_theme + ]); + + docs = pkgs.stdenv.mkDerivation { + name = "hls-docs"; + src = pkgs.lib.sourceFilesBySuffices ./. + [ ".py" ".rst" ".md" ".png" ".gif" ".svg" ".cabal" ]; + buildInputs = [ pythonWithPackages ]; + buildPhase = '' + cd docs + make --makefile=${./docs/Makefile} html BUILDDIR=$out + ''; + dontInstall = true; + }; + + # Support of GenChangelogs.hs + gen-hls-changelogs = hpkgs: with pkgs; + let myGHC = hpkgs.ghcWithPackages (p: with p; [ github ]); + in pkgs.runCommand "gen-hls-changelogs" { + passAsFile = [ "text" ]; + preferLocalBuild = true; + allowSubstitutes = false; + buildInputs = [ git myGHC ]; + } '' + dest=$out/bin/gen-hls-changelogs + mkdir -p $out/bin + echo "#!${runtimeShell}" >> $dest + echo "${myGHC}/bin/runghc ${./GenChangelogs.hs}" >> $dest + chmod +x $dest + ''; + + mkDevShell = hpkgs: with pkgs; mkShell { + name = "haskell-language-server-dev-ghc${hpkgs.ghc.version}"; + # For binary Haskell tools, we use the default Nixpkgs GHC version. + # This removes a rebuild with a different GHC version. The drawback of + # this approach is that our shell may pull two GHC versions in scope. + buildInputs = [ + # Compiler toolchain + hpkgs.ghc + hpkgs.haskell-language-server + pkgs.haskellPackages.cabal-install + # Dependencies needed to build some parts of Hackage + gmp zlib ncurses + # for compatibility of curl with provided gcc + curl + # Changelog tooling + (gen-hls-changelogs hpkgs) + # For the documentation + pythonWithPackages + (pkgs.haskell.lib.justStaticExecutables (pkgs.haskell.lib.dontCheck pkgs.haskellPackages.opentelemetry-extra)) + capstone + stylish-haskell + pre-commit + ] ++ lib.optionals (!stdenv.isDarwin) + [ # tracy has a build problem on macos. + tracy + ] + ++ lib.optionals stdenv.isDarwin + (with darwin.apple_sdk.frameworks; [ + Cocoa + CoreServices + ]); + + shellHook = '' + # @guibou: I'm not sure theses lines are needed + export LD_LIBRARY_PATH=${gmp}/lib:${zlib}/lib:${ncurses}/lib:${capstone}/lib + export DYLD_LIBRARY_PATH=${gmp}/lib:${zlib}/lib:${ncurses}/lib:${capstone}/lib + export PATH=$PATH:$HOME/.local/bin + + # Install pre-commit hook + pre-commit install + ''; + }; + + in { + # Developement shell with only dev tools + devShells = { + default = mkDevShell pkgs.haskellPackages; + shell-ghc96 = mkDevShell pkgs.haskell.packages.ghc96; + shell-ghc98 = mkDevShell pkgs.haskell.packages.ghc98; + shell-ghc910 = mkDevShell pkgs.haskell.packages.ghc910; + shell-ghc912 = mkDevShell pkgs.haskell.packages.ghc912; + }; + + packages = { inherit docs; }; + }); + + nixConfig = { + extra-substituters = [ + "https://haskell-language-server.cachix.org" + ]; + extra-trusted-public-keys = [ + "haskell-language-server.cachix.org-1:juFfHrwkOxqIOZShtC4YC1uT1bBcq2RSvC7OMKx0Nz8=" + ]; + }; +} diff --git a/fmt.sh b/fmt.sh new file mode 100755 index 0000000000..1884d57e57 --- /dev/null +++ b/fmt.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +set -eou pipefail +curl -sSL https://raw.github.com/ndmitchell/hlint/master/misc/run.sh | sh -s ghcide/src ghcide/exe ghcide-bench/exe shake-bench/src ghcide/test/exe --with-group=extra --hint=.hlint.yaml diff --git a/ghcide b/ghcide deleted file mode 160000 index 7e895cfa53..0000000000 --- a/ghcide +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 7e895cfa53260b41996df707baec496a8f2c75dc diff --git a/ghcide-bench/LICENSE b/ghcide-bench/LICENSE new file mode 100644 index 0000000000..261eeb9e9f --- /dev/null +++ b/ghcide-bench/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ghcide-bench/README.md b/ghcide-bench/README.md new file mode 100644 index 0000000000..f815635157 --- /dev/null +++ b/ghcide-bench/README.md @@ -0,0 +1,61 @@ +A benchmark suite for measuring various performance-related metrics on ghcide and HLS. + +## Usage + +Run with `cabal ghcide bench`, point it to a `haskell-language-server` or `ghcide` binary, specify: +- the experiment to run, from the ones defined in `src/Experiments.hs`, +- the example codebase (either a local folder or a Hackage package), +- one or more module paths to run the experiment on, +- the number of samples, +- any extra command line options to pass to the binary, + +``` +Usage: ghcide-bench [(-v|--verbose) | (-q|--quiet)] [--shake-profiling PATH] + [--ot-profiling DIR] [--csv PATH] [--stack] + [--ghcide-options ARG] [-s|--select ARG] [--samples NAT] + [--ghcide PATH] [--timeout ARG] + [[--example-package-name ARG] + [--example-package-version ARG] + [(--example-module PATH)] | + --example-path ARG (--example-module PATH)] [--lsp-config] + [--no-clean] + +Available options: + --ot-profiling DIR Enable OpenTelemetry and write eventlog for each + benchmark in DIR + --stack Use stack (by default cabal is used) + --ghcide-options ARG additional options for ghcide + -s,--select ARG select which benchmarks to run + --samples NAT override sampling count + --ghcide PATH path to ghcide + --timeout ARG timeout for waiting for a ghcide response + --lsp-config Read an LSP config payload from standard input + -h,--help Show this help text +``` + +## Experiments + +Experiments are LSP sessions defined using the `lsp-test` DSL that run on one or +more modules. + +Currently the following experiments are defined: +- *edit*: makes an edit and waits for re-typechecking +- *hover*: asks for hover on an identifier +- *getDefinition*: asks for the definitions of an identifier +- *documentsymbols* +- *completions*: asks for completions on an identifier position +- *code actions*: makes an edit that breaks typechecking and asks for code actions +- *hole fit suggestions*: measures the performance of hole fits +- *X after edit*: combines the *edit* and X experiments +- *X after cradle edit*: combines the X experiments with an edit to the `hie.yaml` file + +One can define additional experiments easily, for e.g. formatting, code lenses, renames, etc. +Experiments are defined in the `src/Experiments.hs` module. + +### Positions +`ghcide-bench` will analyze the modules prior to running the experiments, +and try to identify the following designated source locations in the module: + +- *stringLiteralP*: a location that can be mutated without generating a diagnostic, +- *identifierP*: a location with an identifier that is not locally defined in the module. +- *docP*: a location containing a comment diff --git a/ghcide-bench/exe/Main.hs b/ghcide-bench/exe/Main.hs new file mode 100644 index 0000000000..e04b15f713 --- /dev/null +++ b/ghcide-bench/exe/Main.hs @@ -0,0 +1,59 @@ +{- An automated benchmark built around the simple experiment described in: + + > https://neilmitchell.blogspot.com/2020/05/fixing-space-leaks-in-ghcide.html + + As an example project, it unpacks Cabal-3.2.0.0 in the local filesystem and + loads the module 'Distribution.Simple'. The rationale for this choice is: + + - It's convenient to download with `cabal unpack Cabal-3.2.0.0` + - It has very few dependencies, and all are already needed to build ghcide + - Distribution.Simple has 235 transitive module dependencies, so non trivial + + The experiments are sequences of lsp commands scripted using lsp-test. + A more refined approach would be to record and replay real IDE interactions, + once the replay functionality is available in lsp-test. + A more declarative approach would be to reuse ide-debug-driver: + + > https://github.com/digital-asset/daml/blob/master/compiler/damlc/ide-debug-driver/README.md + + The result of an experiment is a total duration in seconds after a preset + number of iterations. There is ample room for improvement: + - Statistical analysis to detect outliers and auto infer the number of iterations needed + - GC stats analysis (currently -S is printed as part of the experiment) + - Analysis of performance over the commit history of the project + + How to run: + 1. `cabal exec cabal run ghcide-bench -- -- ghcide-bench-options` + 1. `stack build ghcide:ghcide-bench && stack exec ghcide-bench -- -- ghcide-bench-options` + + Note that the package database influences the response times of certain actions, + e.g. code actions, and therefore the two methods above do not necessarily + produce the same results. + + -} + +{-# LANGUAGE ImplicitParams #-} + +import Control.Exception.Safe +import Control.Monad +import Experiments +import Options.Applicative +import System.IO + +optsP :: Parser (Config, Bool) +optsP = (,) <$> configP <*> switch (long "no-clean") + +main :: IO () +main = do + hSetBuffering stdout LineBuffering + hSetBuffering stderr LineBuffering + (config, noClean) <- execParser $ info (optsP <**> helper) fullDesc + let ?config = config + + hPrint stderr config + + output "starting test" + + SetupResult{..} <- setup + + runBenchmarks experiments `finally` unless noClean cleanUp diff --git a/ghcide-bench/src/Experiments.hs b/ghcide-bench/src/Experiments.hs new file mode 100644 index 0000000000..c53ffd0a7c --- /dev/null +++ b/ghcide-bench/src/Experiments.hs @@ -0,0 +1,879 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE ImplicitParams #-} +{-# LANGUAGE ImpredicativeTypes #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-deprecations -Wno-unticked-promoted-constructors #-} + +module Experiments +( Bench(..) +, BenchRun(..) +, Config(..) +, Verbosity(..) +, CabalStack(..) +, SetupResult(..) +, Example(..) +, experiments +, configP +, defConfig +, output +, setup +, runBench +, exampleToOptions +) where +import Control.Applicative.Combinators (skipManyTill) +import Control.Concurrent.Async (withAsync) +import Control.Exception.Safe (IOException, handleAny, + try) +import Control.Lens (_Just, (&), (.~), (^.), + (^?)) +import Control.Lens.Extras (is) +import Control.Monad.Extra (allM, forM, forM_, forever, + unless, void, when, + whenJust, (&&^)) +import Control.Monad.IO.Class +import Data.Aeson (Value (Null), + eitherDecodeStrict', + toJSON) +import qualified Data.Aeson as A +import qualified Data.ByteString as BS +import qualified Data.ByteString.Lazy as BSL +import Data.Either (fromRight) +import Data.List +import Data.Maybe +import Data.Proxy +import Data.Text (Text) +import qualified Data.Text as T +import Data.Version +import Development.IDE.Plugin.Test +import Development.IDE.Test.Diagnostic +import Development.Shake (CmdOption (Cwd), cmd_) +import Experiments.Types +import Language.LSP.Protocol.Capabilities +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding (Null, + SemanticTokenAbsolute (..)) +import qualified Language.LSP.Protocol.Types as LSP +import Language.LSP.Test +import Numeric.Natural +import Options.Applicative +import System.Directory +import System.Environment.Blank (getEnv) +import System.FilePath ((<.>), ()) +import System.IO +import System.Process +import System.Time.Extra +import Text.ParserCombinators.ReadP (readP_to_S) +import Text.Printf + +charEdit :: Position -> TextDocumentContentChangeEvent +charEdit p = + TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range p p + , _rangeLength = Nothing + , _text = "a" + } + +headerEdit :: TextDocumentContentChangeEvent +headerEdit = + TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range (Position 0 0) (Position 0 0) + , _rangeLength = Nothing + , _text = "-- header comment \n" + } + +data DocumentPositions = DocumentPositions { + -- | A position that can be used to generate non null goto-def and completion responses + identifierP :: Maybe Position, + -- | A position that can be modified without generating a new diagnostic + stringLiteralP :: !Position, + -- | The document containing the above positions + doc :: !TextDocumentIdentifier +} + +allWithIdentifierPos :: MonadFail m => (DocumentPositions -> m Bool) -> [DocumentPositions] -> m Bool +allWithIdentifierPos f docs = case applicableDocs of + -- fail if there are no documents to benchmark + [] -> fail "None of the example modules have identifier positions" + docs' -> allM f docs' + where + applicableDocs = filter (isJust . identifierP) docs + +experiments :: HasConfig => [Bench] +experiments = + [ + bench "semanticTokens" $ \docs -> do + liftIO $ putStrLn "Starting semanticTokens" + r <- forM docs $ \DocumentPositions{..} -> do + changeDoc doc [charEdit stringLiteralP] + waitForProgressStart + waitForProgressDone + tks <- getSemanticTokens doc + case tks ^? LSP._L of + Just _ -> return True + Nothing -> return False + return $ and r, + --------------------------------------------------------------------------------------- + bench "hover" $ allWithIdentifierPos $ \DocumentPositions{..} -> + isJust <$> getHover doc (fromJust identifierP), + --------------------------------------------------------------------------------------- + bench "hover after edit" $ \docs -> do + forM_ docs $ \DocumentPositions{..} -> + changeDoc doc [charEdit stringLiteralP] + flip allWithIdentifierPos docs $ \DocumentPositions{..} -> + isJust <$> getHover doc (fromJust identifierP), + --------------------------------------------------------------------------------------- + bench + "hover after cradle edit" + (\docs -> do + hieYamlUri <- getDocUri "hie.yaml" + liftIO $ appendFile (fromJust $ uriToFilePath hieYamlUri) "##\n" + sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams + [ FileEvent hieYamlUri FileChangeType_Changed ] + flip allWithIdentifierPos docs $ \DocumentPositions{..} -> isJust <$> getHover doc (fromJust identifierP) + ), + --------------------------------------------------------------------------------------- + bench "edit" $ \docs -> do + forM_ docs $ \DocumentPositions{..} -> do + changeDoc doc [charEdit stringLiteralP] + -- wait for a fresh build start + waitForProgressStart + -- wait for the build to be finished + output "edit: waitForProgressDone" + waitForProgressDone + return True, + --------------------------------------------------------------------------------------- + bench "edit-header" $ \docs -> do + forM_ docs $ \DocumentPositions{..} -> do + changeDoc doc [headerEdit] + -- wait for a fresh build start + waitForProgressStart + -- wait for the build to be finished + output "edit: waitForProgressDone" + waitForProgressDone + return True, + --------------------------------------------------------------------------------------- + bench "getDefinition" $ allWithIdentifierPos $ \DocumentPositions{..} -> + hasDefinitions <$> getDefinitions doc (fromJust identifierP), + --------------------------------------------------------------------------------------- + bench "getDefinition after edit" $ \docs -> do + forM_ docs $ \DocumentPositions{..} -> + changeDoc doc [charEdit stringLiteralP] + flip allWithIdentifierPos docs $ \DocumentPositions{..} -> + hasDefinitions <$> getDefinitions doc (fromJust identifierP), + --------------------------------------------------------------------------------------- + bench "documentSymbols" $ allM $ \DocumentPositions{..} -> do + fmap (either (not . null) (not . null)) . getDocumentSymbols $ doc, + --------------------------------------------------------------------------------------- + bench "documentSymbols after edit" $ \docs -> do + forM_ docs $ \DocumentPositions{..} -> + changeDoc doc [charEdit stringLiteralP] + flip allM docs $ \DocumentPositions{..} -> + either (not . null) (not . null) <$> getDocumentSymbols doc, + --------------------------------------------------------------------------------------- + bench "completions" $ \docs -> do + flip allWithIdentifierPos docs $ \DocumentPositions{..} -> + not . null <$> getCompletions doc (fromJust identifierP), + --------------------------------------------------------------------------------------- + bench "completions after edit" $ \docs -> do + forM_ docs $ \DocumentPositions{..} -> + changeDoc doc [charEdit stringLiteralP] + flip allWithIdentifierPos docs $ \DocumentPositions{..} -> + not . null <$> getCompletions doc (fromJust identifierP), + --------------------------------------------------------------------------------------- + bench + "code actions" + ( \docs -> do + unless (any (isJust . identifierP) docs) $ + error "None of the example modules is suitable for this experiment" + not . null . catMaybes <$> forM docs (\DocumentPositions{..} -> do + forM identifierP $ \p -> + getCodeActions doc (Range p p)) + ), + --------------------------------------------------------------------------------------- + bench + "code actions after edit" + ( \docs -> do + unless (any (isJust . identifierP) docs) $ + error "None of the example modules is suitable for this experiment" + forM_ docs $ \DocumentPositions{..} -> do + changeDoc doc [charEdit stringLiteralP] + waitForProgressStart + waitForProgressDone + not . null . catMaybes <$> forM docs (\DocumentPositions{..} -> do + forM identifierP $ \p -> + getCodeActions doc (Range p p)) + ), + --------------------------------------------------------------------------------------- + bench + "code actions after cradle edit" + ( \docs -> do + hieYamlUri <- getDocUri "hie.yaml" + liftIO $ appendFile (fromJust $ uriToFilePath hieYamlUri) "##\n" + sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams + [ FileEvent hieYamlUri FileChangeType_Changed ] + waitForProgressStart + waitForProgressStart + waitForProgressStart -- the Session logic restarts a second time + waitForProgressDone + not . all null . catMaybes <$> forM docs (\DocumentPositions{..} -> do + forM identifierP $ \p -> + getCodeActions doc (Range p p)) + ), + --------------------------------------------------------------------------------------- + bench + "code lens" + ( \docs -> not . null <$> forM docs (\DocumentPositions{..} -> + getCodeLenses doc) + ), + --------------------------------------------------------------------------------------- + bench + "code lens after edit" + ( \docs -> do + forM_ docs $ \DocumentPositions{..} -> do + changeDoc doc [charEdit stringLiteralP] + waitForProgressStart + waitForProgressDone + not . null <$> forM docs (\DocumentPositions{..} -> do + getCodeLenses doc) + ), + --------------------------------------------------------------------------------------- + benchWithSetup + "hole fit suggestions" + ( mapM_ $ \DocumentPositions{..} -> do + let edit = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range bottom bottom + , _rangeLength = Nothing + , _text = t + } + bottom = Position maxBound 0 + t = T.unlines + ["" + ,"holef :: [Int] -> [Int]" + ,"holef = _" + ,"" + ,"holeg :: [()] -> [()]" + ,"holeg = _" + ] + changeDoc doc [edit] + ) + (\docs -> do + forM_ docs $ \DocumentPositions{..} -> + changeDoc doc [charEdit stringLiteralP] + void waitForDiagnostics + waitForProgressDone + flip allM docs $ \DocumentPositions{..} -> do + bottom <- pred . length . T.lines <$> documentContents doc + diags <- getCurrentDiagnostics doc + case requireDiagnostic diags (DiagnosticSeverity_Error, (fromIntegral bottom, 8), "Found hole", Just "GHC-88464", Nothing) of + Nothing -> pure True + Just _err -> pure False + ), + --------------------------------------------------------------------------------------- + benchWithSetup + "eval execute single-line code lens" + ( mapM_ $ \DocumentPositions{..} -> do + let edit = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range bottom bottom + , _rangeLength = Nothing + , _text = t + } + bottom = Position maxBound 0 + t = T.unlines + [ "" + , "-- >>> 1 + 2" + ] + changeDoc doc [edit] + ) + ( \docs -> do + not . null <$> forM docs (\DocumentPositions{..} -> do + lenses <- getCodeLenses doc + forM_ lenses $ \case + CodeLens { _command = Just cmd } -> do + executeCommand cmd + waitForProgressStart + waitForProgressDone + _ -> return () + ) + ), + --------------------------------------------------------------------------------------- + benchWithSetup + "eval execute multi-line code lens" + ( mapM_ $ \DocumentPositions{..} -> do + let edit = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range bottom bottom + , _rangeLength = Nothing + , _text = t + } + bottom = Position maxBound 0 + t = T.unlines + [ "" + , "data T = A | B | C | D" + , " deriving (Show, Eq, Ord, Bounded, Enum)" + , "" + , "{-" + , ">>> import Data.List (nub)" + , ">>> xs = ([minBound..maxBound] ++ [minBound..maxBound] :: [T])" + , ">>> nub xs" + , "-}" + ] + changeDoc doc [edit] + ) + ( \docs -> do + not . null <$> forM docs (\DocumentPositions{..} -> do + lenses <- getCodeLenses doc + forM_ lenses $ \case + CodeLens { _command = Just cmd } -> do + executeCommand cmd + waitForProgressStart + waitForProgressDone + _ -> return () + ) + ) + ] + where hasDefinitions (InL (Definition (InL _))) = True + hasDefinitions (InL (Definition (InR ls))) = not $ null ls + hasDefinitions (InR (InL ds)) = not $ null ds + hasDefinitions (InR (InR LSP.Null)) = False +--------------------------------------------------------------------------------------------- + +examplesPath :: FilePath +examplesPath = "bench/example" + +defConfig :: Config +Success defConfig = execParserPure defaultPrefs (info configP fullDesc) [] + +quiet, verbose :: Config -> Bool +verbose = (== All) . verbosity +quiet = (== Quiet) . verbosity + +type HasConfig = (?config :: Config) + +configP :: Parser Config +configP = + Config + <$> (flag' All (short 'v' <> long "verbose") + <|> flag' Quiet (short 'q' <> long "quiet") + <|> pure Normal + ) + <*> optional (strOption (long "shake-profiling" <> metavar "PATH")) + <*> optional (strOption (long "ot-profiling" <> metavar "DIR" <> help "Enable OpenTelemetry and write eventlog for each benchmark in DIR")) + <*> strOption (long "csv" <> metavar "PATH" <> value "results.csv" <> showDefault) + <*> flag Cabal Stack (long "stack" <> help "Use stack (by default cabal is used)") + <*> many (strOption (long "ghcide-options" <> help "additional options for ghcide")) + <*> many (strOption (short 's' <> long "select" <> help "select which benchmarks to run")) + <*> optional (option auto (long "samples" <> metavar "NAT" <> help "override sampling count")) + <*> strOption (long "ghcide" <> metavar "PATH" <> help "path to ghcide" <> value "ghcide") + <*> option auto (long "timeout" <> value 60 <> help "timeout for waiting for a ghcide response") + <*> ( Example + <$> exampleName + <*> (ExampleHackage <$> packageP) + <*> (some moduleOption <|> pure ["src/Distribution/Simple.hs"]) + <*> pure [] + <|> Example + <$> exampleName + <*> pathOrScriptP + <*> some moduleOption + <*> pure []) + <*> switch (long "lsp-config" <> help "Read an LSP config payload from standard input") + where + moduleOption = strOption (long "example-module" <> metavar "PATH") + exampleName = strOption (long "example-name" <> metavar "NAME") + + packageP = ExamplePackage + <$> strOption (long "example-package-name" <> value "Cabal") + <*> option versionP (long "example-package-version" <> value (makeVersion [3,6,0,0])) + pathOrScriptP = ExamplePath <$> strOption (long "example-path") + <|> ExampleScript <$> strOption (long "example-script") <*> many (strOption (long "example-script-args" <> help "arguments for the example generation script")) + +versionP :: ReadM Version +versionP = maybeReader $ extract . readP_to_S parseVersion + where + extract parses = listToMaybe [ res | (res,"") <- parses] + +output :: (MonadIO m, HasConfig) => String -> m () +output = if quiet ?config then (\_ -> pure ()) else liftIO . putStrLn + +--------------------------------------------------------------------------------------- + +type Experiment = [DocumentPositions] -> Session Bool + +data Bench = + Bench + { name :: !String, + enabled :: !Bool, + samples :: !Natural, + benchSetup :: [DocumentPositions] -> Session (), + experiment :: Experiment + } + +select :: HasConfig => Bench -> Bool +select Bench {name, enabled} = + enabled && (null mm || name `elem` mm) + where + mm = matches ?config + +benchWithSetup :: + String -> + ([DocumentPositions] -> Session ()) -> + Experiment -> + Bench +benchWithSetup name benchSetup experiment = Bench {..} + where + enabled = True + samples = 100 + +bench :: String -> Experiment -> Bench +bench name = benchWithSetup name (const $ pure ()) + +runBenchmarksFun :: HasConfig => FilePath -> [Bench] -> IO () +runBenchmarksFun dir allBenchmarks = do + let benchmarks = [ b{samples = fromMaybe 100 (repetitions ?config) } + | b <- allBenchmarks + , select b ] + + whenJust (otMemoryProfiling ?config) $ \eventlogDir -> + createDirectoryIfMissing True eventlogDir + + lspConfig <- if Experiments.Types.lspConfig ?config + then either error id . eitherDecodeStrict' <$> BS.getContents + else return mempty + + let conf = defaultConfig + { logStdErr = verbose ?config, + logMessages = verbose ?config, + logColor = False, + Language.LSP.Test.lspConfig = lspConfig, + messageTimeout = timeoutLsp ?config + } + results <- forM benchmarks $ \b@Bench{name} -> do + let p = (proc (ghcide ?config) (allArgs name dir)) + { std_in = CreatePipe, std_out = CreatePipe, std_err = CreatePipe } + run sess = withCreateProcess p $ \(Just inH) (Just outH) (Just errH) pH -> do + -- Need to continuously consume to stderr else it gets blocked + -- Can't pass NoStream either to std_err + hSetBuffering errH NoBuffering + hSetBinaryMode errH True + let errSinkThread = + forever $ hGetLine errH >>= when (verbose ?config). putStrLn + withAsync errSinkThread $ \_ -> do + runSessionWithHandles' (Just pH) inH outH conf lspTestCaps dir sess + (b,) <$> runBench run b + + -- output raw data as CSV + let headers = + [ "name" + , "success" + , "samples" + , "startup" + , "setup" + , "userT" + , "delayedT" + , "1stBuildT" + , "avgPerRespT" + , "totalT" + , "rulesBuilt" + , "rulesChanged" + , "rulesVisited" + , "rulesTotal" + , "ruleEdges" + , "ghcRebuilds" + ] + rows = + [ [ name, + show success, + show samples, + showMs startup, + showMs runSetup', + showMs userWaits, + showMs delayedWork, + showMs $ firstResponse+firstResponseDelayed, + -- Exclude first response as it has a lot of setup time included + -- Assume that number of requests = number of modules * number of samples + showMs ((userWaits - firstResponse)/((fromIntegral samples - 1)*modules)), + showMs runExperiment, + show rulesBuilt, + show rulesChanged, + show rulesVisited, + show rulesTotal, + show edgesTotal, + show rebuildsTotal + ] + | (Bench {name, samples}, BenchRun {..}) <- results, + let runSetup' = if runSetup < 0.01 then 0 else runSetup + modules = fromIntegral $ length $ exampleModules $ example ?config + ] + csv = unlines $ map (intercalate ", ") (headers : rows) + writeFile (outputCSV ?config) csv + + -- print a nice table + let pads = map (maximum . map length) (transpose (headers : rowsHuman)) + paddedHeaders = zipWith pad pads headers + outputRow = putStrLn . intercalate " | " + rowsHuman = + [ [ name, + show success, + show samples, + showDuration startup, + showDuration runSetup', + showDuration userWaits, + showDuration delayedWork, + showDuration firstResponse, + showDuration runExperiment, + show rulesBuilt, + show rulesChanged, + show rulesVisited, + show rulesTotal, + show edgesTotal, + show rebuildsTotal + ] + | (Bench {name, samples}, BenchRun {..}) <- results, + let runSetup' = if runSetup < 0.01 then 0 else runSetup + ] + outputRow paddedHeaders + outputRow $ (map . map) (const '-') paddedHeaders + forM_ rowsHuman $ \row -> outputRow $ zipWith pad pads row + where + ghcideArgs dir = + [ "--lsp", + "--test", + "--cwd", + dir + ] + allArgs name dir = + ghcideArgs dir + ++ concat + [ [ "+RTS" + , "-l" + , "-ol" ++ (dir map (\c -> if c == ' ' then '-' else c) name <.> "eventlog") + , "-RTS" + ] + | Just dir <- [otMemoryProfiling ?config] + ] + ++ ghcideOptions ?config + ++ concat + [ ["--shake-profiling", path] | Just path <- [shakeProfiling ?config] + ] + ++ ["--ot-memory-profiling" | Just _ <- [otMemoryProfiling ?config]] + lspTestCaps = + fullLatestClientCaps + & (L.window . _Just) .~ WindowClientCapabilities (Just True) Nothing Nothing + & (L.textDocument . _Just . L.codeAction . _Just . L.resolveSupport . _Just) .~ (ClientCodeActionResolveOptions ["edit"]) + & (L.textDocument . _Just . L.codeAction . _Just . L.dataSupport . _Just) .~ True + +showMs :: Seconds -> String +showMs = printf "%.2f" + +data BenchRun = BenchRun + { startup :: !Seconds, + runSetup :: !Seconds, + runExperiment :: !Seconds, + userWaits :: !Seconds, + delayedWork :: !Seconds, + firstResponse :: !Seconds, + firstResponseDelayed :: !Seconds, + rulesBuilt :: !Int, + rulesChanged :: !Int, + rulesVisited :: !Int, + rulesTotal :: !Int, + edgesTotal :: !Int, + rebuildsTotal :: !Int, + success :: !Bool + } + +badRun :: BenchRun +badRun = BenchRun 0 0 0 0 0 0 0 0 0 0 0 0 0 False + +waitForProgressStart :: Session () +waitForProgressStart = void $ do + skipManyTill anyMessage $ satisfy $ \case + FromServerMess SMethod_WindowWorkDoneProgressCreate _ -> True + _ -> False + +-- | Wait for all progress to be done +-- Needs at least one progress done notification to return +waitForProgressDone :: Session () +waitForProgressDone = loop + where + loop = do + ~() <- skipManyTill anyMessage $ satisfyMaybe $ \case + FromServerMess SMethod_Progress (TNotificationMessage _ _ (ProgressParams _ v)) | is _workDoneProgressEnd v -> Just () + _ -> Nothing + done <- null <$> getIncompleteProgressSessions + unless done loop + +-- | Wait for the build queue to be empty +waitForBuildQueue :: Session Seconds +waitForBuildQueue = do + let m = SMethod_CustomMethod (Proxy @"test") + waitId <- sendRequest m (toJSON WaitForShakeQueue) + (td, resp) <- duration $ skipManyTill anyMessage $ responseForId m waitId + case resp of + TResponseMessage{_result=Right Null} -> return td + -- assume a ghcide binary lacking the WaitForShakeQueue method + _ -> return 0 + +runBench :: + HasConfig => + (Session BenchRun -> IO BenchRun) -> + Bench -> + IO BenchRun +runBench runSess Bench{..} = handleAny (\e -> print e >> return badRun) + $ runSess + $ do + (startup, docs) <- duration $ do + (d, docs) <- duration $ setupDocumentContents ?config + output $ "Setting up document contents took " <> showDuration d + -- wait again, as the progress is restarted once while loading the cradle + -- make an edit, to ensure this doesn't block + let DocumentPositions{..} = head docs + changeDoc doc [charEdit stringLiteralP] + waitForProgressDone + return docs + + liftIO $ output $ "Running " <> name <> " benchmark" + (runSetup, ()) <- duration $ benchSetup docs + let loop' (Just timeForFirstResponse) !userWaits !delayedWork 0 = return $ Just (userWaits, delayedWork, timeForFirstResponse) + loop' timeForFirstResponse !userWaits !delayedWork n = do + (t, res) <- duration $ experiment docs + if not res + then return Nothing + else do + output (showDuration t) + -- Wait for the delayed actions to finish + td <- waitForBuildQueue + loop' (timeForFirstResponse <|> Just (t,td)) (userWaits+t) (delayedWork+td) (n -1) + loop = loop' Nothing + + (runExperiment, result) <- duration $ loop 0 0 samples + let success = isJust result + (userWaits, delayedWork, (firstResponse, firstResponseDelayed)) = fromMaybe (0,0,(0,0)) result + + rulesTotal <- length <$> getStoredKeys + rulesBuilt <- either (const 0) length <$> getBuildKeysBuilt + rulesChanged <- either (const 0) length <$> getBuildKeysChanged + rulesVisited <- either (const 0) length <$> getBuildKeysVisited + edgesTotal <- fromRight 0 <$> getBuildEdgesCount + rebuildsTotal <- fromRight 0 <$> getRebuildsCount + + return BenchRun {..} + +data SetupResult = SetupResult { + runBenchmarks :: [Bench] -> IO (), + -- | Path to the setup benchmark example + benchDir :: FilePath, + cleanUp :: IO () +} + +callCommandLogging :: HasConfig => String -> IO () +callCommandLogging cmd = do + output cmd + callCommand cmd + +simpleCabalCradleContent :: String +simpleCabalCradleContent = "cradle:\n cabal:\n" + +simpleStackCradleContent :: String +simpleStackCradleContent = "cradle:\n stack:\n" + +-- | Setup the benchmark +-- we need to create a hie.yaml file for the examples +-- or the hie.yaml file would be searched in the parent directories recursively +-- implicit-hie is error prone for the example test `lsp-types-2.1.1.0` +-- we are using the simpleCabalCradleContent for the hie.yaml file instead. +-- it works if we have cabal > 3.2. +setup :: HasConfig => IO SetupResult +setup = do + benchDir <- case exampleDetails(example ?config) of + ExamplePath examplePath -> do + let hieYamlPath = examplePath "hie.yaml" + alreadyExists <- doesFileExist hieYamlPath + unless alreadyExists $ writeFile hieYamlPath simpleCabalCradleContent + return examplePath + ExampleScript examplePath' scriptArgs -> do + let exampleDir = examplesPath exampleName (example ?config) + alreadySetup <- doesDirectoryExist exampleDir + unless alreadySetup $ do + createDirectoryIfMissing True exampleDir + examplePath <- makeAbsolute examplePath' + cmd_ (Cwd exampleDir) examplePath scriptArgs + let hieYamlPath = exampleDir "hie.yaml" + alreadyExists <- doesFileExist hieYamlPath + unless alreadyExists $ writeFile hieYamlPath simpleCabalCradleContent + + return exampleDir + ExampleHackage ExamplePackage{..} -> do + let path = examplesPath package + package = packageName <> "-" <> showVersion packageVersion + hieYamlPath = path "hie.yaml" + alreadySetup <- doesDirectoryExist path + unless alreadySetup $ + case buildTool ?config of + Cabal -> do + let cabalVerbosity = "-v" ++ show (fromEnum (verbose ?config)) + callCommandLogging $ "cabal get " <> cabalVerbosity <> " " <> package <> " -d " <> examplesPath + let hieYamlPath = path "hie.yaml" + writeFile hieYamlPath simpleCabalCradleContent + -- Need this in case there is a parent cabal.project somewhere + writeFile + (path "cabal.project") + "packages: ." + writeFile + (path "cabal.project.local") + "" + Stack -> do + let stackVerbosity = case verbosity ?config of + Quiet -> "--silent" + Normal -> "" + All -> "--verbose" + callCommandLogging $ "stack " <> stackVerbosity <> " unpack " <> package <> " --to " <> examplesPath + -- Generate the stack descriptor to match the one used to build ghcide + stack_yaml <- fromMaybe "stack.yaml" <$> getEnv "STACK_YAML" + stack_yaml_lines <- lines <$> readFile stack_yaml + writeFile (path stack_yaml) + (unlines $ + "packages: [.]" : + [ l + | l <- stack_yaml_lines + , any (`isPrefixOf` l) + ["resolver" + ,"allow-newer" + ,"compiler"] + ] + ) + writeFile hieYamlPath simpleStackCradleContent + return path + + whenJust (shakeProfiling ?config) $ createDirectoryIfMissing True + + let cleanUp = case exampleDetails (example ?config) of + ExampleHackage _ -> removeDirectoryRecursive examplesPath + ExampleScript _ _ -> removeDirectoryRecursive examplesPath + ExamplePath _ -> return () + + runBenchmarks = runBenchmarksFun benchDir + + return SetupResult{..} + +setupDocumentContents :: Config -> Session [DocumentPositions] +setupDocumentContents config = + forM (exampleModules $ example config) $ \m -> do + doc <- openDoc m "haskell" + + -- Setup the special positions used by the experiments + lastLine <- fromIntegral . length . T.lines <$> documentContents doc + changeDoc doc [TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range (Position lastLine 0) (Position lastLine 0) + , _rangeLength = Nothing + , _text = T.unlines [ "_hygienic = \"hygienic\"" ] + } + ] + let + -- Points to a string in the target file, + -- convenient for hygienic edits + stringLiteralP = Position lastLine 15 + + -- Find an identifier defined in another file in this project + symbols <- getDocumentSymbols doc + let endOfImports = case symbols of + Right symbols | Just x <- findEndOfImports symbols -> x + _ -> error $ "symbols: " <> show symbols + contents <- documentContents doc + identifierP <- searchSymbol doc contents endOfImports + return $ DocumentPositions{..} + +findEndOfImports :: [DocumentSymbol] -> Maybe Position +findEndOfImports (DocumentSymbol{_kind = SymbolKind_Module, _name = "imports", _range} : _) = + Just $ Position (succ $ _line $ _end _range) 4 +findEndOfImports [DocumentSymbol{_kind = SymbolKind_File, _children = Just cc}] = + findEndOfImports cc +findEndOfImports (DocumentSymbol{_range} : _) = + Just $ _range ^. L.start +findEndOfImports _ = Nothing + +-------------------------------------------------------------------------------------------- + +pad :: Int -> String -> String +pad n [] = replicate n ' ' +pad 0 _ = error "pad" +pad n (x:xx) = x : pad (n-1) xx + +-- | Search for a position where: +-- - get definition works and returns a uri other than this file +-- - get completions returns a non empty list +searchSymbol :: TextDocumentIdentifier -> T.Text -> Position -> Session (Maybe Position) +searchSymbol doc@TextDocumentIdentifier{_uri} fileContents pos = do + -- this search is expensive, so we cache the result on disk + let cachedPath = fromJust (uriToFilePath _uri) <.> "identifierPosition" + cachedRes <- liftIO $ try @_ @IOException $ A.decode . BSL.fromStrict <$> BS.readFile cachedPath + case cachedRes of + Left _ -> do + result <- loop pos + liftIO $ BS.writeFile cachedPath $ BSL.toStrict $ A.encode result + return result + Right res -> + return res + where + loop pos + | (fromIntegral $ _line pos) >= lll = + return Nothing + | (fromIntegral $ _character pos) >= lengthOfLine (fromIntegral $ _line pos) = + loop (nextLine pos) + | otherwise = do + checks <- checkDefinitions pos &&^ checkCompletions pos + if checks + then return $ Just pos + else loop (nextIdent pos) + + nextIdent p = p{_character = _character p + 2} + nextLine p = Position (_line p + 1) 4 + + lengthOfLine n = if n >= lll then 0 else T.length (ll !! n) + ll = T.lines fileContents + lll = length ll + + checkDefinitions pos = do + defs <- getDefinitions doc pos + case defs of + (InL (Definition (InR [Location uri _]))) -> return $ uri /= _uri + _ -> return False + checkCompletions pos = + not . null <$> getCompletions doc pos + + +getBuildKeysBuilt :: Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) [T.Text]) +getBuildKeysBuilt = tryCallTestPlugin GetBuildKeysBuilt + +getBuildKeysVisited :: Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) [T.Text]) +getBuildKeysVisited = tryCallTestPlugin GetBuildKeysVisited + +getBuildKeysChanged :: Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) [T.Text]) +getBuildKeysChanged = tryCallTestPlugin GetBuildKeysChanged + +getBuildEdgesCount :: Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) Int) +getBuildEdgesCount = tryCallTestPlugin GetBuildEdgesCount + +getRebuildsCount :: Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) Int) +getRebuildsCount = tryCallTestPlugin GetRebuildsCount + +getStoredKeys :: Session [Text] +getStoredKeys = callTestPlugin GetStoredKeys + +tryCallTestPlugin :: (A.FromJSON b) => TestRequest -> Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) b) +tryCallTestPlugin cmd = do + let cm = SMethod_CustomMethod (Proxy @"test") + waitId <- sendRequest cm (A.toJSON cmd) + TResponseMessage{_result} <- skipManyTill anyMessage $ responseForId cm waitId + return $ case _result of + Left e -> Left e + Right json -> case A.fromJSON json of + A.Success a -> Right a + A.Error e -> error e + +callTestPlugin :: (A.FromJSON b) => TestRequest -> Session b +callTestPlugin cmd = do + res <- tryCallTestPlugin cmd + case res of + Left (TResponseError t err _) -> error $ show t <> ": " <> T.unpack err + Right a -> pure a diff --git a/ghcide-bench/src/Experiments/Types.hs b/ghcide-bench/src/Experiments/Types.hs new file mode 100644 index 0000000000..db33744912 --- /dev/null +++ b/ghcide-bench/src/Experiments/Types.hs @@ -0,0 +1,102 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE OverloadedStrings #-} +module Experiments.Types (module Experiments.Types ) where + +import Control.DeepSeq +import Data.Aeson +import Data.Binary (Binary) +import Data.Hashable (Hashable) +import Data.Maybe (fromMaybe) +import Data.Version +import GHC.Generics +import Numeric.Natural + +data CabalStack = Cabal | Stack + deriving (Eq, Show) + +data Verbosity = Quiet | Normal | All + deriving (Eq, Show) +data Config = Config + { verbosity :: !Verbosity, + -- For some reason, the Shake profile files are truncated and won't load + shakeProfiling :: !(Maybe FilePath), + otMemoryProfiling :: !(Maybe FilePath), + outputCSV :: !FilePath, + buildTool :: !CabalStack, + ghcideOptions :: ![String], + matches :: ![String], + repetitions :: Maybe Natural, + ghcide :: FilePath, + timeoutLsp :: Int, + example :: Example, + lspConfig :: Bool + } + deriving (Eq, Show) + +data ExamplePackage = ExamplePackage {packageName :: !String, packageVersion :: !Version} + deriving (Eq, Generic, Show) + deriving anyclass (Binary, Hashable, NFData) + +data Example = Example + { exampleName :: !String + , exampleDetails :: ExampleDetails + , exampleModules :: [FilePath] + , exampleExtraArgs :: [String]} + deriving (Eq, Generic, Show) + deriving anyclass (Binary, Hashable, NFData) + +data ExampleDetails + = ExamplePath FilePath -- ^ directory where the package is located + | ExampleHackage ExamplePackage -- ^ package from hackage + | ExampleScript FilePath -- ^ location of the script we are running + [String] -- ^ extra arguments for the script + deriving (Eq, Generic, Show) + deriving anyclass (Binary, Hashable, NFData) + +instance FromJSON Example where + parseJSON = withObject "example" $ \x -> do + exampleName <- x .: "name" + exampleModules <- x .: "modules" + exampleExtraArgs <- fromMaybe [] <$> x .:? "extra-args" + + path <- x .:? "path" + case path of + Just examplePath -> do + script <- fromMaybe False <$> x.:? "script" + args <- fromMaybe [] <$> x .:? "script-args" + let exampleDetails + | script = ExampleScript examplePath args + | otherwise = ExamplePath examplePath + return Example{..} + Nothing -> do + packageName <- x .: "package" + packageVersion <- x .: "version" + let exampleDetails = ExampleHackage ExamplePackage{..} + return Example{..} + +exampleToOptions :: Example -> [String] -> [String] +exampleToOptions Example{exampleDetails = ExampleHackage ExamplePackage{..}, ..} extraArgs = + ["--example-package-name", packageName + ,"--example-package-version", showVersion packageVersion + ,"--example-name", exampleName + ] ++ + ["--example-module=" <> m | m <- exampleModules + ] ++ + ["--ghcide-options=" <> o | o <- exampleExtraArgs ++ extraArgs] +exampleToOptions Example{exampleDetails = ExamplePath examplePath, ..} extraArgs = + ["--example-path", examplePath + ,"--example-name", exampleName + ] ++ + ["--example-module=" <> m | m <- exampleModules + ] ++ + ["--ghcide-options=" <> o | o <- exampleExtraArgs ++ extraArgs] +exampleToOptions Example{exampleDetails = ExampleScript examplePath exampleArgs, ..} extraArgs = + ["--example-script", examplePath + ,"--example-name", exampleName + ] ++ + ["--example-script-args=" <> o | o <- exampleArgs + ] ++ + ["--example-module=" <> m | m <- exampleModules + ] ++ + ["--ghcide-options=" <> o | o <- exampleExtraArgs ++ extraArgs] diff --git a/ghcide-bench/test/Main.hs b/ghcide-bench/test/Main.hs new file mode 100644 index 0000000000..a58016ab2b --- /dev/null +++ b/ghcide-bench/test/Main.hs @@ -0,0 +1,47 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE ImplicitParams #-} +{-# OPTIONS_GHC -Wno-deprecations -Wno-unticked-promoted-constructors #-} + +module Main (main) where + +import Data.List.Extra +import qualified Experiments as Bench +import Language.LSP.Test +import Test.Tasty +import Test.Tasty.HUnit +import Test.Tasty.Ingredients.Rerun (defaultMainWithRerun) + +main :: IO () +main = defaultMainWithRerun benchmarkTests + +benchmarkTests :: TestTree +benchmarkTests = + let ?config = Bench.defConfig + { Bench.verbosity = Bench.Quiet + , Bench.repetitions = Just 3 + , Bench.buildTool = Bench.Cabal + } in + withResource Bench.setup Bench.cleanUp $ \getResource -> testGroup "benchmark experiments" + [ testCase (Bench.name e) $ do + Bench.SetupResult{Bench.benchDir} <- getResource + res <- Bench.runBench (runInDir benchDir) e + assertBool "did not successfully complete 5 repetitions" $ Bench.success res + | e <- Bench.experiments + , Bench.name e /= "edit" -- the edit experiment does not ever fail + , Bench.name e /= "hole fit suggestions" -- is too slow! + -- the cradle experiments are way too slow + , not ("cradle" `isInfixOf` Bench.name e) + ] + +runInDir :: FilePath -> Session a -> IO a +runInDir dir = runSessionWithConfig defaultConfig cmd fullLatestClientCaps dir + where + -- TODO use HLS instead of ghcide + cmd = "ghcide --lsp --test --verbose -j2 --cwd " <> dir diff --git a/ghcide-test/LICENSE b/ghcide-test/LICENSE new file mode 100644 index 0000000000..d1f5c9033f --- /dev/null +++ b/ghcide-test/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Digital Asset (Switzerland) GmbH and/or its affiliates + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ghcide-test/data/TH/THA.hs b/ghcide-test/data/TH/THA.hs new file mode 100644 index 0000000000..ec6cf8ef39 --- /dev/null +++ b/ghcide-test/data/TH/THA.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module THA where +import Language.Haskell.TH + +th_a :: DecsQ +th_a = [d| a = () |] diff --git a/ghcide-test/data/TH/THB.hs b/ghcide-test/data/TH/THB.hs new file mode 100644 index 0000000000..8d50b01eac --- /dev/null +++ b/ghcide-test/data/TH/THB.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE TemplateHaskell #-} +module THB where +import THA + +$th_a diff --git a/ghcide-test/data/TH/THC.hs b/ghcide-test/data/TH/THC.hs new file mode 100644 index 0000000000..79a02ef601 --- /dev/null +++ b/ghcide-test/data/TH/THC.hs @@ -0,0 +1,5 @@ +module THC where +import THB + +c ::() +c = a diff --git a/ghcide-test/data/TH/hie.yaml b/ghcide-test/data/TH/hie.yaml new file mode 100644 index 0000000000..a65c7b79c4 --- /dev/null +++ b/ghcide-test/data/TH/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["-Wmissing-signatures", "-package template-haskell", "THA", "THB", "THC"]}} diff --git a/ghcide-test/data/THCoreFile/THA.hs b/ghcide-test/data/THCoreFile/THA.hs new file mode 100644 index 0000000000..93a86c8dee --- /dev/null +++ b/ghcide-test/data/THCoreFile/THA.hs @@ -0,0 +1,23 @@ +{-# LANGUAGE TemplateHaskell #-} +module THA where +import Language.Haskell.TH +import Control.Monad (when) + +th_a :: DecsQ +th_a = do + when (show (StrictConstructor1 123 True 4567) /= "StrictConstructor1 123 True 4567") $ error "TH validation error" + when (show (StrictConstructor2 123 True 4567) /= "StrictConstructor2 123 True 4567") $ error "TH validation error" + when (show (StrictConstructor3 123 True 4567) /= "StrictConstructor3 123 True 4567") $ error "TH validation error" + when (show (classMethod 'z') /= "True") $ error "TH validation error" + when (show (classMethod 'a') /= "False") $ error "TH validation error" + [d| a = () |] + +data StrictType1 = StrictConstructor1 !Int !Bool Int deriving Show +data StrictType2 = StrictConstructor2 !Int !Bool !Int deriving Show +data StrictType3 = StrictConstructor3 !Int !Bool !Int deriving Show + +class SingleMethodClass a where + classMethod :: a -> Bool + +instance SingleMethodClass Char where + classMethod = (== 'z') diff --git a/ghcide-test/data/THCoreFile/THB.hs b/ghcide-test/data/THCoreFile/THB.hs new file mode 100644 index 0000000000..672248d351 --- /dev/null +++ b/ghcide-test/data/THCoreFile/THB.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE TemplateHaskell #-} +module THB where +import THA +import Control.Monad (when) + +$(do + -- Need to verify in both defining module and usage module" + when (show (StrictConstructor1 123 True 4567) /= "StrictConstructor1 123 True 4567") $ error "TH validation error" + when (show (StrictConstructor2 123 True 4567) /= "StrictConstructor2 123 True 4567") $ error "TH validation error" + when (show (StrictConstructor3 123 True 4567) /= "StrictConstructor3 123 True 4567") $ error "TH validation error" + when (show (classMethod 'z') /= "True") $ error "TH validation error" + when (show (classMethod 'a') /= "False") $ error "TH validation error" + th_a) diff --git a/ghcide-test/data/THCoreFile/THC.hs b/ghcide-test/data/THCoreFile/THC.hs new file mode 100644 index 0000000000..79a02ef601 --- /dev/null +++ b/ghcide-test/data/THCoreFile/THC.hs @@ -0,0 +1,5 @@ +module THC where +import THB + +c ::() +c = a diff --git a/ghcide-test/data/THCoreFile/hie.yaml b/ghcide-test/data/THCoreFile/hie.yaml new file mode 100644 index 0000000000..36872d3531 --- /dev/null +++ b/ghcide-test/data/THCoreFile/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["-package template-haskell", "THA", "THB", "THC"]}} diff --git a/ghcide-test/data/THLoading/A.hs b/ghcide-test/data/THLoading/A.hs new file mode 100644 index 0000000000..3559bfc26c --- /dev/null +++ b/ghcide-test/data/THLoading/A.hs @@ -0,0 +1,5 @@ +module A where +import B (bar) + +foo :: () +foo = bar diff --git a/ghcide-test/data/THLoading/B.hs b/ghcide-test/data/THLoading/B.hs new file mode 100644 index 0000000000..a18753c265 --- /dev/null +++ b/ghcide-test/data/THLoading/B.hs @@ -0,0 +1,4 @@ +module B where + +bar :: () +bar = () diff --git a/ghcide-test/data/THLoading/THA.hs b/ghcide-test/data/THLoading/THA.hs new file mode 100644 index 0000000000..d74bdd697e --- /dev/null +++ b/ghcide-test/data/THLoading/THA.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE TemplateHaskell #-} +module THA where +import Language.Haskell.TH +import A (foo) + +th_a :: DecsQ +th_a = [d| a = foo |] diff --git a/ghcide-test/data/THLoading/THB.hs b/ghcide-test/data/THLoading/THB.hs new file mode 100644 index 0000000000..8d50b01eac --- /dev/null +++ b/ghcide-test/data/THLoading/THB.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE TemplateHaskell #-} +module THB where +import THA + +$th_a diff --git a/ghcide-test/data/THLoading/hie.yaml b/ghcide-test/data/THLoading/hie.yaml new file mode 100644 index 0000000000..5d67e9708c --- /dev/null +++ b/ghcide-test/data/THLoading/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["-package template-haskell", "THA", "THB", "A", "B"]}} diff --git a/ghcide-test/data/THNewName/A.hs b/ghcide-test/data/THNewName/A.hs new file mode 100644 index 0000000000..81984d2dff --- /dev/null +++ b/ghcide-test/data/THNewName/A.hs @@ -0,0 +1,6 @@ +module A (template) where + +import Language.Haskell.TH + +template :: DecsQ +template = (\consA -> [DataD [] (mkName "A") [] Nothing [NormalC consA []] []]) <$> newName "A" diff --git a/ghcide-test/data/THNewName/B.hs b/ghcide-test/data/THNewName/B.hs new file mode 100644 index 0000000000..8f65997d60 --- /dev/null +++ b/ghcide-test/data/THNewName/B.hs @@ -0,0 +1,5 @@ +module B(A(A)) where + +import A + +template diff --git a/ghcide-test/data/THNewName/C.hs b/ghcide-test/data/THNewName/C.hs new file mode 100644 index 0000000000..89a7e1eac9 --- /dev/null +++ b/ghcide-test/data/THNewName/C.hs @@ -0,0 +1,4 @@ +module C where +import B + +a = A diff --git a/ghcide-test/data/THNewName/hie.yaml b/ghcide-test/data/THNewName/hie.yaml new file mode 100644 index 0000000000..8853fd51ea --- /dev/null +++ b/ghcide-test/data/THNewName/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["-XTemplateHaskell","-Wmissing-signatures","A", "B", "C"]}} diff --git a/ghcide-test/data/THUnboxed/THA.hs b/ghcide-test/data/THUnboxed/THA.hs new file mode 100644 index 0000000000..be07eb4b86 --- /dev/null +++ b/ghcide-test/data/THUnboxed/THA.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE TemplateHaskell, UnboxedTuples, BangPatterns #-} +module THA where +import Language.Haskell.TH + +data Foo = Foo !Int !Char !String + deriving Show + +newtype Bar = Bar Int + deriving Show + + +f :: Int -> (# Int, Int, Foo, Bar#) +f x = (# x , x+1 , Foo x 'a' "test", Bar 1 #) + +th_a :: DecsQ +th_a = case f 1 of (# a , b, Foo _ _ _, Bar !_ #) -> [d| a = () |] diff --git a/ghcide-test/data/THUnboxed/THB.hs b/ghcide-test/data/THUnboxed/THB.hs new file mode 100644 index 0000000000..8d50b01eac --- /dev/null +++ b/ghcide-test/data/THUnboxed/THB.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE TemplateHaskell #-} +module THB where +import THA + +$th_a diff --git a/ghcide-test/data/THUnboxed/THC.hs b/ghcide-test/data/THUnboxed/THC.hs new file mode 100644 index 0000000000..79a02ef601 --- /dev/null +++ b/ghcide-test/data/THUnboxed/THC.hs @@ -0,0 +1,5 @@ +module THC where +import THB + +c ::() +c = a diff --git a/ghcide-test/data/THUnboxed/hie.yaml b/ghcide-test/data/THUnboxed/hie.yaml new file mode 100644 index 0000000000..a65c7b79c4 --- /dev/null +++ b/ghcide-test/data/THUnboxed/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["-Wmissing-signatures", "-package template-haskell", "THA", "THB", "THC"]}} diff --git a/ghcide-test/data/boot/A.hs b/ghcide-test/data/boot/A.hs new file mode 100644 index 0000000000..9f2c0d73ac --- /dev/null +++ b/ghcide-test/data/boot/A.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE TemplateHaskell #-} +module A where + +import B( TB(..) ) + +newtype TA = MkTA Int + deriving Eq + +f :: TB -> TA +f (MkTB x) = MkTA x diff --git a/ghcide-test/data/boot/A.hs-boot b/ghcide-test/data/boot/A.hs-boot new file mode 100644 index 0000000000..aa35288eda --- /dev/null +++ b/ghcide-test/data/boot/A.hs-boot @@ -0,0 +1,3 @@ +module A where +newtype TA = MkTA Int +instance Eq TA diff --git a/ghcide-test/data/boot/B.hs b/ghcide-test/data/boot/B.hs new file mode 100644 index 0000000000..8bf96dcbde --- /dev/null +++ b/ghcide-test/data/boot/B.hs @@ -0,0 +1,7 @@ +module B(TA(..), TB(..)) where +import {-# SOURCE #-} A( TA(..) ) + +data TB = MkTB !Int + +g :: TA -> TB +g (MkTA x) = MkTB x diff --git a/ghcide-test/data/boot/C.hs b/ghcide-test/data/boot/C.hs new file mode 100644 index 0000000000..f90e960432 --- /dev/null +++ b/ghcide-test/data/boot/C.hs @@ -0,0 +1,8 @@ +module C where + +import B +import A hiding (MkTA(..)) + +x = MkTA +y = MkTB +z = f diff --git a/ghcide-test/data/boot/hie.yaml b/ghcide-test/data/boot/hie.yaml new file mode 100644 index 0000000000..166c61ef84 --- /dev/null +++ b/ghcide-test/data/boot/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["A.hs", "A.hs-boot", "B.hs", "C.hs"]}} diff --git a/ghcide-test/data/boot2/A.hs b/ghcide-test/data/boot2/A.hs new file mode 100644 index 0000000000..3b8b80d6ca --- /dev/null +++ b/ghcide-test/data/boot2/A.hs @@ -0,0 +1,12 @@ +module A where + +-- E source imports B +-- In interface file see source module dependencies: B {-# SOURCE #-} +import E +-- C imports B +-- In interface file see source module dependencies: B +import C + +-- Instance for B only available from B.hi not B.hi-boot, so tests we load +-- that. +main = print B diff --git a/ghcide-test/data/boot2/B.hs b/ghcide-test/data/boot2/B.hs new file mode 100644 index 0000000000..e8458aa739 --- /dev/null +++ b/ghcide-test/data/boot2/B.hs @@ -0,0 +1,8 @@ +module B where + +import D + +data B = B + +instance Show B where + show B = "B" diff --git a/ghcide-test/data/boot2/B.hs-boot b/ghcide-test/data/boot2/B.hs-boot new file mode 100644 index 0000000000..64e74c695a --- /dev/null +++ b/ghcide-test/data/boot2/B.hs-boot @@ -0,0 +1,3 @@ +module B where + +data B = B diff --git a/ghcide-test/data/boot2/C.hs b/ghcide-test/data/boot2/C.hs new file mode 100644 index 0000000000..158757ed80 --- /dev/null +++ b/ghcide-test/data/boot2/C.hs @@ -0,0 +1,3 @@ +module C where + +import B diff --git a/ghcide-test/data/boot2/D.hs b/ghcide-test/data/boot2/D.hs new file mode 100644 index 0000000000..01b53223f9 --- /dev/null +++ b/ghcide-test/data/boot2/D.hs @@ -0,0 +1,3 @@ +module D where + +import {-# SOURCE #-} B diff --git a/ghcide-test/data/boot2/E.hs b/ghcide-test/data/boot2/E.hs new file mode 100644 index 0000000000..a5f78cab2a --- /dev/null +++ b/ghcide-test/data/boot2/E.hs @@ -0,0 +1,3 @@ +module E(B(B)) where + +import {-# SOURCE #-} B diff --git a/ghcide-test/data/boot2/hie.yaml b/ghcide-test/data/boot2/hie.yaml new file mode 100644 index 0000000000..be8dca1601 --- /dev/null +++ b/ghcide-test/data/boot2/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["A.hs", "B.hs-boot", "B.hs", "C.hs", "D.hs", "E.hs"]}} diff --git a/ghcide-test/data/cabal-exe/a/a.cabal b/ghcide-test/data/cabal-exe/a/a.cabal new file mode 100644 index 0000000000..093890733b --- /dev/null +++ b/ghcide-test/data/cabal-exe/a/a.cabal @@ -0,0 +1,14 @@ +cabal-version: 2.2 + +name: a +version: 0.1.0.0 +author: Fendor +maintainer: power.walross@gmail.com +build-type: Simple + +executable a + main-is: Main.hs + hs-source-dirs: src + ghc-options: -Wall + build-depends: base + default-language: Haskell2010 diff --git a/ghcide-test/data/cabal-exe/a/src/Main.hs b/ghcide-test/data/cabal-exe/a/src/Main.hs new file mode 100644 index 0000000000..81d0cfb17a --- /dev/null +++ b/ghcide-test/data/cabal-exe/a/src/Main.hs @@ -0,0 +1,3 @@ +module Main where + +main = putStrLn "Hello, Haskell!" diff --git a/ghcide-test/data/cabal-exe/cabal.project b/ghcide-test/data/cabal-exe/cabal.project new file mode 100644 index 0000000000..edcac420d9 --- /dev/null +++ b/ghcide-test/data/cabal-exe/cabal.project @@ -0,0 +1 @@ +packages: ./a \ No newline at end of file diff --git a/ghcide-test/data/cabal-exe/hie.yaml b/ghcide-test/data/cabal-exe/hie.yaml new file mode 100644 index 0000000000..5c7ab11641 --- /dev/null +++ b/ghcide-test/data/cabal-exe/hie.yaml @@ -0,0 +1,3 @@ +cradle: + cabal: + component: "exe:a" \ No newline at end of file diff --git a/ghcide-test/data/hover/Bar.hs b/ghcide-test/data/hover/Bar.hs new file mode 100644 index 0000000000..f9fde2a7cc --- /dev/null +++ b/ghcide-test/data/hover/Bar.hs @@ -0,0 +1,4 @@ +module Bar (Bar(..)) where + +-- | Bar Haddock +data Bar = Bar diff --git a/ghcide-test/data/hover/Foo.hs b/ghcide-test/data/hover/Foo.hs new file mode 100644 index 0000000000..489a6ccd6b --- /dev/null +++ b/ghcide-test/data/hover/Foo.hs @@ -0,0 +1,6 @@ +module Foo (Bar, foo) where + +import Bar + +-- | foo Haddock +foo = Bar diff --git a/ghcide-test/data/hover/GotoHover.hs b/ghcide-test/data/hover/GotoHover.hs new file mode 100644 index 0000000000..6ff3eeffed --- /dev/null +++ b/ghcide-test/data/hover/GotoHover.hs @@ -0,0 +1,70 @@ +{-# LANGUAGE OverloadedStrings, TemplateHaskell #-} +{- HLINT ignore -} +module GotoHover ( module GotoHover) where +import Data.Text (Text, pack) +import Foo (Bar, foo) + + +data TypeConstructor = DataConstructor + { fff :: Text + , ggg :: Int } +aaa :: TypeConstructor +aaa = DataConstructor + { fff = "dfgy" + , ggg = 832 + } +bbb :: TypeConstructor +bbb = DataConstructor "mjgp" 2994 +ccc :: (Text, Int) +ccc = (fff bbb, ggg aaa) +ddd :: Num a => a -> a -> a +ddd vv ww = vv +! ww +a +! b = a - b +hhh (Just a) (><) = a >< a +iii a b = a `b` a +jjj s = pack $ s <> s +class MyClass a where + method :: a -> Int +instance MyClass Int where + method = succ +kkk :: MyClass a => Int -> a -> Int +kkk n c = n + method c + +doBind :: Maybe () +doBind = do unwrapped <- Just () + return unwrapped + +listCompBind :: [Char] +listCompBind = [ succ c | c <- "ptfx" ] + +multipleClause :: Bool -> Char +multipleClause True = 't' +multipleClause False = 'f' + +-- | Recognizable docs: kpqz +documented :: Monad m => Either Int (m a) +documented = Left 7518 + +listOfInt = [ 8391 :: Int, 6268 ] + +outer :: Bool +outer = undefined inner where + + inner :: Char + inner = undefined + +imported :: Bar +imported = foo + +aa2 :: Bool +aa2 = $(id [| True |]) + +hole :: Int +hole = _ + +hole2 :: a -> Maybe a +hole2 = _ + +-- A comment above a type defnition with a deriving clause +data Example = Example + deriving (Eq) diff --git a/ghcide-test/data/hover/GotoImplementation.hs b/ghcide-test/data/hover/GotoImplementation.hs new file mode 100644 index 0000000000..12038857c6 --- /dev/null +++ b/ghcide-test/data/hover/GotoImplementation.hs @@ -0,0 +1,30 @@ +{-# LANGUAGE GADTs, GeneralisedNewtypeDeriving, DerivingStrategies #-} +{-# OPTIONS_GHC -Wno-missing-methods #-} +module GotoImplementation where + +data AAA = AAA +instance Num AAA where +aaa :: Num x => x +aaa = 1 +aaa1 :: AAA = aaa + +class BBB a where + bbb :: a -> a +instance BBB AAA where + bbb = const AAA +bbbb :: AAA +bbbb = bbb AAA + +ccc :: Show a => a -> String +ccc d = show d + +newtype Q k = Q k + deriving newtype (Eq, Show) +ddd :: (Show k, Eq k) => k -> String +ddd k = if Q k == Q k then show k else "" +ddd1 = ddd (Q 0) + +data GadtTest a where + GadtTest :: Int -> GadtTest Int +printUsingEvidence :: Show a => GadtTest a -> String +printUsingEvidence (GadtTest i) = show i diff --git a/ghcide-test/data/hover/RecordDotSyntax.hs b/ghcide-test/data/hover/RecordDotSyntax.hs new file mode 100644 index 0000000000..3680d08a3c --- /dev/null +++ b/ghcide-test/data/hover/RecordDotSyntax.hs @@ -0,0 +1,18 @@ +{-# LANGUAGE OverloadedRecordDot, DuplicateRecordFields, NoFieldSelectors #-} + +module RecordDotSyntax ( module RecordDotSyntax) where + +import qualified Data.Maybe as M + +data MyRecord = MyRecord + { a :: String + , b :: Integer + , c :: MyChild + } deriving (Eq, Show) + +newtype MyChild = MyChild + { z :: String + } deriving (Eq, Show) + +x = MyRecord { a = "Hello", b = 12, c = MyChild { z = "there" } } +y = x.a ++ show x.b ++ x.c.z diff --git a/ghcide-test/data/hover/hie.yaml b/ghcide-test/data/hover/hie.yaml new file mode 100644 index 0000000000..de7cc991cc --- /dev/null +++ b/ghcide-test/data/hover/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["Foo", "Bar", "GotoHover", "RecordDotSyntax", "GotoImplementation"]}} diff --git a/ghcide-test/data/ignore-fatal/IgnoreFatal.hs b/ghcide-test/data/ignore-fatal/IgnoreFatal.hs new file mode 100644 index 0000000000..b73787f166 --- /dev/null +++ b/ghcide-test/data/ignore-fatal/IgnoreFatal.hs @@ -0,0 +1,8 @@ +-- "missing signature" is declared a fatal warning in the cabal file, +-- but is ignored in this module. + +{-# OPTIONS_GHC -Wno-missing-signatures #-} + +module IgnoreFatal where + +a = 'a' diff --git a/ghcide-test/data/ignore-fatal/cabal.project b/ghcide-test/data/ignore-fatal/cabal.project new file mode 100644 index 0000000000..c6bb6fb152 --- /dev/null +++ b/ghcide-test/data/ignore-fatal/cabal.project @@ -0,0 +1 @@ +packages: ignore-fatal.cabal diff --git a/ghcide-test/data/ignore-fatal/hie.yaml b/ghcide-test/data/ignore-fatal/hie.yaml new file mode 100644 index 0000000000..6ea3cebd0d --- /dev/null +++ b/ghcide-test/data/ignore-fatal/hie.yaml @@ -0,0 +1,4 @@ +cradle: + cabal: + - path: "." + component: "lib:ignore-fatal" diff --git a/ghcide-test/data/ignore-fatal/ignore-fatal.cabal b/ghcide-test/data/ignore-fatal/ignore-fatal.cabal new file mode 100644 index 0000000000..6e831e0395 --- /dev/null +++ b/ghcide-test/data/ignore-fatal/ignore-fatal.cabal @@ -0,0 +1,10 @@ +name: ignore-fatal +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: base + exposed-modules: IgnoreFatal + hs-source-dirs: . + ghc-options: -Werror=missing-signatures diff --git a/ghcide-test/data/multi-unit-reexport/a-1.0.0-inplace b/ghcide-test/data/multi-unit-reexport/a-1.0.0-inplace new file mode 100644 index 0000000000..a54ea9bc4b --- /dev/null +++ b/ghcide-test/data/multi-unit-reexport/a-1.0.0-inplace @@ -0,0 +1,18 @@ +-this-package-name +a +-working-dir +a +-fbuilding-cabal-package +-O0 +-i. +-this-unit-id +a-1.0.0-inplace +-hide-all-packages +-Wmissing-home-modules +-no-user-package-db +-package +base +-package +text +-XHaskell98 +A diff --git a/ghcide-test/data/multi-unit-reexport/a/A.hs b/ghcide-test/data/multi-unit-reexport/a/A.hs new file mode 100644 index 0000000000..9a7d7e33c9 --- /dev/null +++ b/ghcide-test/data/multi-unit-reexport/a/A.hs @@ -0,0 +1,3 @@ +module A(foo) where +import Data.Text +foo = () diff --git a/ghcide-test/data/multi-unit-reexport/b-1.0.0-inplace b/ghcide-test/data/multi-unit-reexport/b-1.0.0-inplace new file mode 100644 index 0000000000..d656a2539b --- /dev/null +++ b/ghcide-test/data/multi-unit-reexport/b-1.0.0-inplace @@ -0,0 +1,21 @@ +-this-package-name +b +-working-dir +b +-fbuilding-cabal-package +-O0 +-i +-i. +-this-unit-id +b-1.0.0-inplace +-hide-all-packages +-Wmissing-home-modules +-no-user-package-db +-package-id +a-1.0.0-inplace +-reexported-module +A +-package +base +-XHaskell98 +B diff --git a/ghcide-test/data/multi-unit-reexport/b/B.hs b/ghcide-test/data/multi-unit-reexport/b/B.hs new file mode 100644 index 0000000000..2c6d4b28a2 --- /dev/null +++ b/ghcide-test/data/multi-unit-reexport/b/B.hs @@ -0,0 +1,3 @@ +module B(module B) where +import A +qux = foo diff --git a/ghcide-test/data/multi-unit-reexport/c-1.0.0-inplace b/ghcide-test/data/multi-unit-reexport/c-1.0.0-inplace new file mode 100644 index 0000000000..e60a95eda0 --- /dev/null +++ b/ghcide-test/data/multi-unit-reexport/c-1.0.0-inplace @@ -0,0 +1,19 @@ +-this-package-name +c +-working-dir +c +-fbuilding-cabal-package +-O0 +-i +-i. +-this-unit-id +c-1.0.0-inplace +-hide-all-packages +-Wmissing-home-modules +-no-user-package-db +-package-id +b-1.0.0-inplace +-package +base +-XHaskell98 +C diff --git a/ghcide-test/data/multi-unit-reexport/c/C.hs b/ghcide-test/data/multi-unit-reexport/c/C.hs new file mode 100644 index 0000000000..1b2d305296 --- /dev/null +++ b/ghcide-test/data/multi-unit-reexport/c/C.hs @@ -0,0 +1,4 @@ +module C(module C) where +import A +import B +cux = foo `seq` qux diff --git a/ghcide-test/data/multi-unit-reexport/cabal.project b/ghcide-test/data/multi-unit-reexport/cabal.project new file mode 100644 index 0000000000..96f52330c9 --- /dev/null +++ b/ghcide-test/data/multi-unit-reexport/cabal.project @@ -0,0 +1,2 @@ +packages: a b c +multi-repl: True diff --git a/ghcide-test/data/multi-unit-reexport/hie.yaml b/ghcide-test/data/multi-unit-reexport/hie.yaml new file mode 100644 index 0000000000..34858b5f64 --- /dev/null +++ b/ghcide-test/data/multi-unit-reexport/hie.yaml @@ -0,0 +1,6 @@ +cradle: + direct: + arguments: ["-unit" ,"@a-1.0.0-inplace" + ,"-unit" ,"@b-1.0.0-inplace" + ,"-unit" ,"@c-1.0.0-inplace" + ] diff --git a/ghcide-test/data/multi-unit/a-1.0.0-inplace b/ghcide-test/data/multi-unit/a-1.0.0-inplace new file mode 100644 index 0000000000..cab2b716ff --- /dev/null +++ b/ghcide-test/data/multi-unit/a-1.0.0-inplace @@ -0,0 +1,21 @@ +-this-package-name +a +-working-dir +a +-fbuilding-cabal-package +-O0 +-i. +-this-unit-id +a-1.0.0-inplace +-hide-all-packages +-Wmissing-home-modules +-no-user-package-db +-package +base +-package +text +-XHaskell98 +A ++RTS +-A32M +-RTS diff --git a/ghcide-test/data/multi-unit/a/A.hs b/ghcide-test/data/multi-unit/a/A.hs new file mode 100644 index 0000000000..9a7d7e33c9 --- /dev/null +++ b/ghcide-test/data/multi-unit/a/A.hs @@ -0,0 +1,3 @@ +module A(foo) where +import Data.Text +foo = () diff --git a/ghcide-test/data/multi-unit/b-1.0.0-inplace b/ghcide-test/data/multi-unit/b-1.0.0-inplace new file mode 100644 index 0000000000..fe43e3a92d --- /dev/null +++ b/ghcide-test/data/multi-unit/b-1.0.0-inplace @@ -0,0 +1,20 @@ +-this-package-name +b +-working-dir +b +-fbuilding-cabal-package +-O0 +-i +-i. +-this-unit-id +b-1.0.0-inplace +-hide-all-packages +-Wmissing-home-modules +-no-user-package-db +-package-id +a-1.0.0-inplace +-package +base +-XHaskell98 +-XPackageImports +B diff --git a/ghcide-test/data/multi-unit/b/B.hs b/ghcide-test/data/multi-unit/b/B.hs new file mode 100644 index 0000000000..54c6b874fc --- /dev/null +++ b/ghcide-test/data/multi-unit/b/B.hs @@ -0,0 +1,3 @@ +module B(module B) where +import "a" A +qux = foo diff --git a/ghcide-test/data/multi-unit/c-1.0.0-inplace b/ghcide-test/data/multi-unit/c-1.0.0-inplace new file mode 100644 index 0000000000..7421d59279 --- /dev/null +++ b/ghcide-test/data/multi-unit/c-1.0.0-inplace @@ -0,0 +1,21 @@ +-this-package-name +c +-working-dir +c +-fbuilding-cabal-package +-O0 +-i +-i. +-this-unit-id +c-1.0.0-inplace +-hide-all-packages +-Wmissing-home-modules +-no-user-package-db +-package-id +a-1.0.0-inplace +-package +base +-XHaskell98 +C ++RTS +-A32M diff --git a/ghcide-test/data/multi-unit/c/C.hs b/ghcide-test/data/multi-unit/c/C.hs new file mode 100644 index 0000000000..b75a7fc3c7 --- /dev/null +++ b/ghcide-test/data/multi-unit/c/C.hs @@ -0,0 +1,3 @@ +module C(module C) where +import A +cux = foo diff --git a/ghcide-test/data/multi-unit/cabal.project b/ghcide-test/data/multi-unit/cabal.project new file mode 100644 index 0000000000..96f52330c9 --- /dev/null +++ b/ghcide-test/data/multi-unit/cabal.project @@ -0,0 +1,2 @@ +packages: a b c +multi-repl: True diff --git a/ghcide-test/data/multi-unit/hie.yaml b/ghcide-test/data/multi-unit/hie.yaml new file mode 100644 index 0000000000..34858b5f64 --- /dev/null +++ b/ghcide-test/data/multi-unit/hie.yaml @@ -0,0 +1,6 @@ +cradle: + direct: + arguments: ["-unit" ,"@a-1.0.0-inplace" + ,"-unit" ,"@b-1.0.0-inplace" + ,"-unit" ,"@c-1.0.0-inplace" + ] diff --git a/ghcide-test/data/multi/a/A.hs b/ghcide-test/data/multi/a/A.hs new file mode 100644 index 0000000000..faf037ca84 --- /dev/null +++ b/ghcide-test/data/multi/a/A.hs @@ -0,0 +1,3 @@ +module A(foo) where +import Control.Concurrent.Async +foo = () diff --git a/ghcide-test/data/multi/a/a.cabal b/ghcide-test/data/multi/a/a.cabal new file mode 100644 index 0000000000..d95697264d --- /dev/null +++ b/ghcide-test/data/multi/a/a.cabal @@ -0,0 +1,9 @@ +name: a +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: base, async >= 2.0 + exposed-modules: A + hs-source-dirs: . diff --git a/ghcide-test/data/multi/b/B.hs b/ghcide-test/data/multi/b/B.hs new file mode 100644 index 0000000000..2c6d4b28a2 --- /dev/null +++ b/ghcide-test/data/multi/b/B.hs @@ -0,0 +1,3 @@ +module B(module B) where +import A +qux = foo diff --git a/ghcide-test/data/multi/b/b.cabal b/ghcide-test/data/multi/b/b.cabal new file mode 100644 index 0000000000..e23f5177d8 --- /dev/null +++ b/ghcide-test/data/multi/b/b.cabal @@ -0,0 +1,9 @@ +name: b +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: base, a + exposed-modules: B + hs-source-dirs: . diff --git a/ghcide-test/data/multi/c/C.hs b/ghcide-test/data/multi/c/C.hs new file mode 100644 index 0000000000..b75a7fc3c7 --- /dev/null +++ b/ghcide-test/data/multi/c/C.hs @@ -0,0 +1,3 @@ +module C(module C) where +import A +cux = foo diff --git a/ghcide-test/data/multi/c/c.cabal b/ghcide-test/data/multi/c/c.cabal new file mode 100644 index 0000000000..93ee004d94 --- /dev/null +++ b/ghcide-test/data/multi/c/c.cabal @@ -0,0 +1,9 @@ +name: c +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: base, a + exposed-modules: C + hs-source-dirs: . diff --git a/ghcide-test/data/multi/cabal.project b/ghcide-test/data/multi/cabal.project new file mode 100644 index 0000000000..317a89138e --- /dev/null +++ b/ghcide-test/data/multi/cabal.project @@ -0,0 +1,3 @@ +packages: a b c + +allow-newer: base diff --git a/ghcide-test/data/multi/hie.yaml b/ghcide-test/data/multi/hie.yaml new file mode 100644 index 0000000000..c6b36d012c --- /dev/null +++ b/ghcide-test/data/multi/hie.yaml @@ -0,0 +1,8 @@ +cradle: + cabal: + - path: "./a" + component: "lib:a" + - path: "./b" + component: "lib:b" + - path: "./c" + component: "lib:c" diff --git a/ghcide-test/data/plugin-knownnat/KnownNat.hs b/ghcide-test/data/plugin-knownnat/KnownNat.hs new file mode 100644 index 0000000000..6c91f0c0a5 --- /dev/null +++ b/ghcide-test/data/plugin-knownnat/KnownNat.hs @@ -0,0 +1,10 @@ +{-# OPTIONS_GHC -fplugin GHC.TypeLits.KnownNat.Solver #-} +{-# LANGUAGE DataKinds, ScopedTypeVariables, TypeOperators #-} +module KnownNat where +import Data.Proxy +import GHC.TypeLits + +f :: forall n. KnownNat n => Proxy n -> Integer +f _ = natVal (Proxy :: Proxy n) + natVal (Proxy :: Proxy (n+2)) +foo :: Int -> Int -> Int +foo a _b = a + c diff --git a/ghcide-test/data/plugin-knownnat/cabal.project b/ghcide-test/data/plugin-knownnat/cabal.project new file mode 100644 index 0000000000..95282e93e9 --- /dev/null +++ b/ghcide-test/data/plugin-knownnat/cabal.project @@ -0,0 +1,4 @@ +packages: . + +-- Needed for ghc >= 9.0.2 and ghc-typelits-natnormalise == 0.7.6 +allow-newer: ghc-typelits-natnormalise:ghc-bignum diff --git a/ghcide-test/data/plugin-knownnat/plugin.cabal b/ghcide-test/data/plugin-knownnat/plugin.cabal new file mode 100644 index 0000000000..1439bf72a7 --- /dev/null +++ b/ghcide-test/data/plugin-knownnat/plugin.cabal @@ -0,0 +1,9 @@ +cabal-version: 1.18 +name: plugin +version: 1.0.0 +build-type: Simple + +library + build-depends: base, ghc-typelits-knownnat + exposed-modules: KnownNat + hs-source-dirs: . diff --git a/ghcide-test/data/recomp/A.hs b/ghcide-test/data/recomp/A.hs new file mode 100644 index 0000000000..cc80fe9edd --- /dev/null +++ b/ghcide-test/data/recomp/A.hs @@ -0,0 +1,6 @@ +module A(x) where + +import B + +x :: Int +x = y diff --git a/ghcide-test/data/recomp/B.hs b/ghcide-test/data/recomp/B.hs new file mode 100644 index 0000000000..e8f35da9e9 --- /dev/null +++ b/ghcide-test/data/recomp/B.hs @@ -0,0 +1,4 @@ +module B(y) where + +y :: Int +y = undefined diff --git a/ghcide-test/data/recomp/P.hs b/ghcide-test/data/recomp/P.hs new file mode 100644 index 0000000000..0622632eea --- /dev/null +++ b/ghcide-test/data/recomp/P.hs @@ -0,0 +1,5 @@ +module P() where +import A +import B + +bar = x :: Int diff --git a/ghcide-test/data/recomp/hie.yaml b/ghcide-test/data/recomp/hie.yaml new file mode 100644 index 0000000000..bf98055e95 --- /dev/null +++ b/ghcide-test/data/recomp/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["-Wmissing-signatures","B", "A", "P"]}} diff --git a/ghcide-test/data/references/Fields.hs b/ghcide-test/data/references/Fields.hs new file mode 100644 index 0000000000..1b935f31c9 --- /dev/null +++ b/ghcide-test/data/references/Fields.hs @@ -0,0 +1,18 @@ +{-# LANGUAGE RecordWildCards #-} +module Fields where + +data Foo = MkFoo + { + barr :: String, + bazz :: String + } + +fooUse0 :: Foo -> String +fooUse0 MkFoo{barr} = "5" + +fooUse1 :: Foo -> String +fooUse1 MkFoo{..} = "6" + +fooUse2 :: String -> String -> Foo +fooUse2 bar baz = + MkFoo{..} diff --git a/ghcide-test/data/references/Main.hs b/ghcide-test/data/references/Main.hs new file mode 100644 index 0000000000..aae14355d4 --- /dev/null +++ b/ghcide-test/data/references/Main.hs @@ -0,0 +1,17 @@ +module Main where + +import References +import Fields +main :: IO () +main = return () + + + +a = 2 :: Int +b = a + 1 + +acc :: Account +acc = Savings + +fooUse3 :: String -> String -> Foo +fooUse3 bar baz = MkFoo{barr = bar, bazz = baz} diff --git a/ghcide-test/data/references/OtherModule.hs b/ghcide-test/data/references/OtherModule.hs new file mode 100644 index 0000000000..4840f46d8e --- /dev/null +++ b/ghcide-test/data/references/OtherModule.hs @@ -0,0 +1,9 @@ +module OtherModule (symbolDefinedInOtherModule, symbolDefinedInOtherOtherModule) where + +import OtherOtherModule + +symbolDefinedInOtherModule = 1 + +symbolLocalToOtherModule = 2 + +someFxn x = x + symbolLocalToOtherModule diff --git a/ghcide-test/data/references/OtherOtherModule.hs b/ghcide-test/data/references/OtherOtherModule.hs new file mode 100644 index 0000000000..d567b8cb97 --- /dev/null +++ b/ghcide-test/data/references/OtherOtherModule.hs @@ -0,0 +1,3 @@ +module OtherOtherModule where + +symbolDefinedInOtherOtherModule = "asdf" diff --git a/ghcide-test/data/references/References.hs b/ghcide-test/data/references/References.hs new file mode 100644 index 0000000000..ac76b4de40 --- /dev/null +++ b/ghcide-test/data/references/References.hs @@ -0,0 +1,25 @@ +module References where + +import OtherModule + +foo = bar + +bar = let x = bar 42 in const "hello" + +baz = do + x <- bar 23 + return $ bar 14 + +data Account = + Checking + | Savings + +bobsAccount = Checking + +bobHasChecking = case bobsAccount of + Checking -> True + Savings -> False + +x = symbolDefinedInOtherModule + +y = symbolDefinedInOtherOtherModule diff --git a/ghcide-test/data/references/hie.yaml b/ghcide-test/data/references/hie.yaml new file mode 100644 index 0000000000..9e68765ba1 --- /dev/null +++ b/ghcide-test/data/references/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["Main","OtherModule","OtherOtherModule","References", "Fields"]}} diff --git a/ghcide-test/data/rootUri/dirA/Foo.hs b/ghcide-test/data/rootUri/dirA/Foo.hs new file mode 100644 index 0000000000..ea4238dcbb --- /dev/null +++ b/ghcide-test/data/rootUri/dirA/Foo.hs @@ -0,0 +1,3 @@ +module Foo () where + +foo = () diff --git a/ghcide-test/data/rootUri/dirA/foo.cabal b/ghcide-test/data/rootUri/dirA/foo.cabal new file mode 100644 index 0000000000..3cdd320ad9 --- /dev/null +++ b/ghcide-test/data/rootUri/dirA/foo.cabal @@ -0,0 +1,9 @@ +name: foo +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: base + exposed-modules: Foo + hs-source-dirs: . diff --git a/ghcide-test/data/rootUri/dirB/Foo.hs b/ghcide-test/data/rootUri/dirB/Foo.hs new file mode 100644 index 0000000000..ea4238dcbb --- /dev/null +++ b/ghcide-test/data/rootUri/dirB/Foo.hs @@ -0,0 +1,3 @@ +module Foo () where + +foo = () diff --git a/ghcide-test/data/rootUri/dirB/foo.cabal b/ghcide-test/data/rootUri/dirB/foo.cabal new file mode 100644 index 0000000000..3cdd320ad9 --- /dev/null +++ b/ghcide-test/data/rootUri/dirB/foo.cabal @@ -0,0 +1,9 @@ +name: foo +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: base + exposed-modules: Foo + hs-source-dirs: . diff --git a/ghcide-test/data/symlink/hie.yaml b/ghcide-test/data/symlink/hie.yaml new file mode 100644 index 0000000000..cfadaebc17 --- /dev/null +++ b/ghcide-test/data/symlink/hie.yaml @@ -0,0 +1,10 @@ + +cradle: + direct: + arguments: + - -i + - -isrc + - -iother_loc/ + - other_loc/Sym.hs + - src/Foo.hs + - -Wall diff --git a/ghcide-test/data/symlink/other_loc/.gitkeep b/ghcide-test/data/symlink/other_loc/.gitkeep new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ghcide-test/data/symlink/some_loc/Sym.hs b/ghcide-test/data/symlink/some_loc/Sym.hs new file mode 100644 index 0000000000..1039f52bfd --- /dev/null +++ b/ghcide-test/data/symlink/some_loc/Sym.hs @@ -0,0 +1,4 @@ +module Sym where + +foo :: String +foo = "" diff --git a/ghcide-test/data/symlink/src/Foo.hs b/ghcide-test/data/symlink/src/Foo.hs new file mode 100644 index 0000000000..dbafb2181a --- /dev/null +++ b/ghcide-test/data/symlink/src/Foo.hs @@ -0,0 +1,4 @@ +module Foo where + +import Sym + diff --git a/ghcide-test/data/watched-files/reload/reload.cabal b/ghcide-test/data/watched-files/reload/reload.cabal new file mode 100644 index 0000000000..d9d5607a94 --- /dev/null +++ b/ghcide-test/data/watched-files/reload/reload.cabal @@ -0,0 +1,12 @@ +cabal-version: 2.4 +name: reload +version: 0.1.0.0 +author: Lin Jian +maintainer: me@linj.tech +build-type: Simple + +library + exposed-modules: MyLib + build-depends: base + hs-source-dirs: src + default-language: Haskell2010 diff --git a/ghcide-test/data/watched-files/reload/src/MyLib.hs b/ghcide-test/data/watched-files/reload/src/MyLib.hs new file mode 100644 index 0000000000..bbb506d001 --- /dev/null +++ b/ghcide-test/data/watched-files/reload/src/MyLib.hs @@ -0,0 +1,6 @@ +module MyLib (someFunc) where + +import Data.List.Split + +someFunc :: IO () +someFunc = putStrLn "someFunc" diff --git a/ghcide-test/data/working-dir/a/A.hs b/ghcide-test/data/working-dir/a/A.hs new file mode 100644 index 0000000000..5b4f28ba40 --- /dev/null +++ b/ghcide-test/data/working-dir/a/A.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE TemplateHaskell #-} +module A(th_a) where + +import Language.Haskell.TH +import Language.Haskell.TH.Syntax +import Control.Monad.IO.Class + +th_a :: DecsQ +th_a = do + str <- makeRelativeToProject "wdtest" >>= liftIO . readFile + [d| a = $(lift str) |] diff --git a/ghcide-test/data/working-dir/a/B.hs b/ghcide-test/data/working-dir/a/B.hs new file mode 100644 index 0000000000..8563bb0875 --- /dev/null +++ b/ghcide-test/data/working-dir/a/B.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module B() where + +import A + +$th_a diff --git a/ghcide-test/data/working-dir/a/a.cabal b/ghcide-test/data/working-dir/a/a.cabal new file mode 100644 index 0000000000..1b92d21849 --- /dev/null +++ b/ghcide-test/data/working-dir/a/a.cabal @@ -0,0 +1,11 @@ +name: a +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.2 +extra-source-files: wdtest + +library + build-depends: base, template-haskell + exposed-modules: A B + ghc-options: -Wmissing-signatures + hs-source-dirs: . diff --git a/ghcide-test/data/working-dir/a/wdtest b/ghcide-test/data/working-dir/a/wdtest new file mode 100644 index 0000000000..9daeafb986 --- /dev/null +++ b/ghcide-test/data/working-dir/a/wdtest @@ -0,0 +1 @@ +test diff --git a/ghcide-test/data/working-dir/cabal.project b/ghcide-test/data/working-dir/cabal.project new file mode 100644 index 0000000000..80dfe76da5 --- /dev/null +++ b/ghcide-test/data/working-dir/cabal.project @@ -0,0 +1 @@ +packages: a diff --git a/ghcide-test/data/working-dir/hie.yaml b/ghcide-test/data/working-dir/hie.yaml new file mode 100644 index 0000000000..04cd24395e --- /dev/null +++ b/ghcide-test/data/working-dir/hie.yaml @@ -0,0 +1,2 @@ +cradle: + cabal: diff --git a/ghcide-test/exe/AsyncTests.hs b/ghcide-test/exe/AsyncTests.hs new file mode 100644 index 0000000000..f341ab504b --- /dev/null +++ b/ghcide-test/exe/AsyncTests.hs @@ -0,0 +1,53 @@ +{-# LANGUAGE DataKinds #-} + +module AsyncTests (tests) where + +import Control.Monad +import Control.Monad.IO.Class (liftIO) +import Data.Aeson (toJSON) +import Data.Proxy +import qualified Data.Text as T +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +-- import Test.QuickCheck.Instances () +import Config +import Development.IDE.Plugin.Test (TestRequest (BlockSeconds), + blockCommandId) +import Test.Tasty +import Test.Tasty.HUnit + +-- | Test if ghcide asynchronously handles Commands and user Requests +tests :: TestTree +tests = testGroup "async" + [ + testWithDummyPluginEmpty "command" $ do + -- Execute a command that will block forever + let req = ExecuteCommandParams Nothing blockCommandId Nothing + void $ sendRequest SMethod_WorkspaceExecuteCommand req + -- Load a file and check for code actions. Will only work if the command is run asynchronously + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "{-# OPTIONS -Wmissing-signatures #-}" + , "foo = id" + ] + void waitForDiagnostics + codeLenses <- getAndResolveCodeLenses doc + liftIO $ [ _title | CodeLens{_command = Just Command{_title}} <- codeLenses] @=? + [ "foo :: a -> a" ] + , testWithDummyPluginEmpty "request" $ do + -- Execute a custom request that will block for 1000 seconds + void $ sendRequest (SMethod_CustomMethod (Proxy @"test")) $ toJSON $ BlockSeconds 1000 + -- Load a file and check for code actions. Will only work if the request is run asynchronously + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "{-# OPTIONS -Wmissing-signatures #-}" + , "foo = id" + ] + void waitForDiagnostics + codeLenses <- getAndResolveCodeLenses doc + liftIO $ [ _title | CodeLens{_command = Just Command{_title}} <- codeLenses] @=? + [ "foo :: a -> a" ] + ] diff --git a/ghcide-test/exe/BootTests.hs b/ghcide-test/exe/BootTests.hs new file mode 100644 index 0000000000..06c05ba9b6 --- /dev/null +++ b/ghcide-test/exe/BootTests.hs @@ -0,0 +1,56 @@ +module BootTests (tests) where + +import Config (checkDefs, mkR, runInDir, + runWithExtraFiles) +import Control.Applicative.Combinators +import Control.Monad +import Control.Monad.IO.Class (liftIO) +import Development.IDE.GHC.Util +import Development.IDE.Test (expectNoMoreDiagnostics, + isReferenceReady) +import Development.IDE.Types.Location +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +import System.FilePath (()) +import Test.Tasty +import Test.Tasty.HUnit + + +tests :: TestTree +tests = testGroup "boot" + [ testCase "boot-def-test" $ runWithExtraFiles "boot" $ \dir -> do + let cPath = dir "C.hs" + cSource <- liftIO $ readFileUtf8 cPath + -- Dirty the cache + liftIO $ runInDir dir $ do + cDoc <- createDoc cPath "haskell" cSource + -- We send a hover request then wait for either the hover response or + -- `ghcide/reference/ready` notification. + -- Once we receive one of the above, we wait for the other that we + -- haven't received yet. + -- If we don't wait for the `ready` notification it is possible + -- that the `getDefinitions` request/response in the outer ghcide + -- session will find no definitions. + let hoverParams = HoverParams cDoc (Position 4 3) Nothing + hoverRequestId <- sendRequest SMethod_TextDocumentHover hoverParams + let parseReadyMessage = isReferenceReady cPath + let parseHoverResponse = responseForId SMethod_TextDocumentHover hoverRequestId + hoverResponseOrReadyMessage <- skipManyTill anyMessage ((Left <$> parseHoverResponse) <|> (Right <$> parseReadyMessage)) + _ <- skipManyTill anyMessage $ + case hoverResponseOrReadyMessage of + Left _ -> void parseReadyMessage + Right _ -> void parseHoverResponse + closeDoc cDoc + cdoc <- createDoc cPath "haskell" cSource + locs <- getDefinitions cdoc (Position 7 4) + let floc = mkR 9 0 9 1 + checkDefs locs (pure [floc]) + , testCase "graph with boot modules" $ runWithExtraFiles "boot2" $ \dir -> do + _ <- openDoc (dir "A.hs") "haskell" + expectNoMoreDiagnostics 2 + ] diff --git a/ghcide-test/exe/CPPTests.hs b/ghcide-test/exe/CPPTests.hs new file mode 100644 index 0000000000..762e6632f1 --- /dev/null +++ b/ghcide-test/exe/CPPTests.hs @@ -0,0 +1,56 @@ +module CPPTests (tests) where + +import Control.Exception (catch) +import qualified Data.Text as T +import Development.IDE.Test (Cursor, expectDiagnostics, + expectNoMoreDiagnostics) +import Language.LSP.Protocol.Types hiding (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), mkRange) +import Language.LSP.Test +-- import Test.QuickCheck.Instances () +import Config +import Test.Tasty +import Test.Tasty.HUnit + +tests :: TestTree +tests = + testGroup "cpp" + [ testCase "cpp-error" $ do + let content = + T.unlines + [ "{-# LANGUAGE CPP #-}", + "module Testing where", + "#ifdef FOO", + "foo = 42" + ] + -- The error locations differ depending on which C-preprocessor is used. + -- Some give the column number and others don't (hence maxBound == -1 unsigned). Assert either + -- of them. + (run $ expectError content (2, maxBound)) + `catch` ( \e -> do + let _ = e :: HUnitFailure + run $ expectError content (2, 1) + ) + , testWithDummyPluginEmpty "cpp-ghcide" $ do + _ <- createDoc "A.hs" "haskell" $ T.unlines + ["{-# LANGUAGE CPP #-}" + ,"main =" + ,"#ifdef __GHCIDE__" + ," worked" + ,"#else" + ," failed" + ,"#endif" + ] + expectDiagnostics [("A.hs", [(DiagnosticSeverity_Error, (3, 2), "Variable not in scope: worked", Just "GHC-88464")])] + ] + where + expectError :: T.Text -> Cursor -> Session () + expectError content cursor = do + _ <- createDoc "Testing.hs" "haskell" content + expectDiagnostics + [ ( "Testing.hs", + [(DiagnosticSeverity_Error, cursor, "error: unterminated", Nothing)] + ) + ] + expectNoMoreDiagnostics 0.5 diff --git a/ghcide-test/exe/ClientSettingsTests.hs b/ghcide-test/exe/ClientSettingsTests.hs new file mode 100644 index 0000000000..7c3c3b27f1 --- /dev/null +++ b/ghcide-test/exe/ClientSettingsTests.hs @@ -0,0 +1,35 @@ +{-# LANGUAGE GADTs #-} +module ClientSettingsTests (tests) where + +import Config (testWithDummyPluginEmpty) +import Control.Applicative.Combinators +import Control.Monad +import Data.Aeson (toJSON) +import Data.Default +import qualified Data.Text as T +import Ide.Types +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +import Test.Hls (waitForProgressDone) +import Test.Tasty + +tests :: TestTree +tests = testGroup "client settings handling" + [ testWithDummyPluginEmpty "ghcide restarts shake session on config changes" $ do + setIgnoringLogNotifications False + void $ createDoc "A.hs" "haskell" "module A where" + waitForProgressDone + setConfigSection "haskell" $ toJSON (def :: Config) + skipManyTill anyMessage restartingBuildSession + + ] + where + restartingBuildSession :: Session () + restartingBuildSession = do + FromServerMess SMethod_WindowLogMessage TNotificationMessage{_params = LogMessageParams{..}} <- loggingNotification + guard $ "Restarting build session" `T.isInfixOf` _message diff --git a/ghcide-test/exe/CodeLensTests.hs b/ghcide-test/exe/CodeLensTests.hs new file mode 100644 index 0000000000..fd821e37b6 --- /dev/null +++ b/ghcide-test/exe/CodeLensTests.hs @@ -0,0 +1,140 @@ +{-# LANGUAGE GADTs #-} + +module CodeLensTests (tests) where + +import Config +import Control.Applicative.Combinators +import Control.Lens ((^.)) +import Control.Monad (void) +import Control.Monad.IO.Class (liftIO) +import qualified Data.Aeson as A +import Data.Maybe +import qualified Data.Text as T +import Development.IDE.GHC.Compat (GhcVersion (..), ghcVersion) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +import Test.Hls (mkRange, waitForProgressDone) +import Test.Tasty +import Test.Tasty.HUnit + +tests :: TestTree +tests = testGroup "code lenses" + [ addSigLensesTests + ] + +data TestSpec = + TestSpec + { mName :: Maybe TestName -- ^ Optional Test Name + , input :: T.Text -- ^ Input + , expected :: Maybe T.Text -- ^ Expected Type Sig + } + +mkT :: T.Text -> T.Text -> TestSpec +mkT i e = TestSpec Nothing i (Just e) +mkT' :: TestName -> T.Text -> T.Text -> TestSpec +mkT' name i e = TestSpec (Just name) i (Just e) + +noExpected :: TestSpec -> TestSpec +noExpected t = t { expected = Nothing } + +mkTestName :: TestSpec -> String +mkTestName t = case mName t of + Nothing -> T.unpack $ T.replace "\n" "\\n" (input t) + Just name -> name + +addSigLensesTests :: TestTree +addSigLensesTests = + let pragmas = "{-# OPTIONS_GHC -Wmissing-signatures -Wmissing-pattern-synonym-signatures #-}" + moduleH exported = + T.unlines + [ "{-# LANGUAGE PatternSynonyms,TypeApplications,DataKinds,RankNTypes,ScopedTypeVariables,TypeOperators,GADTs,BangPatterns #-}" + , "module Sigs(" <> exported <> ") where" + , "import qualified Data.Complex as C" + , "import Data.Data (Proxy (..), type (:~:) (..), mkCharType)" + , "data T1 a where" + , " MkT1 :: (Show b) => a -> b -> T1 a" + ] + before enableGHCWarnings exported spec others = + T.unlines $ [pragmas | enableGHCWarnings] <> [moduleH exported, input spec] <> others + after' enableGHCWarnings exported spec others = + T.unlines $ [pragmas | enableGHCWarnings] <> [moduleH exported] <> maybe [] pure (expected spec) <> [input spec] <> others + createConfig mode = A.object ["plugin" A..= A.object ["ghcide-type-lenses" A..= A.object ["config" A..= A.object ["mode" A..= A.String mode]]]] + sigSession testName enableGHCWarnings waitForDiags mode exported spec others = testWithDummyPluginEmpty testName $ do + let originalCode = before enableGHCWarnings exported spec others + let expectedCode = after' enableGHCWarnings exported spec others + setConfigSection "haskell" (createConfig mode) + doc <- createDoc "Sigs.hs" "haskell" originalCode + -- Because the diagnostics mode is really relying only on diagnostics now + -- to generate the code lens we need to make sure we wait till the file + -- is parsed before asking for codelenses, otherwise we will get nothing. + if waitForDiags + then void waitForDiagnostics + else waitForProgressDone + codeLenses <- getAndResolveCodeLenses doc + if isJust $ expected spec + then do + liftIO $ length codeLenses == 1 @? "Expected 1 code lens, but got: " <> show codeLenses + executeCommand $ fromJust $ head codeLenses ^. L.command + modifiedCode <- skipManyTill anyMessage (getDocumentEdit doc) + liftIO $ expectedCode @=? modifiedCode + else liftIO $ null codeLenses @? "Expected no code lens, but got: " <> show codeLenses + cases = + [ mkT "abc = True" "abc :: Bool" + , mkT "foo a b = a + b" "foo :: Num a => a -> a -> a" + , mkT "bar a b = show $ a + b" "bar :: (Show a, Num a) => a -> a -> String" + , mkT "(!!!) a b = a > b" "(!!!) :: Ord a => a -> a -> Bool" + , mkT "a >>>> b = a + b" "(>>>>) :: Num a => a -> a -> a" + , mkT "a `haha` b = a b" "haha :: (t1 -> t2) -> t1 -> t2" + , mkT "pattern Some a = Just a" "pattern Some :: a -> Maybe a" + , mkT "pattern Some a <- Just a" "pattern Some :: a -> Maybe a" + , mkT "pattern Some a <- Just a\n where Some a = Just a" "pattern Some :: a -> Maybe a" + , mkT "pattern Some a <- Just !a\n where Some !a = Just a" "pattern Some :: a -> Maybe a" + , mkT "pattern Point{x, y} = (x, y)" "pattern Point :: a -> b -> (a, b)" + , mkT "pattern Point{x, y} <- (x, y)" "pattern Point :: a -> b -> (a, b)" + , mkT "pattern Point{x, y} <- (x, y)\n where Point x y = (x, y)" "pattern Point :: a -> b -> (a, b)" + , mkT "pattern MkT1' b = MkT1 42 b" "pattern MkT1' :: (Eq a, Num a) => Show b => b -> T1 a" + , mkT "pattern MkT1' b <- MkT1 42 b" "pattern MkT1' :: (Eq a, Num a) => Show b => b -> T1 a" + , mkT "pattern MkT1' b <- MkT1 42 b\n where MkT1' b = MkT1 42 b" "pattern MkT1' :: (Eq a, Num a) => Show b => b -> T1 a" + , mkT "qualifiedSigTest= C.realPart" "qualifiedSigTest :: C.Complex a -> a" + , mkT "head = 233" "head :: Integer" + , mkT "rank2Test (k :: forall a . a -> a) = (k 233 :: Int, k \"QAQ\")" "rank2Test :: (forall a. a -> a) -> (Int, String)" + , mkT "symbolKindTest = Proxy @\"qwq\"" "symbolKindTest :: Proxy \"qwq\"" + , mkT "promotedKindTest = Proxy @Nothing" (if ghcVersion >= GHC96 then "promotedKindTest :: Proxy Nothing" else "promotedKindTest :: Proxy 'Nothing") + , mkT "typeOperatorTest = Refl" "typeOperatorTest :: forall {k} {a :: k}. a :~: a" + , mkT "notInScopeTest = mkCharType" + (if ghcVersion < GHC910 + then "notInScopeTest :: String -> Data.Data.DataType" + else "notInScopeTest :: String -> GHC.Internal.Data.Data.DataType" + ) + + , mkT' "aVeryLongSignature" + "aVeryLongSignature a b c d e f g h i j k l m n = a && b && c && d && e && f && g && h && i && j && k && l && m && n" + "aVeryLongSignature :: Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool -> Bool" + ] + in testGroup + "add signature" + [ testGroup "signatures are correct" [sigSession (mkTestName spec) False False "always" "" spec [] | spec <- cases] + , sigSession "exported mode works" False False "exported" "xyz" (mkT "xyz = True" "xyz :: Bool") (input <$> take 3 cases) + , testGroup + "diagnostics mode works" + [ sigSession "with GHC warnings" True True "diagnostics" "" (head cases) [] + , sigSession "without GHC warnings" False False "diagnostics" "" (noExpected $ head cases) [] + ] + , testWithDummyPluginEmpty "keep stale lens" $ do + let content = T.unlines + [ "module Stale where" + , "f = _" + ] + doc <- createDoc "Stale.hs" "haskell" content + oldLens <- getCodeLenses doc + liftIO $ length oldLens @?= 1 + let edit = TextEdit (mkRange 0 4 0 5) "" -- Remove the `_` + _ <- applyEdit doc edit + newLens <- getCodeLenses doc + liftIO $ newLens @?= oldLens + ] diff --git a/ghcide-test/exe/CompletionTests.hs b/ghcide-test/exe/CompletionTests.hs new file mode 100644 index 0000000000..8c44173bd6 --- /dev/null +++ b/ghcide-test/exe/CompletionTests.hs @@ -0,0 +1,609 @@ + +{-# LANGUAGE GADTs #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +module CompletionTests (tests) where + +import Config +import Control.Lens ((^.)) +import qualified Control.Lens as Lens +import Control.Monad +import Control.Monad.IO.Class (liftIO) +import Data.Default +import Data.List.Extra +import Data.Maybe +import qualified Data.Text as T +import Development.IDE.Types.Location +import Ide.Plugin.Config +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +import Test.Hls (waitForTypecheck) +import qualified Test.Hls.FileSystem as FS +import Test.Hls.FileSystem (file, text) +import Test.Hls.Util +import Test.Tasty +import Test.Tasty.HUnit + +tests :: TestTree +tests + = testGroup "completion" + [ + testGroup "non local" nonLocalCompletionTests + , testGroup "topLevel" topLevelCompletionTests + , testGroup "local" localCompletionTests + , testGroup "package" packageCompletionTests + , testGroup "project" projectCompletionTests + , testGroup "other" otherCompletionTests + , testGroup "doc" completionDocTests + ] + +testSessionEmpty :: TestName -> Session () -> TestTree +testSessionEmpty name = testWithDummyPlugin name (mkIdeTestFs [FS.directCradle ["A.hs"]]) + +testSessionEmptyWithCradle :: TestName -> T.Text -> Session () -> TestTree +testSessionEmptyWithCradle name cradle = testWithDummyPlugin name (mkIdeTestFs [file "hie.yaml" (text cradle)]) + +testSessionSingleFile :: TestName -> FilePath -> T.Text -> Session () -> TestTree +testSessionSingleFile testName fp txt session = + testWithDummyPlugin testName (mkIdeTestFs [FS.directCradle [T.pack fp] , file fp (text txt)]) session + +completionTest :: HasCallStack => String -> [T.Text] -> Position -> [(T.Text, CompletionItemKind, T.Text, Bool, Bool, Maybe [TextEdit])] -> TestTree +completionTest name src pos expected = testSessionSingleFile name "A.hs" (T.unlines src) $ do + docId <- openDoc "A.hs" "haskell" + _ <- waitForDiagnostics + + compls <- getAndResolveCompletions docId pos + let compls' = [ (_label, _kind, _insertText, _additionalTextEdits) | CompletionItem{..} <- compls] + let emptyToMaybe x = if T.null x then Nothing else Just x + liftIO $ sortOn (Lens.view Lens._1) (take (length expected) compls') @?= + sortOn (Lens.view Lens._1) + [ (l, Just k, emptyToMaybe t, at) | (l,k,t,_,_,at) <- expected] + forM_ (zip compls expected) $ \(CompletionItem{..}, (_,_,_,expectedSig, expectedDocs, _)) -> do + when expectedSig $ + liftIO $ assertBool ("Missing type signature: " <> T.unpack _label) (isJust _detail) + when expectedDocs $ + liftIO $ assertBool ("Missing docs: " <> T.unpack _label) (isJust _documentation) + + +topLevelCompletionTests :: [TestTree] +topLevelCompletionTests = [ + completionTest + "variable" + ["bar = xx", "-- | haddock", "xxx :: ()", "xxx = ()", "-- | haddock", "data Xxx = XxxCon"] + (Position 0 8) + [("xxx", CompletionItemKind_Function, "xxx", True, True, Nothing) + ], + completionTest + "constructor" + ["bar = xx", "-- | haddock", "xxx :: ()", "xxx = ()", "-- | haddock", "data Xxx = XxxCon"] + (Position 0 8) + [("xxx", CompletionItemKind_Function, "xxx", True, True, Nothing) + ], + completionTest + "class method" + ["bar = xx", "class Xxx a where", "-- | haddock", "xxx :: ()", "xxx = ()"] + (Position 0 8) + [("xxx", CompletionItemKind_Function, "xxx", True, True, Nothing)], + completionTest + "type" + ["bar :: Xz", "zzz = ()", "-- | haddock", "data Xzz = XzzCon"] + (Position 0 9) + [("Xzz", CompletionItemKind_Struct, "Xzz", False, True, Nothing)], + completionTest + "class" + ["bar :: Xz", "zzz = ()", "-- | haddock", "class Xzz a"] + (Position 0 9) + [("Xzz", CompletionItemKind_Interface, "Xzz", False, True, Nothing)], + completionTest + "records" + ["data Person = Person { _personName:: String, _personAge:: Int}", "bar = Person { _pers }" ] + (Position 1 19) + [("_personName", CompletionItemKind_Function, "_personName", False, True, Nothing), + ("_personAge", CompletionItemKind_Function, "_personAge", False, True, Nothing)], + completionTest + "recordsConstructor" + ["data XxRecord = XyRecord { x:: String, y:: Int}", "bar = Xy" ] + (Position 1 19) + [("XyRecord", CompletionItemKind_Constructor, "XyRecord", False, True, Nothing), + ("XyRecord", CompletionItemKind_Snippet, "XyRecord {x=${1:_x}, y=${2:_y}}", False, True, Nothing)] + ] + +localCompletionTests :: [TestTree] +localCompletionTests = [ + completionTest + "argument" + ["bar (Just abcdef) abcdefg = abcd"] + (Position 0 32) + [("abcdef", CompletionItemKind_Function, "abcdef", True, False, Nothing), + ("abcdefg", CompletionItemKind_Function , "abcdefg", True, False, Nothing) + ], + completionTest + "let" + ["bar = let (Just abcdef) = undefined" + ," abcdefg = let abcd = undefined in undefined" + ," in abcd" + ] + (Position 2 15) + [("abcdef", CompletionItemKind_Function, "abcdef", True, False, Nothing), + ("abcdefg", CompletionItemKind_Function , "abcdefg", True, False, Nothing) + ], + completionTest + "where" + ["bar = abcd" + ," where (Just abcdef) = undefined" + ," abcdefg = let abcd = undefined in undefined" + ] + (Position 0 10) + [("abcdef", CompletionItemKind_Function, "abcdef", True, False, Nothing), + ("abcdefg", CompletionItemKind_Function , "abcdefg", True, False, Nothing) + ], + completionTest + "do/1" + ["bar = do" + ," Just abcdef <- undefined" + ," abcd" + ," abcdefg <- undefined" + ," pure ()" + ] + (Position 2 6) + [("abcdef", CompletionItemKind_Function, "abcdef", True, False, Nothing) + ], + completionTest + "do/2" + ["bar abcde = do" + ," Just [(abcdef,_)] <- undefined" + ," abcdefg <- undefined" + ," let abcdefgh = undefined" + ," (Just [abcdefghi]) = undefined" + ," abcd" + ," where" + ," abcdefghij = undefined" + ] + (Position 5 8) + [("abcde", CompletionItemKind_Function, "abcde", True, False, Nothing) + ,("abcdefghij", CompletionItemKind_Function, "abcdefghij", True, False, Nothing) + ,("abcdef", CompletionItemKind_Function, "abcdef", True, False, Nothing) + ,("abcdefg", CompletionItemKind_Function, "abcdefg", True, False, Nothing) + ,("abcdefgh", CompletionItemKind_Function, "abcdefgh", True, False, Nothing) + ,("abcdefghi", CompletionItemKind_Function, "abcdefghi", True, False, Nothing) + ], + completionTest + "type family" + ["{-# LANGUAGE DataKinds, TypeFamilies #-}" + ,"type family Bar a" + ,"a :: Ba" + ] + (Position 2 7) + [("Bar", CompletionItemKind_Struct, "Bar", True, False, Nothing) + ], + completionTest + "class method" + [ + "class Test a where" + , " abcd :: a -> ()" + , " abcde :: a -> Int" + , "instance Test Int where" + , " abcd = abc" + ] + (Position 4 14) + [("abcd", CompletionItemKind_Function, "abcd", True, False, Nothing) + ,("abcde", CompletionItemKind_Function, "abcde", True, False, Nothing) + ], + testSessionEmpty "incomplete entries" $ do + let src a = "data Data = " <> a + doc <- createDoc "A.hs" "haskell" $ src "AAA" + void $ waitForTypecheck doc + let editA rhs = + changeDoc doc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ src rhs] + editA "AAAA" + void $ waitForTypecheck doc + editA "AAAAA" + void $ waitForTypecheck doc + + compls <- getCompletions doc (Position 0 15) + liftIO $ filter ("AAA" `T.isPrefixOf`) (mapMaybe _insertText compls) @?= ["AAAAA"] + pure (), + completionTest + "polymorphic record dot completion" + [ "{-# LANGUAGE OverloadedRecordDot #-}" + , "module A () where" + , "data Record = Record" + , " { field1 :: Int" + , " , field2 :: Int" + , " }" + , -- Without the following, this file doesn't trigger any diagnostics, so completionTest waits forever + "triggerDiag :: UnknownType" + , "foo record = record.f" + ] + (Position 7 21) + [("field1", CompletionItemKind_Function, "field1", True, False, Nothing) + ,("field2", CompletionItemKind_Function, "field2", True, False, Nothing) + ], + completionTest + "qualified polymorphic record dot completion" + [ "{-# LANGUAGE OverloadedRecordDot #-}" + , "module A () where" + , "data Record = Record" + , " { field1 :: Int" + , " , field2 :: Int" + , " }" + , "someValue = undefined" + , "foo = A.someValue.f" + ] + (Position 7 19) + [("field1", CompletionItemKind_Function, "field1", True, False, Nothing) + ,("field2", CompletionItemKind_Function, "field2", True, False, Nothing) + ] + ] + +nonLocalCompletionTests :: [TestTree] +nonLocalCompletionTests = + [ brokenForWinOldGhc $ completionTest + "variable" + ["module A where", "f = hea"] + (Position 1 7) + [("head", CompletionItemKind_Function, "head", True, True, Nothing)], + completionTest + "constructor" + ["{-# OPTIONS_GHC -Wall #-}", "module A where", "f = True"] + (Position 2 8) + [ ("True", CompletionItemKind_Constructor, "True", True, True, Nothing) + ], + brokenForWinGhc $ completionTest + "type" + ["{-# OPTIONS_GHC -Wall #-}", "module A () where", "f :: Boo", "f = True"] + (Position 2 8) + [ ("Bool", CompletionItemKind_Struct, "Bool", True, True, Nothing) + ], + completionTest + "qualified" + ["{-# OPTIONS_GHC -Wunused-binds #-}", "module A () where", "f = Prelude.hea"] + (Position 2 15) + [ ("head", CompletionItemKind_Function, "head", True, True, Nothing) + ], + completionTest + "duplicate import" + ["module A where", "import Data.List", "import Data.List", "f = permu"] + (Position 3 9) + [ ("permutations", CompletionItemKind_Function, "permutations", False, False, Nothing) + ], + completionTest + "dont show hidden items" + [ "{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", + "import Control.Monad hiding (join)", + "f = joi" + ] + (Position 3 6) + [], + testGroup "ordering" + [completionTest "qualified has priority" + ["module A where" + ,"import qualified Data.ByteString as BS" + ,"f = BS.read" + ] + (Position 2 10) + [("readFile", CompletionItemKind_Function, "readFile", True, True, Nothing)] + ], + -- we need this test to make sure the ghcide completions module does not return completions for language pragmas. this functionality is turned on in hls + completionTest + "do not show pragma completions" + [ "{-# LANGUAGE ", + "{module A where}", + "main = return ()" + ] + (Position 0 13) + [] + ] + where + brokenForWinGhc = knownBrokenOnWindows "Windows has strange things in scope for some reason" + brokenForWinOldGhc = + knownBrokenInSpecificEnv [HostOS Windows, GhcVer GHC96] "Windows (GHC == 9.6) has strange things in scope for some reason" + . knownBrokenInSpecificEnv [HostOS Windows, GhcVer GHC98] "Windows (GHC == 9.8) has strange things in scope for some reason" + +otherCompletionTests :: [TestTree] +otherCompletionTests = [ + completionTest + "keyword" + ["module A where", "f = newty"] + (Position 1 9) + [("newtype", CompletionItemKind_Keyword, "", False, False, Nothing)], + completionTest + "type context" + [ "{-# OPTIONS_GHC -Wunused-binds #-}", + "module A () where", + "f = f", + "g :: Intege" + ] + -- At this point the module parses but does not typecheck. + -- This should be sufficient to detect that we are in a + -- type context and only show the completion to the type. + (Position 3 11) + [("Integer", CompletionItemKind_Struct, "Integer", True, True, Nothing)], + + testSessionEmpty "duplicate record fields" $ do + void $ + createDoc "B.hs" "haskell" $ + T.unlines + [ "{-# LANGUAGE DuplicateRecordFields #-}", + "module B where", + "newtype Foo = Foo { member :: () }", + "newtype Bar = Bar { member :: () }" + ] + docA <- + createDoc "A.hs" "haskell" $ + T.unlines + [ "module A where", + "import B", + "memb" + ] + _ <- waitForDiagnostics + compls <- getCompletions docA $ Position 2 4 + let compls' = [txt | CompletionItem {_insertText = Just txt, ..} <- compls, _label == "member"] + liftIO $ take 1 compls' @?= ["member"], + + testSessionEmpty "maxCompletions" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-binds #-}", + "module A () where", + "a = Prelude." + ] + _ <- waitForDiagnostics + compls <- getCompletions doc (Position 3 13) + liftIO $ length compls @?= maxCompletions def + ] + +packageCompletionTests :: [TestTree] +packageCompletionTests = + [ testSessionEmptyWithCradle "fromList" "cradle: {direct: {arguments: [-hide-all-packages, -package, base, A]}}" $ do + + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-binds #-}", + "module A () where", + "a = fromList" + ] + _ <- waitForDiagnostics + compls <- getCompletions doc (Position 2 12) + let compls' = + [T.drop 1 $ T.dropEnd 3 d + | CompletionItem {_documentation = Just (InR (MarkupContent MarkupKind_Markdown d)), _label} + <- compls + , _label == "fromList" + ] + liftIO $ take 3 (sort compls') @?= + map ("Defined in "<>) [ + "'Data.List.NonEmpty" + , "'GHC.Exts" + , "'GHC.IsList" + ] + + , testSessionEmptyWithCradle "Map" "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, containers, A]}}" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-binds #-}", + "module A () where", + "a :: Map" + ] + _ <- waitForDiagnostics + compls <- getCompletions doc (Position 2 7) + let compls' = + [T.drop 1 $ T.dropEnd 3 d + | CompletionItem {_documentation = Just (InR (MarkupContent MarkupKind_Markdown d)), _label} + <- compls + , _label == "Map" + ] + liftIO $ take 3 (sort compls') @?= + map ("Defined in "<>) + [ "'Data.Map" + , "'Data.Map.Lazy" + , "'Data.Map.Strict" + ] + , testSessionEmpty "no duplicates" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-binds #-}", + "module A () where", + "import GHC.Exts(fromList)", + "a = fromList" + ] + _ <- waitForDiagnostics + compls <- getCompletions doc (Position 3 13) + let duplicate = + filter + (\case + CompletionItem + { _insertText = Just "fromList" + , _documentation = + Just (InR (MarkupContent MarkupKind_Markdown d)) + } -> + "GHC.Exts" `T.isInfixOf` d + _ -> False + ) compls + liftIO $ length duplicate @?= 1 + + , testSessionEmpty "non-local before global" $ do + -- non local completions are more specific + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-binds #-}", + "module A () where", + "import GHC.Exts(fromList)", + "a = fromList" + ] + _ <- waitForDiagnostics + compls <- getCompletions doc (Position 3 13) + let compls' = + [_insertText + | CompletionItem {_label, _insertText} <- compls + , _label == "fromList" + ] + liftIO $ take 3 compls' @?= + map Just ["fromList"] + ] + +projectCompletionTests :: [TestTree] +projectCompletionTests = + [ testSessionEmptyWithCradle "from hiedb" "cradle: {direct: {arguments: [\"-Wmissing-signatures\", \"A\", \"B\"]}}" $ do + _ <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A (anidentifier) where", + "anidentifier = ()" + ] + _ <- waitForDiagnostics + -- Note that B does not import A + doc <- createDoc "B.hs" "haskell" $ T.unlines + [ "module B where", + "b = anidenti" + ] + compls <- getCompletions doc (Position 1 10) + let compls' = + [T.drop 1 $ T.dropEnd 3 d + | CompletionItem {_documentation = Just (InR (MarkupContent MarkupKind_Markdown d)), _label} + <- compls + , _label == "anidentifier" + ] + liftIO $ compls' @?= ["Defined in 'A"], + testSessionEmptyWithCradle "auto complete project imports" "cradle: {direct: {arguments: [\"-Wmissing-signatures\", \"ALocalModule\", \"B\"]}}" $ do + _ <- createDoc "ALocalModule.hs" "haskell" $ T.unlines + [ "module ALocalModule (anidentifier) where", + "anidentifier = ()" + ] + _ <- waitForDiagnostics + -- Note that B does not import A + doc <- createDoc "B.hs" "haskell" $ T.unlines + [ "module B where", + "import ALocal" + ] + compls <- getCompletions doc (Position 1 13) + let item = head $ filter ((== "ALocalModule") . (^. L.label)) compls + liftIO $ do + item ^. L.label @?= "ALocalModule", + testSessionEmptyWithCradle "auto complete functions from qualified imports without alias" "cradle: {direct: {arguments: [\"-Wmissing-signatures\", \"A\", \"B\"]}}" $ do + _ <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A (anidentifier) where", + "anidentifier = ()" + ] + _ <- waitForDiagnostics + doc <- createDoc "B.hs" "haskell" $ T.unlines + [ "module B where", + "import qualified A", + "A." + ] + compls <- getCompletions doc (Position 2 2) + let item = head compls + liftIO $ do + item ^. L.label @?= "anidentifier", + testSessionEmptyWithCradle "auto complete functions from qualified imports with alias" + "cradle: {direct: {arguments: [\"-Wmissing-signatures\", \"A\", \"B\"]}}" $ do + _ <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A (anidentifier) where", + "anidentifier = ()" + ] + _ <- waitForDiagnostics + doc <- createDoc "B.hs" "haskell" $ T.unlines + [ "module B where", + "import qualified A as Alias", + "foo = Alias." + ] + compls <- getCompletions doc (Position 2 12) + let item = head compls + liftIO $ do + item ^. L.label @?= "anidentifier" + ] + +completionDocTests :: [TestTree] +completionDocTests = + [ testSessionEmpty "local define" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "foo = ()" + , "bar = fo" + ] + let expected = "*Defined at line 2, column 1 in this module*\n" + test doc (Position 2 8) "foo" Nothing [expected] + , testSessionEmpty "local empty doc" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "foo = ()" + , "bar = fo" + ] + test doc (Position 2 8) "foo" Nothing ["*Defined at line 2, column 1 in this module*\n"] + , testSessionEmpty "local single line doc without newline" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "-- |docdoc" + , "foo = ()" + , "bar = fo" + ] + test doc (Position 3 8) "foo" Nothing ["*Defined at line 3, column 1 in this module*\n* * *\n\n\ndocdoc\n"] + , testSessionEmpty "local multi line doc with newline" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "-- | abcabc" + , "--" + , "foo = ()" + , "bar = fo" + ] + test doc (Position 4 8) "foo" Nothing ["*Defined at line 4, column 1 in this module*\n* * *\n\n\nabcabc\n"] + , testSessionEmpty "local multi line doc without newline" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "-- | abcabc" + , "--" + , "--def" + , "foo = ()" + , "bar = fo" + ] + test doc (Position 5 8) "foo" Nothing ["*Defined at line 5, column 1 in this module*\n* * *\n\n\nabcabc \n\ndef\n"] + , testSessionEmpty "extern empty doc" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "foo = od" + ] + let expected = "*Imported from 'Prelude'*\n" + test doc (Position 1 8) "odd" (Just $ T.length expected) [expected] + , testSessionEmpty "extern single line doc without '\\n'" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "foo = no" + ] + let expected = "*Imported from 'Prelude'*\n* * *\n\n\nBoolean \"not\"\n" + test doc (Position 1 8) "not" (Just $ T.length expected) [expected] + , testSessionEmpty "extern mulit line doc" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "foo = i" + ] + let expected = "*Imported from 'Prelude'*\n* * *\n\n\nIdentity function. \n```haskell\nid x = x\n```\n" + test doc (Position 1 7) "id" (Just $ T.length expected) [expected] + , testSessionEmpty "extern defined doc" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "foo = i" + ] + let expected = "*Imported from 'Prelude'*\n" + test doc (Position 1 7) "id" (Just $ T.length expected) [expected] + ] + where + test doc pos label mn expected = do + _ <- waitForDiagnostics + compls <- getCompletions doc pos + rcompls <- forM compls $ \item -> do + rsp <- request SMethod_CompletionItemResolve item + case rsp ^. L.result of + Left err -> liftIO $ assertFailure ("completionItem/resolve failed with: " <> show err) + Right x -> pure x + let compls' = [ + -- We ignore doc uris since it points to the local path which determined by specific machines + case mn of + Nothing -> txt + Just n -> T.take n txt + | CompletionItem {_documentation = Just (InR (MarkupContent MarkupKind_Markdown txt)), ..} <- rcompls + , _label == label + ] + liftIO $ compls' @?= expected diff --git a/ghcide-test/exe/Config.hs b/ghcide-test/exe/Config.hs new file mode 100644 index 0000000000..c98023e90e --- /dev/null +++ b/ghcide-test/exe/Config.hs @@ -0,0 +1,187 @@ +{-# LANGUAGE PatternSynonyms #-} + +module Config( + -- * basic config for ghcIde testing + mkIdeTestFs + , dummyPlugin + + -- * runners for testing specific plugins + , testSessionWithPlugin + -- * runners for testing with dummy plugin + , runWithDummyPlugin + , testWithDummyPlugin + , testWithDummyPluginEmpty + , testWithDummyPlugin' + , testWithDummyPluginEmpty' + , testWithConfig + , testWithExtraFiles + , runWithExtraFiles + , runInDir + , run + + -- * utilities for testing + , Expect(..) + , pattern R + , mkR + , checkDefs + , mkL + , withLongTimeout + , lspTestCaps + , lspTestCapsNoFileWatches + , testDataDir + ) where + +import Control.Exception (bracket_) +import Control.Lens.Setter ((.~)) +import Control.Monad (unless) +import Data.Foldable (traverse_) +import Data.Function ((&)) +import qualified Data.Text as T +import Development.IDE (Pretty) +import Development.IDE.Test (canonicalizeUri) +import Ide.Types (defaultPluginDescriptor) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Types (Null (..)) +import System.Environment.Blank (setEnv, unsetEnv) +import System.FilePath (()) +import Test.Hls +import qualified Test.Hls.FileSystem as FS + +testDataDir :: FilePath +testDataDir = "ghcide-test" "data" + +mkIdeTestFs :: [FS.FileTree] -> FS.VirtualFileTree +mkIdeTestFs = FS.mkVirtualFileTree testDataDir + +-- * Run with some injected plugin +-- testSessionWithPlugin :: FS.VirtualFileTree -> (FilePath -> Session a) -> IO a +testSessionWithPlugin :: Pretty b => FS.VirtualFileTree -> PluginTestDescriptor b -> (FilePath -> Session a) -> IO a +testSessionWithPlugin fs plugin = runSessionWithTestConfig def + { testPluginDescriptor = plugin + , testDirLocation = Right fs + , testConfigCaps = lspTestCaps + , testShiftRoot = True + } + +-- * A dummy plugin for testing ghcIde +dummyPlugin :: PluginTestDescriptor () +dummyPlugin = mkPluginTestDescriptor (\_ pid -> defaultPluginDescriptor pid "dummyTestPlugin") "core" + +runWithDummyPlugin :: FS.VirtualFileTree -> Session a -> IO a +runWithDummyPlugin = runSessionWithServerInTmpDir def dummyPlugin + +testWithConfig :: String -> TestConfig () -> Session () -> TestTree +testWithConfig name conf s = testCase name $ runSessionWithTestConfig conf $ const s + +runWithDummyPlugin' :: FS.VirtualFileTree -> (FilePath -> Session a) -> IO a +runWithDummyPlugin' fs = runSessionWithTestConfig def + { testPluginDescriptor = dummyPlugin + , testDirLocation = Right fs + , testConfigCaps = lspTestCaps + , testShiftRoot = True + } + +testWithDummyPlugin :: String -> FS.VirtualFileTree -> Session () -> TestTree +testWithDummyPlugin caseName vfs = testWithDummyPlugin' caseName vfs . const + +testWithDummyPlugin' :: String -> FS.VirtualFileTree -> (FilePath -> Session ()) -> TestTree +testWithDummyPlugin' caseName vfs = testCase caseName . runWithDummyPlugin' vfs + +testWithDummyPluginEmpty :: String -> Session () -> TestTree +testWithDummyPluginEmpty caseName = testWithDummyPlugin caseName $ mkIdeTestFs [] + +testWithDummyPluginEmpty' :: String -> (FilePath -> Session ()) -> TestTree +testWithDummyPluginEmpty' caseName = testWithDummyPlugin' caseName $ mkIdeTestFs [] + +runWithExtraFiles :: String -> (FilePath -> Session a) -> IO a +runWithExtraFiles dirName action = do + let vfs = mkIdeTestFs [FS.copyDir dirName] + runWithDummyPlugin' vfs action + +testWithExtraFiles :: String -> String -> (FilePath -> Session ()) -> TestTree +testWithExtraFiles testName dirName action = testCase testName $ runWithExtraFiles dirName action + +runInDir :: FilePath -> Session a -> IO a +runInDir fs = runSessionWithServer def dummyPlugin fs + +run :: Session a -> IO a +run = runSessionWithTestConfig def + { testDirLocation = Right (mkIdeTestFs []) + , testPluginDescriptor = dummyPlugin } + . const + +pattern R :: UInt -> UInt -> UInt -> UInt -> Range +pattern R x y x' y' = Range (Position x y) (Position x' y') + +data Expect + = ExpectRange Range -- Both gotoDef and hover should report this range + | ExpectRanges [Range] -- definition lookup with multiple results + | ExpectLocation Location +-- | ExpectDefRange Range -- Only gotoDef should report this range + | ExpectHoverRange Range -- Only hover should report this range + | ExpectHoverText [T.Text] -- the hover message must contain these snippets + | ExpectHoverExcludeText [T.Text] -- the hover message must _not_ contain these snippets + | ExpectHoverTextRegex T.Text -- the hover message must match this pattern + | ExpectExternFail -- definition lookup in other file expected to fail + | ExpectNoDefinitions + | ExpectNoImplementations + | ExpectNoHover +-- | ExpectExtern -- TODO: as above, but expected to succeed: need some more info in here, once we have some working examples + deriving Eq + +mkR :: UInt -> UInt -> UInt -> UInt -> Expect +mkR startLine startColumn endLine endColumn = ExpectRange $ mkRange startLine startColumn endLine endColumn + +mkL :: Uri -> UInt -> UInt -> UInt -> UInt -> Expect +mkL uri startLine startColumn endLine endColumn = ExpectLocation $ Location uri $ mkRange startLine startColumn endLine endColumn + + +checkDefs :: Definition |? ([DefinitionLink] |? Null) -> Session [Expect] -> Session () +checkDefs (defToLocation -> defs) mkExpectations = traverse_ check =<< mkExpectations where + check (ExpectRange expectedRange) = do + def <- assertOneDefinitionFound defs + assertRangeCorrect def expectedRange + check (ExpectRanges ranges) = + traverse_ (assertHasRange defs) ranges + check (ExpectLocation expectedLocation) = do + def <- assertOneDefinitionFound defs + liftIO $ do + canonActualLoc <- canonicalizeLocation def + canonExpectedLoc <- canonicalizeLocation expectedLocation + canonActualLoc @?= canonExpectedLoc + check ExpectNoImplementations = do + liftIO $ assertBool "Expecting no implementations" $ null defs + check ExpectNoDefinitions = do + liftIO $ assertBool "Expecting no definitions" $ null defs + check ExpectExternFail = liftIO $ assertFailure "Expecting to fail to find in external file" + check _ = pure () -- all other expectations not relevant to getDefinition + + assertOneDefinitionFound :: [Location] -> Session Location + assertOneDefinitionFound [def] = pure def + assertOneDefinitionFound xs = liftIO . assertFailure $ "Expecting exactly one definition, got " <> show (length xs) + + assertRangeCorrect Location{_range = foundRange} expectedRange = + liftIO $ expectedRange @=? foundRange + + assertHasRange actualRanges expectedRange = do + let hasRange = any (\Location{_range=foundRange} -> foundRange == expectedRange) actualRanges + unless hasRange $ liftIO $ assertFailure $ + "expected range: " <> show expectedRange <> "\nbut got ranges: " <> show defs + +canonicalizeLocation :: Location -> IO Location +canonicalizeLocation (Location uri range) = Location <$> canonicalizeUri uri <*> pure range + +defToLocation :: Definition |? ([DefinitionLink] |? Null) -> [Location] +defToLocation (InL (Definition (InL l))) = [l] +defToLocation (InL (Definition (InR ls))) = ls +defToLocation (InR (InL defLink)) = (\(DefinitionLink LocationLink{_targetUri,_targetRange}) -> Location _targetUri _targetRange) <$> defLink +defToLocation (InR (InR Null)) = [] + +lspTestCaps :: ClientCapabilities +lspTestCaps = fullLatestClientCaps { _window = Just $ WindowClientCapabilities (Just True) Nothing Nothing } + +lspTestCapsNoFileWatches :: ClientCapabilities +lspTestCapsNoFileWatches = lspTestCaps & L.workspace . traverse . L.didChangeWatchedFiles .~ Nothing + +withLongTimeout :: IO a -> IO a +withLongTimeout = bracket_ (setEnv "LSP_TIMEOUT" "120" True) (unsetEnv "LSP_TIMEOUT") diff --git a/ghcide-test/exe/CradleTests.hs b/ghcide-test/exe/CradleTests.hs new file mode 100644 index 0000000000..d79b90c835 --- /dev/null +++ b/ghcide-test/exe/CradleTests.hs @@ -0,0 +1,248 @@ + +{-# LANGUAGE GADTs #-} + +module CradleTests (tests) where + +import Config (checkDefs, mkL, runInDir, + runWithExtraFiles, + testWithDummyPluginEmpty') +import Control.Applicative.Combinators +import Control.Lens ((^.)) +import Control.Monad.IO.Class (liftIO) +import qualified Data.Text as T +import Development.IDE.GHC.Util +import Development.IDE.Plugin.Test (WaitForIdeRuleResult (..)) +import Development.IDE.Test (expectDiagnostics, + expectDiagnosticsWithTags, + expectNoMoreDiagnostics, + isReferenceReady, + waitForAction) +import Development.IDE.Types.Location +import GHC.TypeLits (symbolVal) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +import System.FilePath +import System.IO.Extra hiding (withTempDir) +import Test.Hls.Util (EnvSpec (..), OS (..), + ignoreInEnv) +import Test.Tasty +import Test.Tasty.HUnit + + +tests :: TestTree +tests = testGroup "cradle" + [testGroup "dependencies" [sessionDepsArePickedUp] + ,testGroup "ignore-fatal" [ignoreFatalWarning] + ,testGroup "loading" [loadCradleOnlyonce, retryFailedCradle] + ,testGroup "multi" (multiTests "multi") + ,testGroup "multi-unit" (multiTests "multi-unit") + ,testGroup "sub-directory" [simpleSubDirectoryTest] + ,testGroup "multi-unit-rexport" [multiRexportTest] + ] + +loadCradleOnlyonce :: TestTree +loadCradleOnlyonce = testGroup "load cradle only once" + [ testWithDummyPluginEmpty' "implicit" implicit + , testWithDummyPluginEmpty' "direct" direct + ] + where + direct dir = do + liftIO $ writeFileUTF8 (dir "hie.yaml") + "cradle: {direct: {arguments: []}}" + test dir + implicit dir = test dir + test _dir = do + doc <- createDoc "B.hs" "haskell" "module B where\nimport Data.Foo" + msgs <- someTill (skipManyTill anyMessage cradleLoadedMessage) (skipManyTill anyMessage (message SMethod_TextDocumentPublishDiagnostics)) + liftIO $ length msgs @?= 1 + changeDoc doc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ "module B where\nimport Data.Maybe"] + msgs <- manyTill (skipManyTill anyMessage cradleLoadedMessage) (skipManyTill anyMessage (message SMethod_TextDocumentPublishDiagnostics)) + liftIO $ length msgs @?= 0 + _ <- createDoc "A.hs" "haskell" "module A where\nimport LoadCradleBar" + msgs <- manyTill (skipManyTill anyMessage cradleLoadedMessage) (skipManyTill anyMessage (message SMethod_TextDocumentPublishDiagnostics)) + liftIO $ length msgs @?= 0 + +retryFailedCradle :: TestTree +retryFailedCradle = testWithDummyPluginEmpty' "retry failed" $ \dir -> do + -- The false cradle always fails + let hieContents = "cradle: {bios: {shell: \"false\"}}" + hiePath = dir "hie.yaml" + liftIO $ writeFile hiePath hieContents + let aPath = dir "A.hs" + doc <- createDoc aPath "haskell" "main = return ()" + WaitForIdeRuleResult {..} <- waitForAction "TypeCheck" doc + liftIO $ "Test assumption failed: cradle should error out" `assertBool` not ideResultSuccess + + -- Fix the cradle and typecheck again + let validCradle = "cradle: {bios: {shell: \"echo A.hs\"}}" + liftIO $ writeFileUTF8 hiePath $ T.unpack validCradle + sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams + [FileEvent (filePathToUri $ dir "hie.yaml") FileChangeType_Changed ] + + WaitForIdeRuleResult {..} <- waitForAction "TypeCheck" doc + liftIO $ "No joy after fixing the cradle" `assertBool` ideResultSuccess + + +cradleLoadedMessage :: Session FromServerMessage +cradleLoadedMessage = satisfy $ \case + FromServerMess (SMethod_CustomMethod p) (NotMess _) -> symbolVal p == cradleLoadedMethod + _ -> False + +cradleLoadedMethod :: String +cradleLoadedMethod = "ghcide/cradle/loaded" + +ignoreFatalWarning :: TestTree +ignoreFatalWarning = testCase "ignore-fatal-warning" $ runWithExtraFiles "ignore-fatal" $ \dir -> do + let srcPath = dir "IgnoreFatal.hs" + src <- liftIO $ readFileUtf8 srcPath + _ <- createDoc srcPath "haskell" src + expectNoMoreDiagnostics 5 + +simpleSubDirectoryTest :: TestTree +simpleSubDirectoryTest = + testCase "simple-subdirectory" $ runWithExtraFiles "cabal-exe" $ \dir -> do + let mainPath = dir "a/src/Main.hs" + mainSource <- liftIO $ readFileUtf8 mainPath + _mdoc <- createDoc mainPath "haskell" mainSource + expectDiagnosticsWithTags + [("a/src/Main.hs", [(DiagnosticSeverity_Warning,(2,0), "Top-level binding", Just "GHC-38417", Nothing)]) -- So that we know P has been loaded + ] + expectNoMoreDiagnostics 0.5 + +multiTests :: FilePath -> [TestTree] +multiTests dir = + [ simpleMultiTest dir + , simpleMultiTest2 dir + , simpleMultiTest3 dir + , simpleMultiDefTest dir + ] + +multiTestName :: FilePath -> String -> String +multiTestName dir name = "simple-" ++ dir ++ "-" ++ name + +simpleMultiTest :: FilePath -> TestTree +simpleMultiTest variant = testCase (multiTestName variant "test") $ runWithExtraFiles variant $ \dir -> do + let aPath = dir "a/A.hs" + bPath = dir "b/B.hs" + adoc <- openDoc aPath "haskell" + bdoc <- openDoc bPath "haskell" + WaitForIdeRuleResult {..} <- waitForAction "TypeCheck" adoc + liftIO $ assertBool "A should typecheck" ideResultSuccess + WaitForIdeRuleResult {..} <- waitForAction "TypeCheck" bdoc + liftIO $ assertBool "B should typecheck" ideResultSuccess + locs <- getDefinitions bdoc (Position 2 7) + let fooL = mkL (adoc ^. L.uri) 2 0 2 3 + checkDefs locs (pure [fooL]) + expectNoMoreDiagnostics 0.5 + +-- Like simpleMultiTest but open the files in the other order +simpleMultiTest2 :: FilePath -> TestTree +simpleMultiTest2 variant = testCase (multiTestName variant "test2") $ runWithExtraFiles variant $ \dir -> do + let aPath = dir "a/A.hs" + bPath = dir "b/B.hs" + bdoc <- openDoc bPath "haskell" + WaitForIdeRuleResult {} <- waitForAction "TypeCheck" bdoc + TextDocumentIdentifier auri <- openDoc aPath "haskell" + skipManyTill anyMessage $ isReferenceReady aPath + locs <- getDefinitions bdoc (Position 2 7) + let fooL = mkL auri 2 0 2 3 + checkDefs locs (pure [fooL]) + expectNoMoreDiagnostics 0.5 + +-- Now with 3 components +simpleMultiTest3 :: FilePath -> TestTree +simpleMultiTest3 variant = + testCase (multiTestName variant "test3") $ runWithExtraFiles variant $ \dir -> do + let aPath = dir "a/A.hs" + bPath = dir "b/B.hs" + cPath = dir "c/C.hs" + bdoc <- openDoc bPath "haskell" + WaitForIdeRuleResult {} <- waitForAction "TypeCheck" bdoc + TextDocumentIdentifier auri <- openDoc aPath "haskell" + skipManyTill anyMessage $ isReferenceReady aPath + cdoc <- openDoc cPath "haskell" + WaitForIdeRuleResult {} <- waitForAction "TypeCheck" cdoc + locs <- getDefinitions cdoc (Position 2 7) + let fooL = mkL auri 2 0 2 3 + checkDefs locs (pure [fooL]) + expectNoMoreDiagnostics 0.5 + +-- Like simpleMultiTest but open the files in component 'a' in a separate session +simpleMultiDefTest :: FilePath -> TestTree +simpleMultiDefTest variant = ignoreForWindows $ testCase testName $ + runWithExtraFiles variant $ \dir -> do + let aPath = dir "a/A.hs" + bPath = dir "b/B.hs" + adoc <- liftIO $ runInDir dir $ do + aSource <- liftIO $ readFileUtf8 aPath + adoc <- createDoc aPath "haskell" aSource + skipManyTill anyMessage $ isReferenceReady aPath + closeDoc adoc + pure adoc + bSource <- liftIO $ readFileUtf8 bPath + bdoc <- createDoc bPath "haskell" bSource + locs <- getDefinitions bdoc (Position 2 7) + let fooL = mkL (adoc ^. L.uri) 2 0 2 3 + checkDefs locs (pure [fooL]) + expectNoMoreDiagnostics 0.5 + where + testName = multiTestName variant "def-test" + ignoreForWindows + | testName == "simple-multi-def-test" = ignoreInEnv [HostOS Windows] "Test is flaky on Windows, see #4270" + | otherwise = id + +multiRexportTest :: TestTree +multiRexportTest = + testCase "multi-unit-reexport-test" $ runWithExtraFiles "multi-unit-reexport" $ \dir -> do + let cPath = dir "c/C.hs" + cdoc <- openDoc cPath "haskell" + WaitForIdeRuleResult {} <- waitForAction "TypeCheck" cdoc + locs <- getDefinitions cdoc (Position 3 7) + let aPath = dir "a/A.hs" + let fooL = mkL (filePathToUri aPath) 2 0 2 3 + checkDefs locs (pure [fooL]) + expectNoMoreDiagnostics 0.5 + +sessionDepsArePickedUp :: TestTree +sessionDepsArePickedUp = testWithDummyPluginEmpty' + "session-deps-are-picked-up" + $ \dir -> do + liftIO $ + writeFileUTF8 + (dir "hie.yaml") + "cradle: {direct: {arguments: []}}" + -- Open without OverloadedStrings and expect an error. + doc <- createDoc "Foo.hs" "haskell" fooContent + expectDiagnostics [("Foo.hs", [(DiagnosticSeverity_Error, (3, 6), "Couldn't match type", Just "GHC-83865")])] + + -- Update hie.yaml to enable OverloadedStrings. + liftIO $ + writeFileUTF8 + (dir "hie.yaml") + "cradle: {direct: {arguments: [-XOverloadedStrings]}}" + sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams + [FileEvent (filePathToUri $ dir "hie.yaml") FileChangeType_Changed ] + -- Send change event. + let change = + TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range (Position 4 0) (Position 4 0) + , _rangeLength = Nothing + , _text = "\n" + } + changeDoc doc [change] + -- Now no errors. + expectDiagnostics [("Foo.hs", [])] + where + fooContent = + T.unlines + [ "module Foo where", + "import Data.Text", + "foo :: Text", + "foo = \"hello\"" + ] diff --git a/ghcide-test/exe/DependentFileTest.hs b/ghcide-test/exe/DependentFileTest.hs new file mode 100644 index 0000000000..1f243819e3 --- /dev/null +++ b/ghcide-test/exe/DependentFileTest.hs @@ -0,0 +1,62 @@ + +{-# LANGUAGE GADTs #-} + +module DependentFileTest (tests) where + +import Config +import qualified Data.Text as T +import Development.IDE.Test (expectDiagnostics) +import Development.IDE.Types.Location +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +import Test.Hls + + +tests :: TestTree +tests = testGroup "addDependentFile" + [testGroup "file-changed" [testCase "test" $ runSessionWithTestConfig def + { testShiftRoot = True + , testDirLocation = Right (mkIdeTestFs []) + , testPluginDescriptor = dummyPlugin + } test] + ] + where + test :: FilePath -> Session () + test _ = do + -- If the file contains B then no type error + -- otherwise type error + let depFilePath = "dep-file.txt" + liftIO $ writeFile depFilePath "A" + let fooContent = T.unlines + [ "{-# LANGUAGE TemplateHaskell #-}" + , "module Foo where" + , "import Language.Haskell.TH.Syntax" + , "foo :: Int" + , "foo = 1 + $(do" + , " qAddDependentFile \"" <> T.pack depFilePath <> "\"" + , " f <- qRunIO (readFile \"" <> T.pack depFilePath <> "\")" + , " if f == \"B\" then [| 1 |] else lift f)" + ] + let bazContent = T.unlines ["module Baz where", "import Foo ()"] + _fooDoc <- createDoc "Foo.hs" "haskell" fooContent + doc <- createDoc "Baz.hs" "haskell" bazContent + expectDiagnostics + [("Foo.hs", [(DiagnosticSeverity_Error, (4,11), "Couldn't match type", Just "GHC-83865")])] + -- Now modify the dependent file + liftIO $ writeFile depFilePath "B" + sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams + [FileEvent (filePathToUri depFilePath) FileChangeType_Changed ] + + -- Modifying Baz will now trigger Foo to be rebuilt as well + let change = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range (Position 2 0) (Position 2 6) + , _rangeLength = Nothing + , _text = "f = ()" + } + changeDoc doc [change] + expectDiagnostics [("Foo.hs", [])] diff --git a/ghcide-test/exe/DiagnosticTests.hs b/ghcide-test/exe/DiagnosticTests.hs new file mode 100644 index 0000000000..52aba0b9b7 --- /dev/null +++ b/ghcide-test/exe/DiagnosticTests.hs @@ -0,0 +1,586 @@ + +{-# LANGUAGE GADTs #-} + +module DiagnosticTests (tests) where + +import Control.Applicative.Combinators +import qualified Control.Lens as Lens +import Control.Monad +import Control.Monad.IO.Class (liftIO) +import Data.List.Extra +import qualified Data.Text as T +import Development.IDE.GHC.Compat (GhcVersion (..), ghcVersion) +import Development.IDE.GHC.Util +import Development.IDE.Test (diagnostic, + expectCurrentDiagnostics, + expectDiagnostics, + expectDiagnosticsWithTags, + expectNoMoreDiagnostics, + flushMessages, waitForAction) +import Development.IDE.Types.Location +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +import System.Directory +import System.FilePath +import System.IO.Extra hiding (withTempDir) + +import Config +import Control.Lens ((^.)) +import Control.Monad.Extra (whenJust) +import Data.Default (def) +import Development.IDE.Plugin.Test (WaitForIdeRuleResult (..)) +import System.Time.Extra +import Test.Hls (TestConfig (testConfigCaps, testDirLocation, testDisableKick, testPluginDescriptor), + runSessionWithTestConfig, + waitForProgressBegin) +import Test.Hls.FileSystem (directCradle, file, text) +import Test.Tasty +import Test.Tasty.HUnit + +tests :: TestTree +tests = testGroup "diagnostics" + [ testWithDummyPluginEmpty "fix syntax error" $ do + let content = T.unlines [ "module Testing wher" ] + doc <- createDoc "Testing.hs" "haskell" content + expectDiagnostics [("Testing.hs", [(DiagnosticSeverity_Error, (0, 15), "parse error", Just "GHC-58481")])] + let change = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range (Position 0 15) (Position 0 19) + , _rangeLength = Nothing + , _text = "where" + } + changeDoc doc [change] + expectDiagnostics [("Testing.hs", [])] + , testWithDummyPluginEmpty "introduce syntax error" $ do + let content = T.unlines [ "module Testing where" ] + doc <- createDoc "Testing.hs" "haskell" content + void $ skipManyTill anyMessage (message SMethod_WindowWorkDoneProgressCreate) + waitForProgressBegin + let change = TextDocumentContentChangeEvent$ InL TextDocumentContentChangePartial + { _range = Range (Position 0 15) (Position 0 18) + , _rangeLength = Nothing + , _text = "wher" + } + changeDoc doc [change] + expectDiagnostics [("Testing.hs", [(DiagnosticSeverity_Error, (0, 15), "parse error", Just "GHC-58481")])] + , testWithDummyPluginEmpty "update syntax error" $ do + let content = T.unlines [ "module Testing(missing) where" ] + doc <- createDoc "Testing.hs" "haskell" content + expectDiagnostics [("Testing.hs", [(DiagnosticSeverity_Error, (0, 15), "Not in scope: 'missing'", Just "GHC-76037")])] + let change = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range (Position 0 15) (Position 0 16) + , _rangeLength = Nothing + , _text = "l" + } + changeDoc doc [change] + expectDiagnostics [("Testing.hs", [(DiagnosticSeverity_Error, (0, 15), "Not in scope: 'lissing'", Just "GHC-76037")])] + , testWithDummyPluginEmpty "variable not in scope" $ do + let content = T.unlines + [ "module Testing where" + , "foo :: Int -> Int -> Int" + , "foo a _b = a + ab" + , "bar :: Int -> Int -> Int" + , "bar _a b = cd + b" + ] + _ <- createDoc "Testing.hs" "haskell" content + expectDiagnostics + [ ( "Testing.hs" + , [ (DiagnosticSeverity_Error, (2, 15), "Variable not in scope: ab", Just "GHC-88464") + , (DiagnosticSeverity_Error, (4, 11), "Variable not in scope: cd", Just "GHC-88464") + ] + ) + ] + , testWithDummyPluginEmpty "type error" $ do + let content = T.unlines + [ "module Testing where" + , "foo :: Int -> String -> Int" + , "foo a b = a + b" + ] + _ <- createDoc "Testing.hs" "haskell" content + expectDiagnostics + [ ( "Testing.hs" + , [(DiagnosticSeverity_Error, (2, 14), "Couldn't match type '[Char]' with 'Int'", Just "GHC-83865")] + ) + ] + , testWithDummyPluginEmpty "typed hole" $ do + let content = T.unlines + [ "module Testing where" + , "foo :: Int -> String" + , "foo a = _ a" + ] + _ <- createDoc "Testing.hs" "haskell" content + expectDiagnostics + [ ( "Testing.hs" + , [(DiagnosticSeverity_Error, (2, 8), "Found hole: _ :: Int -> String", Just "GHC-88464")] + ) + ] + + , testGroup "deferral" $ + let sourceA a = T.unlines + [ "module A where" + , "a :: Int" + , "a = " <> a] + sourceB = T.unlines + [ "module B where" + , "import A ()" + , "b :: Float" + , "b = True"] + bMessage = "Couldn't match expected type 'Float' with actual type 'Bool'" + expectedDs aMessage aCode = + [ ("A.hs", [(DiagnosticSeverity_Error, (2,4), aMessage, aCode)]) + , ("B.hs", [(DiagnosticSeverity_Error, (3,4), bMessage, Just "GHC-83865")])] + deferralTest title binding msg code = testWithDummyPluginEmpty title $ do + _ <- createDoc "A.hs" "haskell" $ sourceA binding + _ <- createDoc "B.hs" "haskell" sourceB + expectDiagnostics $ expectedDs msg code + in + [ deferralTest "type error" "True" "Couldn't match expected type" (Just "GHC-83865") + , deferralTest "typed hole" "_" "Found hole" (Just "GHC-88464") + , deferralTest "out of scope var" "unbound" "Variable not in scope" (Just "GHC-88464") + ] + + , testWithDummyPluginEmpty "remove required module" $ do + let contentA = T.unlines [ "module ModuleA where" ] + docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "module ModuleB where" + , "import ModuleA" + ] + _ <- createDoc "ModuleB.hs" "haskell" contentB + let change = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range (Position 0 0) (Position 0 20) + , _rangeLength = Nothing + , _text = "" + } + changeDoc docA [change] + expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Error, (1, 0), "Could not find module", Nothing)])] + , testWithDummyPluginEmpty "add missing module" $ do + let contentB = T.unlines + [ "module ModuleB where" + , "import ModuleA ()" + ] + _ <- createDoc "ModuleB.hs" "haskell" contentB + expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Error, (1, 7), "Could not find module", Nothing)])] + let contentA = T.unlines [ "module ModuleA where" ] + _ <- createDoc "ModuleA.hs" "haskell" contentA + expectDiagnostics [("ModuleB.hs", [])] + , testCase "add missing module (non workspace)" $ + runSessionWithTestConfig def + { testPluginDescriptor = dummyPlugin + , testConfigCaps = lspTestCapsNoFileWatches + , testDirLocation = Right (mkIdeTestFs []) + } + $ \tmpDir -> do + -- By default lsp-test sends FileWatched notifications for all files, which we don't want + -- as non workspace modules will not be watched by the LSP server. + -- To work around this, we tell lsp-test that our client doesn't have the + -- FileWatched capability, which is enough to disable the notifications + let contentB = T.unlines + [ "module ModuleB where" + , "import ModuleA ()" + ] + _ <- createDoc (tmpDir "ModuleB.hs") "haskell" contentB + expectDiagnostics [(tmpDir "ModuleB.hs", [(DiagnosticSeverity_Error, (1, 7), "Could not find module", Nothing)])] + let contentA = T.unlines [ "module ModuleA where" ] + _ <- createDoc (tmpDir "ModuleA.hs") "haskell" contentA + expectDiagnostics [(tmpDir "ModuleB.hs", [])] + , testWithDummyPluginEmpty "cyclic module dependency" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "import ModuleB" + ] + let contentB = T.unlines + [ "module ModuleB where" + , "import ModuleA" + ] + _ <- createDoc "ModuleA.hs" "haskell" contentA + _ <- createDoc "ModuleB.hs" "haskell" contentB + expectDiagnostics + [ ( "ModuleA.hs" + , [(DiagnosticSeverity_Error, (1, 7), "Cyclic module dependency between ModuleA, ModuleB", Nothing)] + ) + , ( "ModuleB.hs" + , [(DiagnosticSeverity_Error, (1, 7), "Cyclic module dependency between ModuleA, ModuleB", Nothing)] + ) + ] + , let contentA = T.unlines [ "module ModuleA where" , "import ModuleB" ] + contentB = T.unlines [ "module ModuleB where" , "import ModuleA" ] + contentC = T.unlines [ "module ModuleC where" , "import ModuleB" ] + contentD = T.unlines [ "module ModuleD where" , "import ModuleC" ] + cradle = directCradle ["ModuleA", "ModuleB", "ModuleC", "ModuleD"] + in testWithDummyPlugin "deeply nested cyclic module dependency" + (mkIdeTestFs [ + file "ModuleA.hs" (text contentA) + ,file "ModuleB.hs" (text contentB) + ,file "ModuleC.hs" (text contentC) + ,cradle + ]) $ do + _ <- createDoc "ModuleD.hs" "haskell" contentD + expectDiagnostics + [ ( "ModuleB.hs" , [(DiagnosticSeverity_Error, (1, 7), "Cyclic module dependency between ModuleA, ModuleB", Nothing)]) + , ( "ModuleA.hs" , [(DiagnosticSeverity_Error, (1, 7), "Cyclic module dependency between ModuleA, ModuleB", Nothing)]) + ] + , testWithDummyPluginEmpty "cyclic module dependency with hs-boot" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "import {-# SOURCE #-} ModuleB" + ] + let contentB = T.unlines + [ "{-# OPTIONS -Wmissing-signatures#-}" + , "module ModuleB where" + , "import ModuleA" + -- introduce an artificial diagnostic + , "foo = ()" + ] + let contentBboot = T.unlines + [ "module ModuleB where" + ] + _ <- createDoc "ModuleA.hs" "haskell" contentA + _ <- createDoc "ModuleB.hs" "haskell" contentB + _ <- createDoc "ModuleB.hs-boot" "haskell" contentBboot + expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Warning, (3,0), "Top-level binding", Just "GHC-38417")])] + , testWithDummyPlugin "bidirectional module dependency with hs-boot" + (mkIdeTestFs [directCradle ["ModuleA", "ModuleB"]]) + $ do + let contentA = T.unlines + [ "module ModuleA where" + , "import {-# SOURCE #-} ModuleB" + ] + let contentB = T.unlines + [ "{-# OPTIONS -Wmissing-signatures#-}" + , "module ModuleB where" + , "import {-# SOURCE #-} ModuleA" + -- introduce an artificial diagnostic + , "foo = ()" + ] + let contentBboot = T.unlines + [ "module ModuleB where" + ] + let contentAboot = T.unlines + [ "module ModuleA where" + ] + _ <- createDoc "ModuleA.hs" "haskell" contentA + _ <- createDoc "ModuleA.hs-boot" "haskell" contentAboot + _ <- createDoc "ModuleB.hs" "haskell" contentB + _ <- createDoc "ModuleB.hs-boot" "haskell" contentBboot + expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Warning, (3,0), "Top-level binding", Just "GHC-38417")])] + , testWithDummyPluginEmpty "correct reference used with hs-boot" $ do + let contentB = T.unlines + [ "module ModuleB where" + , "import {-# SOURCE #-} ModuleA()" + ] + let contentA = T.unlines + [ "module ModuleA where" + , "import ModuleB()" + , "x = 5" + ] + let contentAboot = T.unlines + [ "module ModuleA where" + ] + let contentC = T.unlines + [ "{-# OPTIONS -Wmissing-signatures #-}" + , "module ModuleC where" + , "import ModuleA" + -- this reference will fail if it gets incorrectly + -- resolved to the hs-boot file + , "y = x" + ] + _ <- createDoc "ModuleB.hs" "haskell" contentB + _ <- createDoc "ModuleA.hs" "haskell" contentA + _ <- createDoc "ModuleA.hs-boot" "haskell" contentAboot + _ <- createDoc "ModuleC.hs" "haskell" contentC + expectDiagnostics [("ModuleC.hs", [(DiagnosticSeverity_Warning, (3,0), "Top-level binding", Just "GHC-38417")])] + , testWithDummyPluginEmpty "redundant import" $ do + let contentA = T.unlines ["module ModuleA where"] + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA" + ] + _ <- createDoc "ModuleA.hs" "haskell" contentA + _ <- createDoc "ModuleB.hs" "haskell" contentB + expectDiagnosticsWithTags + [ ( "ModuleB.hs" + , [(DiagnosticSeverity_Warning, (2, 0), "The import of 'ModuleA' is redundant", Nothing, Just DiagnosticTag_Unnecessary)] + ) + ] + , testWithDummyPluginEmpty "redundant import even without warning" $ do + let contentA = T.unlines ["module ModuleA where"] + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wno-unused-imports -Wmissing-signatures #-}" + , "module ModuleB where" + , "import ModuleA" + -- introduce an artificial warning for testing purposes + , "foo = ()" + ] + _ <- createDoc "ModuleA.hs" "haskell" contentA + _ <- createDoc "ModuleB.hs" "haskell" contentB + expectDiagnostics [("ModuleB.hs", [(DiagnosticSeverity_Warning, (3,0), "Top-level binding", Just "GHC-38417")])] + , testWithDummyPluginEmpty "package imports" $ do + let thisDataListContent = T.unlines + [ "module Data.List where" + , "x :: Integer" + , "x = 123" + ] + let mainContent = T.unlines + [ "{-# LANGUAGE PackageImports #-}" + , "module Main where" + , "import qualified \"this\" Data.List as ThisList" + , "import qualified \"base\" Data.List as BaseList" + , "useThis = ThisList.x" + , "useBase = BaseList.map" + , "wrong1 = ThisList.map" + , "wrong2 = BaseList.x" + , "main = pure ()" + ] + _ <- createDoc "Data/List.hs" "haskell" thisDataListContent + _ <- createDoc "Main.hs" "haskell" mainContent + expectDiagnostics + [ ( "Main.hs" + , [(DiagnosticSeverity_Error, (6, 9), + "Variable not in scope: ThisList.map", Just "GHC-88464") + ,(DiagnosticSeverity_Error, (7, 9), + "Variable not in scope: BaseList.x", Just "GHC-88464") + ] + ) + ] + , testWithDummyPluginEmpty "unqualified warnings" $ do + let fooContent = T.unlines + [ "{-# OPTIONS_GHC -Wredundant-constraints #-}" + , "module Foo where" + , "foo :: Ord a => a -> Int" + , "foo _a = 1" + ] + _ <- createDoc "Foo.hs" "haskell" fooContent + expectDiagnostics + [ ( "Foo.hs" + -- The test is to make sure that warnings contain unqualified names + -- where appropriate. The warning should use an unqualified name 'Ord', not + -- something like 'GHC.Classes.Ord'. The choice of redundant-constraints to + -- test this is fairly arbitrary. + , [(DiagnosticSeverity_Warning, (2, 7), "Redundant constraint: Ord a", Just "GHC-30606") + ] + ) + ] + , testWithDummyPluginEmpty "lower-case drive" $ do + let aContent = T.unlines + [ "module A.A where" + , "import A.B ()" + ] + bContent = T.unlines + [ "{-# OPTIONS_GHC -Wall #-}" + , "module A.B where" + , "import Data.List" + ] + uriB <- getDocUri "A/B.hs" + Just pathB <- pure $ uriToFilePath uriB + uriB <- pure $ + let (drive, suffix) = splitDrive pathB + in filePathToUri (joinDrive (lower drive) suffix) + liftIO $ createDirectoryIfMissing True (takeDirectory pathB) + liftIO $ writeFileUTF8 pathB $ T.unpack bContent + uriA <- getDocUri "A/A.hs" + Just pathA <- pure $ uriToFilePath uriA + uriA <- pure $ + let (drive, suffix) = splitDrive pathA + in filePathToUri (joinDrive (lower drive) suffix) + let itemA = TextDocumentItem uriA "haskell" 0 aContent + let a = TextDocumentIdentifier uriA + sendNotification SMethod_TextDocumentDidOpen (DidOpenTextDocumentParams itemA) + TNotificationMessage{_params = PublishDiagnosticsParams fileUri _ diags} <- skipManyTill anyMessage diagnostic + -- Check that if we put a lower-case drive in for A.A + -- the diagnostics for A.B will also be lower-case. + liftIO $ fileUri @?= uriB + let msg :: T.Text = head diags ^. L.message + liftIO $ unless ("redundant" `T.isInfixOf` msg) $ + assertFailure ("Expected redundant import but got " <> T.unpack msg) + closeDoc a + , testWithDummyPluginEmpty "strip file path" $ do + let + name = "Testing" + content = T.unlines + [ "module " <> name <> " where" + , "value :: Maybe ()" + , "value = [()]" + ] + _ <- createDoc (T.unpack name <> ".hs") "haskell" content + notification <- skipManyTill anyMessage diagnostic + let + offenders = + L.params . + L.diagnostics . + Lens.folded . + L.message . + Lens.filtered (T.isInfixOf ("/" <> name <> ".hs:")) + failure msg = liftIO $ assertFailure $ "Expected file path to be stripped but got " <> T.unpack msg + Lens.mapMOf_ offenders failure notification + , testWithDummyPlugin "-Werror in cradle is ignored" + (mkIdeTestFs [directCradle ["-Wall", "-Werror"]]) + $ do + let fooContent = T.unlines + [ "module Foo where" + , "foo = ()" + ] + _ <- createDoc "Foo.hs" "haskell" fooContent + expectDiagnostics + [ ( "Foo.hs" + , [(DiagnosticSeverity_Warning, (1, 0), "Top-level binding with no type signature:", Nothing) + ] + ) + ] + , testWithDummyPluginEmpty "-Werror in pragma is ignored" $ do + let fooContent = T.unlines + [ "{-# OPTIONS_GHC -Wall -Werror #-}" + , "module Foo() where" + , "foo :: Int" + , "foo = 1" + ] + _ <- createDoc "Foo.hs" "haskell" fooContent + expectDiagnostics + [ ( "Foo.hs" + , [(DiagnosticSeverity_Warning, (3, 0), "Defined but not used:", Nothing) + ] + ) + ] + , testCase "typecheck-all-parents-of-interest" $ runWithExtraFiles "recomp" $ \dir -> do + let bPath = dir "B.hs" + pPath = dir "P.hs" + aPath = dir "A.hs" + + bSource <- liftIO $ readFileUtf8 bPath -- y :: Int + pSource <- liftIO $ readFileUtf8 pPath -- bar = x :: Int + aSource <- liftIO $ readFileUtf8 aPath -- x = y :: Int + + bdoc <- createDoc bPath "haskell" bSource + _pdoc <- createDoc pPath "haskell" pSource + expectDiagnostics + [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding", Just "GHC-38417")])] -- So that we know P has been loaded + + -- Change y from Int to B which introduces a type error in A (imported from P) + changeDoc bdoc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ + T.unlines ["module B where", "y :: Bool", "y = undefined"]] + expectDiagnostics + [("A.hs", [(DiagnosticSeverity_Error, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'", Just "GHC-83865")]) + ] + + -- Open A and edit to fix the type error + adoc <- createDoc aPath "haskell" aSource + changeDoc adoc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ + T.unlines ["module A where", "import B", "x :: Bool", "x = y"]] + + expectDiagnostics + [ ( "P.hs", + [ (DiagnosticSeverity_Error, (4, 6), "Couldn't match expected type 'Int' with actual type 'Bool'", Just "GHC-83865"), + (DiagnosticSeverity_Warning, (4, 0), "Top-level binding", Just "GHC-38417") + ] + ), + ("A.hs", []) + ] + expectNoMoreDiagnostics 1 + + , testWithDummyPluginEmpty "deduplicate missing module diagnostics" $ do + let fooContent = T.unlines [ "module Foo() where" , "import MissingModule" ] + doc <- createDoc "Foo.hs" "haskell" fooContent + expectDiagnostics [("Foo.hs", [(DiagnosticSeverity_Error, (1,7), "Could not find module 'MissingModule'", Nothing)])] + + changeDoc doc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ "module Foo() where" ] + expectDiagnostics [] + + changeDoc doc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ T.unlines + [ "module Foo() where" , "import MissingModule" ] ] + expectDiagnostics [("Foo.hs", [(DiagnosticSeverity_Error, (1,7), "Could not find module 'MissingModule'", Nothing)])] + + , testGroup "Cancellation" + [ cancellationTestGroup "edit header" editHeader yesSession noParse noTc + , cancellationTestGroup "edit import" editImport noSession yesParse noTc + , cancellationTestGroup "edit body" editBody yesSession yesParse yesTc + ] + ] + where + editPair x y = let p = Position x y ; p' = Position x (y+2) in + (TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range p p + , _rangeLength = Nothing + , _text = "fd" + } + + ,TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range p p' + , _rangeLength = Nothing + , _text = "" + } + ) + editHeader = editPair 0 0 + editImport = editPair 2 10 + editBody = editPair 3 10 + + noParse = False + yesParse = True + + noSession = False + yesSession = True + + noTc = False + yesTc = True + +cancellationTestGroup :: TestName -> (TextDocumentContentChangeEvent, TextDocumentContentChangeEvent) -> Bool -> Bool -> Bool -> TestTree +cancellationTestGroup name edits sessionDepsOutcome parseOutcome tcOutcome = testGroup name + [ cancellationTemplate edits Nothing + , cancellationTemplate edits $ Just ("GetFileContents", True) + , cancellationTemplate edits $ Just ("GhcSession", True) + -- the outcome for GetModSummary is always True because parseModuleHeader never fails (!) + , cancellationTemplate edits $ Just ("GetModSummary", True) + , cancellationTemplate edits $ Just ("GetModSummaryWithoutTimestamps", True) + -- getLocatedImports never fails + , cancellationTemplate edits $ Just ("GetLocatedImports", True) + , cancellationTemplate edits $ Just ("GhcSessionDeps", sessionDepsOutcome) + , cancellationTemplate edits $ Just ("GetParsedModule", parseOutcome) + , cancellationTemplate edits $ Just ("TypeCheck", tcOutcome) + , cancellationTemplate edits $ Just ("GetHieAst", tcOutcome) + ] + +cancellationTemplate :: (TextDocumentContentChangeEvent, TextDocumentContentChangeEvent) -> Maybe (String, Bool) -> TestTree +cancellationTemplate (edit, undoEdit) mbKey = testCase (maybe "-" fst mbKey) $ runTestNoKick $ do + doc <- createDoc "Foo.hs" "haskell" $ T.unlines + [ "{-# OPTIONS_GHC -Wall #-}" + , "module Foo where" + , "import Data.List()" + , "f0 x = (x,x)" + ] + + -- for the example above we expect one warning + let missingSigDiags = [(DiagnosticSeverity_Warning, (3, 0), "Top-level binding", Just "GHC-38417") ] + typeCheck doc >> expectCurrentDiagnostics doc missingSigDiags + + -- Now we edit the document and wait for the given key (if any) + changeDoc doc [edit] + whenJust mbKey $ \(key, expectedResult) -> do + WaitForIdeRuleResult{ideResultSuccess} <- waitForAction key doc + liftIO $ ideResultSuccess @?= expectedResult + + -- The 2nd edit cancels the active session and unbreaks the file + -- wait for typecheck and check that the current diagnostics are accurate + changeDoc doc [undoEdit] + typeCheck doc >> expectCurrentDiagnostics doc missingSigDiags + + expectNoMoreDiagnostics 0.5 + where + runTestNoKick s = + runSessionWithTestConfig def + { testPluginDescriptor = dummyPlugin + , testDirLocation = Right (mkIdeTestFs []) + , testDisableKick = True + } $ const s + + typeCheck doc = do + WaitForIdeRuleResult {..} <- waitForAction "TypeCheck" doc + liftIO $ assertBool "The file should typecheck" ideResultSuccess + -- wait for the debouncer to publish diagnostics if the rule runs + liftIO $ sleep 0.2 + -- flush messages to ensure current diagnostics state is updated + flushMessages diff --git a/ghcide-test/exe/ExceptionTests.hs b/ghcide-test/exe/ExceptionTests.hs new file mode 100644 index 0000000000..a95f91e97c --- /dev/null +++ b/ghcide-test/exe/ExceptionTests.hs @@ -0,0 +1,140 @@ + +module ExceptionTests (tests) where + +import Control.Exception (ArithException (DivideByZero), + throwIO) +import Control.Lens +import Control.Monad.Error.Class (MonadError (throwError)) +import Control.Monad.IO.Class (liftIO) +import qualified Data.Aeson as A +import Data.Default (Default (..)) +import qualified Data.Text as T +import Development.IDE.Core.Shake (IdeState (..)) +import qualified Development.IDE.LSP.Notifications as Notifications +import Development.IDE.Plugin.HLS (toResponseError) +import GHC.Base (coerce) +import Ide.Logger (Recorder, WithPriority, + cmapWithPrio) +import Ide.Plugin.Error +import Ide.Plugin.HandleRequestTypes (RejectionReason (DisabledGlobally)) +import Ide.PluginUtils (pluginDescToIdePlugins) +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +import LogType (Log (..)) +import Test.Hls (TestConfig (testDisableDefaultPlugin, testPluginDescriptor), + runSessionWithTestConfig, + testCheckProject, + waitForProgressDone) +import Test.Tasty +import Test.Tasty.HUnit + +tests :: TestTree +tests = do + testGroup "Exceptions and PluginError" [ + testGroup "Testing that IO Exceptions are caught in..." + [ testCase "PluginHandlers" $ do + let pluginId = "plugin-handler-exception" + plugins :: Recorder (WithPriority Log) -> IdePlugins IdeState + plugins r = pluginDescToIdePlugins $ + [ (defaultPluginDescriptor pluginId "") + { pluginHandlers = mconcat + [ mkPluginHandler SMethod_TextDocumentCodeLens $ \_ _ _-> do + _ <- liftIO $ throwIO DivideByZero + pure (InL []) + ] + }] ++ [Notifications.descriptor (cmapWithPrio LogNotifications r) "ghcide-core"] + runSessionWithTestConfig def {testPluginDescriptor = plugins, testDisableDefaultPlugin=True, testCheckProject=False + } $ const $ do + doc <- createDoc "A.hs" "haskell" "module A where" + (view L.result -> lens) <- request SMethod_TextDocumentCodeLens (CodeLensParams Nothing Nothing doc) + case lens of + Left (TResponseError {_code = InR ErrorCodes_InternalError, _message}) -> + liftIO $ assertBool "We caught an error, but it wasn't ours!" + (T.isInfixOf "divide by zero" _message && T.isInfixOf (coerce pluginId) _message) + _ -> liftIO $ assertFailure $ show lens + + , testCase "Commands" $ do + let pluginId = "command-exception" + commandId = CommandId "exception" + plugins :: Recorder (WithPriority Log) -> IdePlugins IdeState + plugins r = pluginDescToIdePlugins $ + [ (defaultPluginDescriptor pluginId "") + { pluginCommands = + [ PluginCommand commandId "Causes an exception" $ \_ _ (_::Int) -> do + _ <- liftIO $ throwIO DivideByZero + pure (InR Null) + ] + }] ++ [Notifications.descriptor (cmapWithPrio LogNotifications r) "ghcide-core"] + runSessionWithTestConfig def {testPluginDescriptor = plugins, testDisableDefaultPlugin=True, testCheckProject=False} $ const $ do + _ <- createDoc "A.hs" "haskell" "module A where" + waitForProgressDone + let cmd = mkLspCommand (coerce pluginId) commandId "" (Just [A.toJSON (1::Int)]) + execParams = ExecuteCommandParams Nothing (cmd ^. L.command) (cmd ^. L.arguments) + (view L.result -> res) <- request SMethod_WorkspaceExecuteCommand execParams + case res of + Left (TResponseError {_code = InR ErrorCodes_InternalError, _message}) -> + liftIO $ assertBool "We caught an error, but it wasn't ours!" + (T.isInfixOf "divide by zero" _message && T.isInfixOf (coerce pluginId) _message) + _ -> liftIO $ assertFailure $ show res + + , testCase "Notification Handlers" $ do + let pluginId = "notification-exception" + plugins :: Recorder (WithPriority Log) -> IdePlugins IdeState + plugins r = pluginDescToIdePlugins $ + [ (defaultPluginDescriptor pluginId "") + { pluginNotificationHandlers = mconcat + [ mkPluginNotificationHandler SMethod_TextDocumentDidOpen $ \_ _ _ _ -> + liftIO $ throwIO DivideByZero + ] + , pluginHandlers = mconcat + [ mkPluginHandler SMethod_TextDocumentCodeLens $ \_ _ _-> do + pure (InL []) + ] + }] ++ [Notifications.descriptor (cmapWithPrio LogNotifications r) "ghcide-core"] + runSessionWithTestConfig def {testPluginDescriptor = plugins, testDisableDefaultPlugin=True, testCheckProject=False} $ const $ do + doc <- createDoc "A.hs" "haskell" "module A where" + waitForProgressDone + (view L.result -> lens) <- request SMethod_TextDocumentCodeLens (CodeLensParams Nothing Nothing doc) + case lens of + Right (InL []) -> + -- We don't get error responses from notification handlers, so + -- we can only make sure that the server is still responding + pure () + _ -> liftIO $ assertFailure $ "We should have had an empty list" <> show lens] + + , testGroup "Testing PluginError order..." + [ pluginOrderTestCase "InternalError over InvalidParams" (PluginInternalError "error test") (PluginInvalidParams "error test") + , pluginOrderTestCase "InvalidParams over InvalidUserState" (PluginInvalidParams "error test") (PluginInvalidUserState "error test") + , pluginOrderTestCase "InvalidUserState over RequestRefused" (PluginInvalidUserState "error test") (PluginRequestRefused DisabledGlobally) + ] + ] + +pluginOrderTestCase :: TestName -> PluginError -> PluginError -> TestTree +pluginOrderTestCase msg err1 err2 = + testCase msg $ do + let pluginId = "error-order-test" + plugins :: Recorder (WithPriority Log) -> IdePlugins IdeState + plugins r = pluginDescToIdePlugins $ + [ (defaultPluginDescriptor pluginId "") + { pluginHandlers = mconcat + [ mkPluginHandler SMethod_TextDocumentCodeLens $ \_ _ _-> do + throwError err1 + ,mkPluginHandler SMethod_TextDocumentCodeLens $ \_ _ _-> do + throwError err2 + ] + }] ++ [Notifications.descriptor (cmapWithPrio LogNotifications r) "ghcide-core"] + runSessionWithTestConfig def {testPluginDescriptor = plugins, testDisableDefaultPlugin=True, testCheckProject=False} $ const $ do + doc <- createDoc "A.hs" "haskell" "module A where" + waitForProgressDone + (view L.result -> lens) <- request SMethod_TextDocumentCodeLens (CodeLensParams Nothing Nothing doc) + case lens of + Left re | toResponseError (pluginId, err1) == re -> pure () + | otherwise -> liftIO $ assertFailure "We caught an error, but it wasn't ours!" + _ -> liftIO $ assertFailure $ show lens diff --git a/ghcide-test/exe/FindDefinitionAndHoverTests.hs b/ghcide-test/exe/FindDefinitionAndHoverTests.hs new file mode 100644 index 0000000000..e4c0958f58 --- /dev/null +++ b/ghcide-test/exe/FindDefinitionAndHoverTests.hs @@ -0,0 +1,258 @@ +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} + +module FindDefinitionAndHoverTests (tests) where + +import Control.Monad +import Data.Foldable +import Data.Maybe +import qualified Data.Text as T +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Test +import System.Info.Extra (isWindows) + +import Config +import Control.Category ((>>>)) +import Control.Lens ((^.)) +import Development.IDE.Test (expectDiagnostics, + standardizeQuotes) +import Test.Hls +import Test.Hls.FileSystem (copyDir) +import Text.Regex.TDFA ((=~)) + +tests :: TestTree +tests = let + tst :: (TextDocumentIdentifier -> Position -> Session a, a -> Session [Expect] -> Session ()) -> Position -> String -> Session [Expect] -> String -> TestTree + tst (get, check) pos sfp targetRange title = + testWithDummyPlugin title (mkIdeTestFs [copyDir "hover"]) $ do + doc <- openDoc sfp "haskell" + waitForProgressDone + _x <- waitForTypecheck doc + found <- get doc pos + check found targetRange + + + + checkHover :: (HasCallStack) => Maybe Hover -> Session [Expect] -> Session () + checkHover hover expectations = traverse_ check =<< expectations where + + check :: (HasCallStack) => Expect -> Session () + check expected = + case hover of + Nothing -> unless (expected == ExpectNoHover) $ liftIO $ assertFailure "no hover found" + Just Hover{_contents = (InL MarkupContent{_value = standardizeQuotes -> msg}) + ,_range = rangeInHover } -> + case expected of + ExpectRange expectedRange -> checkHoverRange expectedRange rangeInHover msg + ExpectHoverRange expectedRange -> checkHoverRange expectedRange rangeInHover msg + ExpectHoverText snippets -> liftIO $ traverse_ (`assertFoundIn` msg) snippets + ExpectHoverExcludeText snippets -> liftIO $ traverse_ (`assertNotFoundIn` msg) snippets + ExpectHoverTextRegex re -> liftIO $ assertBool ("Regex not found in " <> T.unpack msg) (msg =~ re :: Bool) + ExpectNoHover -> liftIO $ assertFailure $ "Expected no hover but got " <> show hover + _ -> pure () -- all other expectations not relevant to hover + _ -> liftIO $ assertFailure $ "test not expecting this kind of hover info" <> show hover + + extractLineColFromHoverMsg :: T.Text -> [T.Text] + extractLineColFromHoverMsg = + -- Hover messages contain multiple lines, and we are looking for the definition + -- site + T.lines + -- The line we are looking for looks like: "*Defined at /tmp/GotoHover.hs:22:3*" + -- So filter by the start of the line + >>> mapMaybe (T.stripPrefix "*Defined at") + -- There can be multiple definitions per hover message! + -- See the test "field in record definition" for example. + -- The tests check against the last line that contains the above line. + >>> last + -- [" /tmp/", "22:3*"] + >>> T.splitOn (sourceFileName <> ":") + -- "22:3*" + >>> last + -- ["22:3", ""] + >>> T.splitOn "*" + -- "22:3" + >>> head + -- ["22", "3"] + >>> T.splitOn ":" + + checkHoverRange :: Range -> Maybe Range -> T.Text -> Session () + checkHoverRange expectedRange rangeInHover msg = + let + lineCol = extractLineColFromHoverMsg msg + -- looks like hovers use 1-based numbering while definitions use 0-based + -- turns out that they are stored 1-based in RealSrcLoc by GHC itself. + adjust Position{_line = l, _character = c} = + Position{_line = l + 1, _character = c + 1} + in + case map (read . T.unpack) lineCol of + [l,c] -> liftIO $ adjust (expectedRange ^. L.start) @=? Position l c + _ -> liftIO $ assertFailure $ + "expected: " <> show ("[...]" <> sourceFileName <> "::**[...]", Just expectedRange) <> + "\n but got: " <> show (msg, rangeInHover) + + assertFoundIn :: T.Text -> T.Text -> Assertion + assertFoundIn part whole = assertBool + (T.unpack $ "failed to find: `" <> part <> "` in hover message:\n" <> whole) + (part `T.isInfixOf` whole) + + assertNotFoundIn :: T.Text -> T.Text -> Assertion + assertNotFoundIn part whole = assertBool + (T.unpack $ "found unexpected: `" <> part <> "` in hover message:\n" <> whole) + (not . T.isInfixOf part $ whole) + + sourceFilePath = T.unpack sourceFileName + sourceFileName = "GotoHover.hs" + + mkFindTests tests = testGroup "get" + [ testGroup "definition" $ mapMaybe fst tests + , testGroup "hover" $ mapMaybe snd tests + , testGroup "hover compile" [checkFileCompiles sourceFilePath $ + expectDiagnostics + [ ( "GotoHover.hs", [(DiagnosticSeverity_Error, (62, 7), "Found hole: _", Just "GHC-88464")]) + , ( "GotoHover.hs", [(DiagnosticSeverity_Error, (65, 8), "Found hole: _", Just "GHC-88464")]) + ]] + , testGroup "type-definition" typeDefinitionTests + , testGroup "hover-record-dot-syntax" recordDotSyntaxTests ] + + typeDefinitionTests = [ tst (getTypeDefinitions, checkDefs) aaaL14 sourceFilePath (pure tcData) "Saturated data con" + , tst (getTypeDefinitions, checkDefs) aL20 sourceFilePath (pure [ExpectNoDefinitions]) "Polymorphic variable"] + + recordDotSyntaxTests = + [ tst (getHover, checkHover) (Position 17 24) (T.unpack "RecordDotSyntax.hs") (pure [ExpectHoverText ["x :: MyRecord"]]) "hover over parent" + , tst (getHover, checkHover) (Position 17 25) (T.unpack "RecordDotSyntax.hs") (pure [ExpectHoverText ["_ :: MyChild"]]) "hover over dot shows child" + , tst (getHover, checkHover) (Position 17 26) (T.unpack "RecordDotSyntax.hs") (pure [ExpectHoverText ["_ :: MyChild"]]) "hover over child" + ] + + test :: (HasCallStack) => (TestTree -> a) -> (TestTree -> b) -> Position -> [Expect] -> String -> (a, b) + test runDef runHover look expect = testM runDef runHover look (return expect) + + testM :: (HasCallStack) => (TestTree -> a) + -> (TestTree -> b) + -> Position + -> Session [Expect] + -> String + -> (a, b) + testM runDef runHover look expect title = + ( runDef $ tst def look sourceFilePath expect title + , runHover $ tst hover look sourceFilePath expect title ) where + def = (getDefinitions, checkDefs) + hover = (getHover , checkHover) + + -- search locations expectations on results + -- TODO: Lookup of record field should return exactly one result + fffL4 = fffR ^. L.start; fffR = mkRange 8 4 8 7; fff = [ExpectRanges [fffR, mkRange 7 23 9 16]] + fffL8 = Position 12 4 ; fff' = [ExpectRange fffR] + fffL14 = Position 18 7 ; + aL20 = Position 19 15 + aaaL14 = Position 18 20 ; aaa = [mkR 11 0 11 3] + dcL7 = Position 11 11 ; tcDC = [mkR 7 23 9 16] + dcL12 = Position 16 11 ; + xtcL5 = Position 9 11 ; xtc = [ExpectHoverText ["Int", "Defined in ", "GHC.Types", "ghc-prim"]] + tcL6 = Position 10 11 ; tcData = [mkR 7 0 9 16, ExpectHoverText ["TypeConstructor", "GotoHover.hs:8:1"]] + vvL16 = Position 20 12 ; vv = [mkR 20 4 20 6] + opL16 = Position 20 15 ; op = [mkR 21 2 21 4] + opL18 = Position 22 22 ; opp = [mkR 22 13 22 17] + aL18 = Position 22 20 ; apmp = [mkR 22 10 22 11] + b'L19 = Position 23 13 ; bp = [mkR 23 6 23 7] + xvL20 = Position 24 8 ; xvMsg = [ExpectHoverText ["pack", ":: String -> Text", "Data.Text", "text"]] + clL23 = Position 27 11 ; cls = [mkR 25 0 26 20, ExpectHoverText ["MyClass", "GotoHover.hs:26:1"]] + clL25 = Position 29 9 + eclL15 = Position 19 8 ; ecls = [ExpectHoverText ["Num", "Defined in ", if ghcVersion < GHC910 then "GHC.Num" else "GHC.Internal.Num", "base"]] + dnbL29 = Position 33 18 ; dnb = [ExpectHoverText [":: ()"], mkR 33 12 33 21] + dnbL30 = Position 34 23 + lcbL33 = Position 37 26 ; lcb = [ExpectHoverText [":: Char"], mkR 37 26 37 27] + lclL33 = Position 37 22 + mclL36 = Position 40 1 ; mcl = [mkR 40 0 40 14] + mclL37 = Position 41 1 + spaceL37 = Position 41 24 ; space = [ExpectNoDefinitions, ExpectHoverText [":: Char"]] + docL41 = Position 45 1 ; doc = [ExpectHoverText ["Recognizable docs: kpqz"]] + ; constr = [ExpectHoverText ["Monad m"]] + eitL40 = Position 44 28 ; kindE = [ExpectHoverText [":: Type -> Type -> Type\n"]] + intL40 = Position 44 34 ; kindI = [ExpectHoverText [":: Type\n"]] + -- TODO: Kind signature of type variables should be `Type -> Type` + tvrL40 = Position 44 37 ; kindV = [ExpectHoverText ["m"]]; kindV' = [ExpectHoverText [":: * -> *\n"]] + -- TODO: Hover of integer literal should be `7518` + intL41 = Position 45 20 ; litI = [ExpectHoverText ["_ :: Int"]]; litI' = [ExpectHoverText ["7518"]] + -- TODO: Hover info of char literal should be `'f'` + chrL36 = Position 41 24 ; litC = [ExpectHoverText ["_ :: Char"]]; litC' = [ExpectHoverText ["'f'"]] + -- TODO: Hover info of Text literal should be `"dfgy"` + txtL8 = Position 12 14 ; litT = [ExpectHoverText ["_ :: Text"]]; litT' = [ExpectHoverText ["\"dfgy\""]] + -- TODO: Hover info of List literal should be `[8391 :: Int, 6268]` + lstL43 = Position 47 12 ; litL = [ExpectHoverText ["[Int]"]]; litL' = [ExpectHoverText ["[8391 :: Int, 6268]"]] + outL45 = Position 49 3 ; outSig = [ExpectHoverText ["outer", "Bool"], mkR 50 0 50 5] + -- TODO: Hover info of local function signature should be `inner :: Bool` + innL48 = Position 52 5 ; innSig = [ExpectHoverText ["inner"], mkR 53 2 53 7]; innSig' = [ExpectHoverText ["inner", "Char"], mkR 49 2 49 7] + holeL60 = Position 62 7 ; hleInfo = [ExpectHoverText ["_ ::"]] + holeL65 = Position 65 8 ; hleInfo2 = [ExpectHoverText ["_ :: a -> Maybe a"]] + cccL17 = Position 17 16 ; docLink = [ExpectHoverTextRegex "\\*Defined in 'GHC.Types'\\* \\*\\(ghc-prim-[0-9.]+\\)\\*\n\n"] + imported = Position 56 13 ; importedSig = getDocUri "Foo.hs" >>= \foo -> return [ExpectHoverText ["foo", "Foo", "Haddock"], mkL foo 5 0 5 3] + reexported = Position 55 14 + reexportedSig = getDocUri "Bar.hs" >>= \bar -> return [ExpectHoverText ["Bar", "Bar", "Haddock"], if ghcVersion < GHC910 || not isWindows then mkL bar 3 5 3 8 else mkL bar 3 0 3 14] + thLocL57 = Position 59 10 ; thLoc = [ExpectHoverText ["Identity"]] + cmtL68 = Position 67 0 ; lackOfdEq = [ExpectHoverExcludeText ["$dEq"]] + import310 = Position 3 10; pkgTxt = [ExpectHoverText ["Data.Text\n\ntext-"]] + in + mkFindTests + -- def hover look expect + [ -- It suggests either going to the constructor or to the field + test (broken fff') yes fffL4 fff "field in record definition" + , test yes yes fffL8 fff' "field in record construction #1102" + , test yes yes fffL14 fff' "field name used as accessor" -- https://github.com/haskell/ghcide/pull/120 in Calculate.hs + , test yes yes aaaL14 aaa "top-level name" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes dcL7 tcDC "data constructor record #1029" + , test yes yes dcL12 tcDC "data constructor plain" -- https://github.com/haskell/ghcide/pull/121 + , test yes yes tcL6 tcData "type constructor #1028" -- https://github.com/haskell/ghcide/pull/147 + , test yes yes xtcL5 xtc "type constructor external #717,1028" + , test yes yes xvL20 xvMsg "value external package #717" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes vvL16 vv "plain parameter" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes aL18 apmp "pattern match name" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes opL16 op "top-level operator #713" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes opL18 opp "parameter operator" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes b'L19 bp "name in backticks" -- https://github.com/haskell/ghcide/pull/120 + , test yes yes clL23 cls "class in instance declaration #1027" + , test yes yes clL25 cls "class in signature #1027" -- https://github.com/haskell/ghcide/pull/147 + , test yes yes eclL15 ecls "external class in signature #717,1027" + , test yes yes dnbL29 dnb "do-notation bind #1073" + , test yes yes dnbL30 dnb "do-notation lookup" + , test yes yes lcbL33 lcb "listcomp bind #1073" + , test yes yes lclL33 lcb "listcomp lookup" + , test yes yes mclL36 mcl "top-level fn 1st clause" + , test yes yes mclL37 mcl "top-level fn 2nd clause #1030" + , test yes yes spaceL37 space "top-level fn on space #1002" + , test no yes docL41 doc "documentation #1129" + , test no yes eitL40 kindE "kind of Either #1017" + , test no yes intL40 kindI "kind of Int #1017" + , test no (broken kindV') tvrL40 kindV "kind of (* -> *) type variable #1017" + , test no (broken litI') intL41 litI "literal Int in hover info #1016" + , test no (broken litC') chrL36 litC "literal Char in hover info #1016" + , test no (broken litT') txtL8 litT "literal Text in hover info #1016" + , test no (broken litL') lstL43 litL "literal List in hover info #1016" + , test yes yes cmtL68 lackOfdEq "no Core symbols #3280" + , test no yes docL41 constr "type constraint in hover info #1012" + , test no yes outL45 outSig "top-level signature #767" + , test yes (broken innSig') innL48 innSig "inner signature #767" + , test no yes holeL60 hleInfo "hole without internal name #831" + , test no yes holeL65 hleInfo2 "hole with variable" + , test no yes cccL17 docLink "Haddock html links" + , testM yes yes imported importedSig "Imported symbol" + , if isWindows then + -- Flaky on Windows: https://github.com/haskell/haskell-language-server/issues/2997 + testM no yes reexported reexportedSig "Imported symbol reexported" + else + testM yes yes reexported reexportedSig "Imported symbol reexported" + , test no yes thLocL57 thLoc "TH Splice Hover" + , test yes yes import310 pkgTxt "show package name and its version" + ] + where yes :: (TestTree -> Maybe TestTree) + yes = Just -- test should run and pass + no = const Nothing -- don't run this test at all + --skip = const Nothing -- unreliable, don't run + broken :: [Expect] -> TestTree -> Maybe TestTree + broken _ = yes + +checkFileCompiles :: FilePath -> Session () -> TestTree +checkFileCompiles fp diag = + testWithDummyPlugin ("hover: Does " ++ fp ++ " compile") (mkIdeTestFs [copyDir "hover"]) $ do + _ <- openDoc fp "haskell" + diag diff --git a/ghcide-test/exe/FindImplementationAndHoverTests.hs b/ghcide-test/exe/FindImplementationAndHoverTests.hs new file mode 100644 index 0000000000..221be90dd2 --- /dev/null +++ b/ghcide-test/exe/FindImplementationAndHoverTests.hs @@ -0,0 +1,228 @@ +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} + +module FindImplementationAndHoverTests (tests) where + +import Control.Monad +import Data.Foldable +import Data.Maybe +import Data.Text (Text) +import qualified Data.Text as T +import Language.LSP.Test +import Text.Regex.TDFA ((=~)) + +import Config +import Development.IDE.Test (standardizeQuotes) +import Test.Hls +import Test.Hls.FileSystem (copyDir) + +tests :: TestTree +tests = let + tst :: (TextDocumentIdentifier -> Position -> Session a, a -> Session [Expect] -> Session ()) -> Position -> String -> Session [Expect] -> String -> TestTree + tst (get, check) pos sfp targetRange title = + testWithDummyPlugin title (mkIdeTestFs [copyDir "hover"]) $ do + doc <- openDoc sfp "haskell" + waitForProgressDone + _x <- waitForTypecheck doc + found <- get doc pos + check found targetRange + + checkHover :: (HasCallStack) => Maybe Hover -> Session [Expect] -> Session () + checkHover hover expectations = traverse_ check =<< expectations where + + check :: (HasCallStack) => Expect -> Session () + check expected = + case hover of + Nothing -> unless (expected == ExpectNoHover) $ liftIO $ assertFailure "no hover found" + Just Hover{_contents = (InL MarkupContent{_value = standardizeQuotes -> msg}) + ,_range = _rangeInHover } -> + case expected of + ExpectRange _expectedRange -> liftIO $ assertFailure $ "ExpectRange assertion not implemented, yet." + ExpectHoverRange _expectedRange -> liftIO $ assertFailure $ "ExpectHoverRange assertion not implemented, yet." + ExpectHoverText snippets -> liftIO $ traverse_ (`assertFoundIn` msg) snippets + ExpectHoverExcludeText snippets -> liftIO $ traverse_ (`assertNotFoundIn` msg) snippets + ExpectHoverTextRegex re -> liftIO $ assertBool ("Regex not found in " <> T.unpack msg) (msg =~ re :: Bool) + ExpectNoHover -> liftIO $ assertFailure $ "Expected no hover but got " <> show hover + _ -> pure () -- all other expectations not relevant to hover + _ -> liftIO $ assertFailure $ "test not expecting this kind of hover info" <> show hover + + assertFoundIn :: T.Text -> T.Text -> Assertion + assertFoundIn part whole = assertBool + (T.unpack $ "failed to find: `" <> part <> "` in hover message:\n" <> whole) + (part `T.isInfixOf` whole) + + assertNotFoundIn :: T.Text -> T.Text -> Assertion + assertNotFoundIn part whole = assertBool + (T.unpack $ "found unexpected: `" <> part <> "` in hover message:\n" <> whole) + (not . T.isInfixOf part $ whole) + + sourceFilePath = T.unpack sourceFileName + sourceFileName = "GotoImplementation.hs" + + mkFindTests tests = testGroup "goto implementation" + [ testGroup "implementation" $ mapMaybe fst allTests + , testGroup "hover" $ mapMaybe snd allTests + ] + where + allTests = tests ++ recordDotSyntaxTests + + recordDotSyntaxTests = + -- We get neither new hover information nor 'Goto Implementation' locations for record-dot-syntax + [ test' "RecordDotSyntax.hs" yes yes (Position 17 6) [ExpectNoImplementations, ExpectHoverText ["_ :: [Char]"]] "hover over parent" + , test' "RecordDotSyntax.hs" yes yes (Position 17 18) [ExpectNoImplementations, ExpectHoverText ["_ :: Integer"]] "hover over dot shows child" + , test' "RecordDotSyntax.hs" yes yes (Position 17 25) [ExpectNoImplementations, ExpectHoverText ["_ :: MyChild"]] "hover over child" + , test' "RecordDotSyntax.hs" yes yes (Position 17 27) [ExpectNoImplementations, ExpectHoverText ["_ :: [Char]"]] "hover over grandchild" + ] + + test :: (HasCallStack) => (TestTree -> a) -> (TestTree -> b) -> Position -> [Expect] -> String -> (a, b) + test runImpl runHover look expect = testM runImpl runHover look (return expect) + + testM :: (HasCallStack) => (TestTree -> a) + -> (TestTree -> b) + -> Position + -> Session [Expect] + -> String + -> (a, b) + testM = testM' sourceFilePath + + test' :: (HasCallStack) => FilePath -> (TestTree -> a) -> (TestTree -> b) -> Position -> [Expect] -> String -> (a, b) + test' sourceFile runImpl runHover look expect = testM' sourceFile runImpl runHover look (return expect) + + testM' :: (HasCallStack) + => FilePath + -> (TestTree -> a) + -> (TestTree -> b) + -> Position + -> Session [Expect] + -> String + -> (a, b) + testM' sourceFile runImpl runHover look expect title = + ( runImpl $ tst impl look sourceFile expect title + , runHover $ tst hover look sourceFile expect title ) where + impl = (getImplementations, checkDefs) + hover = (getHover , checkHover) + + aaaL = Position 8 15; aaaR = mkRange 5 9 5 16; + aaa = + [ ExpectRanges [aaaR] + , ExpectHoverText (evidenceBoundByConstraint "Num" "AAA") + ] + + bbbL = Position 15 8; bbbR = mkRange 12 9 12 16; + bbb = + [ ExpectRanges [bbbR] + , ExpectHoverText (evidenceBoundByConstraint "BBB" "AAA") + ] + cccL = Position 18 11; + ccc = + [ ExpectNoImplementations + , ExpectHoverText (evidenceBySignatureOrPattern "Show" "a") + ] + dddShowR = mkRange 21 26 21 30; dddEqR = mkRange 21 22 21 24 + dddL1 = Position 23 16; + ddd1 = + [ ExpectRanges [dddEqR] + , ExpectHoverText + [ constraintEvidence "Eq" "(Q k)" + , evidenceGoal' "'forall k. Eq k => Eq (Q k)'" + , boundByInstanceOf "Eq" + , evidenceGoal "Eq" "k" + , boundByTypeSigOrPattern + ] + ] + dddL2 = Position 23 29; + ddd2 = + [ ExpectNoImplementations + , ExpectHoverText (evidenceBySignatureOrPattern "Show" "k") + ] + dddL3 = Position 24 8; + ddd3 = + [ ExpectRanges [dddEqR, dddShowR] + , ExpectHoverText + [ constraintEvidence "Show" "(Q Integer)" + , evidenceGoal' "'forall k. Show k => Show (Q k)'" + , boundByInstance + , evidenceGoal "Show" "Integer" + , usingExternalInstance + , constraintEvidence "Eq" "(Q Integer)" + , evidenceGoal' "'forall k. Eq k => Eq (Q k)'" + , boundByInstance + , evidenceGoal "Eq" "Integer" + , usingExternalInstance + ] + ] + gadtL = Position 29 35; + gadt = + [ ExpectNoImplementations + , ExpectHoverText + [ constraintEvidence "Show" "Int" + , evidenceGoal "Show" "a" + , boundByTypeSigOrPattern + , evidenceGoal' "'a ~ Int'" + , boundByPattern + ] + ] + in + mkFindTests + -- impl hover look expect + [ + test yes yes aaaL aaa "locally defined class instance" + , test yes yes bbbL bbb "locally defined class and instance" + , test yes yes cccL ccc "bound by type signature" + , test yes yes dddL1 ddd1 "newtype Eq evidence" + , test yes yes dddL2 ddd2 "Show evidence" + , test yes yes dddL3 ddd3 "evidence construction" + , test yes yes gadtL gadt "GADT evidence" + ] + where yes :: (TestTree -> Maybe TestTree) + yes = Just -- test should run and pass + no = const Nothing -- don't run this test at all + +-- ---------------------------------------------------------------------------- +-- Helper functions for creating hover message verification +-- ---------------------------------------------------------------------------- + +evidenceBySignatureOrPattern :: Text -> Text -> [Text] +evidenceBySignatureOrPattern tyclass varname = + [ constraintEvidence tyclass varname + , boundByTypeSigOrPattern + ] + +evidenceBoundByConstraint :: Text -> Text -> [Text] +evidenceBoundByConstraint tyclass varname = + [ constraintEvidence tyclass varname + , boundByInstanceOf tyclass + ] + +boundByTypeSigOrPattern :: Text +boundByTypeSigOrPattern = "bound by type signature or pattern" + +boundByInstance :: Text +boundByInstance = + "bound by an instance of" + +boundByInstanceOf :: Text -> Text +boundByInstanceOf tyvar = + "bound by an instance of class " <> tyvar + +boundByPattern :: Text +boundByPattern = + "bound by a pattern" + +usingExternalInstance :: Text +usingExternalInstance = + "using an external instance" + +constraintEvidence :: Text -> Text -> Text +constraintEvidence tyclass varname = "Evidence of constraint " <> quotedName tyclass varname + +-- | A goal in the evidence tree. +evidenceGoal :: Text -> Text -> Text +evidenceGoal tyclass varname = "- " <> quotedName tyclass varname + +evidenceGoal' :: Text -> Text +evidenceGoal' t = "- " <> t + +quotedName :: Text -> Text -> Text +quotedName tyclass varname = "'" <> tyclass <> " " <> varname <> "'" diff --git a/ghcide-test/exe/FuzzySearch.hs b/ghcide-test/exe/FuzzySearch.hs new file mode 100644 index 0000000000..1d2a5ac181 --- /dev/null +++ b/ghcide-test/exe/FuzzySearch.hs @@ -0,0 +1,52 @@ +module FuzzySearch (tests) where + +import Data.Maybe (isJust, mapMaybe) +import Data.Text (Text) +import qualified Data.Text as Text +import Prelude hiding (filter) +import Test.Tasty +import Test.Tasty.HUnit +import Test.Tasty.QuickCheck +import Text.Fuzzy.Parallel + +tests :: TestTree +tests = + testGroup + "Fuzzy search" + [ testGroup "match" + [ testCase "empty" $ + match "" "" @?= Just 0 + , testCase "camel case" $ + match "myImportantField" "myImportantField" @?= Just 262124 + , testCase "a" $ + mapMaybe (matchInput "a") ["", "a", "aa", "aaa", "A", "AA", "aA", "Aa"] + @?= [("a",3),("aa",3),("aaa",3),("aA",3),("Aa",1)] + , testCase "lowercase words" $ + mapMaybe (matchInput "abc") ["abc", "abcd", "axbc", "axbxc", "def"] + @?= [("abc", 25), ("abcd", 25), ("axbc", 7), ("axbxc", 5)] + , testCase "lower upper mix" $ + mapMaybe (matchInput "abc") ["abc", "aBc", "axbC", "axBxC", "def"] + @?= [("abc", 25), ("aBc", 25), ("axbC", 7), ("axBxC", 5)] + , testCase "prefixes" $ + mapMaybe (matchInput "alpha") (Text.inits "alphabet") + @?= [("alpha", 119), ("alphab", 119), ("alphabe", 119), ("alphabet", 119)] + , testProperty "x `isSubsequenceOf` y => match x y returns Just" + prop_matchIfSubsequence + ] + ] + where + matchInput :: Text -> Text -> Maybe (Text, Int) + matchInput needle candidate = (candidate,) <$> match needle candidate + +prop_matchIfSubsequence :: Property +prop_matchIfSubsequence = + forAll genNonEmptyText $ \haystack -> + forAll (genSubsequence haystack) $ \needle -> + isJust (match needle haystack) + where + genNonEmptyText = + Text.pack <$> listOf1 (elements $ ['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9']) + + genSubsequence :: Text -> Gen Text + genSubsequence = + fmap Text.pack . sublistOf . Text.unpack diff --git a/ghcide-test/exe/GarbageCollectionTests.hs b/ghcide-test/exe/GarbageCollectionTests.hs new file mode 100644 index 0000000000..5cc9935352 --- /dev/null +++ b/ghcide-test/exe/GarbageCollectionTests.hs @@ -0,0 +1,89 @@ +module GarbageCollectionTests (tests) where + +import Config (testWithDummyPluginEmpty') +import Control.Monad.IO.Class (liftIO) +import qualified Data.Set as Set +import qualified Data.Text as T +import Development.IDE.Test (expectCurrentDiagnostics, + getStoredKeys, waitForGC, + waitForTypecheck) +import Language.LSP.Protocol.Types hiding (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), mkRange) +import Language.LSP.Test +import System.FilePath +import Test.Tasty +import Test.Tasty.HUnit +import Text.Printf (printf) + +tests :: TestTree +tests = testGroup "garbage collection" + [ testGroup "dirty keys" + [ testWithDummyPluginEmpty' "are collected" $ \dir -> do + liftIO $ writeFile (dir "hie.yaml") "cradle: {direct: {arguments: [A]}}" + doc <- generateGarbage "A" dir + closeDoc doc + garbage <- waitForGC + liftIO $ assertBool "no garbage was found" $ not $ null garbage + + , testWithDummyPluginEmpty' "are deleted from the state" $ \dir -> do + liftIO $ writeFile (dir "hie.yaml") "cradle: {direct: {arguments: [A]}}" + docA <- generateGarbage "A" dir + keys0 <- getStoredKeys + closeDoc docA + garbage <- waitForGC + liftIO $ assertBool "something is wrong with this test - no garbage found" $ not $ null garbage + keys1 <- getStoredKeys + liftIO $ assertBool "keys were not deleted from the state" (length keys1 < length keys0) + + , testWithDummyPluginEmpty' "are not regenerated unless needed" $ \dir -> do + liftIO $ writeFile (dir "hie.yaml") "cradle: {direct: {arguments: [A.hs, B.hs]}}" + docA <- generateGarbage "A" dir + _docB <- generateGarbage "B" dir + + -- garbage collect A keys + keysBeforeGC <- getStoredKeys + closeDoc docA + garbage <- waitForGC + liftIO $ assertBool "something is wrong with this test - no garbage found" $ not $ null garbage + keysAfterGC <- getStoredKeys + liftIO $ assertBool "something is wrong with this test - keys were not deleted from the state" + (length keysAfterGC < length keysBeforeGC) + + -- re-typecheck B and check that the keys for A have not materialized back + _docB <- generateGarbage "B" dir + keysB <- getStoredKeys + let regeneratedKeys = Set.filter (not . isExpected) $ + Set.intersection (Set.fromList garbage) (Set.fromList keysB) + liftIO $ regeneratedKeys @?= mempty + + , testWithDummyPluginEmpty' "regenerate successfully" $ \dir -> do + liftIO $ writeFile (dir "hie.yaml") "cradle: {direct: {arguments: [A]}}" + docA <- generateGarbage "A" dir + closeDoc docA + garbage <- waitForGC + liftIO $ assertBool "no garbage was found" $ not $ null garbage + let edit = T.unlines + [ "module A where" + , "a :: Bool" + , "a = ()" + ] + doc <- generateGarbage "A" dir + changeDoc doc [TextDocumentContentChangeEvent . InR $ TextDocumentContentChangeWholeDocument edit] + builds <- waitForTypecheck doc + liftIO $ assertBool "it still builds" builds + expectCurrentDiagnostics doc [(DiagnosticSeverity_Error, (2,4), "Couldn't match expected type", Just "GHC-83865")] + ] + ] + where + isExpected k = "GhcSessionIO" `T.isPrefixOf` k + + generateGarbage :: String -> FilePath -> Session TextDocumentIdentifier + generateGarbage modName dir = do + let fp = modName <> ".hs" + body = printf "module %s where" modName + doc <- createDoc fp "haskell" (T.pack body) + liftIO $ writeFile (dir fp) body + builds <- waitForTypecheck doc + liftIO $ assertBool "something is wrong with this test" builds + return doc diff --git a/ghcide-test/exe/HaddockTests.hs b/ghcide-test/exe/HaddockTests.hs new file mode 100644 index 0000000000..f45468d87f --- /dev/null +++ b/ghcide-test/exe/HaddockTests.hs @@ -0,0 +1,90 @@ + +module HaddockTests (tests) where + +import Development.IDE.Spans.Common +-- import Test.QuickCheck.Instances () +import Test.Tasty +import Test.Tasty.HUnit + +tests :: TestTree +tests + = testGroup "haddock" + [ testCase "Num" $ checkHaddock + (unlines + [ "However, '(+)' and '(*)' are" + , "customarily expected to define a ring and have the following properties:" + , "" + , "[__Associativity of (+)__]: @(x + y) + z@ = @x + (y + z)@" + , "[__Commutativity of (+)__]: @x + y@ = @y + x@" + , "[__@fromInteger 0@ is the additive identity__]: @x + fromInteger 0@ = @x@" + ] + ) + (unlines + [ "" + , "" + , "However, `(+)` and `(*)` are" + , "customarily expected to define a ring and have the following properties: " + , "+ ****Associativity of (+)****: `(x + y) + z` = `x + (y + z)`" + , "+ ****Commutativity of (+)****: `x + y` = `y + x`" + , "+ ****`fromInteger 0` is the additive identity****: `x + fromInteger 0` = `x`" + ] + ) + , testCase "unsafePerformIO" $ checkHaddock + (unlines + [ "may require" + , "different precautions:" + , "" + , " * Use @{\\-\\# NOINLINE foo \\#-\\}@ as a pragma on any function @foo@" + , " that calls 'unsafePerformIO'. If the call is inlined," + , " the I\\/O may be performed more than once." + , "" + , " * Use the compiler flag @-fno-cse@ to prevent common sub-expression" + , " elimination being performed on the module." + , "" + ] + ) + (unlines + [ "" + , "" + , "may require" + , "different precautions: " + , "+ Use `{-# NOINLINE foo #-}` as a pragma on any function `foo` " + , " that calls `unsafePerformIO` . If the call is inlined," + , " the I/O may be performed more than once." + , "" + , "+ Use the compiler flag `-fno-cse` to prevent common sub-expression" + , " elimination being performed on the module." + , "" + ] + ) + , testCase "ordered list" $ checkHaddock + (unlines + [ "may require" + , "different precautions:" + , "" + , " 1. Use @{\\-\\# NOINLINE foo \\#-\\}@ as a pragma on any function @foo@" + , " that calls 'unsafePerformIO'. If the call is inlined," + , " the I\\/O may be performed more than once." + , "" + , " 2. Use the compiler flag @-fno-cse@ to prevent common sub-expression" + , " elimination being performed on the module." + , "" + ] + ) + (unlines + [ "" + , "" + , "may require" + , "different precautions: " + , "1. Use `{-# NOINLINE foo #-}` as a pragma on any function `foo` " + , " that calls `unsafePerformIO` . If the call is inlined," + , " the I/O may be performed more than once." + , "" + , "2. Use the compiler flag `-fno-cse` to prevent common sub-expression" + , " elimination being performed on the module." + , "" + ] + ) + ] + where + checkHaddock s txt = spanDocToMarkdownForTest s @?= txt diff --git a/ghcide-test/exe/HieDbRetry.hs b/ghcide-test/exe/HieDbRetry.hs new file mode 100644 index 0000000000..3e0c41c2f9 --- /dev/null +++ b/ghcide-test/exe/HieDbRetry.hs @@ -0,0 +1,136 @@ +{-# LANGUAGE MultiWayIf #-} +module HieDbRetry (tests) where + +import Control.Concurrent.Extra (Var, modifyVar, newVar, readVar, + withVar) +import Control.Exception (ErrorCall (ErrorCall), evaluate, + throwIO, tryJust) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Data.Tuple.Extra (dupe) +import qualified Database.SQLite.Simple as SQLite +import Development.IDE.Session (retryOnException, retryOnSqliteBusy) +import qualified Development.IDE.Session as Session +import Ide.Logger (Recorder (Recorder, logger_), + WithPriority (WithPriority, payload), + cmapWithPrio) +import qualified System.Random as Random +import Test.Tasty (TestTree, testGroup) +import Test.Tasty.HUnit (assertFailure, testCase, (@?=)) + +data Log + = LogSession Session.Log + deriving Show + +makeLogger :: Var [Log] -> Recorder (WithPriority Log) +makeLogger msgsVar = + Recorder { + logger_ = \WithPriority{ payload = msg } -> liftIO $ modifyVar msgsVar (\msgs -> pure (msg : msgs, ())) + } + +rng :: Random.StdGen +rng = Random.mkStdGen 0 + +retryOnSqliteBusyForTest :: Recorder (WithPriority Log) -> Int -> IO a -> IO a +retryOnSqliteBusyForTest recorder maxRetryCount = retryOnException isErrorBusy (cmapWithPrio LogSession recorder) 1 1 maxRetryCount rng + +isErrorBusy :: SQLite.SQLError -> Maybe SQLite.SQLError +isErrorBusy e + | SQLite.SQLError { sqlError = SQLite.ErrorBusy } <- e = Just e + | otherwise = Nothing + +errorBusy :: SQLite.SQLError +errorBusy = SQLite.SQLError{ sqlError = SQLite.ErrorBusy, sqlErrorDetails = "", sqlErrorContext = "" } + +isErrorCall :: ErrorCall -> Maybe ErrorCall +isErrorCall e + | ErrorCall _ <- e = Just e + +tests :: TestTree +tests = testGroup "RetryHieDb" + [ testCase "retryOnException throws exception after max retries" $ do + logMsgsVar <- newVar [] + let logger = makeLogger logMsgsVar + let maxRetryCount = 1 + + result <- tryJust isErrorBusy (retryOnSqliteBusyForTest logger maxRetryCount (throwIO errorBusy)) + + case result of + Left exception -> do + exception @?= errorBusy + withVar logMsgsVar $ \logMsgs -> + length logMsgs @?= 2 + -- uncomment if want to compare log msgs + -- logMsgs @?= [] + Right _ -> assertFailure "Expected ErrorBusy exception" + + , testCase "retryOnException doesn't throw if given function doesn't throw" $ do + let expected = 1 :: Int + let maxRetryCount = 0 + + actual <- retryOnSqliteBusyForTest mempty maxRetryCount (pure expected) + + actual @?= expected + + , testCase "retryOnException retries the number of times it should" $ do + countVar <- newVar 0 + let maxRetryCount = 3 + let incrementThenThrow = modifyVar countVar (\count -> pure (dupe (count + 1))) >> throwIO errorBusy + + _ <- tryJust isErrorBusy (retryOnSqliteBusyForTest mempty maxRetryCount incrementThenThrow) + + withVar countVar $ \count -> + count @?= maxRetryCount + 1 + + , testCase "retryOnException doesn't retry if exception is not ErrorBusy" $ do + countVar <- newVar (0 :: Int) + let maxRetryCount = 1 + + let throwThenIncrement = do + count <- readVar countVar + if count == 0 then + evaluate (error "dummy exception") + else + modifyVar countVar (\count -> pure (dupe (count + 1))) + + + _ <- tryJust isErrorCall (retryOnSqliteBusyForTest mempty maxRetryCount throwThenIncrement) + + withVar countVar $ \count -> + count @?= 0 + + , testCase "retryOnSqliteBusy retries on ErrorBusy" $ do + countVar <- newVar (0 :: Int) + + let incrementThenThrowThenIncrement = do + count <- readVar countVar + if count == 0 then + modifyVar countVar (\count -> pure (dupe (count + 1))) >> throwIO errorBusy + else + modifyVar countVar (\count -> pure (dupe (count + 1))) + + _ <- retryOnSqliteBusy mempty rng incrementThenThrowThenIncrement + + withVar countVar $ \count -> + count @?= 2 + + , testCase "retryOnException exponentially backs off" $ do + logMsgsVar <- newVar ([] :: [Log]) + + let maxDelay = 100 + let baseDelay = 1 + let maxRetryCount = 6 + let logger = makeLogger logMsgsVar + + result <- tryJust isErrorBusy (retryOnException isErrorBusy (cmapWithPrio LogSession logger) maxDelay baseDelay maxRetryCount rng (throwIO errorBusy)) + + case result of + Left _ -> do + withVar logMsgsVar $ \logMsgs -> + -- uses log messages to check backoff... + if | (LogSession (Session.LogHieDbRetriesExhausted baseDelay maximumDelay maxRetryCount _) : _) <- logMsgs -> do + baseDelay @?= 64 + maximumDelay @?= 100 + maxRetryCount @?= 0 + | otherwise -> assertFailure "Expected more than 0 log messages" + Right _ -> assertFailure "Expected ErrorBusy exception" + ] diff --git a/ghcide-test/exe/HighlightTests.hs b/ghcide-test/exe/HighlightTests.hs new file mode 100644 index 0000000000..3450404679 --- /dev/null +++ b/ghcide-test/exe/HighlightTests.hs @@ -0,0 +1,78 @@ + +module HighlightTests (tests) where + +import Config +import Control.Monad.IO.Class (liftIO) +import qualified Data.Text as T +import Development.IDE.Types.Location +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +import Test.Tasty +import Test.Tasty.HUnit + +tests :: TestTree +tests = testGroup "highlight" + [ testWithDummyPluginEmpty "value" $ do + doc <- createDoc "A.hs" "haskell" source + _ <- waitForDiagnostics + highlights <- getHighlights doc (Position 3 2) + liftIO $ highlights @?= + [ DocumentHighlight (R 2 0 2 3) (Just DocumentHighlightKind_Read) + , DocumentHighlight (R 3 0 3 3) (Just DocumentHighlightKind_Write) + , DocumentHighlight (R 4 6 4 9) (Just DocumentHighlightKind_Read) + , DocumentHighlight (R 5 22 5 25) (Just DocumentHighlightKind_Read) + ] + , testWithDummyPluginEmpty "type" $ do + doc <- createDoc "A.hs" "haskell" source + _ <- waitForDiagnostics + highlights <- getHighlights doc (Position 2 8) + liftIO $ highlights @?= + [ DocumentHighlight (R 2 7 2 10) (Just DocumentHighlightKind_Read) + , DocumentHighlight (R 3 11 3 14) (Just DocumentHighlightKind_Read) + ] + , testWithDummyPluginEmpty "local" $ do + doc <- createDoc "A.hs" "haskell" source + _ <- waitForDiagnostics + highlights <- getHighlights doc (Position 6 5) + liftIO $ highlights @?= + [ DocumentHighlight (R 6 4 6 7) (Just DocumentHighlightKind_Write) + , DocumentHighlight (R 6 10 6 13) (Just DocumentHighlightKind_Read) + , DocumentHighlight (R 7 12 7 15) (Just DocumentHighlightKind_Read) + ] + , + testWithDummyPluginEmpty "record" $ do + doc <- createDoc "A.hs" "haskell" recsource + _ <- waitForDiagnostics + highlights <- getHighlights doc (Position 4 15) + liftIO $ highlights @?= + [ DocumentHighlight (R 4 8 4 10) (Just DocumentHighlightKind_Write) + , DocumentHighlight (R 4 14 4 20) (Just DocumentHighlightKind_Read) + ] + highlights <- getHighlights doc (Position 3 17) + liftIO $ highlights @?= + [ DocumentHighlight (R 3 17 3 23) (Just DocumentHighlightKind_Write) + , DocumentHighlight (R 4 8 4 10) (Just DocumentHighlightKind_Read) + ] + ] + where + source = T.unlines + ["{-# OPTIONS_GHC -Wunused-binds #-}" + ,"module Highlight () where" + ,"foo :: Int" + ,"foo = 3 :: Int" + ,"bar = foo" + ," where baz = let x = foo in x" + ,"baz arg = arg + x" + ," where x = arg" + ] + recsource = T.unlines + ["{-# LANGUAGE RecordWildCards #-}" + ,"{-# OPTIONS_GHC -Wunused-binds #-}" + ,"module Highlight () where" + ,"data Rec = Rec { field1 :: Int, field2 :: Char }" + ,"foo Rec{..} = field2 + field1" + ] diff --git a/ghcide-test/exe/IfaceTests.hs b/ghcide-test/exe/IfaceTests.hs new file mode 100644 index 0000000000..d7dc533550 --- /dev/null +++ b/ghcide-test/exe/IfaceTests.hs @@ -0,0 +1,162 @@ +module IfaceTests (tests) where + +import Config +import Control.Monad.IO.Class (liftIO) +import qualified Data.Text as T +import Development.IDE.GHC.Util +import Development.IDE.Test (configureCheckProject, + expectDiagnostics, + expectNoMoreDiagnostics, + getInterfaceFilesDir) +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +import System.Directory +import System.FilePath +import System.IO.Extra hiding (withTempDir) +import Test.Tasty +import Test.Tasty.HUnit + +tests :: TestTree +tests = testGroup "Interface loading tests" + [ -- https://github.com/haskell/ghcide/pull/645/ + ifaceErrorTest + , ifaceErrorTest2 + , ifaceErrorTest3 + , ifaceTHTest + ] + + +-- | test that TH reevaluates across interfaces +ifaceTHTest :: TestTree +ifaceTHTest = testWithExtraFiles "iface-th-test" "TH" $ \dir -> do + let aPath = dir "THA.hs" + bPath = dir "THB.hs" + cPath = dir "THC.hs" + + aSource <- liftIO $ readFileUtf8 aPath -- [TH] a :: () + _bSource <- liftIO $ readFileUtf8 bPath -- a :: () + cSource <- liftIO $ readFileUtf8 cPath -- c = a :: () + + cdoc <- createDoc cPath "haskell" cSource + + -- Change [TH]a from () to Bool + liftIO $ writeFileUTF8 aPath (unlines $ init (lines $ T.unpack aSource) ++ ["th_a = [d| a = False|]"]) + + -- Check that the change propagates to C + changeDoc cdoc [TextDocumentContentChangeEvent . InR $ TextDocumentContentChangeWholeDocument cSource] + expectDiagnostics + [("THC.hs", [(DiagnosticSeverity_Error, (4, 4), "Couldn't match expected type '()' with actual type 'Bool'", Just "GHC-83865")]) + ,("THB.hs", [(DiagnosticSeverity_Warning, (4,1), "Top-level binding", Just "GHC-38417")])] + closeDoc cdoc + +ifaceErrorTest :: TestTree +ifaceErrorTest = testWithExtraFiles "iface-error-test-1" "recomp" $ \dir -> do + configureCheckProject True + let bPath = dir "B.hs" + pPath = dir "P.hs" + + bSource <- liftIO $ readFileUtf8 bPath -- y :: Int + pSource <- liftIO $ readFileUtf8 pPath -- bar = x :: Int + + bdoc <- createDoc bPath "haskell" bSource + expectDiagnostics + [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding", Just "GHC-38417")])] -- So what we know P has been loaded + + -- Change y from Int to B + changeDoc bdoc [ TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ + T.unlines [ "module B where", "y :: Bool", "y = undefined"] + ] + -- save so that we can that the error propagates to A + sendNotification SMethod_TextDocumentDidSave (DidSaveTextDocumentParams bdoc Nothing) + + + -- Check that the error propagates to A + expectDiagnostics + [("A.hs", [(DiagnosticSeverity_Error, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'", Just "GHC-83865")])] + + -- Check that we wrote the interfaces for B when we saved + hidir <- getInterfaceFilesDir bdoc + hi_exists <- liftIO $ doesFileExist $ hidir "B.hi" + liftIO $ assertBool ("Couldn't find B.hi in " ++ hidir) hi_exists + + pdoc <- openDoc pPath "haskell" + expectDiagnostics + [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding", Just "GHC-38417")]) + ] + changeDoc pdoc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ pSource <> "\nfoo = y :: Bool" ] + -- Now in P we have + -- bar = x :: Int + -- foo = y :: Bool + -- HOWEVER, in A... + -- x = y :: Int + -- This is clearly inconsistent, and the expected outcome a bit surprising: + -- - The diagnostic for A has already been received. Ghcide does not repeat diagnostics + -- - P is being typechecked with the last successful artifacts for A. + expectDiagnostics + [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding", Just "GHC-38417")]) + ,("P.hs", [(DiagnosticSeverity_Warning,(6,0), "Top-level binding", Just "GHC-38417")]) + ] + expectNoMoreDiagnostics 2 + +ifaceErrorTest2 :: TestTree +ifaceErrorTest2 = testWithExtraFiles "iface-error-test-2" "recomp" $ \dir -> do + let bPath = dir "B.hs" + pPath = dir "P.hs" + + bSource <- liftIO $ readFileUtf8 bPath -- y :: Int + pSource <- liftIO $ readFileUtf8 pPath -- bar = x :: Int + + bdoc <- createDoc bPath "haskell" bSource + pdoc <- createDoc pPath "haskell" pSource + expectDiagnostics + [("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding", Just "GHC-38417")])] -- So that we know P has been loaded + + -- Change y from Int to B + changeDoc bdoc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ + T.unlines ["module B where", "y :: Bool", "y = undefined"]] + + -- Add a new definition to P + changeDoc pdoc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ pSource <> "\nfoo = y :: Bool" ] + -- Now in P we have + -- bar = x :: Int + -- foo = y :: Bool + -- HOWEVER, in A... + -- x = y :: Int + expectDiagnostics + -- As in the other test, P is being typechecked with the last successful artifacts for A + -- (ot thanks to -fdeferred-type-errors) + [("A.hs", [(DiagnosticSeverity_Error, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'", Just "GHC-83865")]) + ,("P.hs", [(DiagnosticSeverity_Warning, (4, 0), "Top-level binding", Just "GHC-38417")]) + ,("P.hs", [(DiagnosticSeverity_Warning, (6, 0), "Top-level binding", Just "GHC-38417")]) + ] + + expectNoMoreDiagnostics 2 + +ifaceErrorTest3 :: TestTree +ifaceErrorTest3 = testWithExtraFiles "iface-error-test-3" "recomp" $ \dir -> do + let bPath = dir "B.hs" + pPath = dir "P.hs" + + bSource <- liftIO $ readFileUtf8 bPath -- y :: Int + pSource <- liftIO $ readFileUtf8 pPath -- bar = x :: Int + + bdoc <- createDoc bPath "haskell" bSource + + -- Change y from Int to B + changeDoc bdoc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ T.unlines ["module B where", "y :: Bool", "y = undefined"]] + + -- P should not typecheck, as there are no last valid artifacts for A + _pdoc <- createDoc pPath "haskell" pSource + + -- In this example the interface file for A should not exist (modulo the cache folder) + -- Despite that P still type checks, as we can generate an interface file for A thanks to -fdeferred-type-errors + expectDiagnostics + [("A.hs", [(DiagnosticSeverity_Error, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'", Just "GHC-83865")]) + ,("P.hs", [(DiagnosticSeverity_Warning,(4,0), "Top-level binding", Just "GHC-38417")]) + ] + expectNoMoreDiagnostics 2 diff --git a/ghcide-test/exe/InitializeResponseTests.hs b/ghcide-test/exe/InitializeResponseTests.hs new file mode 100644 index 0000000000..f13344e368 --- /dev/null +++ b/ghcide-test/exe/InitializeResponseTests.hs @@ -0,0 +1,92 @@ + +{-# LANGUAGE DataKinds #-} + +module InitializeResponseTests (tests) where + +import Control.Monad +import Data.List.Extra +import qualified Data.Text as T +import Development.IDE.Plugin.TypeLenses (typeLensCommandId) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Test + +import Config +import Control.Lens ((^.)) +import Development.IDE.Plugin.Test (blockCommandId) +import Test.Hls + +tests :: TestTree +tests = withResource acquire release tests where + + -- these tests document and monitor the evolution of the + -- capabilities announced by the server in the initialize + -- response. Currently the server advertises almost no capabilities + -- at all, in some cases failing to announce capabilities that it + -- actually does provide! Hopefully this will change ... + tests :: IO (TResponseMessage Method_Initialize) -> TestTree + tests getInitializeResponse = + testGroup "initialize response capabilities" + [ chk " text doc sync" _textDocumentSync tds + , chk " hover" _hoverProvider (Just $ InR (HoverOptions (Just False))) + , chk " completion" _completionProvider (Just $ CompletionOptions (Just False) (Just ["."]) Nothing (Just True) Nothing) + , chk "NO signature help" _signatureHelpProvider Nothing + , chk " goto definition" _definitionProvider (Just $ InR (DefinitionOptions (Just False))) + , chk " goto type definition" _typeDefinitionProvider (Just $ InR (InL (TypeDefinitionOptions (Just False)))) + , chk " goto implementation" _implementationProvider (Just $ InR (InL (ImplementationOptions (Just False)))) + , chk " find references" _referencesProvider (Just $ InR (ReferenceOptions (Just False))) + , chk " doc highlight" _documentHighlightProvider (Just $ InR (DocumentHighlightOptions (Just False))) + , chk " doc symbol" _documentSymbolProvider (Just $ InR (DocumentSymbolOptions (Just False) Nothing)) + , chk " workspace symbol" _workspaceSymbolProvider (Just $ InR (WorkspaceSymbolOptions (Just False) (Just False))) + , chk "NO code action" _codeActionProvider Nothing + , chk " code lens" _codeLensProvider (Just $ CodeLensOptions (Just False) (Just True)) + , chk "NO doc formatting" _documentFormattingProvider Nothing + , chk "NO doc range formatting" + _documentRangeFormattingProvider Nothing + , chk "NO doc formatting on typing" + _documentOnTypeFormattingProvider Nothing + , chk "NO renaming" _renameProvider Nothing + , chk "NO doc link" _documentLinkProvider Nothing + , chk "NO color" (^. L.colorProvider) Nothing + , chk "NO folding range" _foldingRangeProvider Nothing + , che " execute command" _executeCommandProvider [typeLensCommandId, blockCommandId] + , chk " workspace" (^. L.workspace) (Just $ WorkspaceOptions + { _workspaceFolders = Just WorkspaceFoldersServerCapabilities + { _supported = Just True + , _changeNotifications = Just (InR True) + } + , _fileOperations = Nothing + }) + , chk "NO experimental" (^. L.experimental) Nothing + ] where + + tds = Just (InL (TextDocumentSyncOptions + { _openClose = Just True + , _change = Just TextDocumentSyncKind_Incremental + , _willSave = Nothing + , _willSaveWaitUntil = Nothing + , _save = Just (InR $ SaveOptions {_includeText = Nothing})})) + + chk :: (Eq a, Show a) => TestName -> (ServerCapabilities -> a) -> a -> TestTree + chk title getActual expected = + testCase title $ getInitializeResponse >>= \ir -> expected @=? (getActual . innerCaps) ir + + che :: TestName -> (ServerCapabilities -> Maybe ExecuteCommandOptions) -> [T.Text] -> TestTree + che title getActual expected = testCase title $ do + ir <- getInitializeResponse + ExecuteCommandOptions {_commands = commands} <- case getActual $ innerCaps ir of + Just eco -> pure eco + Nothing -> assertFailure "Was expecting Just ExecuteCommandOptions, got Nothing" + let commandNames = (!! 2) . T.splitOn ":" <$> commands + zipWithM_ (\e o -> T.isSuffixOf e o @? show (e,o)) (sort expected) (sort commandNames) + + innerCaps :: TResponseMessage Method_Initialize -> ServerCapabilities + innerCaps (TResponseMessage _ _ (Right (InitializeResult c _))) = c + innerCaps (TResponseMessage _ _ (Left _)) = error "Initialization error" + + acquire :: IO (TResponseMessage Method_Initialize) + acquire = run initializeResponse + + release :: TResponseMessage Method_Initialize -> IO () + release = mempty + diff --git a/ghcide-test/exe/LogType.hs b/ghcide-test/exe/LogType.hs new file mode 100644 index 0000000000..476ea5bc27 --- /dev/null +++ b/ghcide-test/exe/LogType.hs @@ -0,0 +1,21 @@ +module LogType (Log(..)) where + +import qualified Development.IDE.LSP.Notifications as Notifications +import qualified Development.IDE.Main as IDE +import qualified Development.IDE.Plugin.HLS.GhcIde as Ghcide + +import Ide.Logger (Pretty (pretty)) +import Language.LSP.VFS (VfsLog) + +data Log + = LogGhcIde Ghcide.Log + | LogIDEMain IDE.Log + | LogVfs VfsLog + | LogNotifications Notifications.Log + +instance Pretty Log where + pretty = \case + LogGhcIde log -> pretty log + LogIDEMain log -> pretty log + LogVfs log -> pretty log + LogNotifications log -> pretty log diff --git a/ghcide-test/exe/Main.hs b/ghcide-test/exe/Main.hs new file mode 100644 index 0000000000..c8d927072c --- /dev/null +++ b/ghcide-test/exe/Main.hs @@ -0,0 +1,106 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{- + NOTE On enforcing determinism + + The tests below use two mechanisms to enforce deterministic LSP sequences: + + 1. Progress reporting: waitForProgress(Begin|Done) + 2. Diagnostics: expectDiagnostics + + Either is fine, but diagnostics are generally more reliable. + + Mixing them both in the same test is NOT FINE as it will introduce race + conditions since multiple interleavings are possible. In other words, + the sequence of diagnostics and progress reports is not deterministic. + For example: + + < do something > + waitForProgressDone + expectDiagnostics [...] + + - When the diagnostics arrive after the progress done message, as they usually do, the test will pass + - When the diagnostics arrive before the progress done msg, when on a slow machine occasionally, the test will timeout + + Therefore, avoid mixing both progress reports and diagnostics in the same test + -} + + + +module Main (main) where + +import qualified HieDbRetry +import Test.Tasty +import Test.Tasty.Ingredients.Rerun + +import AsyncTests +import BootTests +import ClientSettingsTests +import CodeLensTests +import CompletionTests +import CPPTests +import CradleTests +import DependentFileTest +import DiagnosticTests +import ExceptionTests +import FindDefinitionAndHoverTests +import FindImplementationAndHoverTests +import GarbageCollectionTests +import HaddockTests +import HighlightTests +import IfaceTests +import InitializeResponseTests +import LogType () +import NonLspCommandLine +import OpenCloseTest +import OutlineTests +import PluginSimpleTests +import PositionMappingTests +import PreprocessorTests +import ReferenceTests +import ResolveTests +import RootUriTests +import SafeTests +import SymlinkTests +import THTests +import UnitTests +import WatchedFileTests + +main :: IO () +main = do + -- We mess with env vars so run single-threaded. + defaultMainWithRerun $ testGroup "ghcide" + [ OpenCloseTest.tests + , InitializeResponseTests.tests + , CompletionTests.tests + , CPPTests.tests + , DiagnosticTests.tests + , CodeLensTests.tests + , OutlineTests.tests + , HighlightTests.tests + , FindDefinitionAndHoverTests.tests + , FindImplementationAndHoverTests.tests + , PluginSimpleTests.tests + , PreprocessorTests.tests + , THTests.tests + , SymlinkTests.tests + , SafeTests.tests + , UnitTests.tests + , HaddockTests.tests + , PositionMappingTests.tests + , WatchedFileTests.tests + , CradleTests.tests + , DependentFileTest.tests + , NonLspCommandLine.tests + , IfaceTests.tests + , BootTests.tests + , RootUriTests.tests + , AsyncTests.tests + , ClientSettingsTests.tests + , ReferenceTests.tests + , ResolveTests.tests + , GarbageCollectionTests.tests + , HieDbRetry.tests + , ExceptionTests.tests + ] diff --git a/ghcide-test/exe/NonLspCommandLine.hs b/ghcide-test/exe/NonLspCommandLine.hs new file mode 100644 index 0000000000..b2b41071d4 --- /dev/null +++ b/ghcide-test/exe/NonLspCommandLine.hs @@ -0,0 +1,51 @@ + +module NonLspCommandLine (tests) where + +import Control.Monad ((>=>)) +import Data.Foldable (for_) +import Development.Shake (getDirectoryFilesIO) +import System.Directory (copyFile, createDirectoryIfMissing) +import System.Directory.Extra (canonicalizePath) +import System.Environment.Blank (setEnv) +import System.Exit (ExitCode (ExitSuccess)) +import System.FilePath (takeDirectory, ()) +import qualified System.IO.Extra +import System.Process.Extra (CreateProcess (cwd), proc, + readCreateProcessWithExitCode) +import Test.Tasty +import Test.Tasty.HUnit +import Config (testDataDir) + + +-- A test to ensure that the command line ghcide workflow stays working +tests :: TestTree +tests = testGroup "ghcide command line" + [ testCase "works" $ withTempDir $ \dir -> do + ghcide <- locateGhcideExecutable + copyTestDataFiles dir "multi" + let cmd = (proc ghcide ["a/A.hs"]){cwd = Just dir} + + setEnv "HOME" "/homeless-shelter" False + + (ec, _, _) <- readCreateProcessWithExitCode cmd "" + + ec @?= ExitSuccess + ] + +locateGhcideExecutable :: IO FilePath +locateGhcideExecutable = pure "ghcide" + +-- | Version of 'System.IO.Extra.withTempDir' that canonicalizes the path +-- Which we need to do on macOS since the $TMPDIR can be in @/private/var@ or +-- @/var@ +withTempDir :: (FilePath -> IO a) -> IO a +withTempDir f = System.IO.Extra.withTempDir $ canonicalizePath >=> f + + +copyTestDataFiles :: FilePath -> FilePath -> IO () +copyTestDataFiles dir prefix = do + -- Copy all the test data files to the temporary workspace + testDataFiles <- getDirectoryFilesIO (testDataDir prefix) ["//*"] + for_ testDataFiles $ \f -> do + createDirectoryIfMissing True $ dir takeDirectory f + copyFile (testDataDir prefix f) (dir f) diff --git a/ghcide-test/exe/OpenCloseTest.hs b/ghcide-test/exe/OpenCloseTest.hs new file mode 100644 index 0000000000..83a85520f2 --- /dev/null +++ b/ghcide-test/exe/OpenCloseTest.hs @@ -0,0 +1,20 @@ + +module OpenCloseTest (tests) where + +import Control.Applicative.Combinators +import Control.Monad +import Language.LSP.Protocol.Message +import Language.LSP.Test +-- import Test.QuickCheck.Instances () +import Config (testWithDummyPluginEmpty) +import Test.Hls (waitForProgressBegin, + waitForProgressDone) +import Test.Tasty + +tests :: TestTree +tests = testWithDummyPluginEmpty "open close" $ do + doc <- createDoc "Testing.hs" "haskell" "" + void (skipManyTill anyMessage $ message SMethod_WindowWorkDoneProgressCreate) + waitForProgressBegin + closeDoc doc + waitForProgressDone diff --git a/ghcide-test/exe/OutlineTests.hs b/ghcide-test/exe/OutlineTests.hs new file mode 100644 index 0000000000..0d336a6bd0 --- /dev/null +++ b/ghcide-test/exe/OutlineTests.hs @@ -0,0 +1,153 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +module OutlineTests (tests) where + +import Config +import Control.Monad.IO.Class (liftIO) +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE.GHC.Compat (GhcVersion (..), ghcVersion) +import Language.LSP.Protocol.Types hiding (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), mkRange) +import Language.LSP.Test +import Test.Hls.FileSystem (file, text) +import Test.Tasty +import Test.Tasty.HUnit + +testSymbols :: (HasCallStack) => TestName -> FilePath -> [Text] -> [DocumentSymbol] -> TestTree +testSymbols testName path content expectedSymbols = + testCase testName $ runWithDummyPlugin (mkIdeTestFs [file path (text $ T.unlines content)]) $ do + docId <- openDoc path "haskell" + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Right expectedSymbols + +testSymbolsA :: (HasCallStack) => TestName -> [Text] -> [DocumentSymbol] -> TestTree +testSymbolsA testName content expectedSymbols = + testSymbols testName "A.hs" content expectedSymbols + +tests :: TestTree +tests = + testGroup + "outline" + [ testSymbolsA + "type class:" + ["module A where", "class A a where a :: a -> Bool"] + [ moduleSymbol + "A" + (R 0 7 0 8) + [ classSymbol + "A a" + (R 1 0 1 30) + [docSymbol' "a" SymbolKind_Method (R 1 16 1 30) (R 1 16 1 17)] + ] + ], + testSymbolsA + "type class instance " + ["class A a where", "instance A () where"] + [ classSymbol "A a" (R 0 0 0 15) [], + docSymbol "A ()" SymbolKind_Interface (R 1 0 1 19) + ], + testSymbolsA "type family" ["{-# language TypeFamilies #-}", "type family A"] [docSymbolD "A" "type family" SymbolKind_Function (R 1 0 1 13)], + testSymbolsA + "type family instance " + ["{-# language TypeFamilies #-}", "type family A a", "type instance A () = ()"] + [ docSymbolD "A a" "type family" SymbolKind_Function (R 1 0 1 15), + docSymbol "A ()" SymbolKind_Interface (R 2 0 2 23) + ], + testSymbolsA "data family" ["{-# language TypeFamilies #-}", "data family A"] [docSymbolD "A" "data family" SymbolKind_Function (R 1 0 1 (if ghcVersion >= GHC910 then 13 else 11))], + testSymbolsA + "data family instance " + ["{-# language TypeFamilies #-}", "data family A a", "data instance A () = A ()"] + [ docSymbolD "A a" "data family" SymbolKind_Function (R 1 0 1 (if ghcVersion >= GHC910 then 15 else 11)), + docSymbol "A ()" SymbolKind_Interface (R 2 0 2 25) + ], + testSymbolsA "constant" ["a = ()"] [docSymbol "a" SymbolKind_Function (R 0 0 0 6)], + testSymbolsA "pattern" ["Just foo = Just 21"] [docSymbol "Just foo" SymbolKind_Function (R 0 0 0 18)], + testSymbolsA "pattern with type signature" ["{-# language ScopedTypeVariables #-}", "a :: () = ()"] [docSymbol "a :: ()" SymbolKind_Function (R 1 0 1 12)], + testSymbolsA "function" ["a _x = ()"] [docSymbol "a" SymbolKind_Function (R 0 0 0 9)], + testSymbolsA "type synonym" ["type A = Bool"] [docSymbol' "A" SymbolKind_TypeParameter (R 0 0 0 13) (R 0 5 0 6)], + testSymbolsA "datatype" ["data A = C"] [docSymbolWithChildren "A" SymbolKind_Struct (R 0 0 0 10) [docSymbol "C" SymbolKind_Constructor (R 0 9 0 10)]], + testSymbolsA + "record fields" + ["data A = B {", " x :: Int", " , y :: Int}"] + [ docSymbolWithChildren + "A" + SymbolKind_Struct + (R 0 0 2 13) + [ docSymbolWithChildren' + "B" + SymbolKind_Constructor + (R 0 9 2 13) + (R 0 9 0 10) + [ docSymbol "x" SymbolKind_Field (R 1 2 1 3), + docSymbol "y" SymbolKind_Field (R 2 4 2 5) + ] + ] + ], + testSymbolsA + "import" + ["import Data.Maybe ()"] + [ docSymbolWithChildren + "imports" + SymbolKind_Module + (R 0 0 0 20) + [ docSymbol "import Data.Maybe" SymbolKind_Module (R 0 0 0 20) + ] + ], + testSymbolsA + "multiple import" + ["", "import Data.Maybe ()", "", "import Control.Exception ()", ""] + [ docSymbolWithChildren + "imports" + SymbolKind_Module + (R 1 0 3 27) + [ docSymbol "import Data.Maybe" SymbolKind_Module (R 1 0 1 20), + docSymbol "import Control.Exception" SymbolKind_Module (R 3 0 3 27) + ] + ], + testSymbolsA + "foreign import" + [ "{-# language ForeignFunctionInterface #-}", + "foreign import ccall \"a\" a :: Int" + ] + [docSymbolD "a" "import" SymbolKind_Object (R 1 0 1 33)], + testSymbolsA + "foreign export" + [ "{-# language ForeignFunctionInterface #-}", + "foreign export ccall odd :: Int -> Bool" + ] + [docSymbolD "odd" "export" SymbolKind_Object (R 1 0 1 39)] + ] + where + docSymbol name kind loc = + DocumentSymbol name Nothing kind Nothing Nothing loc loc Nothing + docSymbol' name kind loc selectionLoc = + DocumentSymbol name Nothing kind Nothing Nothing loc selectionLoc Nothing + docSymbolD name detail kind loc = + DocumentSymbol name (Just detail) kind Nothing Nothing loc loc Nothing + docSymbolWithChildren name kind loc cc = + DocumentSymbol name Nothing kind Nothing Nothing loc loc (Just cc) + docSymbolWithChildren' name kind loc selectionLoc cc = + DocumentSymbol name Nothing kind Nothing Nothing loc selectionLoc (Just cc) + moduleSymbol name loc cc = + DocumentSymbol + name + Nothing + SymbolKind_File + Nothing + Nothing + (R 0 0 maxBound 0) + loc + (Just cc) + classSymbol name loc cc = + DocumentSymbol + name + (Just "class") + SymbolKind_Interface + Nothing + Nothing + loc + loc + (Just cc) diff --git a/ghcide-test/exe/PluginSimpleTests.hs b/ghcide-test/exe/PluginSimpleTests.hs new file mode 100644 index 0000000000..c160d2461c --- /dev/null +++ b/ghcide-test/exe/PluginSimpleTests.hs @@ -0,0 +1,46 @@ + +module PluginSimpleTests (tests) where + +import Config +import Control.Monad.IO.Class (liftIO) +import Development.IDE.Test (expectDiagnostics) +import Language.LSP.Protocol.Types hiding (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), mkRange) +import Language.LSP.Test +import System.FilePath +import Test.Tasty + +tests :: TestTree +tests = + -- Build profile: -w ghc-9.4.2 -O1 + -- In order, the following will be built (use -v for more details): + -- - ghc-typelits-natnormalise-0.7.7 (lib) (requires build) + -- - ghc-typelits-knownnat-0.7.7 (lib) (requires build) + -- - plugin-1.0.0 (lib) (first run) + -- Starting ghc-typelits-natnormalise-0.7.7 (lib) + -- Building ghc-typelits-natnormalise-0.7.7 (lib) + + -- Failed to build ghc-typelits-natnormalise-0.7.7. + -- Build log ( + -- C:\cabal\logs\ghc-9.4.2\ghc-typelits-_-0.7.7-3f036a52a0d9bfc3389d1852a87da2e87c6de2e4.log + -- ): + -- Preprocessing library for ghc-typelits-natnormalise-0.7.7.. + -- Building library for ghc-typelits-natnormalise-0.7.7.. + -- [1 of 3] Compiling GHC.TypeLits.Normalise.SOP ( src\GHC\TypeLits\Normalise\SOP.hs, dist\build\GHC\TypeLits\Normalise\SOP.o ) + -- [2 of 3] Compiling GHC.TypeLits.Normalise.Unify ( src\GHC\TypeLits\Normalise\Unify.hs, dist\build\GHC\TypeLits\Normalise\Unify.o ) + -- [3 of 3] Compiling GHC.TypeLits.Normalise ( src-ghc-9.4\GHC\TypeLits\Normalise.hs, dist\build\GHC\TypeLits\Normalise.o ) + -- C:\tools\ghc-9.4.2\lib\../mingw/bin/llvm-ar.exe: error: dist\build\objs-5156\libHSghc-typelits-_-0.7.7-3f036a52a0d9bfc3389d1852a87da2e87c6de2e4.a: No such file or directory + + -- Error: cabal: Failed to build ghc-typelits-natnormalise-0.7.7 (which is + -- required by plugin-1.0.0). See the build log above for details. + testWithExtraFiles "simple plugin" "plugin-knownnat" $ \dir -> do + _ <- openDoc (dir "KnownNat.hs") "haskell" + liftIO $ writeFile (dir"hie.yaml") + "cradle: {cabal: [{path: '.', component: 'lib:plugin'}]}" + + expectDiagnostics + [ ( "KnownNat.hs", + [(DiagnosticSeverity_Error, (9, 15), "Variable not in scope: c", Just "GHC-88464")] + ) + ] diff --git a/ghcide-test/exe/PositionMappingTests.hs b/ghcide-test/exe/PositionMappingTests.hs new file mode 100644 index 0000000000..dfd9b0374b --- /dev/null +++ b/ghcide-test/exe/PositionMappingTests.hs @@ -0,0 +1,219 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} + +module PositionMappingTests (tests) where + +import qualified Data.EnumMap.Strict as EM +import qualified Data.Text as T +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.Core.PositionMapping (PositionResult (..), + fromCurrent, + positionResultToMaybe, + toCurrent, + toCurrentPosition) +import Development.IDE.Types.Location +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.VFS (applyChange) +import Test.QuickCheck +-- import Test.QuickCheck.Instances () +import Control.Arrow (second) +import Data.Functor.Identity (runIdentity) +import Data.Text (Text) +import Development.IDE.Core.Shake (updatePositionMappingHelper) +import Test.Tasty +import Test.Tasty.HUnit +import Test.Tasty.QuickCheck + +enumMapMappingTest :: TestTree +enumMapMappingTest = testCase "enumMapMappingTest" $ do + let + mkCE :: UInt -> UInt -> UInt -> UInt -> Text -> TextDocumentContentChangeEvent + mkCE l1 c1 l2 c2 = mkChangeEvent (Range (Position l1 c1) (Position l2 c2)) + events :: [(Int32, [TextDocumentContentChangeEvent])] + events = map (second return) [(0, mkCE 0 0 0 0 ""), (1, mkCE 0 1 0 1 " "), (2, mkCE 0 2 0 2 " "), (3, mkCE 0 3 0 3 " "), (4, mkCE 0 4 0 4 " "), (5, mkCE 0 5 0 5 " ")] + finalMap = Prelude.foldl (\m (i, e) -> updatePositionMappingHelper i e m) mempty events + let updatePose fromPos = do + mapping <- snd <$> EM.lookup 0 finalMap + toCurrentPosition mapping fromPos + updatePose (Position 0 4) @?= Just (Position 0 9) + updatePose (Position 0 5) @?= Just (Position 0 10) + +mkChangeEvent :: Range -> Text -> TextDocumentContentChangeEvent +mkChangeEvent r t = TextDocumentContentChangeEvent $ InL + TextDocumentContentChangePartial {_range = r, _rangeLength = Nothing, _text = t} + +tests :: TestTree +tests = + testGroup "position mapping" + [ + enumMapMappingTest + , testGroup "toCurrent" + [ testCase "before" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "ab" + (Position 0 0) @?= PositionExact (Position 0 0) + , testCase "after, same line, same length" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "ab" + (Position 0 3) @?= PositionExact (Position 0 3) + , testCase "after, same line, increased length" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "abc" + (Position 0 3) @?= PositionExact (Position 0 4) + , testCase "after, same line, decreased length" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "a" + (Position 0 3) @?= PositionExact (Position 0 2) + , testCase "after, next line, no newline" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "abc" + (Position 1 3) @?= PositionExact (Position 1 3) + , testCase "after, next line, newline" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "abc\ndef" + (Position 1 0) @?= PositionExact (Position 2 0) + , testCase "after, same line, newline" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "abc\nd" + (Position 0 4) @?= PositionExact (Position 1 2) + , testCase "after, same line, newline + newline at end" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "abc\nd\n" + (Position 0 4) @?= PositionExact (Position 2 1) + , testCase "after, same line, newline + newline at end" $ + toCurrent + (Range (Position 0 1) (Position 0 1)) + "abc" + (Position 0 1) @?= PositionExact (Position 0 4) + ] + , testGroup "fromCurrent" + [ testCase "before" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "ab" + (Position 0 0) @?= PositionExact (Position 0 0) + , testCase "after, same line, same length" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "ab" + (Position 0 3) @?= PositionExact (Position 0 3) + , testCase "after, same line, increased length" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "abc" + (Position 0 4) @?= PositionExact (Position 0 3) + , testCase "after, same line, decreased length" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "a" + (Position 0 2) @?= PositionExact (Position 0 3) + , testCase "after, next line, no newline" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "abc" + (Position 1 3) @?= PositionExact (Position 1 3) + , testCase "after, next line, newline" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "abc\ndef" + (Position 2 0) @?= PositionExact (Position 1 0) + , testCase "after, same line, newline" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "abc\nd" + (Position 1 2) @?= PositionExact (Position 0 4) + , testCase "after, same line, newline + newline at end" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "abc\nd\n" + (Position 2 1) @?= PositionExact (Position 0 4) + , testCase "after, same line, newline + newline at end" $ + fromCurrent + (Range (Position 0 1) (Position 0 1)) + "abc" + (Position 0 4) @?= PositionExact (Position 0 1) + ] + , adjustOption (\(QuickCheckTests i) -> QuickCheckTests (max 1000 i)) $ testGroup "properties" + [ testProperty "fromCurrent r t <=< toCurrent r t" $ do + -- Note that it is important to use suchThatMap on all values at once + -- instead of only using it on the position. Otherwise you can get + -- into situations where there is no position that can be mapped back + -- for the edit which will result in QuickCheck looping forever. + let gen = do + rope <- genRope + range <- genRange rope + PrintableText replacement <- arbitrary + oldPos <- genPosition rope + pure (range, replacement, oldPos) + forAll + (suchThatMap gen + (\(range, replacement, oldPos) -> positionResultToMaybe $ (range, replacement, oldPos,) <$> toCurrent range replacement oldPos)) $ + \(range, replacement, oldPos, newPos) -> + fromCurrent range replacement newPos === PositionExact oldPos + , testProperty "toCurrent r t <=< fromCurrent r t" $ do + let gen = do + rope <- genRope + range <- genRange rope + PrintableText replacement <- arbitrary + let newRope = runIdentity $ applyChange mempty rope $ mkChangeEvent range replacement + newPos <- genPosition newRope + pure (range, replacement, newPos) + forAll + (suchThatMap gen + (\(range, replacement, newPos) -> positionResultToMaybe $ (range, replacement, newPos,) <$> fromCurrent range replacement newPos)) $ + \(range, replacement, newPos, oldPos) -> + toCurrent range replacement oldPos === PositionExact newPos + ] + ] + +newtype PrintableText = PrintableText { getPrintableText :: T.Text } + deriving Show + +instance Arbitrary PrintableText where + arbitrary = PrintableText . T.pack . getPrintableString <$> arbitrary + +genRope :: Gen Rope +genRope = Rope.fromText . getPrintableText <$> arbitrary + +genPosition :: Rope -> Gen Position +genPosition r = do + let rows :: Int = fromIntegral $ Rope.lengthInLines r + row <- choose (0, max 0 $ rows - 1) `suchThat` inBounds @UInt + let columns = T.length (nthLine (fromIntegral row) r) + column <- choose (0, max 0 $ columns - 1) `suchThat` inBounds @UInt + pure $ Position (fromIntegral row) (fromIntegral column) + +genRange :: Rope -> Gen Range +genRange r = do + let rows :: Int = fromIntegral $ Rope.lengthInLines r + startPos@(Position startLine startColumn) <- genPosition r + let maxLineDiff = max 0 $ rows - 1 - fromIntegral startLine + endLine <- choose (fromIntegral startLine, fromIntegral startLine + maxLineDiff) `suchThat` inBounds @UInt + let columns = T.length (nthLine (fromIntegral endLine) r) + endColumn <- + if fromIntegral startLine == endLine + then choose (fromIntegral startColumn, columns) + else choose (0, max 0 $ columns - 1) + `suchThat` inBounds @UInt + pure $ Range startPos (Position (fromIntegral endLine) (fromIntegral endColumn)) + +inBounds :: forall b a . (Integral a, Integral b, Bounded b) => a -> Bool +inBounds a = let i = toInteger a in i <= toInteger (maxBound @b) && i >= toInteger (minBound @b) + +-- | Get the ith line of a rope, starting from 0. Trailing newline not included. +nthLine :: Int -> Rope -> T.Text +nthLine i r + | Rope.null r = "" + | otherwise = Rope.lines r !! i diff --git a/ghcide-test/exe/PreprocessorTests.hs b/ghcide-test/exe/PreprocessorTests.hs new file mode 100644 index 0000000000..24e2e80a10 --- /dev/null +++ b/ghcide-test/exe/PreprocessorTests.hs @@ -0,0 +1,27 @@ + +module PreprocessorTests (tests) where + +import qualified Data.Text as T +import Development.IDE.Test (expectDiagnostics) +import Language.LSP.Protocol.Types hiding (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), mkRange) +import Language.LSP.Test +-- import Test.QuickCheck.Instances () +import Config +import Test.Tasty + +tests :: TestTree +tests = testWithDummyPluginEmpty "preprocessor" $ do + let content = + T.unlines + [ "{-# OPTIONS_GHC -F -pgmF=ghcide-test-preprocessor #-}" + , "module Testing where" + , "y = x + z" -- plugin replaces x with y, making this have only one diagnostic + ] + _ <- createDoc "Testing.hs" "haskell" content + expectDiagnostics + [ ( "Testing.hs", + [(DiagnosticSeverity_Error, (2, 8), "Variable not in scope: z", Nothing)] -- TODO: Why doesn't this work with expected code "GHC-88464"? + ) + ] diff --git a/ghcide-test/exe/Progress.hs b/ghcide-test/exe/Progress.hs new file mode 100644 index 0000000000..08ad03c78b --- /dev/null +++ b/ghcide-test/exe/Progress.hs @@ -0,0 +1,60 @@ +{-# LANGUAGE PackageImports #-} +module Progress (tests) where + +import Control.Concurrent.STM +import Data.Foldable (for_) +import qualified Data.HashMap.Strict as Map +import Development.IDE (NormalizedFilePath) +import Development.IDE.Core.ProgressReporting +import qualified "list-t" ListT +import qualified StmContainers.Map as STM +import Test.Tasty +import Test.Tasty.HUnit + +tests :: TestTree +tests = testGroup "Progress" + [ reportProgressTests + ] + +data InProgressModel = InProgressModel { + done, todo :: Int, + current :: Map.HashMap NormalizedFilePath Int +} + +reportProgressTests :: TestTree +reportProgressTests = testGroup "recordProgress" + [ test "addNew" addNew + , test "increase" increase + , test "decrease" decrease + , test "done" done + ] + where + p0 = pure $ InProgressModel 0 0 mempty + addNew = recordProgressModel "A" succ p0 + increase = recordProgressModel "A" succ addNew + decrease = recordProgressModel "A" succ increase + done = recordProgressModel "A" pred decrease + recordProgressModel key change state = + model state $ \st -> recordProgress st key change + model stateModelIO k = do + state <- fromModel =<< stateModelIO + _ <- k state + toModel state + test name p = testCase name $ do + InProgressModel{..} <- p + (done, todo) @?= (length (filter (==0) (Map.elems current)), Map.size current) + +fromModel :: InProgressModel -> IO InProgressState +fromModel InProgressModel{..} = do + doneVar <- newTVarIO done + todoVar <- newTVarIO todo + currentVar <- STM.newIO + atomically $ for_ (Map.toList current) $ \(k,v) -> STM.insert v k currentVar + return InProgressState{..} + +toModel :: InProgressState -> IO InProgressModel +toModel InProgressState{..} = atomically $ do + done <- readTVar doneVar + todo <- readTVar todoVar + current <- Map.fromList <$> ListT.toList (STM.listT currentVar) + return InProgressModel{..} diff --git a/ghcide-test/exe/ReferenceTests.hs b/ghcide-test/exe/ReferenceTests.hs new file mode 100644 index 0000000000..758506e54d --- /dev/null +++ b/ghcide-test/exe/ReferenceTests.hs @@ -0,0 +1,269 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedStrings #-} + + +module ReferenceTests (tests) where + +import Control.Applicative.Combinators +import qualified Control.Lens as Lens +import Control.Monad +import Control.Monad.IO.Class (liftIO) +import Data.List.Extra +import qualified Data.Set as Set +import Development.IDE.Types.Location +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +import System.Directory +-- import Test.QuickCheck.Instances () +import Config +import Control.Lens ((^.)) +import qualified Data.Aeson as A +import Data.Default (def) +import Data.Tuple.Extra +import GHC.TypeLits (symbolVal) +import Ide.PluginUtils (toAbsolute) +import Ide.Types +import System.FilePath (addTrailingPathSeparator, + ()) +import Test.Hls (BrokenBehavior (..), + ExpectBroken (..), + FromServerMessage' (..), + SMethod (..), + TCustomMessage (..), + TNotificationMessage (..), + unCurrent) +import Test.Hls.FileSystem (copyDir) +import Test.Tasty +import Test.Tasty.HUnit + + +tests :: TestTree +tests = testGroup "references" + [ testGroup "can get references to FOIs" + [ referenceTest "can get references to symbols" + ("References.hs", 4, 7) + YesIncludeDeclaration + [ ("References.hs", 4, 6) + , ("References.hs", 6, 0) + , ("References.hs", 6, 14) + , ("References.hs", 9, 7) + , ("References.hs", 10, 11) + ] + + , referenceTest "can get references to data constructor" + ("References.hs", 13, 2) + YesIncludeDeclaration + [ ("References.hs", 13, 2) + , ("References.hs", 16, 14) + , ("References.hs", 19, 21) + ] + + , referenceTest "getting references works in the other module" + ("OtherModule.hs", 6, 0) + YesIncludeDeclaration + [ ("OtherModule.hs", 6, 0) + , ("OtherModule.hs", 8, 16) + ] + + , referenceTest "getting references works in the Main module" + ("Main.hs", 9, 0) + YesIncludeDeclaration + [ ("Main.hs", 9, 0) + , ("Main.hs", 10, 4) + ] + + , referenceTest "getting references to main works" + ("Main.hs", 5, 0) + YesIncludeDeclaration + [ ("Main.hs", 4, 0) + , ("Main.hs", 5, 0) + ] + + , referenceTest "can get type references" + ("Main.hs", 9, 9) + YesIncludeDeclaration + [ ("Main.hs", 9, 0) + , ("Main.hs", 9, 9) + , ("Main.hs", 10, 0) + ] + + -- TODO: references provider does not respect includeDeclaration parameter + , referenceTestExpectFail "works when we ask to exclude declarations" + ("References.hs", 4, 7) + NoExcludeDeclaration + (BrokenIdeal + [ ("References.hs", 6, 0) + , ("References.hs", 6, 14) + , ("References.hs", 9, 7) + , ("References.hs", 10, 11) + ] + ) + (BrokenCurrent + [ ("References.hs", 4, 6) + , ("References.hs", 6, 0) + , ("References.hs", 6, 14) + , ("References.hs", 9, 7) + , ("References.hs", 10, 11) + ] + ) + ] + + , testGroup "can get references to non FOIs" + [ referenceTest "references to symbol defined in a module we import" + ("References.hs", 22, 4) + YesIncludeDeclaration + [ ("References.hs", 22, 4) + , ("OtherModule.hs", 0, 20) + , ("OtherModule.hs", 4, 0) + ] + + , referenceTest "references in modules that import us to symbols we define" + ("OtherModule.hs", 4, 0) + YesIncludeDeclaration + [ ("References.hs", 22, 4) + , ("OtherModule.hs", 0, 20) + , ("OtherModule.hs", 4, 0) + ] + + , referenceTest "references to symbol defined in a module we import transitively" + ("References.hs", 24, 4) + YesIncludeDeclaration + [ ("References.hs", 24, 4) + , ("OtherModule.hs", 0, 48) + , ("OtherOtherModule.hs", 2, 0) + ] + + , referenceTest "references in modules that transitively use symbols we define" + ("OtherOtherModule.hs", 2, 0) + YesIncludeDeclaration + [ ("References.hs", 24, 4) + , ("OtherModule.hs", 0, 48) + , ("OtherOtherModule.hs", 2, 0) + ] + + , referenceTest "type references to other modules" + ("Main.hs", 12, 10) + YesIncludeDeclaration + [ ("Main.hs", 12, 7) + , ("Main.hs", 13, 0) + , ("References.hs", 12, 5) + , ("References.hs", 16, 0) + ] + ] + -- Fields.hs does not depend on Main.hs + -- so we can only find references in Fields.hs + , testGroup "references to record fields" + [ referenceTest "references record fields in the same file" + ("Fields.hs", 5, 4) + YesIncludeDeclaration + [ ("Fields.hs", 5, 4) + , ("Fields.hs", 10, 14) + , ("Fields.hs", 13, 14) + ] + + -- Main.hs depends on Fields.hs, so we can find references + -- from Main.hs to Fields.hs + , referenceTest "references record fields cross modules" + ("Main.hs", 16, 24) + YesIncludeDeclaration + [ ("Fields.hs", 5, 4) + , ("Fields.hs", 10, 14) + , ("Fields.hs", 13, 14) + , ("Main.hs", 16, 24) + ] + ] + ] + +-- | When we ask for all references to symbol "foo", should the declaration "foo +-- = 2" be among the references returned? +data IncludeDeclaration = + YesIncludeDeclaration + | NoExcludeDeclaration + +getReferences' :: SymbolLocation -> IncludeDeclaration -> Session [Location] +getReferences' (file, l, c) includeDeclaration = do + doc <- openDoc file "haskell" + getReferences doc (Position l c) $ toBool includeDeclaration + where toBool YesIncludeDeclaration = True + toBool NoExcludeDeclaration = False + + + +referenceTestSession :: String -> FilePath -> [FilePath] -> (FilePath -> Session ()) -> TestTree +referenceTestSession name thisDoc docs' f = do + testWithDummyPlugin' name (mkIdeTestFs [copyDir "references"]) $ \fs -> do + let rootDir = addTrailingPathSeparator fs + -- needed to build whole project indexing + configureCheckProject True + -- need to get the real paths through links + docs <- mapM (liftIO . canonicalizePath . (fs )) $ delete thisDoc $ nubOrd docs' + -- Initial Index + docid <- openDoc thisDoc "haskell" + + liftIO $ putStrLn $ "docs:" <> show docs + let + -- todo wait for docs + loop :: [FilePath] -> Session () + loop [] = pure () + loop docs = do + + doc <- skipManyTill anyMessage $ referenceReady (`elem` docs) + loop (delete doc docs) + loop docs + f rootDir + closeDoc docid + +-- | Given a location, lookup the symbol and all references to it. Make sure +-- they are the ones we expect. +referenceTest :: (HasCallStack) => String -> SymbolLocation -> IncludeDeclaration -> [SymbolLocation] -> TestTree +referenceTest name loc includeDeclaration expected = + referenceTestSession name (fst3 loc) docs $ \rootDir -> do + actual <- getReferences' loc includeDeclaration + liftIO $ expectSameLocations rootDir actual expected + where + docs = map fst3 expected + +referenceTestExpectFail + :: (HasCallStack) + => String + -> SymbolLocation + -> IncludeDeclaration + -> ExpectBroken 'Ideal [SymbolLocation] + -> ExpectBroken 'Current [SymbolLocation] + -> TestTree +referenceTestExpectFail name loc includeDeclaration _ = + referenceTest name loc includeDeclaration . unCurrent + +type SymbolLocation = (FilePath, UInt, UInt) + +expectSameLocations :: (HasCallStack) => FilePath -> [Location] -> [SymbolLocation] -> Assertion +expectSameLocations rootDir actual expected = do + let actual' = + Set.map (\location -> (location ^. L.uri + , location ^. L.range . L.start . L.line . Lens.to fromIntegral + , location ^. L.range . L.start . L.character . Lens.to fromIntegral)) + $ Set.fromList actual + expected' <- Set.fromList <$> + (forM expected $ \(file, l, c) -> do + fp <- canonicalizePath $ toAbsolute rootDir file + return (filePathToUri fp, l, c)) + actual' @?= expected' + + +-- todo find where to put this in hls +configureCheckProject :: Bool -> Session () +configureCheckProject overrideCheckProject = setConfigSection "haskell" (A.toJSON $ def{checkProject = overrideCheckProject}) +referenceReady :: (FilePath -> Bool) -> Session FilePath +referenceReady pred = satisfyMaybe $ \case + FromServerMess (SMethod_CustomMethod p) (NotMess TNotificationMessage{_params}) + | A.Success fp <- A.fromJSON _params + , pred fp + , symbolVal p == "ghcide/reference/ready" + -> Just fp + _ -> Nothing diff --git a/ghcide-test/exe/ResolveTests.hs b/ghcide-test/exe/ResolveTests.hs new file mode 100644 index 0000000000..4fc917c56b --- /dev/null +++ b/ghcide-test/exe/ResolveTests.hs @@ -0,0 +1,199 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE OverloadedStrings #-} +module ResolveTests (tests) where + +import Config +import Control.Lens +import Data.Aeson +import qualified Data.Maybe as Maybe +import Data.Text (Text) +import qualified Data.Text as T +import GHC.Generics +import Ide.Logger +import Ide.Types (PluginDescriptor (..), PluginId, + defaultPluginDescriptor, + mkPluginHandler, + mkResolveHandler) +import qualified Language.LSP.Protocol.Lens as J +import qualified Language.LSP.Protocol.Lens as JL +import Language.LSP.Protocol.Message (SomeMethod (..)) +import qualified Language.LSP.Protocol.Message as LSP +import Language.LSP.Protocol.Types +import Language.LSP.Test hiding (resolveCompletion) +import Test.Hls (IdeState, SMethod (..), liftIO, + mkPluginTestDescriptor, + someMethodToMethodString, + waitForAllProgressDone) +import qualified Test.Hls.FileSystem as FS +import Test.Tasty +import Test.Tasty.HUnit + +tests :: TestTree +tests = testGroup "resolve" + [ testGroup "with and without data" resolveRequests + ] + +removeData :: JL.HasData_ s (Maybe a) => s -> s +removeData param = param & JL.data_ .~ Nothing + +simpleTestSession :: TestName -> Session () -> TestTree +simpleTestSession name act = + testCase name $ runWithResolvePlugin (mkIdeTestFs [FS.directCradle ["A.hs"]]) (const act) + +runWithResolvePlugin :: FS.VirtualFileTree -> (FilePath -> Session a) -> IO a +runWithResolvePlugin fs = + testSessionWithPlugin fs + (mkPluginTestDescriptor resolvePluginDescriptor "resolve-plugin") + +data CompletionItemResolveData = CompletionItemResolveData + { completionItemResolve_number :: Int + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (ToJSON, FromJSON) + +data CodeActionResolve = CodeActionResolve + { codeActionResolve_number :: Int + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (ToJSON, FromJSON) + +data CodeLensResolve = CodeLensResolve + { codeLensResolve_number :: Int + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (ToJSON, FromJSON) + +resolvePluginDescriptor :: Recorder (WithPriority Text) -> PluginId -> PluginDescriptor IdeState +resolvePluginDescriptor recorder pid = (defaultPluginDescriptor pid "Test Plugin for Resolve Requests") + { pluginHandlers = mconcat + [ mkResolveHandler LSP.SMethod_CompletionItemResolve $ \_ _ param _ CompletionItemResolveData{} -> pure param + , mkPluginHandler LSP.SMethod_TextDocumentCompletion $ \_ _ _ -> do + pure $ InL + [ defCompletionItem "test item without data" + , defCompletionItem "test item with data" + & J.data_ .~ Just (toJSON $ CompletionItemResolveData 100) + ] + , mkResolveHandler LSP.SMethod_CodeActionResolve $ \_ _ param _ CodeActionResolve{} -> pure param + , mkPluginHandler LSP.SMethod_TextDocumentCodeAction $ \_ _ _ -> do + logWith recorder Debug "Why is the handler not called?" + pure $ InL + [ InR $ defCodeAction "test item without data" + , InR $ defCodeAction "test item with data" + & J.data_ .~ Just (toJSON $ CodeActionResolve 70) + ] + , mkResolveHandler LSP.SMethod_CodeLensResolve $ \_ _ param _ CodeLensResolve{} -> pure param + , mkPluginHandler LSP.SMethod_TextDocumentCodeLens $ \_ _ _ -> do + pure $ InL + [ defCodeLens "test item without data" + , defCodeLens "test item with data" + & J.data_ .~ Just (toJSON $ CodeLensResolve 50) + ] + ] + } + +resolveRequests :: [TestTree] +resolveRequests = + [ simpleTestSession "completion resolve" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "data Foo = Foo { foo :: Int }" + , "bar = Foo 4" + ] + waitForAllProgressDone + items <- getCompletions doc (Position 2 7) + let resolveCompItems = filter (\i -> "test item" `T.isPrefixOf` (i ^. J.label)) items + liftIO $ assertEqual "There must be exactly two results" 2 (length resolveCompItems) + -- This must not throw an error. + _ <- traverse (resolveCompletion . removeData) resolveCompItems + pure () + , simpleTestSession "codeAction resolve" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "data Foo = Foo { foo :: Int }" + , "bar = Foo 4" + ] + waitForAllProgressDone + -- Cant use 'getAllCodeActions', as this lsp-test function queries the diagnostic + -- locations and we don't have diagnostics in these tests. + cas <- Maybe.mapMaybe (preview _R) <$> getCodeActions doc (Range (Position 0 0) (Position 1 0)) + let resolveCas = filter (\i -> "test item" `T.isPrefixOf` (i ^. J.title)) cas + liftIO $ assertEqual "There must be exactly two results" 2 (length resolveCas) + -- This must not throw an error. + _ <- traverse (resolveCodeAction . removeData) resolveCas + pure () + , simpleTestSession "codelens resolve" $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "module A where" + , "data Foo = Foo { foo :: Int }" + , "bar = Foo 4" + ] + waitForAllProgressDone + cd <- getCodeLenses doc + let resolveCodeLenses = filter (\i -> case i ^. J.command of + Just cmd -> "test item" `T.isPrefixOf` (cmd ^. J.title) + Nothing -> False + ) cd + liftIO $ assertEqual "There must be exactly two results" 2 (length resolveCodeLenses) + -- This must not throw an error. + _ <- traverse (resolveCodeLens . removeData) resolveCodeLenses + pure () + ] + +defCompletionItem :: T.Text -> CompletionItem +defCompletionItem lbl = CompletionItem + { _label = lbl + , _labelDetails = Nothing + , _kind = Nothing + , _tags = Nothing + , _detail = Nothing + , _documentation = Nothing + , _deprecated = Nothing + , _preselect = Nothing + , _sortText = Nothing + , _filterText = Nothing + , _insertText = Just "insertion" + , _insertTextFormat = Nothing + , _insertTextMode = Nothing + , _textEdit = Nothing + , _textEditText = Nothing + , _additionalTextEdits = Nothing + , _commitCharacters = Nothing + , _command = Nothing + , _data_ = Nothing + } + +defCodeAction :: T.Text -> CodeAction +defCodeAction lbl = CodeAction + { _title = lbl + , _kind = Just CodeActionKind_Refactor + , _diagnostics = Nothing + , _isPreferred = Nothing + , _disabled = Nothing + , _edit = Nothing + , _command = Just $ Command + { _title = lbl + , _command = lbl + , _arguments = Nothing + } + , _data_ = Nothing + } + +defCodeLens :: T.Text -> CodeLens +defCodeLens lbl = CodeLens + { _range = mkRange 0 0 1 0 + , _command = Just $ Command + { _title = lbl + , _command = lbl + , _arguments = Nothing + } + , _data_ = Nothing + } + +-- TODO: expose this from lsp-test +resolveCompletion :: CompletionItem -> Session CompletionItem +resolveCompletion item = do + rsp <- request SMethod_CompletionItemResolve item + case rsp ^. JL.result of + Left err -> liftIO $ assertFailure (someMethodToMethodString (SomeMethod SMethod_CompletionItemResolve) <> " failed with: " <> show err) + Right x -> pure x diff --git a/ghcide-test/exe/RootUriTests.hs b/ghcide-test/exe/RootUriTests.hs new file mode 100644 index 0000000000..2a9cb19ab1 --- /dev/null +++ b/ghcide-test/exe/RootUriTests.hs @@ -0,0 +1,39 @@ + +module RootUriTests (tests) where + +import Control.Monad.IO.Class (liftIO) +import Development.IDE.GHC.Util +import Development.IDE.Test (expectNoMoreDiagnostics) +import Language.LSP.Test +import System.FilePath +-- import Test.QuickCheck.Instances () +import Config +import Data.Default (def) +import Test.Hls (TestConfig (..), + runSessionWithTestConfig) +import Test.Hls.FileSystem (copyDir) +import Test.Tasty +import Test.Tasty.HUnit + + +-- | checks if we use InitializeParams.rootUri for loading session +tests :: TestTree +tests = testCase "use rootUri" . runTest "dirA" "dirB" $ \dir -> do + let bPath = dir "dirB/Foo.hs" + bSource <- liftIO $ readFileUtf8 bPath + _ <- createDoc "Foo.hs" "haskell" bSource + expectNoMoreDiagnostics 0.5 + where + -- similar to run' except we can configure where to start ghcide and session + runTest :: FilePath -> FilePath -> (FilePath -> Session ()) -> IO () + runTest dir1 dir2 = runSessionWithTestConfig + def + { + testPluginDescriptor = dummyPlugin + , testDirLocation = Right $ mkIdeTestFs [copyDir "rootUri"] + , testServerRoot = Just dir1 + , testClientRoot = Just dir2 + , testShiftRoot = True + } + + diff --git a/ghcide-test/exe/SafeTests.hs b/ghcide-test/exe/SafeTests.hs new file mode 100644 index 0000000000..85964ba07a --- /dev/null +++ b/ghcide-test/exe/SafeTests.hs @@ -0,0 +1,38 @@ + +module SafeTests (tests) where + +import qualified Data.Text as T +import Development.IDE.Test (expectNoMoreDiagnostics) +import Language.LSP.Test + +import Config +import Test.Tasty + +tests :: TestTree +tests = + testGroup + "SafeHaskell" + [ -- Test for https://github.com/haskell/ghcide/issues/424 + testWithDummyPluginEmpty "load" $ do + let sourceA = + T.unlines + ["{-# LANGUAGE Trustworthy #-}" + ,"module A where" + ,"import System.IO.Unsafe" + ,"import System.IO ()" + ,"trustWorthyId :: a -> a" + ,"trustWorthyId i = unsafePerformIO $ do" + ," putStrLn \"I'm safe\"" + ," return i"] + sourceB = + T.unlines + ["{-# LANGUAGE Safe #-}" + ,"module B where" + ,"import A" + ,"safeId :: a -> a" + ,"safeId = trustWorthyId" + ] + + _ <- createDoc "A.hs" "haskell" sourceA + _ <- createDoc "B.hs" "haskell" sourceB + expectNoMoreDiagnostics 1 ] diff --git a/ghcide-test/exe/SymlinkTests.hs b/ghcide-test/exe/SymlinkTests.hs new file mode 100644 index 0000000000..dda41922f0 --- /dev/null +++ b/ghcide-test/exe/SymlinkTests.hs @@ -0,0 +1,27 @@ + +module SymlinkTests (tests) where + +import Control.Monad.IO.Class (liftIO) +import Development.IDE.Test (expectDiagnosticsWithTags) +import Language.LSP.Protocol.Types hiding (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), mkRange) +import Language.LSP.Test +import System.Directory +import System.FilePath + +import Config +import Test.Tasty +import Test.Tasty.HUnit + +-- | Tests for projects that use symbolic links one way or another +tests :: TestTree +tests = + testGroup "Projects using Symlinks" + [ testCase "Module is symlinked" $ runWithExtraFiles "symlink" $ \dir -> do + liftIO $ createFileLink (dir "some_loc" "Sym.hs") (dir "other_loc" "Sym.hs") + let fooPath = dir "src" "Foo.hs" + _ <- openDoc fooPath "haskell" + expectDiagnosticsWithTags [("src" "Foo.hs", [(DiagnosticSeverity_Warning, (2, 0), "The import of 'Sym' is redundant", Nothing, Just DiagnosticTag_Unnecessary)])] + pure () + ] diff --git a/ghcide-test/exe/THTests.hs b/ghcide-test/exe/THTests.hs new file mode 100644 index 0000000000..59b06431f5 --- /dev/null +++ b/ghcide-test/exe/THTests.hs @@ -0,0 +1,190 @@ + +module THTests (tests) where + +import Config +import Control.Monad.IO.Class (liftIO) +import qualified Data.Text as T +import Development.IDE.GHC.Util +import Development.IDE.Test (expectCurrentDiagnostics, + expectDiagnostics, + expectNoMoreDiagnostics) +import Language.LSP.Protocol.Types hiding (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), mkRange) +import Language.LSP.Test +import System.FilePath +import Test.Tasty +import Test.Tasty.HUnit + +tests :: TestTree +tests = + testGroup + "TemplateHaskell" + [ -- Test for https://github.com/haskell/ghcide/pull/212 + testWithDummyPluginEmpty "load" $ do + let sourceA = + T.unlines + [ "{-# LANGUAGE PackageImports #-}", + "{-# LANGUAGE TemplateHaskell #-}", + "module A where", + "import \"template-haskell\" Language.Haskell.TH", + "a :: Integer", + "a = $(litE $ IntegerL 3)" + ] + sourceB = + T.unlines + [ "{-# LANGUAGE PackageImports #-}", + "{-# LANGUAGE TemplateHaskell #-}", + "module B where", + "import A", + "import \"template-haskell\" Language.Haskell.TH", + "b :: Integer", + "b = $(litE $ IntegerL $ a) + n" + ] + _ <- createDoc "A.hs" "haskell" sourceA + _ <- createDoc "B.hs" "haskell" sourceB + expectDiagnostics [ ( "B.hs", [(DiagnosticSeverity_Error, (6, 29), "Variable not in scope: n", Just "GHC-88464")] ) ] + , testWithDummyPluginEmpty "newtype-closure" $ do + let sourceA = + T.unlines + [ "{-# LANGUAGE DeriveDataTypeable #-}" + ,"{-# LANGUAGE TemplateHaskell #-}" + ,"module A (a) where" + ,"import Data.Data" + ,"import Language.Haskell.TH" + ,"newtype A = A () deriving (Data)" + ,"a :: ExpQ" + ,"a = [| 0 |]"] + let sourceB = + T.unlines + [ "{-# LANGUAGE TemplateHaskell #-}" + ,"module B where" + ,"import A" + ,"b :: Int" + ,"b = $( a )" ] + _ <- createDoc "A.hs" "haskell" sourceA + _ <- createDoc "B.hs" "haskell" sourceB + return () + , thReloadingTest False + , thLoadingTest + , thCoreTest + , thReloadingTest True + -- Regression test for https://github.com/haskell/haskell-language-server/issues/891 + , thLinkingTest False + , thLinkingTest True + , testWithDummyPluginEmpty "findsTHIdentifiers" $ do + let sourceA = + T.unlines + [ "{-# LANGUAGE TemplateHaskell #-}" + , "module A (a) where" + , "import Language.Haskell.TH (ExpQ)" + , "a :: ExpQ" -- TH 2.17 requires an explicit type signature since splices are polymorphic + , "a = [| glorifiedID |]" + , "glorifiedID :: a -> a" + , "glorifiedID = id" ] + let sourceB = + T.unlines + [ "{-# OPTIONS_GHC -Wall #-}" + , "{-# LANGUAGE TemplateHaskell #-}" + , "module B where" + , "import A" + , "main = $a (putStrLn \"success!\")"] + _ <- createDoc "A.hs" "haskell" sourceA + _ <- createDoc "B.hs" "haskell" sourceB + expectDiagnostics [ ( "B.hs", [(DiagnosticSeverity_Warning, (4, 0), "Top-level binding with no type signature: main :: IO ()", Just "GHC-38417")] ) ] + , testCase "findsTHnewNameConstructor" $ runWithExtraFiles "THNewName" $ \dir -> do + + -- This test defines a TH value with the meaning "data A = A" in A.hs + -- Loads and export the template in B.hs + -- And checks wether the constructor A can be loaded in C.hs + -- This test does not fail when either A and B get manually loaded before C.hs + -- or when we remove the seemingly unnecessary TH pragma from C.hs + + let cPath = dir "C.hs" + _ <- openDoc cPath "haskell" + expectDiagnostics [ ( cPath, [(DiagnosticSeverity_Warning, (3, 0), "Top-level binding with no type signature: a :: A", Just "GHC-38417")] ) ] + ] + + +-- | Test that all modules have linkables +thLoadingTest :: TestTree +thLoadingTest = testCase "Loading linkables" $ runWithExtraFiles "THLoading" $ \dir -> do + let thb = dir "THB.hs" + _ <- openDoc thb "haskell" + expectNoMoreDiagnostics 1 + +thCoreTest :: TestTree +thCoreTest = testCase "Verifying TH core files" $ runWithExtraFiles "THCoreFile" $ \dir -> do + let thc = dir "THC.hs" + _ <- openDoc thc "haskell" + expectNoMoreDiagnostics 1 + +-- | test that TH is reevaluated on typecheck +thReloadingTest :: Bool -> TestTree +thReloadingTest unboxed = testCase name $ runWithExtraFiles dir $ \dir -> do + + let aPath = dir "THA.hs" + bPath = dir "THB.hs" + cPath = dir "THC.hs" + + aSource <- liftIO $ readFileUtf8 aPath -- th = [d|a = ()|] + bSource <- liftIO $ readFileUtf8 bPath -- $th + cSource <- liftIO $ readFileUtf8 cPath -- c = a :: () + + adoc <- createDoc aPath "haskell" aSource + bdoc <- createDoc bPath "haskell" bSource + cdoc <- createDoc cPath "haskell" cSource + + expectDiagnostics [("THB.hs", [(DiagnosticSeverity_Warning, (4,1), "Top-level binding", Just "GHC-38417")])] + + -- Change th from () to Bool + let aSource' = T.unlines $ init (T.lines aSource) ++ ["th_a = [d| a = False|]"] + changeDoc adoc [TextDocumentContentChangeEvent . InR $ TextDocumentContentChangeWholeDocument aSource'] + -- generate an artificial warning to avoid timing out if the TH change does not propagate + changeDoc cdoc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ cSource <> "\nfoo=()"] + + -- Check that the change propagates to C + expectDiagnostics + [("THC.hs", [(DiagnosticSeverity_Error, (4, 4), "Couldn't match expected type '()' with actual type 'Bool'", Just "GHC-83865")]) + ,("THC.hs", [(DiagnosticSeverity_Warning, (6,0), "Top-level binding", Just "GHC-38417")]) + ,("THB.hs", [(DiagnosticSeverity_Warning, (4,1), "Top-level bindin", Just "GHC-38417")]) + ] + + closeDoc adoc + closeDoc bdoc + closeDoc cdoc + where + name = "reloading-th-test" <> if unboxed then "-unboxed" else "" + dir | unboxed = "THUnboxed" + | otherwise = "TH" + +thLinkingTest :: Bool -> TestTree +thLinkingTest unboxed = testCase name $ runWithExtraFiles dir $ \dir -> do + + let aPath = dir "THA.hs" + bPath = dir "THB.hs" + + aSource <- liftIO $ readFileUtf8 aPath -- th_a = [d|a :: ()|] + bSource <- liftIO $ readFileUtf8 bPath -- $th_a + + adoc <- createDoc aPath "haskell" aSource + bdoc <- createDoc bPath "haskell" bSource + + expectDiagnostics [("THB.hs", [(DiagnosticSeverity_Warning, (4,1), "Top-level binding", Just "GHC-38417")])] + + let aSource' = T.unlines $ init (init (T.lines aSource)) ++ ["th :: DecsQ", "th = [d| a = False|]"] + changeDoc adoc [TextDocumentContentChangeEvent . InR $ TextDocumentContentChangeWholeDocument aSource'] + + -- modify b too + let bSource' = T.unlines $ init (T.lines bSource) ++ ["$th"] + changeDoc bdoc [TextDocumentContentChangeEvent . InR $ TextDocumentContentChangeWholeDocument bSource'] + _ <- waitForDiagnostics + + expectCurrentDiagnostics bdoc [(DiagnosticSeverity_Warning, (4,1), "Top-level binding", Just "GHC-38417")] + + closeDoc adoc + closeDoc bdoc + where + name = "th-linking-test" <> if unboxed then "-unboxed" else "" + dir | unboxed = "THUnboxed" + | otherwise = "TH" diff --git a/ghcide-test/exe/UnitTests.hs b/ghcide-test/exe/UnitTests.hs new file mode 100644 index 0000000000..b2940ab27f --- /dev/null +++ b/ghcide-test/exe/UnitTests.hs @@ -0,0 +1,112 @@ + +module UnitTests (tests) where + +import Config (mkIdeTestFs) +import Control.Concurrent +import Control.Monad.IO.Class (liftIO) +import Data.IORef +import Data.IORef.Extra (atomicModifyIORef_) +import Data.List.Extra +import Data.String (IsString (fromString)) +import qualified Data.Text as T +import Development.IDE.Core.FileStore (getModTime) +import qualified Development.IDE.Plugin.HLS.GhcIde as Ghcide +import qualified Development.IDE.Types.Diagnostics as Diagnostics +import Development.IDE.Types.Location +import qualified FuzzySearch +import Ide.Logger (Recorder, WithPriority) +import Ide.PluginUtils (pluginDescToIdePlugins) +import Ide.Types +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +import Network.URI +import qualified Progress +import System.IO.Extra hiding (withTempDir) +import System.Mem (performGC) +import Test.Hls (IdeState, def, + runSessionWithServerInTmpDir, + waitForProgressDone) +import Test.Tasty +import Test.Tasty.ExpectedFailure +import Test.Tasty.HUnit +import Text.Printf (printf) + +tests :: TestTree +tests = do + testGroup "Unit" + [ testCase "empty file path does NOT work with the empty String literal" $ + uriToFilePath' (fromNormalizedUri $ filePathToUri' "") @?= Just "." + , testCase "empty file path works using toNormalizedFilePath'" $ + uriToFilePath' (fromNormalizedUri $ filePathToUri' (toNormalizedFilePath' "")) @?= Just "" + , testCase "empty path URI" $ do + Just URI{..} <- pure $ parseURI (T.unpack $ getUri $ fromNormalizedUri emptyPathUri) + uriScheme @?= "file:" + uriPath @?= "" + , testCase "from empty path URI" $ do + let uri = Uri "file://" + uriToFilePath' uri @?= Just "" + , testCase "showDiagnostics prints ranges 1-based (like vscode)" $ do + let diag = Diagnostics.FileDiagnostic "" Diagnostics.ShowDiag Diagnostic + { _codeDescription = Nothing + , _data_ = Nothing + , _range = Range + { _start = Position{_line = 0, _character = 1} + , _end = Position{_line = 2, _character = 3} + } + , _severity = Nothing + , _code = Nothing + , _source = Nothing + , _message = "" + , _relatedInformation = Nothing + , _tags = Nothing + } Diagnostics.NoStructuredMessage + let shown = T.unpack (Diagnostics.showDiagnostics [diag]) + let expected = "1:2-3:4" + assertBool (unwords ["expected to find range", expected, "in diagnostic", shown]) $ + expected `isInfixOf` shown + , testCase "notification handlers run in priority order" $ do + orderRef <- newIORef [] + let + plugins ::Recorder (WithPriority Ghcide.Log) -> IdePlugins IdeState + plugins recorder = pluginDescToIdePlugins $ + [ (priorityPluginDescriptor i) + { pluginNotificationHandlers = mconcat + [ mkPluginNotificationHandler SMethod_TextDocumentDidOpen $ \_ _ _ _ -> + liftIO $ atomicModifyIORef_ orderRef (i:) + ] + } + | i <- [1..20] + ] ++ Ghcide.descriptors recorder + priorityPluginDescriptor i = (defaultPluginDescriptor (fromString $ show i) ""){pluginPriority = i} + + runSessionWithServerInTmpDir def plugins (mkIdeTestFs []) $ do + _ <- createDoc "A.hs" "haskell" "module A where" + waitForProgressDone + actualOrder <- liftIO $ reverse <$> readIORef orderRef + + -- Handlers are run in priority descending order + liftIO $ actualOrder @?= [20, 19 .. 1] + , ignoreTestBecause "The test fails sometimes showing 10000us" $ + testCase "timestamps have millisecond resolution" $ do + resolution_us <- findResolution_us 1 + let msg = printf "Timestamps do not have millisecond resolution: %dus" resolution_us + assertBool msg (resolution_us <= 1000) + , Progress.tests + , FuzzySearch.tests + ] + +findResolution_us :: Int -> IO Int +findResolution_us delay_us | delay_us >= 1000000 = error "Unable to compute timestamp resolution" +findResolution_us delay_us = withTempFile $ \f -> withTempFile $ \f' -> do + performGC + writeFile f "" + threadDelay delay_us + writeFile f' "" + t <- getModTime f + t' <- getModTime f' + if t /= t' then return delay_us else findResolution_us (delay_us * 10) diff --git a/ghcide-test/exe/WatchedFileTests.hs b/ghcide-test/exe/WatchedFileTests.hs new file mode 100644 index 0000000000..1c2ded9109 --- /dev/null +++ b/ghcide-test/exe/WatchedFileTests.hs @@ -0,0 +1,98 @@ + +{-# LANGUAGE GADTs #-} + +module WatchedFileTests (tests) where + +import Config (mkIdeTestFs, + testWithDummyPlugin', + testWithDummyPluginEmpty') +import Control.Applicative.Combinators +import Control.Monad.IO.Class (liftIO) +import qualified Data.Aeson as A +import qualified Data.Text as T +import qualified Data.Text.IO as T +import Development.IDE.Test (expectDiagnostics) +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..), + SemanticTokenRelative (..), + SemanticTokensEdit (..), + mkRange) +import Language.LSP.Test +import System.Directory +import System.FilePath +import Test.Hls.FileSystem +import Test.Tasty +import Test.Tasty.HUnit + +tests :: TestTree +tests = testGroup "watched files" + [ testGroup "Subscriptions" + [ testWithDummyPluginEmpty' "workspace files" $ \sessionDir -> do + liftIO $ writeFile (sessionDir "hie.yaml") "cradle: {direct: {arguments: [\"-isrc\", \"A\", \"WatchedFilesMissingModule\"]}}" + _doc <- createDoc "A.hs" "haskell" "{-#LANGUAGE NoImplicitPrelude #-}\nmodule A where\nimport WatchedFilesMissingModule" + setIgnoringRegistrationRequests False + watchedFileRegs <- getWatchedFilesSubscriptionsUntil SMethod_TextDocumentPublishDiagnostics + + -- Expect 2 subscriptions: one for all .hs files and one for the hie.yaml cradle + liftIO $ length watchedFileRegs @?= 2 + + , testWithDummyPluginEmpty' "non workspace file" $ \sessionDir -> do + tmpDir <- liftIO getTemporaryDirectory + let yaml = "cradle: {direct: {arguments: [\"-i" <> tail(init(show tmpDir)) <> "\", \"A\", \"WatchedFilesMissingModule\"]}}" + liftIO $ writeFile (sessionDir "hie.yaml") yaml + _doc <- createDoc "A.hs" "haskell" "{-# LANGUAGE NoImplicitPrelude#-}\nmodule A where\nimport WatchedFilesMissingModule" + setIgnoringRegistrationRequests False + watchedFileRegs <- getWatchedFilesSubscriptionsUntil SMethod_TextDocumentPublishDiagnostics + + -- Expect 2 subscriptions: one for all .hs files and one for the hie.yaml cradle + liftIO $ length watchedFileRegs @?= 2 + + -- TODO add a test for didChangeWorkspaceFolder + ] + , testGroup "Changes" + [ + testWithDummyPluginEmpty' "workspace files" $ \sessionDir -> do + liftIO $ writeFile (sessionDir "hie.yaml") "cradle: {direct: {arguments: [\"-isrc\", \"A\", \"B\"]}}" + liftIO $ writeFile (sessionDir "B.hs") $ unlines + ["module B where" + ,"b :: Bool" + ,"b = False"] + _doc <- createDoc "A.hs" "haskell" $ T.unlines + ["module A where" + ,"import B" + ,"a :: ()" + ,"a = b" + ] + expectDiagnostics [("A.hs", [(DiagnosticSeverity_Error, (3, 4), "Couldn't match expected type '()' with actual type 'Bool'", Just "GHC-83865")])] + -- modify B off editor + liftIO $ writeFile (sessionDir "B.hs") $ unlines + ["module B where" + ,"b :: Int" + ,"b = 0"] + sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams + [FileEvent (filePathToUri $ sessionDir "B.hs") FileChangeType_Changed ] + expectDiagnostics [("A.hs", [(DiagnosticSeverity_Error, (3, 4), "Couldn't match expected type '()' with actual type 'Int'", Just "GHC-83865")])] + , testWithDummyPlugin' "reload HLS after .cabal file changes" (mkIdeTestFs [copyDir ("watched-files" "reload")]) $ \sessionDir -> do + let hsFile = "src" "MyLib.hs" + _ <- openDoc hsFile "haskell" + expectDiagnostics [(hsFile, [(DiagnosticSeverity_Error, (2, 7), "Could not load module \8216Data.List.Split\8217", Nothing)])] + let cabalFile = "reload.cabal" + cabalContent <- liftIO $ T.readFile cabalFile + let fix = T.replace "build-depends: base" "build-depends: base, split" + liftIO $ T.writeFile cabalFile (fix cabalContent) + sendNotification SMethod_WorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams + [ FileEvent (filePathToUri $ sessionDir cabalFile) FileChangeType_Changed ] + expectDiagnostics [(hsFile, [])] + ] + ] + +getWatchedFilesSubscriptionsUntil :: forall m. SServerMethod m -> Session [DidChangeWatchedFilesRegistrationOptions] +getWatchedFilesSubscriptionsUntil m = do + msgs <- manyTill (Just <$> message SMethod_ClientRegisterCapability <|> Nothing <$ anyMessage) (message m) + return + [ x + | Just TRequestMessage{_params = RegistrationParams regs} <- msgs + , Registration _id "workspace/didChangeWatchedFiles" (Just args) <- regs + , Just x@(DidChangeWatchedFilesRegistrationOptions _) <- [A.decode . A.encode $ args] + ] diff --git a/ghcide-test/manual/lhs/Bird.lhs b/ghcide-test/manual/lhs/Bird.lhs new file mode 100644 index 0000000000..a9ed4e2a57 --- /dev/null +++ b/ghcide-test/manual/lhs/Bird.lhs @@ -0,0 +1,19 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + + +\subsection{Bird-style LHS} + +> module Bird +> ( +> fly +> ) where + + + +what birds are able to do: + +> fly :: IO () +> fly = putStrLn "birds fly." + + diff --git a/ghcide-test/manual/lhs/Main.hs b/ghcide-test/manual/lhs/Main.hs new file mode 100644 index 0000000000..3559ab22b4 --- /dev/null +++ b/ghcide-test/manual/lhs/Main.hs @@ -0,0 +1,12 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +module Main + ( + main + ) where + +import Test (main) + + + diff --git a/ghcide-test/manual/lhs/Test.lhs b/ghcide-test/manual/lhs/Test.lhs new file mode 100644 index 0000000000..0e30d25a01 --- /dev/null +++ b/ghcide-test/manual/lhs/Test.lhs @@ -0,0 +1,36 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + + +\subsection{Testing LHS} + +\begin{code} +{-# LANGUAGE CPP #-} + +module Test + ( + main + ) where + + +import Bird + +\end{code} + +for this file, \emph{hlint} should be turned off. +\begin{code} +{-# ANN module ("HLint: ignore" :: String) #-} +\end{code} + +our main procedure + +\begin{code} + +main :: IO () +main = do + putStrLn "hello world." + fly + +\end{code} + + diff --git a/ghcide-test/preprocessor/Main.hs b/ghcide-test/preprocessor/Main.hs new file mode 100644 index 0000000000..d7ae0e5cab --- /dev/null +++ b/ghcide-test/preprocessor/Main.hs @@ -0,0 +1,10 @@ + +module Main(main) where + +import System.Environment + +main :: IO () +main = do + _:input:output:_ <- getArgs + let f = map (\x -> if x == 'x' then 'y' else x) + writeFile output . f =<< readFile input diff --git a/ghcide/.editorconfig b/ghcide/.editorconfig new file mode 100644 index 0000000000..400e76ac0b --- /dev/null +++ b/ghcide/.editorconfig @@ -0,0 +1,13 @@ +; This file is for unifying the coding style for different editors and IDEs. +; More information at https://EditorConfig.org + +root = true + +[*] +end_of_line = LF +trim_trailing_whitespace = true +insert_final_newline = true + +[*.{hs,lhs}] +indent_style = space +indent_size = 4 diff --git a/ghcide/.gitignore b/ghcide/.gitignore new file mode 100644 index 0000000000..8370c00874 --- /dev/null +++ b/ghcide/.gitignore @@ -0,0 +1,15 @@ +dist/ +.stack-work/ +dist-newstyle/ +cabal.project.local +*~ +*.lock +/.tasty-rerun-log +.vscode +/.hlint-* +.shake/ +ghcide +ghcide-bench +ghcide-preprocessor +*.gcStats.log +tags diff --git a/ghcide/CHANGELOG.md b/ghcide/CHANGELOG.md new file mode 100644 index 0000000000..38557e9c1e --- /dev/null +++ b/ghcide/CHANGELOG.md @@ -0,0 +1,341 @@ +### 1.3.0.0 (2021-05-09) +* Replace unsafe getmodtime with unix package (#1778) - Pepe Iborra +* Progress reporting improvements (#1784) - Pepe Iborra +* Unify session loading using implicit-hie (#1783) - fendor +* Fix remove constraint (#1578) - Kostas Dermentzis +* Fix wrong extend import while type constructor and data constructor have the same name (#1775) - Lei Zhu +* Improve vscode extension schema generation (#1742) - Potato Hatsue +* Add hls-graph abstracting over shake (#1748) - Neil Mitchell +* Tease apart the custom SYB from ExactPrint (#1746) - Sandy Maguire +* fix class method completion (#1741) - Lei Zhu +* Fix: #1690 - Infix typed holes are now filled using infix notation (#1708) - Oliver Madine + +### 1.2.0.2 (2021-04-13) +* Bracketing for snippet completions (#1709) - Oliver Madine +* Don't suggest destruct actions for already-destructed terms (#1715) - Sandy Maguire + +### 1.2.0.1 (2021-04-12) +* restore compat. with haddock-library 1.8 (#1717) - Pepe Iborra + +### 1.2.0 (2021-04-11) +* Emit holes as diagnostics (#1653) - Sandy Maguire +* Fix ghcide and HLS enter lsp mode by default (#1692) - Potato Hatsue +* support custom Ide commands (#1666) - Pepe Iborra +* ghcide - enable ApplicativeDo everywhere (#1667) - Pepe Iborra +* Intelligent derivations of Semigroup and Monoid for Wingman (#1671) - Sandy Maguire +* Avoid creating IsFileOfInterest keys for non workspace files (#1661) - Pepe Iborra +* Fix a wingman bug caused by mismanaged stale data (#1657) - Sandy Maguire +* Skip tracing unless eventlog is enabled (#1658) - Pepe Iborra +* optimize ambiguity import suggestions (#1669) - Lei Zhu +* Replace Barrier with MVar in lsp main (#1668) - Potato Hatsue +* Add bounds for Diff (#1665) - Potato Hatsue +* log exceptions before killing the server (#1651) - Pepe Iborra +* Fix importing type operators (#1644) - Potato Hatsue +* Shut the Shake session on exit, instead of restarting it (#1655) - Pepe Iborra +* Do not override custom user commands (#1650) - Pepe Iborra +* Civilized indexing progress reporting (#1633) - Pepe Iborra +* Avoid reordering plugins (#1629) - Pepe Iborra +* Update to lsp-1.2 (#1631) - wz1000 +* Use custom config for completions plugin (#1619) - Potato Hatsue +* Configurable I/O handles (#1617) - Pepe Iborra +* Add test data files to extra-source-files (#1605) - Javier Neira +* Allow for customizable Haskell views of Property types (#1608) - Sandy Maguire +* Extract hls-test-utils (#1606) - Potato Hatsue +* Add ability for plugins to handle file change notifications (#1588) - Pepe Iborra +* Bump haddock-library to 1.10.0 (#1598) - Potato Hatsue +* Use CiInterface/SkInterface for typeclass symbols (#1592) - FW +* Relax ghcides upper bound on base16-bytestring (#1595) - maralorn +* Regularize custom config of plugins (#1576) - Potato Hatsue +* Avoid duplicating known targets and import paths (#1590) - Pepe Iborra +* Update homepage and other urls for ghcide (#1580) - Felix Yan +* Add custom code action kinds for import related code actions (#1570) - Potato Hatsue +* Use TextEdit to insert new imports (#1554) - Potato Hatsue + +### 1.1.0 (2021-03-09) +* Add an option to control progress reporting (#1513) - Pepe Iborra +* Fix missing parens of auto extending imports (#1526) - Potato Hatsue +* Avoid redundant work in diagnostics pass (#1514) - Pepe Iborra +* Avoid always rerunning GetModificationTime for interface files too (#1506) - Pepe Iborra +* Demote implicit cradle warn to logging (#1511) - Javier Neira +* Drive GetModificationTime using watched file events (#1487) - Pepe Iborra +* Make type lenses plugin configurable (#1491) - Potato Hatsue +* Context-aware ExactPrint grafting for HsExpr (#1489) - Sandy Maguire +* Register IDE configuration when called via the command line (#1495) - wz1000 +* Faster ModSummary fingerprints (#1485) - Pepe Iborra +* Make sure to give the correct DynFlags to the recompilation checker (#1459) - Pepe Iborra +* Customize the unitId used for the fake internal component (#1435) - Pepe Iborra +* Extract the qualified name from already imported module (#1445) - Potato Hatsue +* Add code action for importing class methods (#1428) - Potato Hatsue +* Reformat all files (#1439) - Junyoung/Clare Jang +* Minor performance optimizations (#1432) - Pepe Iborra + +### 1.0.0 (2021-03-04) +* Fix the handling of default HLS config again (#1419) - Pepe Iborra +* Hlint hints. (#1227) - Peter Wicks Stringfield +* Use object code for TH+UnboxedTuples/Sums (#1382) - wz1000 +* Add a pre commit hook for code formatting (#1384) - Junyoung/Clare Jang +### 0.7.5 (2021-02-17) +* Tone down some logInfos to logDebug (#1385) - Pepe Iborra +* Show window message when auto extending import lists (#1371) - Potato Hatsue +* Catch GHC errors in listing module names (#1367) - Potato Hatsue +* Upgrade to lsp-1.0 (#1284) - wz1000 +* Added Development.IDE.Main (#1338) - Pepe Iborra +* Fix completion snippets on DuplicateRecordFields (#1360) - Potato Hatsue +* Add code action for hiding shadowed identifiers from imports (#1322) - Potato Hatsue +* Make find-definition work better with multi-components (#1357) - wz1000 +* Index files on first open (#1358) - wz1000 +* Fix code actions regression (#1349) - Pepe Iborra + +### 0.7.4 (2021-02-08) +* Support for references via hiedb (#704) - wz1000 +* Fix space leak on cradle reloads (#1316) - Pepe Iborra +* Trigger extending import only when the item is not in scope (#1309) - Potato Hatsue +* Don't extend the import list with child if the parent has already been imported as (..) (#1302) - Potato Hatsue +* FindImports typo (minor) (#1291) - Andy +* Reenable auto extend imports and drop snippets for infix completions (#1266) - Pepe Iborra +* ghcide: Implements a CodeAction to disambiguate ambiguous symbols (#1264) - Hiromi Ishii +* Restore code actions order (#1273) - Pepe Iborra + +### 0.7.3 (2021-02-04) +* Add custom cache layer for session loading (#1197) - (fendor) +* Remove invalid exports (#1193) - (Kostas Dermentzis) +* Use exact print to suggestExtendImport - (Potato Hatsue) +* Add code actions for disabling a warning in the current file (#1235) - (George Thomas) +* Limit completions to top 40 (#1218) - (Pepe Iborra) +* Add traces for HLS providers (#1222) - (Pepe Iborra) +* Use exact print for suggest missing constraint code actions (#1221) - (Pepe Iborra) +* Parenthesise type operators when extending import lists (#1212) - (Thomas Winant) + +### 0.7.2 (2021-01-14) +* Expose shakeOptions used - (Pepe Iborra) + +### 0.7.1 (2021-01-13) +* Fix sticky diagnostics bug (#1188) - (Pepe Iborra) +* Use completionSnippetsOn flag (#1195) - (Yuya Kono) +* Update tested-with GHC in cabal config - (jneira) +* Do not disable parallel GC by default (#1190) - (Pepe Iborra) +* Fix module outline becoming stale after switching branches (#1189) - (Pepe Iborra) +* Make adding missing constraint work in presence of 'forall' (fixes #1164) (#1177) - (Jan Hrcek) +* Bump haskell-lsp to 0.23 (#1146) - (Potato Hatsue) +* Fix #723 (Instance declarations in hs-boot files result in GHC errors) (#781) - (Ben Simms) +* Also suggest importing methods without parent class (#766) - (Thomas Winant) +* Update links to issues/PRs in ghcide tests. (#1142) - (Peter Wicks Stringfield) +* fix suggestAddTypeAnnotation regex (#760) - (Kostas Dermentzis) + +### 0.7.0 (2021-01-03) +* Ghcide now loads HLS plugins internally - (Pepe Iborra) +* Retry a failed cradle if the cradle descriptor changes (#762) - (Pepe Iborra) +* Fix extend imports regression (#769) - (Pepe Iborra) +* Perform memory measurement on SIGUSR1 (#761) - (Pepe Iborra) + +### 0.6.0.2 (2020-12-26) +* Fix disappearing diagnostics bug (#959) - (Pepe Iborra) +* Deduplicate module not found diagnostics (#952) - (Pepe Iborra) +* Use qualified module name from diagnostics in suggestNewImport (#945) - (Potato Hatsue) +* Disable auto extend import snippets in completions (these need a bit more work) + +### 0.6.0.1 (2020-12-13) +* Fix build with GHC 8.8.2 and 8.8.3 - (Javier Neira) +* Update old URLs still pointing to digital-asset - (Jan Hrcek) + +### 0.6.0 (2020-12-06) +* Completions: extend explicit import list automatically (#930) - (Guru Devanla) +* Completions for identifiers not in explicit import lists (#919) - (Guru Devanla) +* Completions for record fields (#900) - (Guru Devanla) +* Bugfix: add constructors to import lists correctly (#916) - (Potato Hatsue) +* Bugfix: respect qualified identifiers (#938) - (Pepe Iborra) +* Bugfix: partial `pathToId` (#926) - (Samuel Ainsworth) +* Bugfix: import suggestions when there's more than one option (#913) - (Guru Devanla) +* Bugfix: parenthesize operators when exporting (#906) - (Potato Hatsue) +* Opentelemetry traces and heapsize memory analysis (#922) - (Michalis Pardalos / Pepe Iborra) +* Make Filetargets absolute before continue using them (#914) - (fendor) +* Do not enable every "unnecessary" warning by default (#907) - (Alejandro Serrano) +* Update implicit-hie to 0.3.0 (#905) - (Avi Dessauer) + +### 0.5.0 (2020-11-07) +* Use implicit-hie-0.1.2.0 (#880) - (Javier Neira) +* Clarify and downgrade implicit-hie message (#883) - (Avi Dessauer) +* Switch back to bytecode (#873) - (wz1000) +* Add code action for remove all redundant imports (#867) - (Potato Hatsue) +* Fix pretty printer for diagnostic ranges (#871) - (Martin Huschenbett) +* Canonicalize import dirs (#870) - (Pepe Iborra) +* Do not show internal hole names (#852) - (Alejandro Serrano) +* Downgrade file watch debug log to logDebug from logInfo (#848) - (Matthew Pickering) +* Pull in local bindings (#845) - (Sandy Maguire) +* Use object code for Template Haskell, emit desugarer warnings (#836) - (wz1000) +* Fix code action for adding missing constraints to type signatures (#839) - (Jan Hrcek) +* Fix duplicated completions (#837) - (Vitalii) +* FileExists: set one watcher instead of thousands (#831) - (Michael Peyton Jones) +* Drop 8.4 support (#834) - (wz1000) +* Add GetHieAsts rule, Replace SpanInfo, add support for DocumentHighlight and scope-aware completions for local variables (#784) - (wz1000) +* Tag unused warning as such (#815) - (Alejandro Serrano) +* Update instructions for stty error in windows (#825) - (Javier Neira) +* Fix docs tooltip for base libraries on Windows (#814) - (Nick Dunets) +* Fix documentation (or source) link when html file is less specific than module (#766) - (Nick Dunets) +* Add completion tests for records. (#804) - (Guru Devanla) +* Restore identifiers missing from hi file (#741) - (maralorn) +* Fix import suggestions when dot is typed (#800) - (Marcelo Lazaroni) + +### 0.4.0 (2020-09-15) +* Fixes for GHC source plugins: dotpreprocessor works now - (srid) +* Use implicit-hie when no explicit hie.yaml (#782) - (Javier Neira) +* Extend position mapping with fuzzy ranges (#785) - (wz1000) +* Sort import suggestions (#793) - (Pepe Iborra) +* Save source files with HIE files (#701) - (fendor) +* Fully asynchronous request handling (#767) - (Pepe Iborra) +* Refinement holes (#748) - (Pepe Iborra) +* Fix haddock to markdown conversion (#757) - (George Thomas) +* Expose `getCompletionsLSP` to allow completions in hls (#756) - (wz1000) +* Suggestions for missing imports from local modules (#739) - (Pepe Iborra) +* Dynamically load libm on Linux for each new session (#723) - (Luke Lau) +* Use InitializeParams.rootUri for initial session setup (#713) - (shaurya gupta) +* Show documentation on hover for symbols defined in the same module (#691) - (wz1000) +* Suggest open imports (#740) - (Pepe Iborra) +* module Development.IDE (#724) - (Pepe Iborra) +* Ignore -Werror (#738) - (Pepe Iborra) +* Fix issue #710: fix suggest delete binding (#728) - (Ray Shih) +* Generate doc file URL via LSP (to fix it for Windows) (#721) - (Nick Dunets) +* Fix `.hie` file location for `.hs-boot` files (#690) - (wz1000) +* Use argsVerbose to determine log level in test mode (#717) - (Ziyang Liu) +* output which cradle files were found (#716) - (Adam Sandberg Eriksson) +* Typecheck entire project on Initial Load and typecheck reverse dependencies of a file on saving (#688) - (wz1000) + +### 0.3.0 (2020-09-02) + +* CI: remove (internal) DA Slack notifications (#750) - (Gary Verhaegen) +* Add session-loader to hie.yaml (#714) - (Luke Lau) +* Codeaction for exporting unused top-level bindings (#711) - (shaurya gupta) +* Add links to haddock and hscolour pages in documentation (#699) - (Luke Lau) +* Expose GHC.Compat module (#709) - (Pepe Iborra) +* Move session loading logic into ghcide library (#697) - (Luke Lau) +* Code action: remove redundant constraints for type signature (#692) - (Denis Frezzato) +* Fix Binary instance of Q to handle empty file paths (#707) - (Moritz Kiefer) +* Populate ms_hs_date in GetModSummary rule (#694) - (Pepe Iborra) +* Allow GHC plugins to be called with an updated StringBuffer (#698) - (Alfredo Di Napoli) +* Relax upper bounds for GHC 8.10.1 (#705) - (Pepe Iborra) +* Obtain the GHC libdir at runtime (#696) - (Luke Lau) +* Expect bench experiments to fail with Cabal (#704) - (Pepe Iborra) +* Bump lodash from 4.17.15 to 4.17.19 in /extension (#702) - (dependabot[bot]) +* Update to hie-bios 0.6.1 (#693) - (fendor) +* Backport HIE files to GHC 8.6 (#689) - (wz1000) +* Performance improvements for GetSpanInfo (#681) - (Pepe Iborra) +* Code action add default type annotation to remove `-Wtype-defaults` warning (#680) - (Serhii) +* Use a global namecache to read `.hie` files (#677) - (wz1000) +* Completions need not depend on typecheck of the current file (#670) - (Pepe Iborra) +* Fix spaninfo Haddocks for local modules (#678) - (Pepe Iborra) +* Avoid excessive retypechecking of TH codebases (#673) - (Pepe Iborra) +* Use stale information if it's available to answer requests quickly (#624) - (Matthew Pickering) +* Code action: add constraint (#653) - (Denis Frezzato) +* Make BenchHist non buildable by default and save logs (#666) - (Pepe Iborra) +* Delete unused top level binding code action (#657) - (Serhii) +* stack810.yaml: bump (#651) - (Domen Kozar) +* Fix debouncer for 0 delay (#662) - (Pepe Iborra) +* Interface file fixes (#645) - (Pepe Iborra) +* Retry GHC 8.10 on Windows (#661) - (Moritz Kiefer) +* Finer dependencies for GhcSessionFun (#643) - (Pepe Iborra) +* Send WorkDoneProgressEnd only when work is done (#649) - (Pepe Iborra) +* Add a note on differential benchmarks (#647) - (Pepe Iborra) +* Cache a ghc session per file of interest (#630) - (Pepe Iborra) +* Remove `Strict` from the language extensions used for code actions (#638) - (Torsten Schmits) +* Report progress when setting up cradle (#644) - (Luke Lau) +* Fix crash when writing to a Barrier more than once (#637) - (Pepe Iborra) +* Write a cabal.project file in the benchmark example (#640) - (Pepe Iborra) +* Performance analysis over time (#629) - (Pepe Iborra) +* More benchmarks (#625) - (Pepe Iborra) +* Canonicalize the locations in the cradle tests (#628) - (Luke Lau) +* Add hie.yaml.stack and use none cradle for test data (#626) - (Javier Neira) +* Fix a bug in getHiFileRule (#623) - (Pepe Iborra) +* ghc initialization error handling (#609) - (Pepe Iborra) +* Fix regression in getSpanInfoRule (#622) - (Pepe Iborra) +* Restore Shake profiling (#621) - (Pepe Iborra) +* Use a better noRange (#612) - (Neil Mitchell) +* Add back a .ghci file (#607) - (Neil Mitchell) +* #573, make haddock errors warnings with the word Haddock in front (#608) - (Neil Mitchell) +* Implement Goto Type Definition (#533) - (Matthew Pickering) +* remove unnecessary FileExists dependency in GetHiFile (#589) - (Pepe Iborra) +* ShakeSession and shakeEnqueue (#554) - (Pepe Iborra) +* Benchmark suite (#590) - (Pepe Iborra) + +### 0.2.0 (2020-06-02) + +* Multi-component support (thanks @mpickering) +* Support for GHC 8.10 (thanks @sheaf and @chshersh) +* Fix some TH issues (thanks @mpickering) +* Automatically pick up changes to cradle dependencies (e.g. cabal + files) (thanks @jinwoo) +* Track dependencies when using `qAddDependentFile` (thanks @mpickering) +* Add record fields to document symbols outline (thanks @bubba) +* Fix some space leaks (thanks @mpickering) +* Strip redundant path information from diagnostics (thanks @tek) +* Fix import suggestions for operators (thanks @eddiemundo) +* Significant reductions in memory usage by using interfaces and `.hie` files (thanks + @pepeiborra) +* Minor improvements to completions +* More comprehensive suggestions for missing imports (thanks @pepeiborra) +* Group imports in document outline (thanks @fendor) +* Upgrade to haskell-lsp-0.22 (thanks @bubba) +* Upgrade to hie-bios 0.5 (thanks @fendor) + +### 0.1.0 (2020-02-04) + +* Code action for inserting new definitions (see #309). +* Better default GC settings (see #329 and #333). +* Various performance improvements (see #322 and #384). +* Improvements to hover information (see #317 and #338). +* Support GHC 8.8.2 (see #355). +* Include keywords in completions (see #351). +* Fix some issues with aborted requests (see #353). +* Use hie-bios 0.4.0 (see #382). +* Avoid stuck progress reporting (see #400). +* Only show progress notifications after 0.1s (see #392). +* Progress reporting is now in terms of the number of files rather + than the number of shake rules (see #379). + +### 0.0.6 (2020-01-10) + +* Fix type in hover information for do-notation and list + comprehensions (see #243). +* Fix hover and goto-definition for multi-clause definitions (see #252). +* Upgrade to `hie-bios-0.3` (see #257) +* Upgrade to `haskell-lsp-0.19` (see #254) +* Code lenses for missing signatures are displayed even if the warning + has not been enabled. The warning itself will not be shown if it is + not enabled. (see #232) +* Define `__GHCIDE__` when running CPP to allow for `ghcide`-specific + workarounds. (see #264) +* Fix some filepath normalization issues. (see #266) +* Fix build with `shake-0.18.4` (see #272) +* Fix hover for type constructors and type classes. (see #267) +* Support custom preprocessors (see #282) +* Add support for code completions (see #227) +* Code action for removing redundant symbols from imports (see #290) +* Support document symbol requests (see #293) +* Show CPP errors as diagnostics (see #296) +* Code action for adding suggested imports (see #295) + +### 0.0.5 (2019-12-12) + +* Support for GHC plugins (see #192) +* Update to haskell-lsp 0.18 (see #203) +* Initial support for `TemplateHaskell` (see #222) +* Code lenses for missing signatures. These are only shown if + `-Wmissing-signatures` is enabled. (see #224) +* Fix path normalisation on Windows (see #225) +* Fix flickering of the progress indicator (see #230) + +### 0.0.4 (2019-10-20) + +* Add a ``--version`` cli option (thanks @jacg) +* Update to use progress reporting as defined in LSP 3.15. The VSCode + extension has also been updated and should now be making use of + this. +* Properly declare that we should support code actions. This helps + with some clients that rely on this information to enable code + actions (thanks @jacg). +* Fix a race condition caused by sharing the finder cache between + concurrent compilations. +* Avoid normalizing include dirs. This avoids issues where the same + file ends up twice in the module graph, e.g., with different casing + for drive letters. + +### 0.0.3 (2019-09-21) diff --git a/ghcide/LICENSE b/ghcide/LICENSE new file mode 100644 index 0000000000..d1f5c9033f --- /dev/null +++ b/ghcide/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Digital Asset (Switzerland) GmbH and/or its affiliates + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ghcide/README.md b/ghcide/README.md new file mode 100644 index 0000000000..39f8bb8ee8 --- /dev/null +++ b/ghcide/README.md @@ -0,0 +1,340 @@ +# `ghcide` - A library for building Haskell IDE tooling + +Our vision is that you should build an IDE by combining: + +![vscode](https://raw.githubusercontent.com/haskell/ghcide/master/img/vscode2.png) + +* [`hie-bios`](https://github.com/mpickering/hie-bios) for determining where your files are, what are their dependencies, what extensions are enabled and so on; +* `ghcide` (i.e. this library) for defining how to type check, when to type check, and producing diagnostic messages; +* A bunch of plugins that haven't yet been written, e.g. [`hie-hlint`](https://github.com/ndmitchell/hlint) and [`hie-ormolu`](https://github.com/tweag/ormolu), to choose which features you want; +* [`haskell-lsp`](https://github.com/alanz/haskell-lsp) for sending those messages to a [Language Server Protocol (LSP)](https://microsoft.github.io/language-server-protocol/) server; +* An LSP client for your editor. + +There are more details about our approach [in this blog post](https://4ta.uk/p/shaking-up-the-ide). + +## Features + +`ghcide` already exports the following features via the lsp protocol: + +| Feature | LSP name | +| - | - | +| Display error messages (parse errors, typecheck errors, etc.) and enabled warnings. | diagnostics | +| Go to definition in local package | definition | +| Display type and source module of values | hover | +| Remove redundant imports, replace suggested typos for values and module imports, fill type holes, insert missing type signatures, add suggested ghc extensions | codeAction (quickfix) | + + +## Limitations to Multi-Component support + +`ghcide` supports loading multiple components into the same session so that +features such as go-to definition work across components. However, there are +some limitations to this. + +1. You will get much better results currently manually specifying the hie.yaml file. +Until tools like cabal and stack provide the right interface to support multi-component +projects, it is always advised to specify explicitly how your project partitions. +2. Cross-component features only work if you have loaded at least one file +from each component. +3. There is a known issue where if you have three components, such that A depends on B which depends on C +then if you load A and C into the session but not B then under certain situations you +can get strange errors about a type coming from two different places. See [this repo](https://github.com/fendor/ghcide-bad-interface-files) for +a simple reproduction of the bug. + +## Using it + +`ghcide` is not an end-user tool, [don't use `ghcide`](https://neilmitchell.blogspot.com/2020/09/dont-use-ghcide-anymore-directly.html) directly (more about the rationale [here](https://github.com/haskell/ghcide/pull/939)). + + [`haskell-language-server`](http://github.com/haskell/haskell-language-server) is an LSP server built on top of `ghcide` with additional features and a user friendly deployment model. To get it, simply install the [Haskell extension](https://marketplace.visualstudio.com/items?itemName=haskell.haskell) in VS Code, or download prebuilt binaries from the [haskell-language-server](https://github.com/haskell/haskell-language-server) project page. + + +The instructions below are meant for developers interested in setting up ghcide as an LSP server for testing purposes. + +### Install `ghcide` + +#### With Nix + +Note that you need to compile `ghcide` with the same `ghc` as the project you are working on. + +1. If the `ghc` you are using matches the version (or better is) from `nixpkgs` it‘s easiest to use the `ghcide` from `nixpkgs`. You can do so via + ``` + nix-env -iA haskellPackages.ghcide + ``` + or e.g. including `pkgs.haskellPackages.ghcide` in your projects `shell.nix`. + Depending on your `nixpkgs` channel that might not be the newest `ghcide`, though. + +2. If your `ghc` does not match nixpkgs you should try the [ghcide-nix repository](https://github.com/cachix/ghcide-nix) + which provides a `ghcide` via the `haskell.nix` infrastructure. + +#### With Cabal or Stack + +First install the `ghcide` binary using `stack` or `cabal`, e.g. + +1. `git clone https://github.com/haskell/ghcide.git` +2. `cd ghcide` +3. `cabal install` or `stack install` (and make sure `~/.local/bin` is on your `$PATH`) + +It's important that `ghcide` is compiled with the same compiler you use to build your projects. + +### Test `ghcide` + +Next, check that `ghcide` is capable of loading your code. Change to the project directory and run `ghcide`, which will try and load everything using the same code as the IDE, but in a way that's much easier to understand. For example, taking the example of [`shake`](https://github.com/ndmitchell/shake), running `ghcide` gives some error messages and warnings before reporting at the end: + +```console +Files that failed: + * .\model\Main.hs + * .\model\Model.hs + * .\model\Test.hs + * .\model\Util.hs + * .\output\docs\Main.hs + * .\output\docs\Part_Architecture_md.hs +Completed (152 worked, 6 failed) +``` + +Of the 158 files in Shake, as of this moment, 152 can be loaded by the IDE, but 6 can't (error messages for the reasons they can't be loaded are given earlier). The failing files are all prototype work or test output, meaning I can confidently use Shake. + +The `ghcide` executable mostly relies on [`hie-bios`](https://github.com/mpickering/hie-bios) to do the difficult work of setting up your GHC environment. If it doesn't work, see [the `hie-bios` manual](https://github.com/mpickering/hie-bios#readme) to get it working. My default fallback is to figure it out by hand and create a `direct` style [`hie.yaml`](https://github.com/ndmitchell/shake/blob/master/hie.yaml) listing the command line arguments to load the project. + +If you can't get `ghcide` working outside the editor, see [this setup troubleshooting guide](docs/Setup.md). Once you have got `ghcide` working outside the editor, the next step is to pick which editor to integrate with. + +### Optimal project setup + +`ghcide` has been designed to handle projects with hundreds or thousands of modules. If `ghci` can handle it, then `ghcide` should be able to handle it. The only caveat is that this currently requires GHC >= 8.8, and that the first time a module is loaded in the editor will trigger generation of support files in the background if those do not already exist. + +### Using with VS Code + +The [Haskell](https://marketplace.visualstudio.com/items?itemName=haskell.haskell) extension has a setting for ghcide. + +### Using with Atom + +You can follow the [instructions](https://github.com/moodmosaic/ide-haskell-ghcide#readme) to install with `apm`. + +### Using with Sublime Text + +* Install [LSP](https://packagecontrol.io/packages/LSP) +* Press Ctrl+Shift+P or Cmd+Shift+P in Sublime Text and search for *Preferences: LSP Settings*, then paste these settings +``` +{ + "clients": + { + "ghcide": + { + "enabled" : true, + "languageId": "haskell", + "command" : ["ghcide", "--lsp"], + "scopes" : ["source.haskell"], + "syntaxes" : ["Packages/Haskell/Haskell.sublime-syntax"] + } + } +} +``` + +### Using with Emacs + +If you don't already have [MELPA](https://melpa.org/#/) package installation configured, visit MELPA [getting started](https://melpa.org/#/getting-started) page to get set up. Then, install [`use-package`](https://melpa.org/#/use-package). + +Now you have a choice of two different Emacs packages which can be used to communicate with the `ghcide` LSP server: + ++ `lsp-ui` ++ `eglot` (requires Emacs 26.1+) + +In each case, you can enable support by adding the shown lines to your `.emacs`: + +#### lsp-ui + +```elisp +;; LSP +(use-package flycheck + :ensure t + :init + (global-flycheck-mode t)) +(use-package yasnippet + :ensure t) +(use-package lsp-mode + :ensure t + :hook (haskell-mode . lsp) + :commands lsp) +(use-package lsp-ui + :ensure t + :commands lsp-ui-mode) +(use-package lsp-haskell + :ensure t + :config + (setq lsp-haskell-process-path-hie "ghcide") + (setq lsp-haskell-process-args-hie '()) + ;; Comment/uncomment this line to see interactions between lsp client/server. + ;;(setq lsp-log-io t) +) +``` + +#### eglot + +````elisp +(use-package eglot + :ensure t + :config + (add-to-list 'eglot-server-programs '(haskell-mode . ("ghcide" "--lsp")))) +```` + +### Using with Vim/Neovim + +#### LanguageClient-neovim +Install [LanguageClient-neovim](https://github.com/autozimu/LanguageClient-neovim) + +Add this to your vim config: +```vim +let g:LanguageClient_rootMarkers = ['*.cabal', 'stack.yaml'] +let g:LanguageClient_serverCommands = { + \ 'rust': ['rls'], + \ 'haskell': ['ghcide', '--lsp'], + \ } +``` + +Refer to `:he LanguageClient` for more details on usage and configuration. + +#### vim-lsp +Install [vim-lsp](https://github.com/prabirshrestha/vim-lsp). + +Add this to your vim config: + +```vim +au User lsp_setup call lsp#register_server({ + \ 'name': 'ghcide', + \ 'cmd': {server_info->['/your/path/to/ghcide', '--lsp']}, + \ 'whitelist': ['haskell'], + \ }) +``` + +To verify it works move your cursor over a symbol and run `:LspHover`. + +### coc.nvim + +Install [coc.nvim](https://github.com/neoclide/coc.nvim) + +Add this to your coc-settings.json (which you can edit with :CocConfig): + +```json +{ + "languageserver": { + "haskell": { + "command": "ghcide", + "args": [ + "--lsp" + ], + "rootPatterns": [ + ".stack.yaml", + ".hie-bios", + "BUILD.bazel", + "cabal.config", + "package.yaml" + ], + "filetypes": [ + "hs", + "lhs", + "haskell" + ] + } + } +} +``` + +Here's a nice article on setting up neovim and coc: [Vim and Haskell in +2019](http://marco-lopes.com/articles/Vim-and-Haskell-in-2019/) (this is actually for haskell-ide, not ghcide) + +Here is a Docker container that pins down the build and configuration for +Neovim and ghcide on a minimal Debian 10 base system: +[docker-ghcide-neovim](https://github.com/carlohamalainen/docker-ghcide-neovim/). + +### SpaceVim + +In the `autocomplete` layer, add the `autocomplete_method` option to force the use of `coc`: + +```toml +[[layers]] + name = 'autocomplete' + auto-completion-return-key-behavior = "complete" + auto-completion-tab-key-behavior = "smart" + [options] + autocomplete_method = "coc" +``` + +Add this to your coc-settings.json (which you can edit with :CocConfig): + +```json +{ + "languageserver": { + "haskell": { + "command": "ghcide", + "args": [ + "--lsp" + ], + "rootPatterns": [ + ".stack.yaml", + ".hie-bios", + "BUILD.bazel", + "cabal.config", + "package.yaml" + ], + "filetypes": [ + "hs", + "lhs", + "haskell" + ] + } + } +} +``` + +This example above describes a setup in which `ghcide` is installed +using `stack install ghcide` within a project. + +### Using with Kakoune + +Install [kak-lsp](https://github.com/ul/kak-lsp). + +Change `kak-lsp.toml` to include this: + +```toml +[language.haskell] +filetypes = ["haskell"] +roots = ["Setup.hs", "stack.yaml", "*.cabal", "cabal.project", "hie.yaml"] +command = "ghcide" +args = ["--lsp"] +``` + +## Hacking on ghcide + +To build and work on `ghcide` itself, you should use cabal, e.g., +running `cabal test` will execute the test suite. You can use `stack test` too, but +note that some tests will fail, and none of the maintainers are currently using `stack`. + +If you are using Nix, there is a Cachix nix-shell cache for all the supported platforms: `cachix use haskell-ghcide`. + +If you are using Windows, you should disable the `auto.crlf` setting and configure your editor to use LF line endings, directly or making it use the existing `.editor-config`. + +If you are chasing down test failures, you can use the tasty-rerun feature by running tests as + + cabal test --test-options"--rerun" + +This writes a log file called `.tasty-rerun-log` of the failures, and only runs those. +See the [tasty-rerun](https://hackage.haskell.org/package/tasty-rerun-1.1.17/docs/Test-Tasty-Ingredients-Rerun.html) documentation for other options. + +If you are touching performance sensitive code, take the time to run a differential +benchmark between HEAD and master using the benchHist script. This assumes that +"master" points to the upstream master. + +Run the benchmarks with `cabal bench`. + +It should take around 15 minutes and the results will be stored in the `bench-results` folder. To interpret the results, see the comments in the `bench/hist/Main.hs` module. + +More details in [bench/README](bench/README.md) + + +## History and relationship to other Haskell IDE's + +The teams behind this project and the [`haskell-ide-engine`](https://github.com/haskell/haskell-ide-engine#readme) have agreed to join forces under the [`haskell-language-server` project](https://github.com/haskell/haskell-language-server), see the [original announcement](https://neilmitchell.blogspot.com/2020/01/one-haskell-ide-to-rule-them-all.html). The technical work is ongoing, with the likely model being that this project serves as the core, while plugins and integrations are kept in the [`haskell-language-server` project](https://github.com/haskell/haskell-language-server). + +The code behind `ghcide` was originally developed by [Digital Asset](https://digitalasset.com/) as part of the [DAML programming language](https://github.com/digital-asset/daml). DAML is a smart contract language targeting distributed-ledger runtimes, based on [GHC](https://www.haskell.org/ghc/) with custom language extensions. The DAML programming language has [an IDE](https://webide.daml.com/), and work was done to separate off a reusable Haskell-only IDE (what is now `ghcide`) which the [DAML IDE then builds upon](https://github.com/digital-asset/daml/tree/master/compiler/damlc). Since that time, there have been various [non-Digital Asset contributors](https://github.com/haskell/ghcide/graphs/contributors), in addition to continued investment by Digital Asset. The project has been handed over to Haskell.org as of September 2020. + +The Haskell community [has](https://github.com/DanielG/ghc-mod) [various](https://github.com/chrisdone/intero) [IDE](https://github.com/rikvdkleij/intellij-haskell) [choices](http://leksah.org/), but the one that had been gathering momentum is [`haskell-ide-engine`](https://github.com/haskell/haskell-ide-engine#readme). Our project owes a debt of gratitude to the `haskell-ide-engine`. We reuse libraries from their ecosystem, including [`hie-bios`](https://github.com/mpickering/hie-bios#readme) (a likely future environment setup layer in `haskell-ide-engine`), [`haskell-lsp`](https://github.com/alanz/haskell-lsp#readme) and [`lsp-test`](https://github.com/bubba/lsp-test#readme) (the `haskell-ide-engine` [LSP protocol](https://microsoft.github.io/language-server-protocol/) pieces). We make heavy use of their contributions to GHC itself, in particular the work to make GHC take string buffers rather than files. + +The best summary of the architecture of `ghcide` is available [this talk](https://www.youtube.com/watch?v=cijsaeWNf2E&list=PLxxF72uPfQVRdAsvj7THoys-nVj-oc4Ss) ([slides](https://ndmitchell.com/downloads/slides-making_a_haskell_ide-07_sep_2019.pdf)), given at [MuniHac 2019](https://munihac.de/2019.html). However, since that talk the project has renamed from `hie-core` to `ghcide`, and the repo has moved to [this location](https://github.com/haskell/ghcide/). diff --git a/ghcide/azure-pipelines.yml b/ghcide/azure-pipelines.yml new file mode 100644 index 0000000000..4021f118fc --- /dev/null +++ b/ghcide/azure-pipelines.yml @@ -0,0 +1,18 @@ +# Build master commits +trigger: + batch: false + branches: + include: + - master + - azure* + +# Enable PR triggers that target the master branch +pr: + autoCancel: true # cancel previous builds on push + branches: + include: + - master + +jobs: + - template: ./.azure/linux-stack.yml + - template: ./.azure/windows-stack.yml diff --git a/ghcide/docs/opentelemetry.md b/ghcide/docs/opentelemetry.md new file mode 100644 index 0000000000..49d65d5c2c --- /dev/null +++ b/ghcide/docs/opentelemetry.md @@ -0,0 +1,70 @@ +# Using opentelemetry + +`ghcide` has support for opentelemetry-based tracing. This allows for tracing +the execution of the process, seeing when Shake rules fire and for how long they +run, when LSP messages are received, and (currently WIP) measuring the memory +occupancy of different objects in memory. + +## Capture opentlemetry data + +Capturing of opentelemetry data can be enabled by first building ghcide with eventlog support: + +```sh +stack build --ghc-options -eventlog +``` + +Then, you can run `ghcide`, giving it a file to dump eventlog information into. + +```sh +ghcide +RTS -l -ol ghcide.eventlog -RTS +``` + +# Profiling the Shake cache + +The flag `--ot-memory-profiling` profiles the values map repeatedly with 1s pauses in between. + +```sh +ghcide --ot-memory-profiling +RTS -A4G -l -ol ghcide.eventlog -RTS +``` + +*Note:* This option, while functional, is extremely slow. You will notice this because the memory graph in the output will have datapoints spaced apart by a couple of minutes. The nursery must be big enough (-A1G or larger) or the measurements will self-abort. + +Another way to profile the heap is by sending a USR1 signal (`kill -s USR1`) to the process. + +## Viewing with tracy + +After installing `opentelemetry-extra` and `tracy`, you can view the opentelementry output: + +```sh +eventlog-to-tracy ghcide.eventlog +``` + +If everything has been set up correctly, this should open a tracy window with the tracing data you captured + +### Installing opentelemetry-extra + +This package includes a number of binaries for converting between the eventlog output and the formats that various opentelemetry viewers (like tracy) can display: + +```sh +cabal install openetelemetry-extra +``` + + + +### Building tracy + +1. Install the dependencies: `pkg-config` and `glfw, freetype, capstone, GTK3`, along + with their header files (`-dev` on most distros. On Arch the header + files are included with the normal packages). +2. Download tracy from https://github.com/wolfpld/tracy +3. `cd` into the directory containing the source you downloaded +4. Build the `import-chrome` and `Tracy` libraries: + ```sh + make -C profiler/build/unix release + make -C import-chrome/build/unix release + ``` +5. Copy the binaries to your `$PATH`: + ```sh + cp profiler/build/unix/Tracy-release ~/.local/bin/Tracy + cp import-chrome/build/unix/import-chrome-release ~/.local/bin/import-chrome + ``` diff --git a/ghcide/exe/Arguments.hs b/ghcide/exe/Arguments.hs new file mode 100644 index 0000000000..627c041970 --- /dev/null +++ b/ghcide/exe/Arguments.hs @@ -0,0 +1,47 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +module Arguments(Arguments(..), getArguments) where + +import Development.IDE (IdeState) +import Development.IDE.Main (Command (..), commandP) +import Ide.Types (IdePlugins) +import Options.Applicative + +data Arguments = Arguments + {argsCwd :: Maybe FilePath + ,argsVersion :: Bool + ,argsShakeProfiling :: Maybe FilePath + ,argsOTMemoryProfiling :: Bool + ,argsTesting :: Bool + ,argsDisableKick :: Bool + ,argsVerifyCoreFile :: Bool + ,argsThreads :: Int + ,argsVerbose :: Bool + ,argsCommand :: Command + ,argsConservativeChangeTracking :: Bool + } + +getArguments :: IdePlugins IdeState -> IO Arguments +getArguments plugins = execParser opts + where + opts = info (arguments plugins <**> helper) + ( fullDesc + <> header "ghcide - the core of a Haskell IDE") + +arguments :: IdePlugins IdeState -> Parser Arguments +arguments plugins = Arguments + <$> optional (strOption $ long "cwd" <> metavar "DIR" <> help "Change to this directory") + <*> switch (long "version" <> help "Show ghcide and GHC versions") + <*> optional (strOption $ long "shake-profiling" <> metavar "DIR" <> help "Dump profiling reports to this directory (env var: GHCIDE_BUILD_PROFILING)") + <*> switch (long "ot-memory-profiling" <> help "Record OpenTelemetry info to the eventlog. Needs the -l RTS flag to have an effect") + <*> switch (long "test" <> help "Enable additional lsp messages used by the testsuite") + <*> switch (long "test-no-kick" <> help "Disable kick. Useful for testing cancellation") + <*> switch (long "verify-core-file" <> help "Verify core trips by roundtripping after serialization. Slow, only useful for testing purposes") + <*> option auto (short 'j' <> help "Number of threads (0: automatic)" <> metavar "NUM" <> value 0 <> showDefault) + <*> switch (short 'd' <> long "verbose" <> help "Include internal events in logging output") + <*> (commandP plugins <|> lspCommand <|> checkCommand) + <*> switch (long "conservative-change-tracking" <> help "disable reactive change tracking (for testing/debugging)") + where + checkCommand = Check <$> many (argument str (metavar "FILES/DIRS...")) + lspCommand = LSP <$ flag' True (long "lsp" <> help "Start talking to an LSP client") diff --git a/ghcide/exe/Main.hs b/ghcide/exe/Main.hs new file mode 100644 index 0000000000..80913da190 --- /dev/null +++ b/ghcide/exe/Main.hs @@ -0,0 +1,138 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE TemplateHaskell #-} + +module Main(main) where + +import Arguments (Arguments (..), + getArguments) +import Control.Monad.IO.Class (liftIO) +import Data.Default (def) +import Data.Function ((&)) +import Data.Version (showVersion) +import Development.GitRev (gitHash) +import Development.IDE.Core.Rules (mainRule) +import qualified Development.IDE.Core.Rules as Rules +import Development.IDE.Core.Tracing (withTelemetryRecorder) +import qualified Development.IDE.Main as IDEMain +import qualified Development.IDE.Monitoring.OpenTelemetry as OpenTelemetry +import qualified Development.IDE.Plugin.HLS.GhcIde as GhcIde +import Development.IDE.Types.Options +import Ide.Logger (LoggingColumn (..), + Pretty (pretty), + Priority (Debug, Error, Info), + WithPriority (WithPriority, priority), + cfilter, + cmapWithPrio, + defaultLayoutOptions, + layoutPretty, + makeDefaultStderrRecorder, + renderStrict) +import qualified Ide.Logger as Logger +import Ide.Plugin.Config (Config (checkParents, checkProject)) +import Ide.PluginUtils (pluginDescToIdePlugins) +import Ide.Types (PluginDescriptor (pluginNotificationHandlers), + defaultPluginDescriptor, + mkPluginNotificationHandler) +import Language.LSP.Protocol.Message as LSP +import Language.LSP.Server as LSP +import Paths_ghcide (version) +import qualified System.Directory.Extra as IO +import System.Environment (getExecutablePath) +import System.Exit (exitSuccess) +import System.Info (compilerVersion) +import System.IO (hPutStrLn, stderr) + +data Log + = LogIDEMain IDEMain.Log + | LogRules Rules.Log + | LogGhcIde GhcIde.Log + +instance Pretty Log where + pretty = \case + LogIDEMain log -> pretty log + LogRules log -> pretty log + LogGhcIde log -> pretty log + +ghcideVersion :: IO String +ghcideVersion = do + path <- getExecutablePath + let gitHashSection = case $(gitHash) of + x | x == "UNKNOWN" -> "" + x -> " (GIT hash: " <> x <> ")" + return $ "ghcide version: " <> showVersion version + <> " (GHC: " <> showVersion compilerVersion + <> ") (PATH: " <> path <> ")" + <> gitHashSection + +main :: IO () +main = withTelemetryRecorder $ \telemetryRecorder -> do + -- stderr recorder just for plugin cli commands + pluginCliRecorder <- + cmapWithPrio pretty + <$> makeDefaultStderrRecorder (Just [ThreadIdColumn, PriorityColumn, DataColumn]) + + let hlsPlugins = pluginDescToIdePlugins (GhcIde.descriptors (cmapWithPrio LogGhcIde pluginCliRecorder)) + -- WARNING: If you write to stdout before runLanguageServer + -- then the language server will not work + Arguments{..} <- getArguments hlsPlugins + + if argsVersion then ghcideVersion >>= putStrLn >> exitSuccess + else hPutStrLn stderr {- see WARNING above -} =<< ghcideVersion + + -- if user uses --cwd option we need to make this path absolute (and set the current directory to it) + argsCwd <- case argsCwd of + Nothing -> IO.getCurrentDirectory + Just root -> IO.setCurrentDirectory root >> IO.getCurrentDirectory + + let minPriority = if argsVerbose then Debug else Info + + docWithPriorityRecorder <- makeDefaultStderrRecorder (Just [PriorityColumn, DataColumn]) + + (lspLogRecorder, cb1) <- Logger.withBacklog Logger.lspClientLogRecorder + (lspMessageRecorder, cb2) <- Logger.withBacklog Logger.lspClientMessageRecorder + -- This plugin just installs a handler for the `initialized` notification, which then + -- picks up the LSP environment and feeds it to our recorders + let lspRecorderPlugin = (defaultPluginDescriptor "LSPRecorderCallback" "Internal plugin") + { pluginNotificationHandlers = mkPluginNotificationHandler LSP.SMethod_Initialized $ \_ _ _ _ -> do + env <- LSP.getLspEnv + liftIO $ (cb1 <> cb2) env + } + + let docWithFilteredPriorityRecorder = + (docWithPriorityRecorder & cfilter (\WithPriority{ priority } -> priority >= minPriority)) <> + (lspLogRecorder & cmapWithPrio (renderStrict . layoutPretty defaultLayoutOptions) + & cfilter (\WithPriority{ priority } -> priority >= minPriority)) <> + (lspMessageRecorder & cmapWithPrio (renderStrict . layoutPretty defaultLayoutOptions) + & cfilter (\WithPriority{ priority } -> priority >= Error)) <> + telemetryRecorder + + let recorder = docWithFilteredPriorityRecorder + & cmapWithPrio pretty + + let arguments = + if argsTesting + then IDEMain.testing (cmapWithPrio LogIDEMain recorder) argsCwd hlsPlugins + else IDEMain.defaultArguments (cmapWithPrio LogIDEMain recorder) argsCwd hlsPlugins + + IDEMain.defaultMain (cmapWithPrio LogIDEMain recorder) arguments + { IDEMain.argsProjectRoot = argsCwd + , IDEMain.argCommand = argsCommand + , IDEMain.argsHlsPlugins = IDEMain.argsHlsPlugins arguments <> pluginDescToIdePlugins [lspRecorderPlugin] + + , IDEMain.argsRules = do + mainRule (cmapWithPrio LogRules recorder) def + + , IDEMain.argsThreads = case argsThreads of 0 -> Nothing ; i -> Just (fromIntegral i) + + , IDEMain.argsIdeOptions = \config sessionLoader -> + let defOptions = IDEMain.argsIdeOptions arguments config sessionLoader + in defOptions + { optShakeProfiling = argsShakeProfiling + , optCheckParents = pure $ checkParents config + , optCheckProject = pure $ checkProject config + , optRunSubset = not argsConservativeChangeTracking + , optVerifyCoreFile = argsVerifyCoreFile + } + , IDEMain.argsMonitoring = OpenTelemetry.monitoring + } diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal new file mode 100644 index 0000000000..7dd12f9fef --- /dev/null +++ b/ghcide/ghcide.cabal @@ -0,0 +1,249 @@ +cabal-version: 3.4 +build-type: Simple +category: Development +name: ghcide +version: 2.11.0.0 +license: Apache-2.0 +license-file: LICENSE +author: Digital Asset and Ghcide contributors +maintainer: Ghcide contributors +copyright: Digital Asset and Ghcide contributors 2018-2020 +synopsis: The core of an IDE +description: A library for building Haskell IDE's on top of the GHC API. +homepage: + https://github.com/haskell/haskell-language-server/tree/master/ghcide#readme + +bug-reports: https://github.com/haskell/haskell-language-server/issues +tested-with: GHC == {9.12.2, 9.10.1, 9.8.4, 9.6.7} +extra-source-files: + CHANGELOG.md + README.md + +source-repository head + type: git + location: https://github.com/haskell/haskell-language-server.git + +flag pedantic + description: Enable -Werror + default: False + manual: True + +common warnings + ghc-options: + -Werror=incomplete-patterns + -Wall + -Wincomplete-uni-patterns + -Wunused-packages + -Wno-name-shadowing + -Wno-unticked-promoted-constructors + -fno-ignore-asserts + +library + import: warnings + default-language: GHC2021 + build-depends: + , aeson + , array + , async + , base >=4.16 && <5 + , base16-bytestring >=0.1.1 && <1.1 + , binary + , bytestring + , case-insensitive + , co-log-core + , containers + , cryptohash-sha1 >=0.11.100 && <0.12 + , data-default + , deepseq + , dependent-map + , dependent-sum + , Diff ^>=0.5 || ^>=1.0.0 + , directory + , dlist + , enummapset + , exceptions + , extra >=1.7.14 + , filepath + , fingertree + , focus >=1.0.3.2 + , ghc >=9.2 + , ghc-boot + , ghc-boot-th + , ghc-trace-events + , Glob + , haddock-library >=1.8 && <1.12 + , hashable + , hie-bios ^>=0.17.0 + , hiedb ^>= 0.7.0.0 + , hls-graph == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , implicit-hie >= 0.1.4.0 && < 0.1.5 + , lens + , lens-aeson + , list-t + , lsp ^>=2.7 + , lsp-types ^>=2.3 + , mtl + , opentelemetry >=0.6.1 + , optparse-applicative + , os-string + , parallel + , prettyprinter >=1.7 + , prettyprinter-ansi-terminal + , random + , regex-tdfa >=1.3.1.0 + , safe-exceptions + , sorted-list + , sqlite-simple + , stm + , stm-containers + , syb + , text + , text-rope + , time + , transformers + , unliftio >=0.2.6 + , unliftio-core + , unordered-containers >=0.2.10.0 + , vector + + if os(windows) + build-depends: Win32 + + else + build-depends: unix + + default-extensions: + DataKinds + ExplicitNamespaces + LambdaCase + OverloadedStrings + RecordWildCards + ViewPatterns + + hs-source-dirs: src session-loader + exposed-modules: + Control.Concurrent.Strict + Development.IDE + Development.IDE.Core.Actions + Development.IDE.Core.Compile + Development.IDE.Core.Debouncer + Development.IDE.Core.FileStore + Development.IDE.Core.FileUtils + Development.IDE.Core.IdeConfiguration + Development.IDE.Core.OfInterest + Development.IDE.Core.PluginUtils + Development.IDE.Core.PositionMapping + Development.IDE.Core.Preprocessor + Development.IDE.Core.ProgressReporting + Development.IDE.Core.Rules + Development.IDE.Core.RuleTypes + Development.IDE.Core.Service + Development.IDE.Core.Shake + Development.IDE.Core.Tracing + Development.IDE.Core.UseStale + Development.IDE.Core.WorkerThread + Development.IDE.GHC.Compat + Development.IDE.GHC.Compat.Core + Development.IDE.GHC.Compat.CmdLine + Development.IDE.GHC.Compat.Driver + Development.IDE.GHC.Compat.Env + Development.IDE.GHC.Compat.Error + Development.IDE.GHC.Compat.Iface + Development.IDE.GHC.Compat.Logger + Development.IDE.GHC.Compat.Outputable + Development.IDE.GHC.Compat.Parser + Development.IDE.GHC.Compat.Plugins + Development.IDE.GHC.Compat.Units + Development.IDE.GHC.Compat.Util + Development.IDE.GHC.CoreFile + Development.IDE.GHC.Error + Development.IDE.GHC.Orphans + Development.IDE.GHC.Util + Development.IDE.Import.DependencyInformation + Development.IDE.Import.FindImports + Development.IDE.LSP.HoverDefinition + Development.IDE.LSP.LanguageServer + Development.IDE.LSP.Notifications + Development.IDE.LSP.Outline + Development.IDE.LSP.Server + Development.IDE.Main + Development.IDE.Main.HeapStats + Development.IDE.Monitoring.OpenTelemetry + Development.IDE.Plugin + Development.IDE.Plugin.Completions + Development.IDE.Plugin.Completions.Types + Development.IDE.Plugin.Completions.Logic + Development.IDE.Plugin.HLS + Development.IDE.Plugin.HLS.GhcIde + Development.IDE.Plugin.Test + Development.IDE.Plugin.TypeLenses + Development.IDE.Session + Development.IDE.Session.Diagnostics + Development.IDE.Session.Implicit + Development.IDE.Spans.AtPoint + Development.IDE.Spans.Common + Development.IDE.Spans.Documentation + Development.IDE.Spans.LocalBindings + Development.IDE.Spans.Pragmas + Development.IDE.Types.Diagnostics + Development.IDE.Types.Exports + Development.IDE.Types.HscEnvEq + Development.IDE.Types.KnownTargets + Development.IDE.Types.Location + Development.IDE.Types.Monitoring + Development.IDE.Types.Options + Development.IDE.Types.Shake + Generics.SYB.GHC + Text.Fuzzy.Parallel + + other-modules: + Development.IDE.Core.FileExists + Development.IDE.GHC.CPP + Development.IDE.GHC.Warnings + Development.IDE.Types.Action + + if flag(pedantic) + ghc-options: + -Werror + +flag executable + description: Build the ghcide executable + default: True + +executable ghcide + import: warnings + default-language: GHC2021 + hs-source-dirs: exe + ghc-options: -threaded -rtsopts "-with-rtsopts=-I0 -A128M -T" + + + -- allow user RTS overrides + -- disable idle GC + -- increase nursery size + -- Enable collection of heap statistics + main-is: Main.hs + build-depends: + , base >=4.16 && <5 + , data-default + , extra + , ghcide + , gitrev + , hls-plugin-api + , lsp + , lsp-types + , optparse-applicative + + other-modules: + Arguments + Paths_ghcide + + autogen-modules: Paths_ghcide + default-extensions: + LambdaCase + OverloadedStrings + RecordWildCards + ViewPatterns + + if !flag(executable) + buildable: False diff --git a/ghcide/img/vscode2.png b/ghcide/img/vscode2.png new file mode 100644 index 0000000000..f17de0aa88 Binary files /dev/null and b/ghcide/img/vscode2.png differ diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs new file mode 100644 index 0000000000..dde1cfdea5 --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -0,0 +1,1258 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE TypeFamilies #-} + +{-| +The logic for setting up a ghcide session by tapping into hie-bios. +-} +module Development.IDE.Session + (SessionLoadingOptions(..) + ,CacheDirs(..) + ,loadSessionWithOptions + ,getInitialGhcLibDirDefault + ,getHieDbLoc + ,retryOnSqliteBusy + ,retryOnException + ,Log(..) + ,runWithDb + ) where + +-- Unfortunately, we cannot use loadSession with ghc-lib since hie-bios uses +-- the real GHC library and the types are incompatible. Furthermore, when +-- building with ghc-lib we need to make this Haskell agnostic, so no hie-bios! + +import Control.Concurrent.Strict +import Control.Exception.Safe as Safe +import Control.Monad +import Control.Monad.Extra as Extra +import Control.Monad.IO.Class +import qualified Crypto.Hash.SHA1 as H +import Data.Aeson hiding (Error) +import Data.Bifunctor +import qualified Data.ByteString.Base16 as B16 +import qualified Data.ByteString.Char8 as B +import Data.Default +import Data.Either.Extra +import Data.Function +import Data.Hashable hiding (hash) +import qualified Data.HashMap.Strict as HM +import Data.IORef +import Data.List +import Data.List.Extra as L +import Data.List.NonEmpty (NonEmpty (..)) +import qualified Data.List.NonEmpty as NE +import qualified Data.Map.Strict as Map +import Data.Maybe +import Data.Proxy +import qualified Data.Text as T +import Data.Time.Clock +import Data.Version +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Shake hiding (Log, knownTargets, + withHieDb) +import qualified Development.IDE.GHC.Compat as Compat +import Development.IDE.GHC.Compat.CmdLine +import Development.IDE.GHC.Compat.Core hiding (Target, TargetFile, + TargetModule, Var, + Warning, getOptions) +import qualified Development.IDE.GHC.Compat.Core as GHC +import Development.IDE.GHC.Compat.Env hiding (Logger) +import Development.IDE.GHC.Compat.Units (UnitId) +import Development.IDE.GHC.Util +import Development.IDE.Graph (Action) +import qualified Development.IDE.Session.Implicit as GhcIde +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Exports +import Development.IDE.Types.HscEnvEq (HscEnvEq, newHscEnvEq) +import Development.IDE.Types.Location +import Development.IDE.Types.Options +import GHC.ResponseFile +import qualified HIE.Bios as HieBios +import qualified HIE.Bios.Cradle.Utils as HieBios +import HIE.Bios.Environment hiding (getCacheDir) +import HIE.Bios.Types hiding (Log) +import qualified HIE.Bios.Types as HieBios +import Ide.Logger (Pretty (pretty), + Priority (Debug, Error, Info, Warning), + Recorder, WithPriority, + cmapWithPrio, logWith, + nest, + toCologActionWithPrio, + vcat, viaShow, (<+>)) +import Ide.Types (SessionLoadingPreferenceConfig (..), + sessionLoading) +import Language.LSP.Protocol.Message +import Language.LSP.Server +import System.Directory +import qualified System.Directory.Extra as IO +import System.FilePath +import System.Info + +import Control.Applicative (Alternative ((<|>))) +import Data.Void + +import Control.Concurrent.STM.Stats (atomically, modifyTVar', + readTVar, writeTVar) +import Control.Concurrent.STM.TQueue +import Control.DeepSeq +import Control.Exception (evaluate) +import Control.Monad.IO.Unlift (MonadUnliftIO) +import Control.Monad.Trans.Cont (ContT (ContT, runContT)) +import Data.Foldable (for_) +import Data.HashMap.Strict (HashMap) +import Data.HashSet (HashSet) +import qualified Data.HashSet as Set +import qualified Data.Set as OS +import Database.SQLite.Simple +import Development.IDE.Core.Tracing (withTrace) +import Development.IDE.Core.WorkerThread (awaitRunInThread, + withWorkerQueue) +import qualified Development.IDE.GHC.Compat.Util as Compat +import Development.IDE.Session.Diagnostics (renderCradleError) +import Development.IDE.Types.Shake (WithHieDb, + WithHieDbShield (..), + toNoFileKey) +import GHC.Data.Graph.Directed +import HieDb.Create +import HieDb.Types +import Ide.PluginUtils (toAbsolute) +import qualified System.Random as Random +import System.Random (RandomGen) +import Text.ParserCombinators.ReadP (readP_to_S) + +import GHC.Driver.Env (hsc_all_home_unit_ids) +import GHC.Driver.Errors.Types +import GHC.Types.Error (errMsgDiagnostic, + singleMessage) +import GHC.Unit.State + +#if MIN_VERSION_ghc(9,13,0) +import GHC.Driver.Make (checkHomeUnitsClosed) +#endif + +data Log + = LogSettingInitialDynFlags + | LogGetInitialGhcLibDirDefaultCradleFail !CradleError !FilePath !(Maybe FilePath) !(Cradle Void) + | LogGetInitialGhcLibDirDefaultCradleNone + | LogHieDbRetry !Int !Int !Int !SomeException + | LogHieDbRetriesExhausted !Int !Int !Int !SomeException + | LogHieDbWriterThreadSQLiteError !SQLError + | LogHieDbWriterThreadException !SomeException + | LogInterfaceFilesCacheDir !FilePath + | LogKnownFilesUpdated !(HashMap Target (HashSet NormalizedFilePath)) + | LogMakingNewHscEnv ![UnitId] + | LogDLLLoadError !String + | LogCradlePath !FilePath + | LogCradleNotFound !FilePath + | LogSessionLoadingResult !(Either [CradleError] (ComponentOptions, FilePath, String)) + | LogCradle !(Cradle Void) + | LogNoneCradleFound FilePath + | LogNewComponentCache !(([FileDiagnostic], Maybe HscEnvEq), DependencyInfo) + | LogHieBios HieBios.Log + | LogSessionLoadingChanged +deriving instance Show Log + +instance Pretty Log where + pretty = \case + LogNoneCradleFound path -> + "None cradle found for" <+> pretty path <+> ", ignoring the file" + LogSettingInitialDynFlags -> + "Setting initial dynflags..." + LogGetInitialGhcLibDirDefaultCradleFail cradleError rootDirPath hieYamlPath cradle -> + nest 2 $ + vcat + [ "Couldn't load cradle for ghc libdir." + , "Cradle error:" <+> viaShow cradleError + , "Root dir path:" <+> pretty rootDirPath + , "hie.yaml path:" <+> pretty hieYamlPath + , "Cradle:" <+> viaShow cradle ] + LogGetInitialGhcLibDirDefaultCradleNone -> + "Couldn't load cradle. Cradle not found." + LogHieDbRetry delay maxDelay retriesRemaining e -> + nest 2 $ + vcat + [ "Retrying hiedb action..." + , "delay:" <+> pretty delay + , "maximum delay:" <+> pretty maxDelay + , "retries remaining:" <+> pretty retriesRemaining + , "SQLite error:" <+> pretty (displayException e) ] + LogHieDbRetriesExhausted baseDelay maxDelay retriesRemaining e -> + nest 2 $ + vcat + [ "Retries exhausted for hiedb action." + , "base delay:" <+> pretty baseDelay + , "maximum delay:" <+> pretty maxDelay + , "retries remaining:" <+> pretty retriesRemaining + , "Exception:" <+> pretty (displayException e) ] + LogHieDbWriterThreadSQLiteError e -> + nest 2 $ + vcat + [ "HieDb writer thread SQLite error:" + , pretty (displayException e) ] + LogHieDbWriterThreadException e -> + nest 2 $ + vcat + [ "HieDb writer thread exception:" + , pretty (displayException e) ] + LogInterfaceFilesCacheDir path -> + "Interface files cache directory:" <+> pretty path + LogKnownFilesUpdated targetToPathsMap -> + nest 2 $ + vcat + [ "Known files updated:" + , viaShow $ (HM.map . Set.map) fromNormalizedFilePath targetToPathsMap + ] + LogMakingNewHscEnv inPlaceUnitIds -> + "Making new HscEnv. In-place unit ids:" <+> pretty (map show inPlaceUnitIds) + LogDLLLoadError errorString -> + "Error dynamically loading libm.so.6:" <+> pretty errorString + LogCradlePath path -> + "Cradle path:" <+> pretty path + LogCradleNotFound path -> + vcat + [ "No [cradle](https://github.com/mpickering/hie-bios#hie-bios) found for" <+> pretty path <> "." + , "Proceeding with [implicit cradle](https://hackage.haskell.org/package/implicit-hie)." + , "You should ignore this message, unless you see a 'Multi Cradle: No prefixes matched' error." ] + LogSessionLoadingResult e -> + "Session loading result:" <+> viaShow e + LogCradle cradle -> + "Cradle:" <+> viaShow cradle + LogNewComponentCache componentCache -> + "New component cache HscEnvEq:" <+> viaShow componentCache + LogHieBios msg -> pretty msg + LogSessionLoadingChanged -> + "Session Loading config changed, reloading the full session." + +-- | Bump this version number when making changes to the format of the data stored in hiedb +hiedbDataVersion :: String +hiedbDataVersion = "2" + +data CacheDirs = CacheDirs + { hiCacheDir, hieCacheDir, oCacheDir :: Maybe FilePath} + +data SessionLoadingOptions = SessionLoadingOptions + { findCradle :: FilePath -> IO (Maybe FilePath) + -- | Load the cradle with an optional 'hie.yaml' location. + -- If a 'hie.yaml' is given, use it to load the cradle. + -- Otherwise, use the provided project root directory to determine the cradle type. + , loadCradle :: Recorder (WithPriority Log) -> Maybe FilePath -> FilePath -> IO (HieBios.Cradle Void) + -- | Given the project name and a set of command line flags, + -- return the path for storing generated GHC artifacts, + -- or 'Nothing' to respect the cradle setting + , getCacheDirs :: String -> [String] -> IO CacheDirs + -- | Return the GHC lib dir to use for the 'unsafeGlobalDynFlags' + , getInitialGhcLibDir :: Recorder (WithPriority Log) -> FilePath -> IO (Maybe LibDir) + } + +instance Default SessionLoadingOptions where + def = SessionLoadingOptions + {findCradle = HieBios.findCradle + ,loadCradle = loadWithImplicitCradle + ,getCacheDirs = getCacheDirsDefault + ,getInitialGhcLibDir = getInitialGhcLibDirDefault + } + +-- | Find the cradle for a given 'hie.yaml' configuration. +-- +-- If a 'hie.yaml' is given, the cradle is read from the config. +-- If this config does not comply to the "hie.yaml" +-- specification, an error is raised. +-- +-- If no location for "hie.yaml" is provided, the implicit config is used +-- using the provided root directory for discovering the project. +-- The implicit config uses different heuristics to determine the type +-- of the project that may or may not be accurate. +loadWithImplicitCradle + :: Recorder (WithPriority Log) + -> Maybe FilePath + -- ^ Optional 'hie.yaml' location. Will be used if given. + -> FilePath + -- ^ Root directory of the project. Required as a fallback + -- if no 'hie.yaml' location is given. + -> IO (HieBios.Cradle Void) +loadWithImplicitCradle recorder mHieYaml rootDir = do + let logger = toCologActionWithPrio (cmapWithPrio LogHieBios recorder) + case mHieYaml of + Just yaml -> HieBios.loadCradle logger yaml + Nothing -> GhcIde.loadImplicitCradle logger rootDir + +getInitialGhcLibDirDefault :: Recorder (WithPriority Log) -> FilePath -> IO (Maybe LibDir) +getInitialGhcLibDirDefault recorder rootDir = do + hieYaml <- findCradle def (rootDir "a") + cradle <- loadCradle def recorder hieYaml rootDir + libDirRes <- getRuntimeGhcLibDir cradle + case libDirRes of + CradleSuccess libdir -> pure $ Just $ LibDir libdir + CradleFail err -> do + logWith recorder Error $ LogGetInitialGhcLibDirDefaultCradleFail err rootDir hieYaml cradle + pure Nothing + CradleNone -> do + logWith recorder Warning LogGetInitialGhcLibDirDefaultCradleNone + pure Nothing + +-- | If the action throws exception that satisfies predicate then we sleep for +-- a duration determined by the random exponential backoff formula, +-- `uniformRandom(0, min (maxDelay, (baseDelay * 2) ^ retryAttempt))`, and try +-- the action again for a maximum of `maxRetryCount` times. +-- `MonadIO`, `MonadCatch` are used as constraints because there are a few +-- HieDb functions that don't return IO values. +retryOnException + :: (MonadIO m, MonadCatch m, RandomGen g, Exception e) + => (e -> Maybe e) -- ^ only retry on exception if this predicate returns Just + -> Recorder (WithPriority Log) + -> Int -- ^ maximum backoff delay in microseconds + -> Int -- ^ base backoff delay in microseconds + -> Int -- ^ maximum number of times to retry + -> g -- ^ random number generator + -> m a -- ^ action that may throw exception + -> m a +retryOnException exceptionPred recorder maxDelay !baseDelay !maxTimesRetry rng action = do + result <- tryJust exceptionPred action + case result of + Left e + | maxTimesRetry > 0 -> do + -- multiply by 2 because baseDelay is midpoint of uniform range + let newBaseDelay = min maxDelay (baseDelay * 2) + let (delay, newRng) = Random.randomR (0, newBaseDelay) rng + let newMaxTimesRetry = maxTimesRetry - 1 + liftIO $ do + logWith recorder Warning $ LogHieDbRetry delay maxDelay newMaxTimesRetry (toException e) + threadDelay delay + retryOnException exceptionPred recorder maxDelay newBaseDelay newMaxTimesRetry newRng action + + | otherwise -> do + liftIO $ do + logWith recorder Warning $ LogHieDbRetriesExhausted baseDelay maxDelay maxTimesRetry (toException e) + throwIO e + + Right b -> pure b + +-- | in microseconds +oneSecond :: Int +oneSecond = 1000000 + +-- | in microseconds +oneMillisecond :: Int +oneMillisecond = 1000 + +-- | default maximum number of times to retry hiedb call +maxRetryCount :: Int +maxRetryCount = 10 + +retryOnSqliteBusy :: (MonadIO m, MonadCatch m, RandomGen g) + => Recorder (WithPriority Log) -> g -> m a -> m a +retryOnSqliteBusy recorder rng action = + let isErrorBusy e + | SQLError{ sqlError = ErrorBusy } <- e = Just e + | otherwise = Nothing + in + retryOnException isErrorBusy recorder oneSecond oneMillisecond maxRetryCount rng action + +makeWithHieDbRetryable :: RandomGen g => Recorder (WithPriority Log) -> g -> HieDb -> WithHieDb +makeWithHieDbRetryable recorder rng hieDb f = + retryOnSqliteBusy recorder rng (f hieDb) + +-- | Wraps `withHieDb` to provide a database connection for reading, and a `HieWriterChan` for +-- writing. Actions are picked off one by one from the `HieWriterChan` and executed in serial +-- by a worker thread using a dedicated database connection. +-- This is done in order to serialize writes to the database, or else SQLite becomes unhappy +-- +-- Also see Note [Serializing runs in separate thread] +runWithDb :: Recorder (WithPriority Log) -> FilePath -> ContT () IO (WithHieDbShield, IndexQueue) +runWithDb recorder fp = ContT $ \k -> do + -- use non-deterministic seed because maybe multiple HLS start at same time + -- and send bursts of requests + rng <- Random.newStdGen + -- Delete the database if it has an incompatible schema version + retryOnSqliteBusy + recorder + rng + (withHieDb fp (const $ pure ()) `Safe.catch` \IncompatibleSchemaVersion{} -> removeFile fp) + + withHieDb fp $ \writedb -> do + -- the type signature is necessary to avoid concretizing the tyvar + -- e.g. `withWriteDbRetryable initConn` without type signature will + -- instantiate tyvar `a` to `()` + let withWriteDbRetryable :: WithHieDb + withWriteDbRetryable = makeWithHieDbRetryable recorder rng writedb + withWriteDbRetryable initConn + + + -- Clear the index of any files that might have been deleted since the last run + _ <- withWriteDbRetryable deleteMissingRealFiles + _ <- withWriteDbRetryable garbageCollectTypeNames + + runContT (withWorkerQueue (writer withWriteDbRetryable)) $ \chan -> + withHieDb fp (\readDb -> k (WithHieDbShield $ makeWithHieDbRetryable recorder rng readDb, chan)) + where + writer withHieDbRetryable l = do + -- TODO: probably should let exceptions be caught/logged/handled by top level handler + l withHieDbRetryable + `Safe.catch` \e@SQLError{} -> do + logWith recorder Error $ LogHieDbWriterThreadSQLiteError e + `Safe.catchAny` \f -> do + logWith recorder Error $ LogHieDbWriterThreadException f + + +getHieDbLoc :: FilePath -> IO FilePath +getHieDbLoc dir = do + let db = intercalate "-" [dirHash, takeBaseName dir, Compat.ghcVersionStr, hiedbDataVersion] <.> "hiedb" + dirHash = B.unpack $ B16.encode $ H.hash $ B.pack dir + cDir <- IO.getXdgDirectory IO.XdgCache cacheDir + createDirectoryIfMissing True cDir + pure (cDir db) + +-- | Given a root directory, return a Shake 'Action' which setups an +-- 'IdeGhcSession' given a file. +-- Some of the many things this does: +-- +-- * Find the cradle for the file +-- * Get the session options, +-- * Get the GHC lib directory +-- * Make sure the GHC compiletime and runtime versions match +-- * Restart the Shake session +-- +-- This is the key function which implements multi-component support. All +-- components mapping to the same hie.yaml file are mapped to the same +-- HscEnv which is updated as new components are discovered. + +loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions -> FilePath -> TQueue (IO ()) -> IO (Action IdeGhcSession) +loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do + let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] + cradle_files <- newIORef [] + -- Mapping from hie.yaml file to HscEnv, one per hie.yaml file + hscEnvs <- newVar Map.empty :: IO (Var HieMap) + -- Mapping from a Filepath to HscEnv + fileToFlags <- newVar Map.empty :: IO (Var FlagsMap) + -- Mapping from a Filepath to its 'hie.yaml' location. + -- Should hold the same Filepaths as 'fileToFlags', otherwise + -- they are inconsistent. So, everywhere you modify 'fileToFlags', + -- you have to modify 'filesMap' as well. + filesMap <- newVar HM.empty :: IO (Var FilesMap) + -- Version of the mappings above + version <- newVar 0 + biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) + let returnWithVersion fun = IdeGhcSession fun <$> liftIO (readVar version) + -- This caches the mapping from Mod.hs -> hie.yaml + cradleLoc <- liftIO $ memoIO $ \v -> do + res <- findCradle v + -- Sometimes we get C:, sometimes we get c:, and sometimes we get a relative path + -- try and normalise that + -- e.g. see https://github.com/haskell/ghcide/issues/126 + let res' = toAbsolutePath <$> res + return $ normalise <$> res' + + return $ do + clientConfig <- getClientConfigAction + extras@ShakeExtras{restartShakeSession, ideNc, knownTargetsVar, lspEnv + } <- getShakeExtras + let invalidateShakeCache = do + void $ modifyVar' version succ + return $ toNoFileKey GhcSessionIO + + IdeOptions{ optTesting = IdeTesting optTesting + , optCheckProject = getCheckProject + , optExtensions + , optHaddockParse + } <- getIdeOptions + + -- populate the knownTargetsVar with all the + -- files in the project so that `knownFiles` can learn about them and + -- we can generate a complete module graph + let extendKnownTargets newTargets = do + knownTargets <- concatForM newTargets $ \TargetDetails{..} -> + case targetTarget of + TargetFile f -> do + -- If a target file has multiple possible locations, then we + -- assume they are all separate file targets. + -- This happens with '.hs-boot' files if they are in the root directory of the project. + -- GHC reports options such as '-i. A' as 'TargetFile A.hs' instead of 'TargetModule A'. + -- In 'fromTargetId', we dutifully look for '.hs-boot' files and add them to the + -- targetLocations of the TargetDetails. Then we add everything to the 'knownTargetsVar'. + -- However, when we look for a 'Foo.hs-boot' file in 'FindImports.hs', we look for either + -- + -- * TargetFile Foo.hs-boot + -- * TargetModule Foo + -- + -- If we don't generate a TargetFile for each potential location, we will only have + -- 'TargetFile Foo.hs' in the 'knownTargetsVar', thus not find 'TargetFile Foo.hs-boot' + -- and also not find 'TargetModule Foo'. + fs <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations + pure $ map (\fp -> (TargetFile fp, Set.singleton fp)) (nubOrd (f:fs)) + TargetModule _ -> do + found <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations + return [(targetTarget, Set.fromList found)] + hasUpdate <- atomically $ do + known <- readTVar knownTargetsVar + let known' = flip mapHashed known $ \k -> unionKnownTargets k (mkKnownTargets knownTargets) + hasUpdate = if known /= known' then Just (unhashed known') else Nothing + writeTVar knownTargetsVar known' + pure hasUpdate + for_ hasUpdate $ \x -> + logWith recorder Debug $ LogKnownFilesUpdated (targetMap x) + return $ toNoFileKey GetKnownTargets + + -- Create a new HscEnv from a hieYaml root and a set of options + let packageSetup :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) + -> IO ([ComponentInfo], [ComponentInfo]) + packageSetup (hieYaml, cfp, opts, libDir) = do + -- Parse DynFlags for the newly discovered component + hscEnv <- emptyHscEnv ideNc libDir + newTargetDfs <- evalGhcEnv hscEnv $ setOptions optHaddockParse cfp opts (hsc_dflags hscEnv) rootDir + let deps = componentDependencies opts ++ maybeToList hieYaml + dep_info <- getDependencyInfo (fmap toAbsolutePath deps) + -- Now lookup to see whether we are combining with an existing HscEnv + -- or making a new one. The lookup returns the HscEnv and a list of + -- information about other components loaded into the HscEnv + -- (unitId, DynFlag, Targets) + modifyVar hscEnvs $ \m -> do + -- Just deps if there's already an HscEnv + -- Nothing is it's the first time we are making an HscEnv + let oldDeps = Map.lookup hieYaml m + let -- Add the raw information about this component to the list + -- We will modify the unitId and DynFlags used for + -- compilation but these are the true source of + -- information. + new_deps = fmap (\(df, targets) -> RawComponentInfo (homeUnitId_ df) df targets cfp opts dep_info) newTargetDfs + all_deps = new_deps `NE.appendList` fromMaybe [] oldDeps + -- Get all the unit-ids for things in this component + _inplace = map rawComponentUnitId $ NE.toList all_deps + + all_deps' <- forM all_deps $ \RawComponentInfo{..} -> do + let prefix = show rawComponentUnitId + -- See Note [Avoiding bad interface files] + let cacheDirOpts = componentOptions opts + cacheDirs <- liftIO $ getCacheDirs prefix cacheDirOpts + processed_df <- setCacheDirs recorder cacheDirs rawComponentDynFlags + -- The final component information, mostly the same but the DynFlags don't + -- contain any packages which are also loaded + -- into the same component. + pure $ ComponentInfo + { componentUnitId = rawComponentUnitId + , componentDynFlags = processed_df + , componentTargets = rawComponentTargets + , componentFP = rawComponentFP + , componentCOptions = rawComponentCOptions + , componentDependencyInfo = rawComponentDependencyInfo + } + -- Modify the map so the hieYaml now maps to the newly updated + -- ComponentInfos + -- Returns + -- . The information for the new component which caused this cache miss + -- . The modified information (without -inplace flags) for + -- existing packages + let (new,old) = NE.splitAt (NE.length new_deps) all_deps' + pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) + + + let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) + -> IO (IdeResult HscEnvEq,[FilePath]) + session args@(hieYaml, _cfp, _opts, _libDir) = do + (new_deps, old_deps) <- packageSetup args + + -- For each component, now make a new HscEnvEq which contains the + -- HscEnv for the hie.yaml file but the DynFlags for that component + -- For GHC's supporting multi component sessions, we create a shared + -- HscEnv but set the active component accordingly + hscEnv <- emptyHscEnv ideNc _libDir + let new_cache = newComponentCache recorder optExtensions _cfp hscEnv + all_target_details <- new_cache old_deps new_deps + + this_dep_info <- getDependencyInfo $ maybeToList hieYaml + let (all_targets, this_flags_map, this_options) + = case HM.lookup _cfp flags_map' of + Just this -> (all_targets', flags_map', this) + Nothing -> (this_target_details : all_targets', HM.insert _cfp this_flags flags_map', this_flags) + where all_targets' = concat all_target_details + flags_map' = HM.fromList (concatMap toFlagsMap all_targets') + this_target_details = TargetDetails (TargetFile _cfp) this_error_env this_dep_info [_cfp] + this_flags = (this_error_env, this_dep_info) + this_error_env = ([this_error], Nothing) + this_error = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) _cfp + (T.unlines + [ "No cradle target found. Is this file listed in the targets of your cradle?" + , "If you are using a .cabal file, please ensure that this module is listed in either the exposed-modules or other-modules section" + ]) + Nothing + + void $ modifyVar' fileToFlags $ Map.insert hieYaml this_flags_map + void $ modifyVar' filesMap $ flip HM.union (HM.fromList (map ((,hieYaml) . fst) $ concatMap toFlagsMap all_targets)) + -- The VFS doesn't change on cradle edits, re-use the old one. + -- Invalidate all the existing GhcSession build nodes by restarting the Shake session + keys2 <- invalidateShakeCache + restartShakeSession VFSUnmodified "new component" [] $ do + keys1 <- extendKnownTargets all_targets + return [keys1, keys2] + + -- Typecheck all files in the project on startup + checkProject <- getCheckProject + unless (null new_deps || not checkProject) $ do + cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) + void $ shakeEnqueue extras $ mkDelayedAction "InitialLoad" Debug $ void $ do + mmt <- uses GetModificationTime cfps' + let cs_exist = catMaybes (zipWith (<$) cfps' mmt) + modIfaces <- uses GetModIface cs_exist + -- update exports map + shakeExtras <- getShakeExtras + let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces + liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) + + return $ second Map.keys this_options + + let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, [FilePath]) + consultCradle hieYaml cfp = do + let lfpLog = makeRelative rootDir cfp + logWith recorder Info $ LogCradlePath lfpLog + when (isNothing hieYaml) $ + logWith recorder Warning $ LogCradleNotFound lfpLog + cradle <- loadCradle recorder hieYaml rootDir + when optTesting $ mRunLspT lspEnv $ + sendNotification (SMethod_CustomMethod (Proxy @"ghcide/cradle/loaded")) (toJSON cfp) + + -- Display a user friendly progress message here: They probably don't know what a cradle is + let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) + <> " (for " <> T.pack lfpLog <> ")" + eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ + withTrace "Load cradle" $ \addTag -> do + addTag "file" lfpLog + old_files <- readIORef cradle_files + res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp old_files + addTag "result" (show res) + return res + + logWith recorder Debug $ LogSessionLoadingResult eopts + case eopts of + -- The cradle gave us some options so get to work turning them + -- into and HscEnv. + Right (opts, libDir, version) -> do + let compileTime = fullCompilerVersion + case reverse $ readP_to_S parseVersion version of + [] -> error $ "GHC version could not be parsed: " <> version + ((runTime, _):_) + | compileTime == runTime -> do + atomicModifyIORef' cradle_files (\xs -> (cfp:xs,())) + session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) + | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) + -- Failure case, either a cradle error or the none cradle + Left err -> do + dep_info <- getDependencyInfo (maybeToList hieYaml) + let ncfp = toNormalizedFilePath' cfp + let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) + void $ modifyVar' fileToFlags $ + Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) + void $ modifyVar' filesMap $ HM.insert ncfp hieYaml + return (res, maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) + + let + -- | We allow users to specify a loading strategy. + -- Check whether this config was changed since the last time we have loaded + -- a session. + -- + -- If the loading configuration changed, we likely should restart the session + -- in its entirety. + didSessionLoadingPreferenceConfigChange :: IO Bool + didSessionLoadingPreferenceConfigChange = do + mLoadingConfig <- readVar biosSessionLoadingVar + case mLoadingConfig of + Nothing -> do + writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) + pure False + Just loadingConfig -> do + writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) + pure (loadingConfig /= sessionLoading clientConfig) + + -- This caches the mapping from hie.yaml + Mod.hs -> [String] + -- Returns the Ghc session and the cradle dependencies + let sessionOpts :: (Maybe FilePath, FilePath) + -> IO (IdeResult HscEnvEq, [FilePath]) + sessionOpts (hieYaml, file) = do + Extra.whenM didSessionLoadingPreferenceConfigChange $ do + logWith recorder Info LogSessionLoadingChanged + -- If the dependencies are out of date then clear both caches and start + -- again. + modifyVar_ fileToFlags (const (return Map.empty)) + modifyVar_ filesMap (const (return HM.empty)) + -- Don't even keep the name cache, we start from scratch here! + modifyVar_ hscEnvs (const (return Map.empty)) + + v <- Map.findWithDefault HM.empty hieYaml <$> readVar fileToFlags + let cfp = toAbsolutePath file + case HM.lookup (toNormalizedFilePath' cfp) v of + Just (opts, old_di) -> do + deps_ok <- checkDependencyInfo old_di + if not deps_ok + then do + -- If the dependencies are out of date then clear both caches and start + -- again. + modifyVar_ fileToFlags (const (return Map.empty)) + modifyVar_ filesMap (const (return HM.empty)) + -- Keep the same name cache + modifyVar_ hscEnvs (return . Map.adjust (const []) hieYaml ) + consultCradle hieYaml cfp + else return (opts, Map.keys old_di) + Nothing -> consultCradle hieYaml cfp + + -- The main function which gets options for a file. We only want one of these running + -- at a time. Therefore the IORef contains the currently running cradle, if we try + -- to get some more options then we wait for the currently running action to finish + -- before attempting to do so. + let getOptions :: FilePath -> IO (IdeResult HscEnvEq, [FilePath]) + getOptions file = do + let ncfp = toNormalizedFilePath' (toAbsolutePath file) + cachedHieYamlLocation <- HM.lookup ncfp <$> readVar filesMap + hieYaml <- cradleLoc file + let + -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action + -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes. + -- The GlobPattern of a FileSystemWatcher can be absolute or relative. + -- We use the absolute one because it is supported by more LSP clients. + -- Here we make sure deps are absolute and later we use those absolute deps as GlobPattern. + absolutePathsCradleDeps (eq, deps) + = (eq, fmap toAbsolutePath deps) + (absolutePathsCradleDeps <$> sessionOpts (join cachedHieYamlLocation <|> hieYaml, file)) `Safe.catch` \e -> + return (([renderPackageSetupException file e], Nothing), maybe [] pure hieYaml) + + returnWithVersion $ \file -> do + -- see Note [Serializing runs in separate thread] + awaitRunInThread que $ getOptions file + +-- | Run the specific cradle on a specific FilePath via hie-bios. +-- This then builds dependencies or whatever based on the cradle, gets the +-- GHC options/dynflags needed for the session and the GHC library directory +cradleToOptsAndLibDir :: Recorder (WithPriority Log) -> SessionLoadingPreferenceConfig -> Cradle Void -> FilePath -> [FilePath] + -> IO (Either [CradleError] (ComponentOptions, FilePath, String)) +cradleToOptsAndLibDir recorder loadConfig cradle file old_fps = do + -- let noneCradleFoundMessage :: FilePath -> T.Text + -- noneCradleFoundMessage f = T.pack $ "none cradle found for " <> f <> ", ignoring the file" + -- Start off by getting the session options + logWith recorder Debug $ LogCradle cradle + cradleRes <- HieBios.getCompilerOptions file loadStyle cradle + case cradleRes of + CradleSuccess r -> do + -- Now get the GHC lib dir + libDirRes <- getRuntimeGhcLibDir cradle + versionRes <- getRuntimeGhcVersion cradle + case liftA2 (,) libDirRes versionRes of + -- This is the successful path + (CradleSuccess (libDir, version)) -> pure (Right (r, libDir, version)) + CradleFail err -> return (Left [err]) + CradleNone -> do + logWith recorder Info $ LogNoneCradleFound file + return (Left []) + + CradleFail err -> return (Left [err]) + CradleNone -> do + logWith recorder Info $ LogNoneCradleFound file + return (Left []) + + where + loadStyle = case loadConfig of + PreferSingleComponentLoading -> LoadFile + PreferMultiComponentLoading -> LoadWithContext old_fps + +emptyHscEnv :: NameCache -> FilePath -> IO HscEnv +emptyHscEnv nc libDir = do + -- We call setSessionDynFlags so that the loader is initialised + -- We need to do this before we call initUnits. + env <- runGhc (Just libDir) $ + getSessionDynFlags >>= setSessionDynFlags >> getSession + pure $ setNameCache nc (hscSetFlags ((hsc_dflags env){useUnicode = True }) env) + +data TargetDetails = TargetDetails + { + targetTarget :: !Target, + targetEnv :: !(IdeResult HscEnvEq), + targetDepends :: !DependencyInfo, + targetLocations :: ![NormalizedFilePath] + } + +fromTargetId :: [FilePath] -- ^ import paths + -> [String] -- ^ extensions to consider + -> TargetId + -> IdeResult HscEnvEq + -> DependencyInfo + -> IO [TargetDetails] +-- For a target module we consider all the import paths +fromTargetId is exts (GHC.TargetModule modName) env dep = do + let fps = [i moduleNameSlashes modName -<.> ext <> boot + | ext <- exts + , i <- is + , boot <- ["", "-boot"] + ] + let locs = fmap toNormalizedFilePath' fps + return [TargetDetails (TargetModule modName) env dep locs] +-- For a 'TargetFile' we consider all the possible module names +fromTargetId _ _ (GHC.TargetFile f _) env deps = do + let nf = toNormalizedFilePath' f + let other + | "-boot" `isSuffixOf` f = toNormalizedFilePath' (L.dropEnd 5 $ fromNormalizedFilePath nf) + | otherwise = toNormalizedFilePath' (fromNormalizedFilePath nf ++ "-boot") + return [TargetDetails (TargetFile nf) env deps [nf, other]] + +toFlagsMap :: TargetDetails -> [(NormalizedFilePath, (IdeResult HscEnvEq, DependencyInfo))] +toFlagsMap TargetDetails{..} = + [ (l, (targetEnv, targetDepends)) | l <- targetLocations] + + +setNameCache :: NameCache -> HscEnv -> HscEnv +setNameCache nc hsc = hsc { hsc_NC = nc } + +#if MIN_VERSION_ghc(9,13,0) +-- Moved back to implementation in GHC. +checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> [DriverMessages] +checkHomeUnitsClosed' ue _ = checkHomeUnitsClosed ue +#else +-- This function checks the important property that if both p and q are home units +-- then any dependency of p, which transitively depends on q is also a home unit. +-- GHC had an implementation of this function, but it was horribly inefficient +-- We should move back to the GHC implementation on compilers where +-- https://gitlab.haskell.org/ghc/ghc/-/merge_requests/12162 is included +checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> Maybe (Compat.MsgEnvelope DriverMessage) +checkHomeUnitsClosed' ue home_id_set + | OS.null bad_unit_ids = Nothing + | otherwise = Just (GHC.mkPlainErrorMsgEnvelope rootLoc $ DriverHomePackagesNotClosed (OS.toList bad_unit_ids)) + where + bad_unit_ids = upwards_closure OS.\\ home_id_set + rootLoc = mkGeneralSrcSpan (Compat.fsLit "") + + graph :: Graph (Node UnitId UnitId) + graph = graphFromEdgedVerticesUniq graphNodes + + -- downwards closure of graph + downwards_closure + = graphFromEdgedVerticesUniq [ DigraphNode uid uid (OS.toList deps) + | (uid, deps) <- Map.toList (allReachable graph node_key)] + + inverse_closure = transposeG downwards_closure + + upwards_closure = OS.fromList $ map node_key $ reachablesG inverse_closure [DigraphNode uid uid [] | uid <- OS.toList home_id_set] + + all_unit_direct_deps :: UniqMap UnitId (OS.Set UnitId) + all_unit_direct_deps + = unitEnv_foldWithKey go emptyUniqMap $ ue_home_unit_graph ue + where + go rest this this_uis = + plusUniqMap_C OS.union + (addToUniqMap_C OS.union external_depends this (OS.fromList this_deps)) + rest + where + external_depends = mapUniqMap (OS.fromList . unitDepends) +#if !MIN_VERSION_ghc(9,7,0) + $ listToUniqMap $ Map.toList +#endif + + $ unitInfoMap this_units + this_units = homeUnitEnv_units this_uis + this_deps = [ Compat.toUnitId unit | (unit,Just _) <- explicitUnits this_units] + + graphNodes :: [Node UnitId UnitId] + graphNodes = go OS.empty home_id_set + where + go done todo + = case OS.minView todo of + Nothing -> [] + Just (uid, todo') + | OS.member uid done -> go done todo' + | otherwise -> case lookupUniqMap all_unit_direct_deps uid of + Nothing -> pprPanic "uid not found" (Compat.ppr (uid, all_unit_direct_deps)) + Just depends -> + let todo'' = (depends OS.\\ done) `OS.union` todo' + in DigraphNode uid uid (OS.toList depends) : go (OS.insert uid done) todo'' +#endif + +-- | Create a mapping from FilePaths to HscEnvEqs +-- This combines all the components we know about into +-- an appropriate session, which is a multi component +-- session on GHC 9.4+ +newComponentCache + :: Recorder (WithPriority Log) + -> [String] -- ^ File extensions to consider + -> NormalizedFilePath -- ^ Path to file that caused the creation of this component + -> HscEnv -- ^ An empty HscEnv + -> [ComponentInfo] -- ^ New components to be loaded + -> [ComponentInfo] -- ^ old, already existing components + -> IO [ [TargetDetails] ] +newComponentCache recorder exts _cfp hsc_env old_cis new_cis = do + let cis = Map.unionWith unionCIs (mkMap new_cis) (mkMap old_cis) + -- When we have multiple components with the same uid, + -- prefer the new one over the old. + -- However, we might have added some targets to the old unit + -- (see special target), so preserve those + unionCIs new_ci old_ci = new_ci { componentTargets = componentTargets new_ci ++ componentTargets old_ci } + mkMap = Map.fromListWith unionCIs . map (\ci -> (componentUnitId ci, ci)) + let dfs = map componentDynFlags $ Map.elems cis + uids = Map.keys cis + logWith recorder Info $ LogMakingNewHscEnv uids + hscEnv' <- -- Set up a multi component session with the other units on GHC 9.4 + Compat.initUnits dfs hsc_env + + let closure_errs = maybeToList $ checkHomeUnitsClosed' (hsc_unit_env hscEnv') (hsc_all_home_unit_ids hscEnv') + closure_err_to_multi_err err = + ideErrorWithSource + (Just "cradle") (Just DiagnosticSeverity_Warning) _cfp + (T.pack (Compat.printWithoutUniques (singleMessage err))) + (Just (fmap GhcDriverMessage err)) + multi_errs = map closure_err_to_multi_err closure_errs + bad_units = OS.fromList $ concat $ do + x <- map errMsgDiagnostic closure_errs + DriverHomePackagesNotClosed us <- pure x + pure us + isBad ci = (homeUnitId_ (componentDynFlags ci)) `OS.member` bad_units + -- Whenever we spin up a session on Linux, dynamically load libm.so.6 + -- in. We need this in case the binary is statically linked, in which + -- case the interactive session will fail when trying to load + -- ghc-prim, which happens whenever Template Haskell is being + -- evaluated or haskell-language-server's eval plugin tries to run + -- some code. If the binary is dynamically linked, then this will have + -- no effect. + -- See https://github.com/haskell/haskell-language-server/issues/221 + -- We need to do this after the call to setSessionDynFlags initialises + -- the loader + when (os == "linux") $ do + initObjLinker hscEnv' + res <- loadDLL hscEnv' "libm.so.6" + case res of + Nothing -> pure () + Just err -> logWith recorder Error $ LogDLLLoadError err + + forM (Map.elems cis) $ \ci -> do + let df = componentDynFlags ci + thisEnv <- do + -- In GHC 9.4 we have multi component support, and we have initialised all the units + -- above. + -- We just need to set the current unit here + pure $ hscSetActiveUnitId (homeUnitId_ df) hscEnv' + henv <- newHscEnvEq thisEnv + let targetEnv = (if isBad ci then multi_errs else [], Just henv) + targetDepends = componentDependencyInfo ci + logWith recorder Debug $ LogNewComponentCache (targetEnv, targetDepends) + evaluate $ liftRnf rwhnf $ componentTargets ci + + let mk t = fromTargetId (importPaths df) exts (targetId t) targetEnv targetDepends + ctargets <- concatMapM mk (componentTargets ci) + + return (L.nubOrdOn targetTarget ctargets) + +{- Note [Avoiding bad interface files] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Originally, we set the cache directory for the various components once +on the first occurrence of the component. +This works fine if these components have no references to each other, +but you have components that depend on each other, the interface files are +updated for each component. +After restarting the session and only opening the component that depended +on the other, suddenly the interface files of this component are stale. +However, from the point of view of `ghcide`, they do not look stale, +thus, not regenerated and the IDE shows weird errors such as: +``` +typecheckIface +Declaration for Rep_ClientRunFlags +Axiom branches Rep_ClientRunFlags: + Failed to load interface for ‘Distribution.Simple.Flag’ + Use -v to see a list of the files searched for. +``` +and +``` +expectJust checkFamInstConsistency +CallStack (from HasCallStack): + error, called at compiler\\utils\\Maybes.hs:55:27 in ghc:Maybes + expectJust, called at compiler\\typecheck\\FamInst.hs:461:30 in ghc:FamInst +``` + +and many more. + +To mitigate this, we set the cache directory for each component dependent +on the components of the current `HscEnv`, additionally to the component options +of the respective components. +Assume two components, c1, c2, where c2 depends on c1, and the options of the +respective components are co1, co2. +If we want to load component c2, followed by c1, we set the cache directory for +each component in this way: + + * Load component c2 + * (Cache Directory State) + - name of c2 + co2 + * Load component c1 + * (Cache Directory State) + - name of c2 + name of c1 + co2 + - name of c2 + name of c1 + co1 + +Overall, we created three cache directories. If we opened c1 first, then we +create a fourth cache directory. +This makes sure that interface files are always correctly updated. + +Since this causes a lot of recompilation, we only update the cache-directory, +if the dependencies of a component have really changed. +E.g. when you load two executables, they can not depend on each other. They +should be filtered out, such that we dont have to re-compile everything. +-} + +-- | Set the cache-directory based on the ComponentOptions and a list of +-- internal packages. +-- For the exact reason, see Note [Avoiding bad interface files]. +setCacheDirs :: MonadUnliftIO m => Recorder (WithPriority Log) -> CacheDirs -> DynFlags -> m DynFlags +setCacheDirs recorder CacheDirs{..} dflags = do + logWith recorder Info $ LogInterfaceFilesCacheDir (fromMaybe cacheDir hiCacheDir) + pure $ dflags + & maybe id setHiDir hiCacheDir + & maybe id setHieDir hieCacheDir + & maybe id setODir oCacheDir + +-- See Note [Multi Cradle Dependency Info] +type DependencyInfo = Map.Map FilePath (Maybe UTCTime) +type HieMap = Map.Map (Maybe FilePath) [RawComponentInfo] +-- | Maps a "hie.yaml" location to all its Target Filepaths and options. +type FlagsMap = Map.Map (Maybe FilePath) (HM.HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) +-- | Maps a Filepath to its respective "hie.yaml" location. +-- It aims to be the reverse of 'FlagsMap'. +type FilesMap = HM.HashMap NormalizedFilePath (Maybe FilePath) + +-- This is pristine information about a component +data RawComponentInfo = RawComponentInfo + { rawComponentUnitId :: UnitId + -- | Unprocessed DynFlags. Contains inplace packages such as libraries. + -- We do not want to use them unprocessed. + , rawComponentDynFlags :: DynFlags + -- | All targets of this components. + , rawComponentTargets :: [GHC.Target] + -- | Filepath which caused the creation of this component + , rawComponentFP :: NormalizedFilePath + -- | Component Options used to load the component. + , rawComponentCOptions :: ComponentOptions + -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file + -- to last modification time. See Note [Multi Cradle Dependency Info]. + , rawComponentDependencyInfo :: DependencyInfo + } + +-- This is processed information about the component, in particular the dynflags will be modified. +data ComponentInfo = ComponentInfo + { componentUnitId :: UnitId + -- | Processed DynFlags. Does not contain inplace packages such as local + -- libraries. Can be used to actually load this Component. + , componentDynFlags :: DynFlags + -- | All targets of this components. + , componentTargets :: [GHC.Target] + -- | Filepath which caused the creation of this component + , componentFP :: NormalizedFilePath + -- | Component Options used to load the component. + , componentCOptions :: ComponentOptions + -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file + -- to last modification time. See Note [Multi Cradle Dependency Info] + , componentDependencyInfo :: DependencyInfo + } + +-- | Check if any dependency has been modified lately. +checkDependencyInfo :: DependencyInfo -> IO Bool +checkDependencyInfo old_di = do + di <- getDependencyInfo (Map.keys old_di) + return (di == old_di) + +-- Note [Multi Cradle Dependency Info] +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- Why do we implement our own file modification tracking here? +-- The primary reason is that the custom caching logic is quite complicated and going into shake +-- adds even more complexity and more indirection. I did try for about 5 hours to work out how to +-- use shake rules rather than IO but eventually gave up. + +-- | Computes a mapping from a filepath to its latest modification date. +-- See Note [Multi Cradle Dependency Info] why we do this ourselves instead +-- of letting shake take care of it. +getDependencyInfo :: [FilePath] -> IO DependencyInfo +getDependencyInfo fs = Map.fromList <$> mapM do_one fs + + where + safeTryIO :: IO a -> IO (Either IOException a) + safeTryIO = Safe.try + + do_one :: FilePath -> IO (FilePath, Maybe UTCTime) + do_one fp = (fp,) . eitherToMaybe <$> safeTryIO (getModificationTime fp) + +-- | This function removes all the -package flags which refer to packages we +-- are going to deal with ourselves. For example, if a executable depends +-- on a library component, then this function will remove the library flag +-- from the package flags for the executable +-- +-- There are several places in GHC (for example the call to hptInstances in +-- tcRnImports) which assume that all modules in the HPT have the same unit +-- ID. Therefore we create a fake one and give them all the same unit id. +_removeInplacePackages --Only used in ghc < 9.4 + :: UnitId -- ^ fake uid to use for our internal component + -> [UnitId] + -> DynFlags + -> (DynFlags, [UnitId]) +_removeInplacePackages fake_uid us df = (setHomeUnitId_ fake_uid $ + df { packageFlags = ps }, uids) + where + (uids, ps) = Compat.filterInplaceUnits us (packageFlags df) + +-- | Memoize an IO function, with the characteristics: +-- +-- * If multiple people ask for a result simultaneously, make sure you only compute it once. +-- +-- * If there are exceptions, repeatedly reraise them. +-- +-- * If the caller is aborted (async exception) finish computing it anyway. +memoIO :: Ord a => (a -> IO b) -> IO (a -> IO b) +memoIO op = do + ref <- newVar Map.empty + return $ \k -> join $ mask_ $ modifyVar ref $ \mp -> + case Map.lookup k mp of + Nothing -> do + res <- onceFork $ op k + return (Map.insert k res mp, res) + Just res -> return (mp, res) + +unit_flags :: [Flag (CmdLineP [String])] +unit_flags = [defFlag "unit" (SepArg addUnit)] + +addUnit :: String -> EwM (CmdLineP [String]) () +addUnit unit_str = liftEwM $ do + units <- getCmdLineState + putCmdLineState (unit_str : units) + +-- | Throws if package flags are unsatisfiable +setOptions :: GhcMonad m + => OptHaddockParse + -> NormalizedFilePath + -> ComponentOptions + -> DynFlags + -> FilePath -- ^ root dir, see Note [Root Directory] + -> m (NonEmpty (DynFlags, [GHC.Target])) +setOptions haddockOpt cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do + ((theOpts',_errs,_warns),units) <- processCmdLineP unit_flags [] (map noLoc theOpts) + case NE.nonEmpty units of + Just us -> initMulti us + Nothing -> do + (df, targets) <- initOne (map unLoc theOpts') + -- A special target for the file which caused this wonderful + -- component to be created. In case the cradle doesn't list all the targets for + -- the component, in which case things will be horribly broken anyway. + -- + -- When we have a singleComponent that is caused to be loaded due to a + -- file, we assume the file is part of that component. This is useful + -- for bare GHC sessions, such as many of the ones used in the testsuite + -- + -- We don't do this when we have multiple components, because each + -- component better list all targets or there will be anarchy. + -- It is difficult to know which component to add our file to in + -- that case. + -- Multi unit arguments are likely to come from cabal, which + -- does list all targets. + -- + -- If we don't end up with a target for the current file in the end, then + -- we will report it as an error for that file + let abs_fp = toAbsolute rootDir (fromNormalizedFilePath cfp) + let special_target = Compat.mkSimpleTarget df abs_fp + pure $ (df, special_target : targets) :| [] + where + initMulti unitArgFiles = + forM unitArgFiles $ \f -> do + args <- liftIO $ expandResponse [f] + -- The reponse files may contain arguments like "+RTS", + -- and hie-bios doesn't expand the response files of @-unit@ arguments. + -- Thus, we need to do the stripping here. + initOne $ HieBios.removeRTS $ HieBios.removeVerbosityOpts args + initOne this_opts = do + (dflags', targets') <- addCmdOpts this_opts dflags + let dflags'' = + case unitIdString (homeUnitId_ dflags') of + -- cabal uses main for the unit id of all executable packages + -- This makes multi-component sessions confused about what + -- options to use for that component. + -- Solution: hash the options and use that as part of the unit id + -- This works because there won't be any dependencies on the + -- executable unit. + "main" -> + let hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack this_opts) + hashed_uid = Compat.toUnitId (Compat.stringToUnit ("main-"++hash)) + in setHomeUnitId_ hashed_uid dflags' + _ -> dflags' + + let targets = makeTargetsAbsolute root targets' + root = case workingDirectory dflags'' of + Nothing -> compRoot + Just wdir -> compRoot wdir + let dflags''' = + setWorkingDirectory root $ + disableWarningsAsErrors $ + -- disabled, generated directly by ghcide instead + flip gopt_unset Opt_WriteInterface $ + -- disabled, generated directly by ghcide instead + -- also, it can confuse the interface stale check + dontWriteHieFiles $ + setIgnoreInterfacePragmas $ + setBytecodeLinkerOptions $ + enableOptHaddock haddockOpt $ + disableOptimisation $ + Compat.setUpTypedHoles $ + makeDynFlagsAbsolute compRoot -- makeDynFlagsAbsolute already accounts for workingDirectory + dflags'' + return (dflags''', targets) + +setIgnoreInterfacePragmas :: DynFlags -> DynFlags +setIgnoreInterfacePragmas df = + gopt_set (gopt_set df Opt_IgnoreInterfacePragmas) Opt_IgnoreOptimChanges + +disableOptimisation :: DynFlags -> DynFlags +disableOptimisation df = updOptLevel 0 df + +-- | We always compile with '-haddock' unless explicitly disabled. +-- +-- This avoids inconsistencies when doing recompilation checking which was +-- observed in https://github.com/haskell/haskell-language-server/issues/4511 +enableOptHaddock :: OptHaddockParse -> DynFlags -> DynFlags +enableOptHaddock HaddockParse d = gopt_set d Opt_Haddock +enableOptHaddock NoHaddockParse d = d + +setHiDir :: FilePath -> DynFlags -> DynFlags +setHiDir f d = + -- override user settings to avoid conflicts leading to recompilation + d { hiDir = Just f} + +setODir :: FilePath -> DynFlags -> DynFlags +setODir f d = + -- override user settings to avoid conflicts leading to recompilation + d { objectDir = Just f} + +getCacheDirsDefault :: String -> [String] -> IO CacheDirs +getCacheDirsDefault prefix opts = do + dir <- Just <$> getXdgDirectory XdgCache (cacheDir prefix ++ "-" ++ opts_hash) + return $ CacheDirs dir dir dir + where + -- Create a unique folder per set of different GHC options, assuming that each different set of + -- GHC options will create incompatible interface files. + opts_hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack opts) + +-- | Sub directory for the cache path +cacheDir :: String +cacheDir = "ghcide" + +---------------------------------------------------------------------------------------------------- + +data PackageSetupException + = PackageSetupException + { message :: !String + } + | GhcVersionMismatch + { compileTime :: !Version + , runTime :: !Version + } + deriving (Eq, Show, Typeable) + +instance Exception PackageSetupException + +showPackageSetupException :: PackageSetupException -> String +showPackageSetupException GhcVersionMismatch{..} = unwords + ["ghcide compiled against GHC" + ,showVersion compileTime + ,"but currently using" + ,showVersion runTime + ,"\nThis is unsupported, ghcide must be compiled with the same GHC version as the project." + ] +showPackageSetupException PackageSetupException{..} = unwords + [ "ghcide compiled by GHC", showVersion fullCompilerVersion + , "failed to load packages:", message <> "." + , "\nPlease ensure that ghcide is compiled with the same GHC installation as the project."] + +renderPackageSetupException :: FilePath -> PackageSetupException -> FileDiagnostic +renderPackageSetupException fp e = + ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) (toNormalizedFilePath' fp) (T.pack $ showPackageSetupException e) Nothing diff --git a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs new file mode 100644 index 0000000000..2890c87966 --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs @@ -0,0 +1,110 @@ +{-# LANGUAGE DeriveAnyClass #-} + +module Development.IDE.Session.Diagnostics where +import Control.Applicative +import Control.Lens +import Control.Monad +import qualified Data.Aeson as Aeson +import Data.List +import Data.List.Extra (split) +import Data.Maybe +import qualified Data.Text as T +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location +import GHC.Generics +import qualified HIE.Bios.Cradle as HieBios +import HIE.Bios.Types hiding (Log) +import System.FilePath + +data CradleErrorDetails = + CradleErrorDetails + { cabalProjectFiles :: [FilePath] + -- ^ files related to the cradle error + -- i.e. .cabal, cabal.project, etc. + } deriving (Show, Eq, Ord, Read, Generic, Aeson.ToJSON, Aeson.FromJSON) + +{- | Takes a cradle error, the corresponding cradle and the file path where + the cradle error occurred (of the file we attempted to load). + Depicts the cradle error in a user-friendly way. +-} +renderCradleError :: CradleError -> Cradle a -> NormalizedFilePath -> FileDiagnostic +renderCradleError cradleError cradle nfp = + let noDetails = + ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) nfp (T.unlines $ map T.pack userFriendlyMessage) Nothing + in + if HieBios.isCabalCradle cradle + then noDetails & fdLspDiagnosticL %~ \diag -> diag{_data_ = Just $ Aeson.toJSON CradleErrorDetails{cabalProjectFiles=absDeps}} + else noDetails + where + ms = cradleErrorStderr cradleError + + absDeps = fmap (cradleRootDir cradle ) (cradleErrorDependencies cradleError) + userFriendlyMessage :: [String] + userFriendlyMessage + | HieBios.isCabalCradle cradle = fromMaybe ms $ fileMissingMessage <|> mkUnknownModuleMessage + | otherwise = ms + + mkUnknownModuleMessage :: Maybe [String] + mkUnknownModuleMessage + | any (isInfixOf "Failed extracting script block:") ms = + Just $ unknownModuleMessage (fromNormalizedFilePath nfp) + | otherwise = Nothing + + fileMissingMessage :: Maybe [String] + fileMissingMessage = + multiCradleErrMessage <$> parseMultiCradleErr ms + +-- | Information included in Multi Cradle error messages +data MultiCradleErr = MultiCradleErr + { mcPwd :: FilePath + , mcFilePath :: FilePath + , mcPrefixes :: [(FilePath, String)] + } deriving (Show) + +-- | Attempt to parse a multi-cradle message +parseMultiCradleErr :: [String] -> Maybe MultiCradleErr +parseMultiCradleErr ms = do + _ <- lineAfter "Multi Cradle: " + wd <- lineAfter "pwd: " + fp <- lineAfter "filepath: " + ps <- prefixes + pure $ MultiCradleErr wd fp ps + + where + lineAfter :: String -> Maybe String + lineAfter pre = listToMaybe $ mapMaybe (stripPrefix pre) ms + + prefixes :: Maybe [(FilePath, String)] + prefixes = do + pure $ mapMaybe tuple ms + + tuple :: String -> Maybe (String, String) + tuple line = do + line' <- surround '(' line ')' + [f, s] <- pure $ split (==',') line' + pure (f, s) + + -- extracts the string surrounded by required characters + surround :: Char -> String -> Char -> Maybe String + surround start s end = do + guard (listToMaybe s == Just start) + guard (listToMaybe (reverse s) == Just end) + pure $ drop 1 $ take (length s - 1) s + +multiCradleErrMessage :: MultiCradleErr -> [String] +multiCradleErrMessage e = + unknownModuleMessage (mcFilePath e) + <> [""] + <> map prefix (mcPrefixes e) + where + prefix (f, r) = f <> " - " <> r + +unknownModuleMessage :: String -> [String] +unknownModuleMessage moduleFileName = + [ "Loading the module '" <> moduleFileName <> "' failed." + , "" + , "It may not be listed in your .cabal file!" + , "Perhaps you need to add `"<> dropExtension (takeFileName moduleFileName) <> "` to other-modules or exposed-modules." + , "" + , "For more information, visit: https://cabal.readthedocs.io/en/3.4/developing-packages.html#modules-included-in-the-package" + ] diff --git a/ghcide/session-loader/Development/IDE/Session/Implicit.hs b/ghcide/session-loader/Development/IDE/Session/Implicit.hs new file mode 100644 index 0000000000..c7a6402a9f --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session/Implicit.hs @@ -0,0 +1,155 @@ +module Development.IDE.Session.Implicit + ( loadImplicitCradle + ) where + + +import Control.Applicative ((<|>)) +import Control.Exception (handleJust) +import Control.Monad +import Control.Monad.IO.Class +import Control.Monad.Trans.Maybe +import Data.Bifunctor +import Data.Functor ((<&>)) +import Data.Maybe +import Data.Void +import System.Directory hiding (findFile) +import System.FilePath +import System.IO.Error + +import Colog.Core (LogAction (..), WithSeverity (..)) +import HIE.Bios.Config +import HIE.Bios.Cradle (defaultCradle, getCradle) +import HIE.Bios.Types hiding (ActionName (..)) + +import Hie.Cabal.Parser +import Hie.Locate +import qualified Hie.Yaml as Implicit + +loadImplicitCradle :: Show a => LogAction IO (WithSeverity Log) -> FilePath -> IO (Cradle a) +loadImplicitCradle l wfile = do + is_dir <- doesDirectoryExist wfile + let wdir | is_dir = wfile + | otherwise = takeDirectory wfile + cfg <- runMaybeT (implicitConfig wdir) + case cfg of + Just bc -> getCradle l absurd bc + Nothing -> return $ defaultCradle l wdir + +-- | Wraps up the cradle inferred by @inferCradleTree@ as a @CradleConfig@ with no dependencies +implicitConfig :: FilePath -> MaybeT IO (CradleConfig a, FilePath) +implicitConfig = (fmap . first) (CradleConfig noDeps) . inferCradleTree + where + noDeps :: [FilePath] + noDeps = [] + + +inferCradleTree :: FilePath -> MaybeT IO (CradleTree a, FilePath) +inferCradleTree start_dir = + maybeItsBios + -- If we have both a config file (cabal.project/stack.yaml) and a work dir + -- (dist-newstyle/.stack-work), prefer that + <|> (cabalExecutable >> cabalConfigDir start_dir >>= \dir -> cabalWorkDir dir >> pure (simpleCabalCradle dir)) + <|> (stackExecutable >> stackConfigDir start_dir >>= \dir -> stackWorkDir dir >> stackCradle dir) + -- If we have a cabal.project OR we have a .cabal and dist-newstyle, prefer cabal + <|> (cabalExecutable >> (cabalConfigDir start_dir <|> cabalFileAndWorkDir) <&> simpleCabalCradle) + -- If we have a stack.yaml, use stack + <|> (stackExecutable >> stackConfigDir start_dir >>= stackCradle) + -- If we have a cabal file, use cabal + <|> (cabalExecutable >> cabalFileDir start_dir <&> simpleCabalCradle) + + where + maybeItsBios = (\wdir -> (Bios (Program $ wdir ".hie-bios") Nothing Nothing, wdir)) <$> biosWorkDir start_dir + + cabalFileAndWorkDir = cabalFileDir start_dir >>= (\dir -> cabalWorkDir dir >> pure dir) + +-- | Generate a stack cradle given a filepath. +-- +-- Since we assume there was proof that this file belongs to a stack cradle +-- we look immediately for the relevant @*.cabal@ and @stack.yaml@ files. +-- We do not look for package.yaml, as we assume the corresponding .cabal has +-- been generated already. +-- +-- We parse the @stack.yaml@ to find relevant @*.cabal@ file locations, then +-- we parse the @*.cabal@ files to generate a mapping from @hs-source-dirs@ to +-- component names. +stackCradle :: FilePath -> MaybeT IO (CradleTree a, FilePath) +stackCradle fp = do + pkgs <- stackYamlPkgs fp + pkgsWithComps <- liftIO $ catMaybes <$> mapM (nestedPkg fp) pkgs + let yaml = fp "stack.yaml" + pure $ (,fp) $ case pkgsWithComps of + [] -> Stack (StackType Nothing (Just yaml)) + ps -> StackMulti mempty $ do + Package n cs <- ps + c <- cs + let (prefix, comp) = Implicit.stackComponent n c + pure (prefix, StackType (Just comp) (Just yaml)) + +-- | By default, we generate a simple cabal cradle which is equivalent to the +-- following hie.yaml: +-- +-- @ +-- cradle: +-- cabal: +-- @ +-- +-- Note, this only works reliable for reasonably modern cabal versions >= 3.2. +simpleCabalCradle :: FilePath -> (CradleTree a, FilePath) +simpleCabalCradle fp = (Cabal $ CabalType Nothing Nothing, fp) + +cabalExecutable :: MaybeT IO FilePath +cabalExecutable = MaybeT $ findExecutable "cabal" + +stackExecutable :: MaybeT IO FilePath +stackExecutable = MaybeT $ findExecutable "stack" + +biosWorkDir :: FilePath -> MaybeT IO FilePath +biosWorkDir = findFileUpwards (".hie-bios" ==) + +cabalWorkDir :: FilePath -> MaybeT IO () +cabalWorkDir wdir = do + check <- liftIO $ doesDirectoryExist (wdir "dist-newstyle") + unless check $ fail "No dist-newstyle" + +stackWorkDir :: FilePath -> MaybeT IO () +stackWorkDir wdir = do + check <- liftIO $ doesDirectoryExist (wdir ".stack-work") + unless check $ fail "No .stack-work" + +cabalConfigDir :: FilePath -> MaybeT IO FilePath +cabalConfigDir = findFileUpwards (\fp -> fp == "cabal.project" || fp == "cabal.project.local") + +cabalFileDir :: FilePath -> MaybeT IO FilePath +cabalFileDir = findFileUpwards (\fp -> takeExtension fp == ".cabal") + +stackConfigDir :: FilePath -> MaybeT IO FilePath +stackConfigDir = findFileUpwards isStack + where + isStack name = name == "stack.yaml" + +-- | Searches upwards for the first directory containing a file to match +-- the predicate. +findFileUpwards :: (FilePath -> Bool) -> FilePath -> MaybeT IO FilePath +findFileUpwards p dir = do + cnts <- + liftIO + $ handleJust + -- Catch permission errors + (\(e :: IOError) -> if isPermissionError e then Just [] else Nothing) + pure + (findFile p dir) + + case cnts of + [] | dir' == dir -> fail "No cabal files" + | otherwise -> findFileUpwards p dir' + _ : _ -> return dir + where dir' = takeDirectory dir + +-- | Sees if any file in the directory matches the predicate +findFile :: (FilePath -> Bool) -> FilePath -> IO [FilePath] +findFile p dir = do + b <- doesDirectoryExist dir + if b then getFiles >>= filterM doesPredFileExist else return [] + where + getFiles = filter p <$> getDirectoryContents dir + doesPredFileExist file = doesFileExist $ dir file diff --git a/ghcide/src/Control/Concurrent/Strict.hs b/ghcide/src/Control/Concurrent/Strict.hs new file mode 100644 index 0000000000..842252d51c --- /dev/null +++ b/ghcide/src/Control/Concurrent/Strict.hs @@ -0,0 +1,35 @@ +module Control.Concurrent.Strict + (modifyVar', modifyVarIO' + ,modifyVar, modifyVar_ + ,module Control.Concurrent.Extra + ) where + +import Control.Concurrent.Extra hiding (modifyVar, modifyVar', + modifyVar_) +import qualified Control.Concurrent.Extra as Extra +import Control.Exception (evaluate) +import Control.Monad (void) +import Data.Tuple.Extra (dupe) + +-- | Strict modification that returns the new value +modifyVar' :: Extra.Var a -> (a -> a) -> IO a +modifyVar' var upd = modifyVarIO' var (pure . upd) + +-- | Strict modification that returns the new value +modifyVarIO' :: Extra.Var a -> (a -> IO a) -> IO a +modifyVarIO' var upd = do + res <- Extra.modifyVar var $ \v -> do + v' <- upd v + pure $ dupe v' + evaluate res + +modifyVar :: Extra.Var a -> (a -> IO (a, b)) -> IO b +modifyVar var upd = do + (new, res) <- Extra.modifyVar var $ \old -> do + (new,res) <- upd old + return (new, (new, res)) + void $ evaluate new + return res + +modifyVar_ :: Extra.Var a -> (a -> IO a) -> IO () +modifyVar_ var upd = void $ modifyVarIO' var upd diff --git a/ghcide/src/Development/IDE.hs b/ghcide/src/Development/IDE.hs new file mode 100644 index 0000000000..8741c98c37 --- /dev/null +++ b/ghcide/src/Development/IDE.hs @@ -0,0 +1,57 @@ +module Development.IDE +( + -- TODO It would be much nicer to enumerate all the exports + -- and organize them in sections + module X + +) where + +import Development.IDE.Core.Actions as X (getAtPoint, + getDefinition, + getTypeDefinition) +import Development.IDE.Core.FileExists as X (getFileExists) +import Development.IDE.Core.FileStore as X (getFileContents, + getFileModTimeContents, + getUriContents) +import Development.IDE.Core.IdeConfiguration as X (IdeConfiguration (..), + isWorkspaceFile) +import Development.IDE.Core.OfInterest as X (getFilesOfInterestUntracked) +import Development.IDE.Core.Rules as X (getClientConfigAction, + getParsedModule, + usePropertyAction) +import Development.IDE.Core.RuleTypes as X +import Development.IDE.Core.Service as X (runAction) +import Development.IDE.Core.Shake as X (FastResult (..), + IdeAction (..), + IdeRule, IdeState, + RuleBody (..), + ShakeExtras, + VFSModified (..), + actionLogger, + define, + defineEarlyCutoff, + defineNoDiagnostics, + getClientConfig, + getPluginConfigAction, + ideLogger, rootDir, + runIdeAction, + shakeExtras, use, + useNoFile, + useNoFile_, + useWithStale, + useWithStaleFast, + useWithStaleFast', + useWithStale_, + use_, uses, uses_) +import Development.IDE.GHC.Compat as X (GhcVersion (..), + ghcVersion) +import Development.IDE.GHC.Error as X +import Development.IDE.GHC.Util as X +import Development.IDE.Graph as X (Action, RuleResult, + Rules, action) +import Development.IDE.Plugin as X +import Development.IDE.Types.Diagnostics as X +import Development.IDE.Types.HscEnvEq as X (HscEnvEq (..), + hscEnv) +import Development.IDE.Types.Location as X +import Ide.Logger as X diff --git a/ghcide/src/Development/IDE/Core/Actions.hs b/ghcide/src/Development/IDE/Core/Actions.hs new file mode 100644 index 0000000000..61614cb0ca --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Actions.hs @@ -0,0 +1,153 @@ +{-# LANGUAGE TypeFamilies #-} +module Development.IDE.Core.Actions +( getAtPoint +, getDefinition +, getTypeDefinition +, getImplementationDefinition +, highlightAtPoint +, refsAtPoint +, workspaceSymbols +, lookupMod +) where + +import Control.Monad.Extra (mapMaybeM) +import Control.Monad.Reader +import Control.Monad.Trans.Maybe +import qualified Data.HashMap.Strict as HM +import Data.Maybe +import qualified Data.Text as T +import Data.Tuple.Extra +import Development.IDE.Core.OfInterest +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.PositionMapping +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Service +import Development.IDE.Core.Shake +import Development.IDE.GHC.Compat hiding (writeHieFile) +import Development.IDE.Graph +import qualified Development.IDE.Spans.AtPoint as AtPoint +import Development.IDE.Types.HscEnvEq (hscEnv) +import Development.IDE.Types.Location +import GHC.Iface.Ext.Types (Identifier) +import qualified HieDb +import Language.LSP.Protocol.Types (DocumentHighlight (..), + SymbolInformation (..), + normalizedFilePathToUri, + uriToNormalizedFilePath) + + +-- | Eventually this will lookup/generate URIs for files in dependencies, but not in the +-- project. Right now, this is just a stub. +lookupMod + :: HieDbWriter -- ^ access the database + -> FilePath -- ^ The `.hie` file we got from the database + -> ModuleName + -> Unit + -> Bool -- ^ Is this file a boot file? + -> MaybeT IdeAction Uri +lookupMod _dbchan _hie_f _mod _uid _boot = MaybeT $ pure Nothing + + +-- IMPORTANT NOTE : make sure all rules `useWithStaleFastMT`d by these have a "Persistent Stale" rule defined, +-- so we can quickly answer as soon as the IDE is opened +-- Even if we don't have persistent information on disk for these rules, the persistent rule +-- should just return an empty result +-- It is imperative that the result of the persistent rule succeed in such a case, or we will +-- block waiting for the rule to be properly computed. + +-- | Try to get hover text for the name under point. +getAtPoint :: NormalizedFilePath -> Position -> IdeAction (Maybe (Maybe Range, [T.Text])) +getAtPoint file pos = runMaybeT $ do + ide <- ask + opts <- liftIO $ getIdeOptionsIO ide + + (hf, mapping) <- useWithStaleFastMT GetHieAst file + env <- hscEnv . fst <$> useWithStaleFastMT GhcSession file + dkMap <- lift $ maybe (DKMap mempty mempty) fst <$> runMaybeT (useWithStaleFastMT GetDocMap file) + + !pos' <- MaybeT (return $ fromCurrentPosition mapping pos) + MaybeT $ liftIO $ fmap (first (toCurrentRange mapping =<<)) <$> AtPoint.atPoint opts hf dkMap env pos' + +-- | Converts locations in the source code to their current positions, +-- taking into account changes that may have occurred due to edits. +toCurrentLocation + :: PositionMapping + -> NormalizedFilePath + -> Location + -> IdeAction (Maybe Location) +toCurrentLocation mapping file (Location uri range) = + -- The Location we are going to might be in a different + -- file than the one we are calling gotoDefinition from. + -- So we check that the location file matches the file + -- we are in. + if nUri == normalizedFilePathToUri file + -- The Location matches the file, so use the PositionMapping + -- we have. + then pure $ Location uri <$> toCurrentRange mapping range + -- The Location does not match the file, so get the correct + -- PositionMapping and use that instead. + else do + otherLocationMapping <- fmap (fmap snd) $ runMaybeT $ do + otherLocationFile <- MaybeT $ pure $ uriToNormalizedFilePath nUri + useWithStaleFastMT GetHieAst otherLocationFile + pure $ Location uri <$> (flip toCurrentRange range =<< otherLocationMapping) + where + nUri :: NormalizedUri + nUri = toNormalizedUri uri + +-- | Goto Definition. +getDefinition :: NormalizedFilePath -> Position -> IdeAction (Maybe [(Location, Identifier)]) +getDefinition file pos = runMaybeT $ do + ide@ShakeExtras{ withHieDb, hiedbWriter } <- ask + opts <- liftIO $ getIdeOptionsIO ide + (hf, mapping) <- useWithStaleFastMT GetHieAst file + (ImportMap imports, _) <- useWithStaleFastMT GetImportMap file + !pos' <- MaybeT (pure $ fromCurrentPosition mapping pos) + locationsWithIdentifier <- AtPoint.gotoDefinition withHieDb (lookupMod hiedbWriter) opts imports hf pos' + mapMaybeM (\(location, identifier) -> do + fixedLocation <- MaybeT $ toCurrentLocation mapping file location + pure $ Just (fixedLocation, identifier) + ) locationsWithIdentifier + + +getTypeDefinition :: NormalizedFilePath -> Position -> IdeAction (Maybe [(Location, Identifier)]) +getTypeDefinition file pos = runMaybeT $ do + ide@ShakeExtras{ withHieDb, hiedbWriter } <- ask + opts <- liftIO $ getIdeOptionsIO ide + (hf, mapping) <- useWithStaleFastMT GetHieAst file + !pos' <- MaybeT (return $ fromCurrentPosition mapping pos) + locationsWithIdentifier <- AtPoint.gotoTypeDefinition withHieDb (lookupMod hiedbWriter) opts hf pos' + mapMaybeM (\(location, identifier) -> do + fixedLocation <- MaybeT $ toCurrentLocation mapping file location + pure $ Just (fixedLocation, identifier) + ) locationsWithIdentifier + +getImplementationDefinition :: NormalizedFilePath -> Position -> IdeAction (Maybe [Location]) +getImplementationDefinition file pos = runMaybeT $ do + ide@ShakeExtras{ withHieDb, hiedbWriter } <- ask + opts <- liftIO $ getIdeOptionsIO ide + (hf, mapping) <- useWithStaleFastMT GetHieAst file + !pos' <- MaybeT (pure $ fromCurrentPosition mapping pos) + locs <- AtPoint.gotoImplementation withHieDb (lookupMod hiedbWriter) opts hf pos' + traverse (MaybeT . toCurrentLocation mapping file) locs + +highlightAtPoint :: NormalizedFilePath -> Position -> IdeAction (Maybe [DocumentHighlight]) +highlightAtPoint file pos = runMaybeT $ do + (HAR _ hf rf _ _,mapping) <- useWithStaleFastMT GetHieAst file + !pos' <- MaybeT (return $ fromCurrentPosition mapping pos) + let toCurrentHighlight (DocumentHighlight range t) = flip DocumentHighlight t <$> toCurrentRange mapping range + mapMaybe toCurrentHighlight <$>AtPoint.documentHighlight hf rf pos' + +-- Refs are not an IDE action, so it is OK to be slow and (more) accurate +refsAtPoint :: NormalizedFilePath -> Position -> Action [Location] +refsAtPoint file pos = do + ShakeExtras{withHieDb} <- getShakeExtras + fs <- HM.keys <$> getFilesOfInterestUntracked + asts <- HM.fromList . mapMaybe sequence . zip fs <$> usesWithStale GetHieAst fs + AtPoint.referencesAtPoint withHieDb file pos (AtPoint.FOIReferences asts) + +workspaceSymbols :: T.Text -> IdeAction (Maybe [SymbolInformation]) +workspaceSymbols query = runMaybeT $ do + ShakeExtras{withHieDb} <- ask + res <- liftIO $ withHieDb (\hieDb -> HieDb.searchDef hieDb $ T.unpack query) + pure $ mapMaybe AtPoint.defRowToSymbolInfo res diff --git a/ghcide/src/Development/IDE/Core/Compile.hs b/ghcide/src/Development/IDE/Core/Compile.hs new file mode 100644 index 0000000000..48439e2ff3 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Compile.hs @@ -0,0 +1,1658 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} +{-# LANGUAGE GADTs #-} + +-- | Based on https://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/API. +-- Given a list of paths to find libraries, and a file to compile, produce a list of 'CoreModule' values. +module Development.IDE.Core.Compile + ( TcModuleResult(..) + , RunSimplifier(..) + , compileModule + , parseModule + , typecheckModule + , computePackageDeps + , addRelativeImport + , mkHiFileResultCompile + , mkHiFileResultNoCompile + , generateObjectCode + , generateByteCode + , generateHieAsts + , writeAndIndexHieFile + , indexHieFile + , writeHiFile + , getModSummaryFromImports + , loadHieFile + , loadInterface + , RecompilationInfo(..) + , loadModulesHome + , getDocsBatch + , lookupName + , mergeEnvs + , ml_core_file + , coreFileToLinkable + , TypecheckHelpers(..) + , sourceTypecheck + , sourceParser + , shareUsages + , setNonHomeFCHook + ) where + +import Control.Concurrent.STM.Stats hiding (orElse) +import Control.DeepSeq (NFData (..), + force, rnf) +import Control.Exception (evaluate) +import Control.Exception.Safe +import Control.Lens hiding (List, pre, + (<.>)) +import Control.Monad.Extra +import Control.Monad.IO.Class +import Control.Monad.Trans.Except +import qualified Control.Monad.Trans.State.Strict as S +import Data.Aeson (toJSON) +import Data.Bifunctor (first, second) +import Data.Binary +import qualified Data.ByteString as BS +import Data.Coerce +import qualified Data.DList as DL +import Data.Functor +import Data.Generics.Aliases +import Data.Generics.Schemes +import qualified Data.HashMap.Strict as HashMap +import Data.IntMap (IntMap) +import Data.IORef +import Data.List.Extra +import qualified Data.Map.Strict as Map +import Data.Maybe +import Data.Proxy (Proxy (Proxy)) +import qualified Data.Text as T +import Data.Time (UTCTime (..)) +import Data.Tuple.Extra (dupe) +import Debug.Trace +import Development.IDE.Core.FileStore (resetInterfaceStore) +import Development.IDE.Core.Preprocessor +import Development.IDE.Core.ProgressReporting (progressUpdate) +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Shake +import Development.IDE.Core.Tracing (withTrace) +import qualified Development.IDE.GHC.Compat as Compat +import qualified Development.IDE.GHC.Compat as GHC +import Development.IDE.GHC.Compat.Driver (hscTypecheckRenameWithDiagnostics) +import qualified Development.IDE.GHC.Compat.Util as Util +import Development.IDE.GHC.CoreFile +import Development.IDE.GHC.Error +import Development.IDE.GHC.Orphans () +import Development.IDE.GHC.Util +import Development.IDE.GHC.Warnings +import Development.IDE.Import.DependencyInformation +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location +import Development.IDE.Types.Options +import GHC (ForeignHValue, + GetDocsFailure (..), + ModLocation (..), + parsedSource) +import qualified GHC.LanguageExtensions as LangExt +import GHC.Serialized +import HieDb hiding (withHieDb) +import qualified Language.LSP.Protocol.Message as LSP +import Language.LSP.Protocol.Types (DiagnosticTag (..)) +import qualified Language.LSP.Server as LSP +import Prelude hiding (mod) +import System.Directory +import System.FilePath +import System.IO.Extra (fixIO, + newTempFileWithin) + +import qualified Data.Set as Set +import qualified GHC as G +import GHC.Core.Lint.Interactive +import GHC.Driver.Config.CoreToStg.Prep +import GHC.Iface.Ext.Types (HieASTs) +import qualified GHC.Runtime.Loader as Loader +import GHC.Tc.Gen.Splice +import GHC.Types.Error +import GHC.Types.ForeignStubs +import GHC.Types.HpcInfo +import GHC.Types.TypeEnv + +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] + +#if MIN_VERSION_ghc(9,7,0) +import Data.Foldable (toList) +import GHC.Unit.Module.Warnings +#else +import Development.IDE.Core.FileStore (shareFilePath) +#endif + +#if MIN_VERSION_ghc(9,10,0) +import Development.IDE.GHC.Compat hiding (assert, + loadInterface, + parseHeader, + parseModule, + tcRnModule, + writeHieFile) +#else +import Development.IDE.GHC.Compat hiding + (loadInterface, + parseHeader, + parseModule, + tcRnModule, + writeHieFile) +#endif + +#if MIN_VERSION_ghc(9,11,0) +import qualified Data.List.NonEmpty as NE +import Data.Time (getCurrentTime) +import GHC.Driver.Env (hsc_all_home_unit_ids) +import GHC.Iface.Ext.Types (NameEntityInfo) +#endif + +#if MIN_VERSION_ghc(9,12,0) +import Development.IDE.Import.FindImports +#endif + +--Simple constants to make sure the source is consistently named +sourceTypecheck :: T.Text +sourceTypecheck = "typecheck" +sourceParser :: T.Text +sourceParser = "parser" + +-- | Given a string buffer, return the string (after preprocessing) and the 'ParsedModule'. +parseModule + :: IdeOptions + -> HscEnv + -> FilePath + -> ModSummary + -> IO (IdeResult ParsedModule) +parseModule IdeOptions{..} env filename ms = + fmap (either (, Nothing) id) $ + runExceptT $ do + (diag, modu) <- parseFileContents env optPreprocessor filename ms + return (diag, Just modu) + + +-- | Given a package identifier, what packages does it depend on +computePackageDeps + :: HscEnv + -> Unit + -> IO (Either [FileDiagnostic] [UnitId]) +computePackageDeps env pkg = do + case lookupUnit env pkg of + Nothing -> + return $ Left + [ ideErrorText + (toNormalizedFilePath' noFilePath) + (T.pack $ "unknown package: " ++ show pkg) + ] + Just pkgInfo -> return $ Right $ unitDepends pkgInfo + +data TypecheckHelpers + = TypecheckHelpers + { getLinkables :: [NormalizedFilePath] -> IO [LinkableResult] -- ^ hls-graph action to get linkables for files + , getModuleGraph :: IO DependencyInformation + } + +typecheckModule :: IdeDefer + -> HscEnv + -> TypecheckHelpers + -> ParsedModule + -> IO (IdeResult TcModuleResult) +typecheckModule (IdeDefer defer) hsc tc_helpers pm = do + let modSummary = pm_mod_summary pm + dflags = ms_hspp_opts modSummary + initialized <- catchSrcErrors (hsc_dflags hsc) "typecheck (initialize plugins)" + (Loader.initializePlugins (hscSetFlags (ms_hspp_opts modSummary) hsc)) + case initialized of + Left errs -> return (errs, Nothing) + Right hscEnv -> do + etcm <- + let + -- TODO: maybe setting ms_hspp_opts is unnecessary? + mod_summary' = modSummary { ms_hspp_opts = hsc_dflags hscEnv} + in + catchSrcErrors (hsc_dflags hscEnv) sourceTypecheck $ do + tcRnModule hscEnv tc_helpers $ demoteIfDefer pm{pm_mod_summary = mod_summary'} + case etcm of + Left errs -> return (errs, Nothing) + Right tcm -> + let addReason diag = + map (Just (diagnosticReason (errMsgDiagnostic diag)),) $ + diagFromErrMsg sourceTypecheck (hsc_dflags hscEnv) diag + errorPipeline = map (unDefer . hideDiag dflags . tagDiag) . addReason + diags = concatMap errorPipeline $ Compat.getMessages $ tmrWarnings tcm + deferredError = any fst diags + in + return (map snd diags, Just $ tcm{tmrDeferredError = deferredError}) + where + demoteIfDefer = if defer then demoteTypeErrorsToWarnings else id + +-- | Install hooks to capture the splices as well as the runtime module dependencies +captureSplicesAndDeps :: TypecheckHelpers -> HscEnv -> (HscEnv -> IO a) -> IO (a, Splices, ModuleEnv BS.ByteString) +captureSplicesAndDeps TypecheckHelpers{..} env k = do + splice_ref <- newIORef mempty + dep_ref <- newIORef emptyModuleEnv + res <- k (hscSetHooks (addSpliceHook splice_ref . addLinkableDepHook dep_ref $ hsc_hooks env) env) + splices <- readIORef splice_ref + needed_mods <- readIORef dep_ref + return (res, splices, needed_mods) + where + addLinkableDepHook :: IORef (ModuleEnv BS.ByteString) -> Hooks -> Hooks + addLinkableDepHook var h = h { hscCompileCoreExprHook = Just (compile_bco_hook var) } + + -- We want to record exactly which linkables/modules the typechecker needed at runtime + -- This is useful for recompilation checking. + -- See Note [Recompilation avoidance in the presence of TH] + -- + -- From hscCompileCoreExpr' in GHC + -- To update, copy hscCompileCoreExpr' (the implementation of + -- hscCompileCoreExprHook) verbatim, and add code to extract all the free + -- names in the compiled bytecode, recording the modules that those names + -- come from in the IORef,, as these are the modules on whose implementation + -- we depend. + compile_bco_hook :: IORef (ModuleEnv BS.ByteString) -> HscEnv -> SrcSpan -> CoreExpr + -> IO (ForeignHValue, [Linkable], PkgsLoaded) + compile_bco_hook var hsc_env srcspan ds_expr + = do { let dflags = hsc_dflags hsc_env + + {- Simplify it -} + ; simpl_expr <- simplifyExpr dflags hsc_env ds_expr + + {- Tidy it (temporary, until coreSat does cloning) -} + ; let tidy_expr = tidyExpr emptyTidyEnv simpl_expr + + {- Prepare for codegen -} + ; prepd_expr <- corePrepExpr dflags hsc_env tidy_expr + + {- Lint if necessary -} + ; lintInteractiveExpr "hscCompileExpr" hsc_env prepd_expr + + + ; let iNTERACTIVELoc = G.ModLocation{ ml_hs_file = Nothing, + ml_hi_file = panic "hscCompileCoreExpr':ml_hi_file", + ml_obj_file = panic "hscCompileCoreExpr':ml_obj_file", + ml_dyn_obj_file = panic "hscCompileCoreExpr':ml_dyn_obj_file", + ml_dyn_hi_file = panic "hscCompileCoreExpr':ml_dyn_hi_file", + ml_hie_file = panic "hscCompileCoreExpr':ml_hie_file" + } + ; let ictxt = hsc_IC hsc_env + + ; (binding_id, stg_expr, _, _) <- + myCoreToStgExpr (hsc_logger hsc_env) + (hsc_dflags hsc_env) + ictxt + True -- for bytecode + (icInteractiveModule ictxt) + iNTERACTIVELoc + prepd_expr + + {- Convert to BCOs -} + ; bcos <- byteCodeGen hsc_env + (icInteractiveModule ictxt) + stg_expr + [] Nothing +#if MIN_VERSION_ghc(9,11,0) + [] -- spt_entries +#endif + + -- Exclude wired-in names because we may not have read + -- their interface files, so getLinkDeps will fail + -- All wired-in names are in the base package, which we link + -- by default, so we can safely ignore them here. + + -- Find the linkables for the modules we need + ; let needed_mods = mkUniqSet [ + mod -- We need the whole module for 9.4 because of multiple home units modules may have different unit ids + + | n <- concatMap (uniqDSetToList . bcoFreeNames) $ bc_bcos bcos + , not (isWiredInName n) -- Exclude wired-in names + , Just mod <- [nameModule_maybe n] -- Names from other modules + , moduleUnitId mod `elem` home_unit_ids -- Only care about stuff from the home package set + ] + home_unit_ids = + map fst (hugElts $ hsc_HUG hsc_env) + mods_transitive = getTransitiveMods hsc_env needed_mods + + -- If we don't support multiple home units, ModuleNames are sufficient because all the units will be the same + mods_transitive_list = + mapMaybe nodeKeyToInstalledModule $ Set.toList mods_transitive + + ; moduleLocs <- getModuleGraph + ; lbs <- getLinkables [file + | installedMod <- mods_transitive_list + , let file = fromJust $ lookupModuleFile (installedMod { moduleUnit = RealUnit (Definite $ moduleUnit installedMod) }) moduleLocs + ] + ; let hsc_env' = loadModulesHome (map linkableHomeMod lbs) hsc_env + + {- load it -} +#if MIN_VERSION_ghc(9,11,0) + ; bco_time <- getCurrentTime + ; (fv_hvs, lbss, pkgs) <- loadDecls (hscInterp hsc_env') hsc_env' srcspan $ + Linkable bco_time (icInteractiveModule ictxt) $ NE.singleton $ BCOs bcos +#else + ; (fv_hvs, lbss, pkgs) <- loadDecls (hscInterp hsc_env') hsc_env' srcspan bcos +#endif + ; let hval = (expectJust "hscCompileCoreExpr'" $ lookup (idName binding_id) fv_hvs, lbss, pkgs) + + ; modifyIORef' var (flip extendModuleEnvList [(mi_module $ hm_iface hm, linkableHash lb) | lb <- lbs, let hm = linkableHomeMod lb]) + ; return hval } + + -- TODO: support backpack + nodeKeyToInstalledModule :: NodeKey -> Maybe InstalledModule + -- We shouldn't get boot files here, but to be safe, never map them to an installed module + -- because boot files don't have linkables we can load, and we will fail if we try to look + -- for them + nodeKeyToInstalledModule (NodeKey_Module (ModNodeKeyWithUid (GWIB _ IsBoot) _)) = Nothing + nodeKeyToInstalledModule (NodeKey_Module (ModNodeKeyWithUid (GWIB moduleName _) uid)) = Just $ mkModule uid moduleName + nodeKeyToInstalledModule _ = Nothing + moduleToNodeKey :: Module -> NodeKey + moduleToNodeKey mod = NodeKey_Module $ ModNodeKeyWithUid (GWIB (moduleName mod) NotBoot) (moduleUnitId mod) + + -- Compute the transitive set of linkables required + getTransitiveMods hsc_env needed_mods + = Set.unions (Set.fromList (map moduleToNodeKey mods) : [ dep | m <- mods + , Just dep <- [Map.lookup (moduleToNodeKey m) (mgTransDeps (hsc_mod_graph hsc_env))] + ]) + where mods = nonDetEltsUniqSet needed_mods -- OK because we put them into a set immediately after + + -- | Add a Hook to the DynFlags which captures and returns the + -- typechecked splices before they are run. This information + -- is used for hover. + addSpliceHook :: IORef Splices -> Hooks -> Hooks + addSpliceHook var h = h { runMetaHook = Just (splice_hook (runMetaHook h) var) } + + splice_hook :: Maybe (MetaHook TcM) -> IORef Splices -> MetaHook TcM + splice_hook (fromMaybe defaultRunMeta -> hook) var metaReq e = case metaReq of + (MetaE f) -> do + expr' <- metaRequestE hook e + liftIO $ modifyIORef' var $ exprSplicesL %~ ((e, expr') :) + pure $ f expr' + (MetaP f) -> do + pat' <- metaRequestP hook e + liftIO $ modifyIORef' var $ patSplicesL %~ ((e, pat') :) + pure $ f pat' + (MetaT f) -> do + type' <- metaRequestT hook e + liftIO $ modifyIORef' var $ typeSplicesL %~ ((e, type') :) + pure $ f type' + (MetaD f) -> do + decl' <- metaRequestD hook e + liftIO $ modifyIORef' var $ declSplicesL %~ ((e, decl') :) + pure $ f decl' + (MetaAW f) -> do + aw' <- metaRequestAW hook e + liftIO $ modifyIORef' var $ awSplicesL %~ ((e, aw') :) + pure $ f aw' + + +tcRnModule + :: HscEnv + -> TypecheckHelpers -- ^ Program linkables not to unload + -> ParsedModule + -> IO TcModuleResult +tcRnModule hsc_env tc_helpers pmod = do + let ms = pm_mod_summary pmod + hsc_env_tmp = hscSetFlags (ms_hspp_opts ms) hsc_env + + (((tc_gbl_env', mrn_info), warning_messages), splices, mod_env) + <- captureSplicesAndDeps tc_helpers hsc_env_tmp $ \hscEnvTmp -> + do hscTypecheckRenameWithDiagnostics hscEnvTmp ms $ + HsParsedModule { hpm_module = parsedSource pmod + , hpm_src_files = pm_extra_src_files pmod + } + let rn_info = case mrn_info of + Just x -> x + Nothing -> error "no renamed info tcRnModule" + + -- Serialize mod_env so we can read it from the interface + mod_env_anns = map (\(mod, hash) -> Annotation (ModuleTarget mod) $ toSerialized BS.unpack hash) + (moduleEnvToList mod_env) + tc_gbl_env = tc_gbl_env' { tcg_ann_env = extendAnnEnvList (tcg_ann_env tc_gbl_env') mod_env_anns } + pure (TcModuleResult pmod rn_info tc_gbl_env splices False mod_env warning_messages) + + +-- Note [Clearing mi_globals after generating an iface] +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- GHC populates the mi_global field in interfaces for GHCi if we are using the bytecode +-- interpreter. +-- However, this field is expensive in terms of heap usage, and we don't use it in HLS +-- anywhere. So we zero it out. +-- The field is not serialized or deserialised from disk, so we don't need to remove it +-- while reading an iface from disk, only if we just generated an iface in memory +-- + + + +-- | See https://github.com/haskell/haskell-language-server/issues/3450 +-- GHC's recompilation avoidance in the presense of TH is less precise than +-- HLS. To avoid GHC from pessimising HLS, we filter out certain dependency information +-- that we track ourselves. See also Note [Recompilation avoidance in the presence of TH] +filterUsages :: [Usage] -> [Usage] +filterUsages = filter $ \case UsageHomeModuleInterface{} -> False + _ -> True + +-- | Mitigation for https://gitlab.haskell.org/ghc/ghc/-/issues/22744 +-- Important to do this immediately after reading the unit before +-- anything else has a chance to read `mi_usages` +shareUsages :: ModIface -> ModIface +shareUsages iface + = iface +-- Fixed upstream in GHC 9.8 +#if !MIN_VERSION_ghc(9,7,0) + {mi_usages = usages} + where usages = map go (mi_usages iface) + go usg@UsageFile{} = usg {usg_file_path = fp} + where !fp = shareFilePath (usg_file_path usg) + go usg = usg +#endif + + +mkHiFileResultNoCompile :: HscEnv -> TcModuleResult -> IO HiFileResult +mkHiFileResultNoCompile session tcm = do + let hsc_env_tmp = hscSetFlags (ms_hspp_opts ms) session + ms = pm_mod_summary $ tmrParsed tcm + tcGblEnv = tmrTypechecked tcm + details <- makeSimpleDetails hsc_env_tmp tcGblEnv + sf <- finalSafeMode (ms_hspp_opts ms) tcGblEnv + iface' <- mkIfaceTc hsc_env_tmp sf details ms Nothing tcGblEnv + -- See Note [Clearing mi_globals after generating an iface] + let iface = iface' +#if MIN_VERSION_ghc(9,11,0) + & set_mi_top_env Nothing + & set_mi_usages (filterUsages (mi_usages iface')) +#else + { mi_globals = Nothing, mi_usages = filterUsages (mi_usages iface') } +#endif + pure $! mkHiFileResult ms iface details (tmrRuntimeModules tcm) Nothing + +mkHiFileResultCompile + :: ShakeExtras + -> HscEnv + -> TcModuleResult + -> ModGuts + -> IO (IdeResult HiFileResult) +mkHiFileResultCompile se session' tcm simplified_guts = catchErrs $ do + let session = hscSetFlags (ms_hspp_opts ms) session' + ms = pm_mod_summary $ tmrParsed tcm + + (details, guts) <- do + -- write core file + -- give variables unique OccNames + tidy_opts <- initTidyOpts session + (guts, details) <- tidyProgram tidy_opts simplified_guts + pure (details, guts) + + let !partial_iface = force $ mkPartialIface session + (cg_binds guts) + details + ms +#if MIN_VERSION_ghc(9,11,0) + (tcg_import_decls (tmrTypechecked tcm)) +#endif + simplified_guts + + final_iface' <- mkFullIface session partial_iface Nothing + Nothing +#if MIN_VERSION_ghc(9,11,0) + NoStubs [] +#endif + -- See Note [Clearing mi_globals after generating an iface] + let final_iface = final_iface' +#if MIN_VERSION_ghc(9,11,0) + & set_mi_top_env Nothing + & set_mi_usages (filterUsages (mi_usages final_iface')) +#else + {mi_globals = Nothing, mi_usages = filterUsages (mi_usages final_iface')} +#endif + + -- Write the core file now + core_file <- do + let core_fp = ml_core_file $ ms_location ms + core_file = codeGutsToCoreFile iface_hash guts + iface_hash = getModuleHash final_iface + core_hash1 <- atomicFileWrite se core_fp $ \fp -> + writeBinCoreFile (hsc_dflags session) fp core_file + -- We want to drop references to guts and read in a serialized, compact version + -- of the core file from disk (as it is deserialised lazily) + -- This is because we don't want to keep the guts in memory for every file in + -- the project as it becomes prohibitively expensive + -- The serialized file however is much more compact and only requires a few + -- hundred megabytes of memory total even in a large project with 1000s of + -- modules + (coreFile, !core_hash2) <- readBinCoreFile (mkUpdater $ hsc_NC session) core_fp + pure $ assert (core_hash1 == core_hash2) + $ Just (coreFile, fingerprintToBS core_hash2) + + -- Verify core file by roundtrip testing and comparison + IdeOptions{optVerifyCoreFile} <- getIdeOptionsIO se + case core_file of + Just (core, _) | optVerifyCoreFile -> do + let core_fp = ml_core_file $ ms_location ms + traceIO $ "Verifying " ++ core_fp + let CgGuts{cg_binds = unprep_binds, cg_tycons = tycons } = guts + mod = ms_mod ms + data_tycons = filter isDataTyCon tycons + CgGuts{cg_binds = unprep_binds'} <- coreFileToCgGuts session final_iface details core + cp_cfg <- initCorePrepConfig session + let corePrep = corePrepPgm + (hsc_logger session) cp_cfg (initCorePrepPgmConfig (hsc_dflags session) (interactiveInScope $ hsc_IC session)) + mod (ms_location ms) + + -- Run corePrep first as we want to test the final version of the program that will + -- get translated to STG/Bytecode + prepd_binds + <- corePrep unprep_binds data_tycons + prepd_binds' + <- corePrep unprep_binds' data_tycons + let binds = noUnfoldings $ (map flattenBinds . (:[])) prepd_binds + binds' = noUnfoldings $ (map flattenBinds . (:[])) prepd_binds' + + -- diffBinds is unreliable, sometimes it goes down the wrong track. + -- This fixes the order of the bindings so that it is less likely to do so. + diffs2 = concat $ flip S.evalState (mkRnEnv2 emptyInScopeSet) $ zipWithM go binds binds' + -- diffs1 = concat $ flip S.evalState (mkRnEnv2 emptyInScopeSet) $ zipWithM go (map (:[]) $ concat binds) (map (:[]) $ concat binds') + -- diffs3 = flip S.evalState (mkRnEnv2 emptyInScopeSet) $ go (concat binds) (concat binds') + + diffs = diffs2 + go x y = S.state $ \s -> diffBinds True s x y + + -- The roundtrip doesn't preserver OtherUnfolding or occInfo, but neither are of these + -- are used for generate core or bytecode, so we can safely ignore them + -- SYB is slow but fine given that this is only used for testing + noUnfoldings = everywhere $ mkT $ \v -> if isId v + then + let v' = if isOtherUnfolding (realIdUnfolding v) then setIdUnfolding v noUnfolding else v + in setIdOccInfo v' noOccInfo + else v + isOtherUnfolding (OtherCon _) = True + isOtherUnfolding _ = False + + + when (not $ null diffs) $ + panicDoc "verify core failed!" (vcat $ punctuate (text "\n\n") diffs) -- ++ [ppr binds , ppr binds'])) + _ -> pure () + + pure ([], Just $! mkHiFileResult ms final_iface details (tmrRuntimeModules tcm) core_file) + + where + dflags = hsc_dflags session' + source = "compile" + catchErrs x = x `catches` + [ Handler $ return . (,Nothing) . diagFromGhcException source dflags + , Handler $ \diag -> + return + ( diagFromString + source DiagnosticSeverity_Error (noSpan "") + ("Error during " ++ T.unpack source ++ show @SomeException diag) + Nothing + , Nothing + ) + ] + +-- | Whether we should run the -O0 simplifier when generating core. +-- +-- This is required for template Haskell to work but we disable this in DAML. +-- See #256 +newtype RunSimplifier = RunSimplifier Bool + +-- | Compile a single type-checked module to a 'CoreModule' value, or +-- provide errors. +compileModule + :: RunSimplifier + -> HscEnv + -> ModSummary + -> TcGblEnv + -> IO (IdeResult ModGuts) +compileModule (RunSimplifier simplify) session ms tcg = + fmap (either (, Nothing) (second Just)) $ + catchSrcErrors (hsc_dflags session) "compile" $ do + (warnings,desugared_guts) <- withWarnings "compile" $ \tweak -> do + -- Breakpoints don't survive roundtripping from disk + -- and this trips up the verify-core-files check + -- They may also lead to other problems. + -- We have to setBackend ghciBackend in 9.8 as otherwise + -- non-exported definitions are stripped out. + -- However, setting this means breakpoints are generated. + -- Solution: prevent breakpoing generation by unsetting + -- Opt_InsertBreakpoints + let session' = tweak $ flip hscSetFlags session +#if MIN_VERSION_ghc(9,7,0) + $ flip gopt_unset Opt_InsertBreakpoints + $ setBackend ghciBackend +#endif + $ ms_hspp_opts ms + -- TODO: maybe settings ms_hspp_opts is unnecessary? + -- MP: the flags in ModSummary should be right, if they are wrong then + -- the correct place to fix this is when the ModSummary is created. + desugar <- hscDesugar session' (ms { ms_hspp_opts = hsc_dflags session' }) tcg + if simplify + then do + plugins <- readIORef (tcg_th_coreplugins tcg) + hscSimplify session' plugins desugar + else pure desugar + return (map snd warnings, desugared_guts) + +generateObjectCode :: HscEnv -> ModSummary -> CgGuts -> IO (IdeResult Linkable) +generateObjectCode session summary guts = do + fmap (either (, Nothing) (second Just)) $ + catchSrcErrors (hsc_dflags session) "object" $ do + let dot_o = ml_obj_file (ms_location summary) + mod = ms_mod summary + fp = replaceExtension dot_o "s" + createDirectoryIfMissing True (takeDirectory fp) + (warnings, dot_o_fp) <- + withWarnings "object" $ \tweak -> do + let env' = tweak (hscSetFlags (ms_hspp_opts summary) session) + target = platformDefaultBackend (hsc_dflags env') + newFlags = setBackend target $ updOptLevel 0 $ setOutputFile + (Just dot_o) + $ hsc_dflags env' + session' = hscSetFlags newFlags session + (outputFilename, _mStub, _foreign_files, _cinfos, _stgcinfos) <- hscGenHardCode session' guts + (ms_location summary) + fp + obj <- compileFile session' driverNoStop (outputFilename, Just (As False)) + case obj of + Nothing -> throwGhcExceptionIO $ Panic "compileFile didn't generate object code" + Just x -> pure x + -- Need time to be the modification time for recompilation checking + t <- liftIO $ getModificationTime dot_o_fp +#if MIN_VERSION_ghc(9,11,0) + let linkable = Linkable t mod (pure $ DotO dot_o_fp ModuleObject) +#else + let linkable = LM t mod [DotO dot_o_fp] +#endif + pure (map snd warnings, linkable) + +newtype CoreFileTime = CoreFileTime UTCTime + +generateByteCode :: CoreFileTime -> HscEnv -> ModSummary -> CgGuts -> IO (IdeResult Linkable) +generateByteCode (CoreFileTime time) hscEnv summary guts = do + fmap (either (, Nothing) (second Just)) $ + catchSrcErrors (hsc_dflags hscEnv) "bytecode" $ do + +#if MIN_VERSION_ghc(9,11,0) + (warnings, (_, bytecode)) <- + withWarnings "bytecode" $ \_tweak -> do + let session = _tweak (hscSetFlags (ms_hspp_opts summary) hscEnv) + -- TODO: maybe settings ms_hspp_opts is unnecessary? + summary' = summary { ms_hspp_opts = hsc_dflags session } + hscInteractive session (mkCgInteractiveGuts guts) + (ms_location summary') +#else + (warnings, (_, bytecode, sptEntries)) <- + withWarnings "bytecode" $ \_tweak -> do + let session = _tweak (hscSetFlags (ms_hspp_opts summary) hscEnv) + -- TODO: maybe settings ms_hspp_opts is unnecessary? + summary' = summary { ms_hspp_opts = hsc_dflags session } + hscInteractive session (mkCgInteractiveGuts guts) + (ms_location summary') +#endif + +#if MIN_VERSION_ghc(9,11,0) + let linkable = Linkable time (ms_mod summary) (pure $ BCOs bytecode) +#else + let linkable = LM time (ms_mod summary) [BCOs bytecode sptEntries] +#endif + + pure (map snd warnings, linkable) + +demoteTypeErrorsToWarnings :: ParsedModule -> ParsedModule +demoteTypeErrorsToWarnings = + (update_pm_mod_summary . update_hspp_opts) demoteTEsToWarns where + + demoteTEsToWarns :: DynFlags -> DynFlags + -- convert the errors into warnings, and also check the warnings are enabled + demoteTEsToWarns = (`wopt_set` Opt_WarnDeferredTypeErrors) + . (`wopt_set` Opt_WarnTypedHoles) + . (`wopt_set` Opt_WarnDeferredOutOfScopeVariables) + . (`gopt_set` Opt_DeferTypeErrors) + . (`gopt_set` Opt_DeferTypedHoles) + . (`gopt_set` Opt_DeferOutOfScopeVariables) + +update_hspp_opts :: (DynFlags -> DynFlags) -> ModSummary -> ModSummary +update_hspp_opts up ms = ms{ms_hspp_opts = up $ ms_hspp_opts ms} + +update_pm_mod_summary :: (ModSummary -> ModSummary) -> ParsedModule -> ParsedModule +update_pm_mod_summary up pm = + pm{pm_mod_summary = up $ pm_mod_summary pm} + +unDefer :: (Maybe DiagnosticReason, FileDiagnostic) -> (Bool, FileDiagnostic) +unDefer (Just (WarningWithFlag Opt_WarnDeferredTypeErrors) , fd) = (True, upgradeWarningToError fd) +unDefer (Just (WarningWithFlag Opt_WarnTypedHoles) , fd) = (True, upgradeWarningToError fd) +unDefer (Just (WarningWithFlag Opt_WarnDeferredOutOfScopeVariables), fd) = (True, upgradeWarningToError fd) +unDefer ( _ , fd) = (False, fd) + +upgradeWarningToError :: FileDiagnostic -> FileDiagnostic +upgradeWarningToError = + fdLspDiagnosticL %~ \diag -> diag {_severity = Just DiagnosticSeverity_Error, _message = warn2err $ _message diag} + where + warn2err :: T.Text -> T.Text + warn2err = T.intercalate ": error:" . T.splitOn ": warning:" + +hideDiag :: DynFlags -> (Maybe DiagnosticReason, FileDiagnostic) -> (Maybe DiagnosticReason, FileDiagnostic) +hideDiag originalFlags (w@(Just (WarningWithFlag warning)), fd) + | not (wopt warning originalFlags) + = (w, fd { fdShouldShowDiagnostic = HideDiag }) +hideDiag _originalFlags t = t + +-- | Warnings which lead to a diagnostic tag +unnecessaryDeprecationWarningFlags :: [WarningFlag] +unnecessaryDeprecationWarningFlags + = [ Opt_WarnUnusedTopBinds + , Opt_WarnUnusedLocalBinds + , Opt_WarnUnusedPatternBinds + , Opt_WarnUnusedImports + , Opt_WarnUnusedMatches + , Opt_WarnUnusedTypePatterns + , Opt_WarnUnusedForalls + , Opt_WarnUnusedRecordWildcards + , Opt_WarnInaccessibleCode +#if !MIN_VERSION_ghc(9,7,0) + , Opt_WarnWarningsDeprecations +#endif + ] + +-- | Add a unnecessary/deprecated tag to the required diagnostics. +tagDiag :: (Maybe DiagnosticReason, FileDiagnostic) -> (Maybe DiagnosticReason, FileDiagnostic) + +#if MIN_VERSION_ghc(9,7,0) +tagDiag (w@(Just (WarningWithCategory cat)), fd) + | cat == defaultWarningCategory -- default warning category is for deprecations + = (w, fd & fdLspDiagnosticL %~ \diag -> diag { _tags = Just $ DiagnosticTag_Deprecated : concat (_tags diag) }) +tagDiag (w@(Just (WarningWithFlags warnings)), fd) + | tags <- mapMaybe requiresTag (toList warnings) + = (w, fd & fdLspDiagnosticL %~ \diag -> diag { _tags = Just $ tags ++ concat (_tags diag) }) +#else +tagDiag (w@(Just (WarningWithFlag warning)), fd) + | Just tag <- requiresTag warning + = (w, fd & fdLspDiagnosticL %~ \diag -> diag { _tags = Just $ tag : concat (_tags diag) }) +#endif + where + requiresTag :: WarningFlag -> Maybe DiagnosticTag +#if !MIN_VERSION_ghc(9,7,0) + -- doesn't exist on 9.8, we use WarningWithCategory instead + requiresTag Opt_WarnWarningsDeprecations + = Just DiagnosticTag_Deprecated +#endif + requiresTag wflag -- deprecation was already considered above + | wflag `elem` unnecessaryDeprecationWarningFlags + = Just DiagnosticTag_Unnecessary + requiresTag _ = Nothing +-- other diagnostics are left unaffected +tagDiag t = t + +addRelativeImport :: NormalizedFilePath -> ModuleName -> DynFlags -> DynFlags +addRelativeImport fp modu dflags = dflags + {importPaths = nubOrd $ maybeToList (moduleImportPath fp modu) ++ importPaths dflags} + +-- | Also resets the interface store +atomicFileWrite :: ShakeExtras -> FilePath -> (FilePath -> IO a) -> IO a +atomicFileWrite se targetPath write = do + let dir = takeDirectory targetPath + createDirectoryIfMissing True dir + (tempFilePath, cleanUp) <- newTempFileWithin dir + (write tempFilePath >>= \x -> renameFile tempFilePath targetPath >> atomically (resetInterfaceStore se (toNormalizedFilePath' targetPath)) >> pure x) + `onException` cleanUp + +generateHieAsts :: HscEnv -> TcModuleResult +#if MIN_VERSION_ghc(9,11,0) + -> IO ([FileDiagnostic], Maybe (HieASTs Type, NameEntityInfo)) +#else + -> IO ([FileDiagnostic], Maybe (HieASTs Type)) +#endif +generateHieAsts hscEnv tcm = + handleGenerationErrors' dflags "extended interface generation" $ runHsc hscEnv $ do + -- These varBinds use unitDataConId but it could be anything as the id name is not used + -- during the hie file generation process. It's a workaround for the fact that the hie modules + -- don't export an interface which allows for additional information to be added to hie files. + let fake_splice_binds = +#if !MIN_VERSION_ghc(9,11,0) + Util.listToBag $ +#endif + map (mkVarBind unitDataConId) (spliceExpressions $ tmrTopLevelSplices tcm) + real_binds = tcg_binds $ tmrTypechecked tcm + all_binds = +#if MIN_VERSION_ghc(9,11,0) + fake_splice_binds ++ real_binds +#else + fake_splice_binds `Util.unionBags` real_binds +#endif + ts = tmrTypechecked tcm :: TcGblEnv + top_ev_binds = tcg_ev_binds ts :: Util.Bag EvBind + insts = tcg_insts ts :: [ClsInst] + tcs = tcg_tcs ts :: [TyCon] + hie_asts = GHC.enrichHie all_binds (tmrRenamed tcm) top_ev_binds insts tcs + + pure $ Just $ +#if MIN_VERSION_ghc(9,11,0) + hie_asts (tcg_type_env ts) +#else + hie_asts +#endif + where + dflags = hsc_dflags hscEnv + +spliceExpressions :: Splices -> [LHsExpr GhcTc] +spliceExpressions Splices{..} = + DL.toList $ mconcat + [ DL.fromList $ map fst exprSplices + , DL.fromList $ map fst patSplices + , DL.fromList $ map fst typeSplices + , DL.fromList $ map fst declSplices + , DL.fromList $ map fst awSplices + ] + +-- | In addition to indexing the `.hie` file, this function is responsible for +-- maintaining the 'IndexQueue' state and notifying the user about indexing +-- progress. +-- +-- We maintain a record of all pending index operations in the 'indexPending' +-- TVar. +-- When 'indexHieFile' is called, it must check to ensure that the file hasn't +-- already be queued up for indexing. If it has, then we can just skip it +-- +-- Otherwise, we record the current file as pending and write an indexing +-- operation to the queue +-- +-- When the indexing operation is picked up and executed by the worker thread, +-- the first thing it does is ensure that a newer index for the same file hasn't +-- been scheduled by looking at 'indexPending'. If a newer index has been +-- scheduled, we can safely skip this one +-- +-- Otherwise, we start or continue a progress reporting session, telling it +-- about progress so far and the current file we are attempting to index. Then +-- we can go ahead and call in to hiedb to actually do the indexing operation +-- +-- Once this completes, we have to update the 'IndexQueue' state. First, we +-- must remove the just indexed file from 'indexPending' Then we check if +-- 'indexPending' is now empty. In that case, we end the progress session and +-- report the total number of file indexed. We also set the 'indexCompleted' +-- TVar to 0 in order to set it up for a fresh indexing session. Otherwise, we +-- can just increment the 'indexCompleted' TVar and exit. +-- +indexHieFile :: ShakeExtras -> ModSummary -> NormalizedFilePath -> Util.Fingerprint -> Compat.HieFile -> IO () +indexHieFile se mod_summary srcPath !hash hf = do + atomically $ do + pending <- readTVar indexPending + case HashMap.lookup srcPath pending of + Just pendingHash | pendingHash == hash -> pure () -- An index is already scheduled + _ -> do + -- hiedb doesn't use the Haskell src, so we clear it to avoid unnecessarily keeping it around + let !hf' = hf{hie_hs_src = mempty} + modifyTVar' indexPending $ HashMap.insert srcPath hash + writeTQueue indexQueue $ \withHieDb -> do + -- We are now in the worker thread + -- Check if a newer index of this file has been scheduled, and if so skip this one + newerScheduled <- atomically $ do + pendingOps <- readTVar indexPending + pure $ case HashMap.lookup srcPath pendingOps of + Nothing -> False + -- If the hash in the pending list doesn't match the current hash, then skip + Just pendingHash -> pendingHash /= hash + unless newerScheduled $ do + -- Using bracket, so even if an exception happen during withHieDb call, + -- the `post` (which clean the progress indicator) will still be called. + bracket_ pre post $ + withHieDb (\db -> HieDb.addRefsFromLoaded db targetPath (HieDb.RealFile $ fromNormalizedFilePath srcPath) hash hf') + where + mod_location = ms_location mod_summary + targetPath = Compat.ml_hie_file mod_location + HieDbWriter{..} = hiedbWriter se + + pre = progressUpdate indexProgressReporting ProgressStarted + -- Report the progress once we are done indexing this file + post = do + mdone <- atomically $ do + -- Remove current element from pending + pending <- stateTVar indexPending $ + dupe . HashMap.update (\pendingHash -> guard (pendingHash /= hash) $> pendingHash) srcPath + modifyTVar' indexCompleted (+1) + -- If we are done, report and reset completed + whenMaybe (HashMap.null pending) $ + swapTVar indexCompleted 0 + whenJust (lspEnv se) $ \env -> LSP.runLspT env $ + when (coerce $ ideTesting se) $ + LSP.sendNotification (LSP.SMethod_CustomMethod (Proxy @"ghcide/reference/ready")) $ + toJSON $ fromNormalizedFilePath srcPath + whenJust mdone $ \_ -> progressUpdate indexProgressReporting ProgressCompleted + +writeAndIndexHieFile + :: HscEnv -> ShakeExtras -> ModSummary -> NormalizedFilePath -> [GHC.AvailInfo] +#if MIN_VERSION_ghc(9,11,0) + -> (HieASTs Type, NameEntityInfo) +#else + -> HieASTs Type +#endif + -> BS.ByteString -> IO [FileDiagnostic] +writeAndIndexHieFile hscEnv se mod_summary srcPath exports ast source = + handleGenerationErrors dflags "extended interface write/compression" $ do + hf <- runHsc hscEnv $ + GHC.mkHieFile' mod_summary exports ast source + atomicFileWrite se targetPath $ flip GHC.writeHieFile hf + hash <- Util.getFileHash targetPath + indexHieFile se mod_summary srcPath hash hf + where + dflags = hsc_dflags hscEnv + mod_location = ms_location mod_summary + targetPath = Compat.ml_hie_file mod_location + +writeHiFile :: ShakeExtras -> HscEnv -> HiFileResult -> IO [FileDiagnostic] +writeHiFile se hscEnv tc = + handleGenerationErrors dflags "interface write" $ do + atomicFileWrite se targetPath $ \fp -> + writeIfaceFile hscEnv fp modIface + where + modIface = hirModIface tc + targetPath = ml_hi_file $ ms_location $ hirModSummary tc + dflags = hsc_dflags hscEnv + +handleGenerationErrors :: DynFlags -> T.Text -> IO () -> IO [FileDiagnostic] +handleGenerationErrors dflags source action = + action >> return [] `catches` + [ Handler $ return . diagFromGhcException source dflags + , Handler $ \(exception :: SomeException) -> return $ + diagFromString + source DiagnosticSeverity_Error (noSpan "") + ("Error during " ++ T.unpack source ++ show exception) + Nothing + ] + +handleGenerationErrors' :: DynFlags -> T.Text -> IO (Maybe a) -> IO ([FileDiagnostic], Maybe a) +handleGenerationErrors' dflags source action = + fmap ([],) action `catches` + [ Handler $ return . (,Nothing) . diagFromGhcException source dflags + , Handler $ \(exception :: SomeException) -> + return + ( diagFromString + source DiagnosticSeverity_Error (noSpan "") + ("Error during " ++ T.unpack source ++ show exception) + Nothing + , Nothing + ) + ] + +-- Merge the HPTs, module graphs and FinderCaches +-- See Note [GhcSessionDeps] in Development.IDE.Core.Rules +-- Add the current ModSummary to the graph, along with the +-- HomeModInfo's of all direct dependencies (by induction hypothesis all +-- transitive dependencies will be contained in envs) +#if MIN_VERSION_ghc(9,11,0) +mergeEnvs :: HscEnv + -> ModuleGraph + -> DependencyInformation + -> ModSummary + -> [HomeModInfo] + -> [HscEnv] + -> IO HscEnv +mergeEnvs env mg dep_info ms extraMods envs = do + return $! loadModulesHome extraMods $ + let newHug = foldl' mergeHUG (hsc_HUG env) (map hsc_HUG envs) in + (hscUpdateHUG (const newHug) env){ + hsc_mod_graph = mg, + hsc_FC = (hsc_FC env) + { addToFinderCache = \gwib@(GWIB im _) val -> + if moduleUnit im `elem` hsc_all_home_unit_ids env + then pure () + else addToFinderCache (hsc_FC env) gwib val + , lookupFinderCache = \gwib@(GWIB im _) -> + if moduleUnit im `elem` hsc_all_home_unit_ids env + then case lookupModuleFile (im { moduleUnit = RealUnit (Definite $ moduleUnit im) }) dep_info of + Nothing -> pure Nothing + Just fs -> let ml = fromJust $ do + id <- lookupPathToId (depPathIdMap dep_info) fs + artifactModLocation (idToModLocation (depPathIdMap dep_info) id) + in pure $ Just $ InstalledFound ml im + else lookupFinderCache (hsc_FC env) gwib + } + } + + where + mergeHUG (UnitEnvGraph a) (UnitEnvGraph b) = UnitEnvGraph $ Map.unionWith mergeHUE a b + mergeHUE a b = a { homeUnitEnv_hpt = mergeUDFM (homeUnitEnv_hpt a) (homeUnitEnv_hpt b) } + mergeUDFM = plusUDFM_C combineModules + + combineModules a b + | HsSrcFile <- mi_hsc_src (hm_iface a) = a + | otherwise = b + +#else +mergeEnvs :: HscEnv + -> ModuleGraph + -> DependencyInformation + -> ModSummary + -> [HomeModInfo] + -> [HscEnv] + -> IO HscEnv +mergeEnvs env mg _dep_info ms extraMods envs = do + let im = Compat.installedModule (toUnitId $ moduleUnit $ ms_mod ms) (moduleName (ms_mod ms)) + ifr = InstalledFound (ms_location ms) im + curFinderCache = Compat.extendInstalledModuleEnv Compat.emptyInstalledModuleEnv im ifr + newFinderCache <- concatFC curFinderCache (map hsc_FC envs) + return $! loadModulesHome extraMods $ + let newHug = foldl' mergeHUG (hsc_HUG env) (map hsc_HUG envs) in + (hscUpdateHUG (const newHug) env){ + hsc_FC = newFinderCache, + hsc_mod_graph = mg + } + + where + mergeHUG (UnitEnvGraph a) (UnitEnvGraph b) = UnitEnvGraph $ Map.unionWith mergeHUE a b + mergeHUE a b = a { homeUnitEnv_hpt = mergeUDFM (homeUnitEnv_hpt a) (homeUnitEnv_hpt b) } + mergeUDFM = plusUDFM_C combineModules + + combineModules a b + | HsSrcFile <- mi_hsc_src (hm_iface a) = a + | otherwise = b + + -- Prefer non-boot files over non-boot files + -- otherwise we can get errors like https://gitlab.haskell.org/ghc/ghc/-/issues/19816 + -- if a boot file shadows over a non-boot file + combineModuleLocations a@(InstalledFound ml _) _ | Just fp <- ml_hs_file ml, not ("boot" `isSuffixOf` fp) = a + combineModuleLocations _ b = b + + concatFC :: FinderCacheState -> [FinderCache] -> IO FinderCache + concatFC cur xs = do + fcModules <- mapM (readIORef . fcModuleCache) xs + fcFiles <- mapM (readIORef . fcFileCache) xs + fcModules' <- newIORef $! foldl' (plusInstalledModuleEnv combineModuleLocations) cur fcModules + fcFiles' <- newIORef $! Map.unions fcFiles + pure $ FinderCache fcModules' fcFiles' +#endif + + +withBootSuffix :: HscSource -> ModLocation -> ModLocation +withBootSuffix HsBootFile = addBootSuffixLocnOut +withBootSuffix _ = id + +-- | Given a buffer, env and filepath, produce a module summary by parsing only the imports. +-- Runs preprocessors as needed. +getModSummaryFromImports + :: HscEnv + -> FilePath + -> UTCTime + -> Maybe Util.StringBuffer + -> ExceptT [FileDiagnostic] IO ModSummaryResult +-- modTime is only used in GHC < 9.4 +getModSummaryFromImports env fp _modTime mContents = do +-- src_hash is only used in GHC >= 9.4 + (contents, opts, ppEnv, _src_hash) <- preprocessor env fp mContents + + let dflags = hsc_dflags ppEnv + + -- The warns will hopefully be reported when we actually parse the module + (_warns, L main_loc hsmod) <- parseHeader dflags fp contents + + -- Copied from `HeaderInfo.getImports`, but we also need to keep the parsed imports + let mb_mod = hsmodName hsmod + imps = hsmodImports hsmod + + mod = fmap unLoc mb_mod `Util.orElse` mAIN_NAME + + (src_idecls, ord_idecls) = partition ((== IsBoot) . ideclSource.unLoc) imps + + -- GHC.Prim doesn't exist physically, so don't go looking for it. + -- ghc_prim_imports is only used in GHC >= 9.4 + (ordinary_imps, _ghc_prim_imports) + = partition ((/= moduleName gHC_PRIM) . unLoc + . ideclName . unLoc) + ord_idecls + + implicit_prelude = xopt LangExt.ImplicitPrelude dflags + implicit_imports = mkPrelImports mod main_loc + implicit_prelude imps + + + convImport (L _ i) = ( + (ideclPkgQual i) + , reLoc $ ideclName i) + + msrImports = implicit_imports ++ imps + + rn_pkg_qual = renameRawPkgQual (hsc_unit_env ppEnv) + rn_imps = fmap (\(pk, lmn@(L _ mn)) -> (rn_pkg_qual mn pk, lmn)) + srcImports = rn_imps $ map convImport src_idecls + textualImports = rn_imps $ map convImport (implicit_imports ++ ordinary_imps) + ghc_prim_import = not (null _ghc_prim_imports) + + + -- Force bits that might keep the string buffer and DynFlags alive unnecessarily + liftIO $ evaluate $ rnf srcImports + liftIO $ evaluate $ rnf textualImports + + + modLoc <- liftIO $ if mod == mAIN_NAME + -- specially in tests it's common to have lots of nameless modules + -- mkHomeModLocation will map them to the same hi/hie locations + then mkHomeModLocation dflags (pathToModuleName fp) fp + else mkHomeModLocation dflags mod fp + + let modl = mkHomeModule (hscHomeUnit ppEnv) mod + sourceType = if "-boot" `isSuffixOf` takeExtension fp then HsBootFile else HsSrcFile + msrModSummary = + ModSummary + { ms_mod = modl + , ms_hie_date = Nothing + , ms_dyn_obj_date = Nothing + , ms_ghc_prim_import = ghc_prim_import + , ms_hs_hash = _src_hash + + , ms_hsc_src = sourceType + -- The contents are used by the GetModSummary rule + , ms_hspp_buf = Just contents + , ms_hspp_file = fp + , ms_hspp_opts = dflags + , ms_iface_date = Nothing + , ms_location = withBootSuffix sourceType modLoc + , ms_obj_date = Nothing + , ms_parsed_mod = Nothing + , ms_srcimps = srcImports + , ms_textual_imps = textualImports + } + + msrFingerprint <- liftIO $ computeFingerprint opts msrModSummary + msrHscEnv <- liftIO $ Loader.initializePlugins (hscSetFlags (ms_hspp_opts msrModSummary) ppEnv) + return ModSummaryResult{..} + where + -- Compute a fingerprint from the contents of `ModSummary`, + -- eliding the timestamps, the preprocessed source and other non relevant fields + computeFingerprint opts ModSummary{..} = do + fingerPrintImports <- fingerprintFromPut $ do + put $ Util.uniq $ moduleNameFS $ moduleName ms_mod + forM_ (ms_srcimps ++ ms_textual_imps) $ \(mb_p, m) -> do + put $ Util.uniq $ moduleNameFS $ unLoc m + case mb_p of + G.NoPkgQual -> pure () + G.ThisPkg uid -> put $ getKey $ getUnique uid + G.OtherPkg uid -> put $ getKey $ getUnique uid + return $! Util.fingerprintFingerprints $ + [ Util.fingerprintString fp + , fingerPrintImports + , modLocationFingerprint ms_location + ] ++ map Util.fingerprintString opts + + modLocationFingerprint :: ModLocation -> Util.Fingerprint + modLocationFingerprint ModLocation{..} = Util.fingerprintFingerprints $ + Util.fingerprintString <$> [ fromMaybe "" ml_hs_file + , ml_hi_file + , ml_dyn_hi_file + , ml_obj_file + , ml_dyn_obj_file + , ml_hie_file] + +-- | Parse only the module header +parseHeader + :: Monad m + => DynFlags -- ^ flags to use + -> FilePath -- ^ the filename (for source locations) + -> Util.StringBuffer -- ^ Haskell module source text (full Unicode is supported) + -> ExceptT [FileDiagnostic] m ([FileDiagnostic], Located (HsModule GhcPs)) +parseHeader dflags filename contents = do + let loc = mkRealSrcLoc (Util.mkFastString filename) 1 1 + case unP Compat.parseHeader (initParserState (initParserOpts dflags) contents loc) of + PFailedWithErrorMessages msgs -> + throwE $ diagFromGhcErrorMessages sourceParser dflags $ msgs dflags + POk pst rdr_module -> do + let (warns, errs) = renderMessages $ getPsMessages pst + + -- Just because we got a `POk`, it doesn't mean there + -- weren't errors! To clarify, the GHC parser + -- distinguishes between fatal and non-fatal + -- errors. Non-fatal errors are the sort that don't + -- prevent parsing from continuing (that is, a parse + -- tree can still be produced despite the error so that + -- further errors/warnings can be collected). Fatal + -- errors are those from which a parse tree just can't + -- be produced. + unless (null errs) $ + throwE $ diagFromGhcErrorMessages sourceParser dflags errs + + let warnings = diagFromGhcErrorMessages sourceParser dflags warns + return (warnings, rdr_module) + +-- | Given a buffer, flags, and file path, produce a +-- parsed module (or errors) and any parse warnings. Does not run any preprocessors +-- ModSummary must contain the (preprocessed) contents of the buffer +parseFileContents + :: HscEnv + -> (GHC.ParsedSource -> IdePreprocessedSource) + -> FilePath -- ^ the filename (for source locations) + -> ModSummary + -> ExceptT [FileDiagnostic] IO ([FileDiagnostic], ParsedModule) +parseFileContents env customPreprocessor filename ms = do + let loc = mkRealSrcLoc (Util.mkFastString filename) 1 1 + dflags = ms_hspp_opts ms + contents = fromJust $ ms_hspp_buf ms + case unP Compat.parseModule (initParserState (initParserOpts dflags) contents loc) of + PFailedWithErrorMessages msgs -> + throwE $ diagFromGhcErrorMessages sourceParser dflags $ msgs dflags + POk pst rdr_module -> + let + psMessages = getPsMessages pst + in + do + let IdePreprocessedSource preproc_warns preproc_errs parsed = customPreprocessor rdr_module + let attachNoStructuredError (span, msg) = (span, msg, Nothing) + + unless (null preproc_errs) $ + throwE $ + diagFromStrings + sourceParser + DiagnosticSeverity_Error + (fmap attachNoStructuredError preproc_errs) + + let preproc_warning_file_diagnostics = + diagFromStrings + sourceParser + DiagnosticSeverity_Warning + (fmap attachNoStructuredError preproc_warns) + (parsed', msgs) <- liftIO $ applyPluginsParsedResultAction env ms parsed psMessages + let (warns, errors) = renderMessages msgs + + -- Just because we got a `POk`, it doesn't mean there + -- weren't errors! To clarify, the GHC parser + -- distinguishes between fatal and non-fatal + -- errors. Non-fatal errors are the sort that don't + -- prevent parsing from continuing (that is, a parse + -- tree can still be produced despite the error so that + -- further errors/warnings can be collected). Fatal + -- errors are those from which a parse tree just can't + -- be produced. + unless (null errors) $ + throwE $ diagFromGhcErrorMessages sourceParser dflags errors + + -- To get the list of extra source files, we take the list + -- that the parser gave us, + -- - eliminate files beginning with '<'. gcc likes to use + -- pseudo-filenames like "" and "" + -- - normalise them (eliminate differences between ./f and f) + -- - filter out the preprocessed source file + -- - filter out anything beginning with tmpdir + -- - remove duplicates + -- - filter out the .hs/.lhs source filename if we have one + -- + let n_hspp = normalise filename + TempDir tmp_dir = tmpDir dflags + srcs0 = nubOrd $ filter (not . (tmp_dir `isPrefixOf`)) + $ filter (/= n_hspp) + $ map normalise + $ filter (not . isPrefixOf "<") + $ map Util.unpackFS + $ srcfiles pst + srcs1 = case ml_hs_file (ms_location ms) of + Just f -> filter (/= normalise f) srcs0 + Nothing -> srcs0 + + -- sometimes we see source files from earlier + -- preprocessing stages that cannot be found, so just + -- filter them out: + srcs2 <- liftIO $ filterM doesFileExist srcs1 + + let pm = ParsedModule ms parsed' srcs2 + warnings = diagFromGhcErrorMessages sourceParser dflags warns + pure (warnings ++ preproc_warning_file_diagnostics, pm) + +loadHieFile :: Compat.NameCacheUpdater -> FilePath -> IO GHC.HieFile +loadHieFile ncu f = do + GHC.hie_file_result <$> GHC.readHieFile ncu f + + +{- Note [Recompilation avoidance in the presence of TH] + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Most versions of GHC we currently support don't have a working implementation of +code unloading for object code, and no version of GHC supports this on certain +platforms like Windows. This makes it completely infeasible for interactive use, +as symbols from previous compiles will shadow over all future compiles. + +This means that we need to use bytecode when generating code for Template +Haskell. Unfortunately, we can't serialize bytecode, so we will always need +to recompile when the IDE starts. However, we can put in place a much tighter +recompilation avoidance scheme for subsequent compiles: + +1. If the source file changes, then we always need to recompile + a. For files of interest, we will get explicit `textDocument/change` events + that will let us invalidate our build products + b. For files we read from disk, we can detect source file changes by + comparing the `mtime` of the source file with the build product (.hi/.o) file + on disk. +2. If GHC's recompilation avoidance scheme based on interface file hashes says + that we need to recompile, the we need to recompile. +3. If the file in question requires code generation then, we need to recompile + if we don't have the appropriate kind of build products. + a. If we already have the build products in memory, and the conditions 1 and + 2 above hold, then we don't need to recompile + b. If we are generating object code, then we can also search for it on + disk and ensure it is up to date. Notably, we did _not_ previously re-use + old bytecode from memory when `hls-graph`/`shake` decided to rebuild the + `HiFileResult` for some reason + +4. If the file in question used Template Haskell on the previous compile, then +we need to recompile if any `Linkable` in its transitive closure changed. This +sounds bad, but it is possible to make some improvements. In particular, we only +need to recompile if any of the `Linkable`s actually used during the previous +compile change. + +How can we tell if a `Linkable` was actually used while running some TH? + +GHC provides a `hscCompileCoreExprHook` which lets us intercept bytecode as +it is being compiled and linked. We can inspect the bytecode to see which +`Linkable` dependencies it requires, and record this for use in +recompilation checking. +We record all the home package modules of the free names that occur in the +bytecode. The `Linkable`s required are then the transitive closure of these +modules in the home-package environment. This is the same scheme as used by +GHC to find the correct things to link in before running bytecode. + +This works fine if we already have previous build products in memory, but +what if we are reading an interface from disk? Well, we can smuggle in the +necessary information (linkable `Module`s required as well as the time they +were generated) using `Annotation`s, which provide a somewhat general purpose +way to serialise arbitrary information along with interface files. + +Then when deciding whether to recompile, we need to check that the versions +(i.e. hashes) of the linkables used during a previous compile match whatever is +currently in the HPT. + +As we always generate Linkables from core files, we use the core file hash +as a (hopefully) deterministic measure of whether the Linkable has changed. +This is better than using the object file hash (if we have one) because object +file generation is not deterministic. +-} + +data RecompilationInfo m + = RecompilationInfo + { source_version :: FileVersion + , old_value :: Maybe (HiFileResult, FileVersion) + , get_file_version :: NormalizedFilePath -> m (Maybe FileVersion) + , get_linkable_hashes :: [NormalizedFilePath] -> m [BS.ByteString] + , get_module_graph :: m DependencyInformation + , regenerate :: Maybe LinkableType -> m ([FileDiagnostic], Maybe HiFileResult) -- ^ Action to regenerate an interface + } + +-- | Either a regular GHC linkable or a core file that +-- can be later turned into a proper linkable +data IdeLinkable = GhcLinkable !Linkable | CoreLinkable !UTCTime !CoreFile + +instance NFData IdeLinkable where + rnf (GhcLinkable lb) = rnf lb + rnf (CoreLinkable time _) = rnf time + +ml_core_file :: ModLocation -> FilePath +ml_core_file ml = ml_hi_file ml <.> "core" + +-- | Returns an up-to-date module interface, regenerating if needed. +-- Assumes file exists. +-- Requires the 'HscEnv' to be set up with dependencies +-- See Note [Recompilation avoidance in the presence of TH] +loadInterface + :: (MonadIO m, MonadMask m) + => HscEnv + -> ModSummary + -> Maybe LinkableType + -> RecompilationInfo m + -> m ([FileDiagnostic], Maybe HiFileResult) +loadInterface session ms linkableNeeded RecompilationInfo{..} = do + let sessionWithMsDynFlags = hscSetFlags (ms_hspp_opts ms) session + mb_old_iface = hirModIface . fst <$> old_value + mb_old_version = snd <$> old_value + + core_file = ml_core_file (ms_location ms) + iface_file = ml_hi_file (ms_location ms) + + !mod = ms_mod ms + + mb_dest_version <- case mb_old_version of + Just ver -> pure $ Just ver + Nothing -> get_file_version (toNormalizedFilePath' iface_file) + + -- The source is modified if it is newer than the destination (iface file) + -- A more precise check for the core file is performed later + let _sourceMod = case mb_dest_version of -- sourceMod is only used in GHC < 9.4 + Nothing -> SourceModified -- destination file doesn't exist, assume modified source + Just dest_version + | source_version <= dest_version -> SourceUnmodified + | otherwise -> SourceModified + + -- old_iface is only used in GHC >= 9.4 + _old_iface <- case mb_old_iface of + Just iface -> pure (Just iface) + Nothing -> do + -- ncu and read_dflags are only used in GHC >= 9.4 + let _ncu = hsc_NC sessionWithMsDynFlags + _read_dflags = hsc_dflags sessionWithMsDynFlags + read_result <- liftIO $ readIface _read_dflags _ncu mod iface_file + case read_result of + Util.Failed{} -> return Nothing + -- important to call `shareUsages` here before checkOldIface + -- consults `mi_usages` + Util.Succeeded iface -> return $ Just (shareUsages iface) + + -- If mb_old_iface is nothing then checkOldIface will load it for us + -- given that the source is unmodified + (recomp_iface_reqd, mb_checked_iface) + <- liftIO $ checkOldIface sessionWithMsDynFlags ms _old_iface >>= \case + UpToDateItem x -> pure (UpToDate, Just x) + OutOfDateItem reason x -> pure (NeedsRecompile reason, x) + + let do_regenerate _reason = withTrace "regenerate interface" $ \setTag -> do + setTag "Module" $ moduleNameString $ moduleName mod + setTag "Reason" $ showReason _reason + liftIO $ traceMarkerIO $ "regenerate interface " ++ show (moduleNameString $ moduleName mod, showReason _reason) + regenerate linkableNeeded + + case (mb_checked_iface, recomp_iface_reqd) of + (Just iface, UpToDate) -> do + details <- liftIO $ mkDetailsFromIface sessionWithMsDynFlags iface + -- parse the runtime dependencies from the annotations + let runtime_deps + | not (mi_used_th iface) = emptyModuleEnv + | otherwise = parseRuntimeDeps (md_anns details) + -- Peform the fine grained recompilation check for TH + maybe_recomp <- checkLinkableDependencies get_linkable_hashes get_module_graph runtime_deps + case maybe_recomp of + Just msg -> do_regenerate msg + Nothing + | isJust linkableNeeded -> handleErrs $ do + (coreFile@CoreFile{cf_iface_hash}, core_hash) <- liftIO $ + readBinCoreFile (mkUpdater $ hsc_NC session) core_file + if cf_iface_hash == getModuleHash iface + then return ([], Just $ mkHiFileResult ms iface details runtime_deps (Just (coreFile, fingerprintToBS core_hash))) + else do_regenerate (recompBecause "Core file out of date (doesn't match iface hash)") + | otherwise -> return ([], Just $ mkHiFileResult ms iface details runtime_deps Nothing) + where handleErrs = flip catches + [Handler $ \(e :: IOException) -> do_regenerate (recompBecause $ "Reading core file failed (" ++ show e ++ ")") + ,Handler $ \(e :: GhcException) -> case e of + Signal _ -> throw e + Panic _ -> throw e + _ -> do_regenerate (recompBecause $ "Reading core file failed (" ++ show e ++ ")") + ] + (_, _reason) -> do_regenerate _reason + +-- | Find the runtime dependencies by looking at the annotations +-- serialized in the iface +-- The bytestrings are the hashes of the core files for modules we +-- required to run the TH splices in the given module. +-- See Note [Recompilation avoidance in the presence of TH] +parseRuntimeDeps :: [ModIfaceAnnotation] -> ModuleEnv BS.ByteString +parseRuntimeDeps anns = mkModuleEnv $ mapMaybe go anns + where + go (Annotation (ModuleTarget mod) payload) + | Just bs <- fromSerialized BS.pack payload + = Just (mod, bs) + go _ = Nothing + +-- | checkLinkableDependencies compares the core files in the build graph to +-- the runtime dependencies of the module, to check if any of them are out of date +-- Hopefully 'runtime_deps' will be empty if the module didn't actually use TH +-- See Note [Recompilation avoidance in the presence of TH] +checkLinkableDependencies :: MonadIO m => ([NormalizedFilePath] -> m [BS.ByteString]) -> m DependencyInformation -> ModuleEnv BS.ByteString -> m (Maybe RecompileRequired) +checkLinkableDependencies get_linkable_hashes get_module_graph runtime_deps = do + graph <- get_module_graph + let go (mod, hash) = (,hash) <$> lookupModuleFile mod graph + hs_files = mapM go (moduleEnvToList runtime_deps) + case hs_files of + Nothing -> error "invalid module graph" + Just fs -> do + store_hashes <- get_linkable_hashes (map fst fs) + let out_of_date = [core_file | ((core_file, expected_hash), actual_hash) <- zip fs store_hashes, expected_hash /= actual_hash] + case out_of_date of + [] -> pure Nothing + _ -> pure $ Just $ recompBecause + $ "out of date runtime dependencies: " ++ intercalate ", " (map show out_of_date) + +recompBecause :: String -> RecompileRequired +recompBecause = + NeedsRecompile . + RecompBecause + . CustomReason + +data SourceModified = SourceModified | SourceUnmodified deriving (Eq, Ord, Show) + +showReason :: RecompileRequired -> String +showReason UpToDate = "UpToDate" +showReason (NeedsRecompile MustCompile) = "MustCompile" +showReason (NeedsRecompile s) = printWithoutUniques s + +mkDetailsFromIface :: HscEnv -> ModIface -> IO ModDetails +mkDetailsFromIface session iface = do + fixIO $ \details -> do + let !hsc' = hscUpdateHPT (\hpt -> addToHpt hpt (moduleName $ mi_module iface) (HomeModInfo iface details emptyHomeModInfoLinkable)) session + initIfaceLoad hsc' (typecheckIface iface) + +coreFileToCgGuts :: HscEnv -> ModIface -> ModDetails -> CoreFile -> IO CgGuts +coreFileToCgGuts session iface details core_file = do + let act hpt = addToHpt hpt (moduleName this_mod) + (HomeModInfo iface details emptyHomeModInfoLinkable) + this_mod = mi_module iface + types_var <- newIORef (md_types details) + let hsc_env' = hscUpdateHPT act (session { + hsc_type_env_vars = knotVarsFromModuleEnv (mkModuleEnv [(this_mod, types_var)]) + }) + core_binds <- initIfaceCheck (text "l") hsc_env' $ typecheckCoreFile this_mod types_var core_file + -- Implicit binds aren't saved, so we need to regenerate them ourselves. + let _implicit_binds = concatMap getImplicitBinds tyCons -- only used if GHC < 9.6 + tyCons = typeEnvTyCons (md_types details) + -- In GHC 9.6, the implicit binds are tidied and part of core_binds + pure $ CgGuts this_mod tyCons core_binds [] NoStubs [] mempty +#if !MIN_VERSION_ghc(9,11,0) + (emptyHpcInfo False) +#endif + Nothing [] + +coreFileToLinkable :: LinkableType -> HscEnv -> ModSummary -> ModIface -> ModDetails -> CoreFile -> UTCTime -> IO ([FileDiagnostic], Maybe HomeModInfo) +coreFileToLinkable linkableType session ms iface details core_file t = do + cgi_guts <- coreFileToCgGuts session iface details core_file + (warns, lb) <- case linkableType of + BCOLinkable -> fmap (maybe emptyHomeModInfoLinkable justBytecode) <$> generateByteCode (CoreFileTime t) session ms cgi_guts + ObjectLinkable -> fmap (maybe emptyHomeModInfoLinkable justObjects) <$> generateObjectCode session ms cgi_guts + pure (warns, Just $ HomeModInfo iface details lb) -- TODO wz1000 handle emptyHomeModInfoLinkable + +-- | Non-interactive, batch version of 'InteractiveEval.getDocs'. +-- The interactive paths create problems in ghc-lib builds +--- and leads to fun errors like "Cannot continue after interface file error". +getDocsBatch + :: HscEnv + -> [Name] + -> IO [Either String (Maybe [HsDoc GhcRn], IntMap (HsDoc GhcRn))] +getDocsBatch hsc_env _names = do + res <- initIfaceLoad hsc_env $ forM _names $ \name -> + case nameModule_maybe name of + Nothing -> return (Left $ NameHasNoModule name) + Just mod -> do + ModIface { + mi_docs = Just Docs{ docs_mod_hdr = mb_doc_hdr + , docs_decls = dmap + , docs_args = amap + } + } <- loadSysInterface (text "getModuleInterface") mod + if isNothing mb_doc_hdr && isNullUniqMap dmap && isNullUniqMap amap + then pure (Left (NoDocsInIface mod $ compiled name)) + else pure (Right ( + lookupUniqMap dmap name, + lookupWithDefaultUniqMap amap mempty name)) + return $ map (first $ T.unpack . printOutputable) res + where + compiled n = + -- TODO: Find a more direct indicator. + case nameSrcLoc n of + RealSrcLoc {} -> False + UnhelpfulLoc {} -> True + +-- | Non-interactive, batch version of 'InteractiveEval.lookupNames'. +-- The interactive paths create problems in ghc-lib builds +--- and leads to fun errors like "Cannot continue after interface file error". +lookupName :: HscEnv + -> Name + -> IO (Maybe TyThing) +lookupName _ name + | Nothing <- nameModule_maybe name = pure Nothing +lookupName hsc_env name = exceptionHandle $ do + mb_thing <- liftIO $ lookupType hsc_env name + case mb_thing of + x@(Just _) -> return x + Nothing + | x@(Just thing) <- wiredInNameTyThing_maybe name + -> do when (needWiredInHomeIface thing) + (initIfaceLoad hsc_env (loadWiredInHomeIface name)) + return x + | otherwise -> do + res <- initIfaceLoad hsc_env $ importDecl name + case res of + Util.Succeeded x -> return (Just x) + _ -> return Nothing + where + exceptionHandle x = x `catch` \(_ :: IOEnvFailure) -> pure Nothing + +pathToModuleName :: FilePath -> ModuleName +pathToModuleName = mkModuleName . map rep + where + rep c | isPathSeparator c = '_' + rep ':' = '_' + rep c = c + +-- | Initialising plugins looks in the finder cache, but we know that the plugin doesn't come from a home module, so don't +-- error out when we don't find it +setNonHomeFCHook :: HscEnv -> HscEnv +setNonHomeFCHook hsc_env = +#if MIN_VERSION_ghc(9,11,0) + hsc_env { hsc_FC = (hsc_FC hsc_env) + { lookupFinderCache = \m@(GWIB im _) -> + if moduleUnit im `elem` hsc_all_home_unit_ids hsc_env + then pure (Just $ InstalledNotFound [] Nothing) + else lookupFinderCache (hsc_FC hsc_env) m + } + } +#else + hsc_env +#endif + +{- Note [Guidelines For Using CPP In GHCIDE Import Statements] + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + GHCIDE's interface with GHC is extensive, and unfortunately, because we have + to work with multiple versions of GHC, we have several files that need to use + a lot of CPP. In order to simplify the CPP in the import section of every file + we have a few specific guidelines for using CPP in these sections. + + - We don't want to nest CPP clauses, nor do we want to use else clauses. Both + nesting and else clauses end up drastically complicating the code, and require + significant mental stack to unwind. + + - CPP clauses should be placed at the end of the imports section. The clauses + should be ordered by the GHC version they target from earlier to later versions, + with negative if clauses coming before positive if clauses of the same + version. (If you think about which GHC version a clause activates for this + should make sense `!MIN_VERSION_GHC(9,0,0)` refers to 8.10 and lower which is + an earlier version than `MIN_VERSION_GHC(9,0,0)` which refers to versions 9.0 + and later). In addition there should be a space before and after each CPP + clause. + + - In if clauses that use `&&` and depend on more than one statement, the + positive statement should come before the negative statement. In addition the + clause should come after the single positive clause for that GHC version. + + - There shouldn't be multiple identical CPP statements. The use of odd or even + GHC numbers is identical, with the only preference being to use what is + already there. (i.e. (`MIN_VERSION_GHC(9,2,0)` and `MIN_VERSION_GHC(9,1,0)` + are functionally equivalent) +-} diff --git a/ghcide/src/Development/IDE/Core/Debouncer.hs b/ghcide/src/Development/IDE/Core/Debouncer.hs new file mode 100644 index 0000000000..d5c313c95f --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Debouncer.hs @@ -0,0 +1,58 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +module Development.IDE.Core.Debouncer + ( Debouncer + , registerEvent + , newAsyncDebouncer + , noopDebouncer + ) where + +import Control.Concurrent.Async +import Control.Concurrent.STM.Stats (atomically, atomicallyNamed) +import Control.Exception +import Control.Monad (join) +import Data.Foldable (traverse_) +import Data.Hashable +import qualified Focus +import qualified StmContainers.Map as STM +import System.Time.Extra + +-- | A debouncer can be used to avoid triggering many events +-- (e.g. diagnostics) for the same key (e.g. the same file) +-- within a short timeframe. This is accomplished +-- by delaying each event for a given time. If another event +-- is registered for the same key within that timeframe, +-- only the new event will fire. +-- +-- We abstract over the debouncer used so we an use a proper debouncer in the IDE but disable +-- debouncing in the DAML CLI compiler. +newtype Debouncer k = Debouncer { registerEvent :: Seconds -> k -> IO () -> IO () } + +-- | Debouncer used in the IDE that delays events as expected. +newAsyncDebouncer :: (Eq k, Hashable k) => IO (Debouncer k) +newAsyncDebouncer = Debouncer . asyncRegisterEvent <$> STM.newIO + +-- | Register an event that will fire after the given delay if no other event +-- for the same key gets registered until then. +-- +-- If there is a pending event for the same key, the pending event will be killed. +-- Events are run unmasked so it is up to the user of `registerEvent` +-- to mask if required. +asyncRegisterEvent :: (Eq k, Hashable k) => STM.Map k (Async ()) -> Seconds -> k -> IO () -> IO () +asyncRegisterEvent d 0 k fire = do + join $ atomically $ do + prev <- STM.focus Focus.lookupAndDelete k d + return $ traverse_ cancel prev + fire +asyncRegisterEvent d delay k fire = mask_ $ do + a <- asyncWithUnmask $ \unmask -> unmask $ do + sleep delay + fire + atomically $ STM.delete k d + prev <- atomicallyNamed "debouncer" $ STM.focus (Focus.lookup <* Focus.insert a) k d + traverse_ cancel prev + +-- | Debouncer used in the DAML CLI compiler that emits events immediately. +noopDebouncer :: Debouncer k +noopDebouncer = Debouncer $ \_ _ a -> a diff --git a/ghcide/src/Development/IDE/Core/FileExists.hs b/ghcide/src/Development/IDE/Core/FileExists.hs new file mode 100644 index 0000000000..280cd14028 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/FileExists.hs @@ -0,0 +1,256 @@ +{-# LANGUAGE OverloadedLists #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} +module Development.IDE.Core.FileExists + ( fileExistsRules + , modifyFileExists + , getFileExists + , watchedGlobs + , GetFileExists(..) + , Log(..) + ) +where + +import Control.Concurrent.STM.Stats (atomically, + atomicallyNamed) +import Control.Exception +import Control.Monad.Extra +import Control.Monad.IO.Class +import qualified Data.ByteString as BS +import Data.List (partition) +import Data.Maybe +import Development.IDE.Core.FileStore hiding (Log, LogShake) +import qualified Development.IDE.Core.FileStore as FileStore +import Development.IDE.Core.IdeConfiguration +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Shake hiding (Log) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.Graph +import Development.IDE.Types.Location +import Development.IDE.Types.Options +import Development.IDE.Types.Shake (toKey) +import qualified Focus +import Ide.Logger (Pretty (pretty), + Recorder, WithPriority, + cmapWithPrio) +import Ide.Plugin.Config (Config) +import Language.LSP.Protocol.Types +import Language.LSP.Server hiding (getVirtualFile) +import qualified StmContainers.Map as STM +import qualified System.Directory as Dir +import qualified System.FilePath.Glob as Glob + +{- Note [File existence cache and LSP file watchers] + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Some LSP servers provide the ability to register file watches with the client, which will then notify +us of file changes. Some clients can do this more efficiently than us, or generally it's a tricky +problem + +Here we use this to maintain a quick lookup cache of file existence. How this works is: +- On startup, if the client supports it we ask it to watch some files (see below). +- When those files are created or deleted (we can also see change events, but we don't +care since we're only caching existence here) we get a notification from the client. +- The notification handler calls 'modifyFileExists' to update our cache. + +This means that the cache will only ever work for the files we have set up a watcher for. +So we pick the set that we mostly care about and which are likely to change existence +most often: the source files of the project (as determined by the source extensions +we're configured to care about). + +For all other files we fall back to the slow path. + +There are a few failure modes to think about: + +1. The client doesn't send us the notifications we asked for. + +There's not much we can do in this case: the whole point is to rely on the client so +we don't do the checking ourselves. If the client lets us down, we will just be wrong. + +2. Races between registering watchers, getting notifications, and file changes. + +If a file changes status between us asking for notifications and the client actually +setting up the notifications, we might not get told about it. But this is a relatively +small race window around startup, so we just don't worry about it. + +3. Using the fast path for files that we aren't watching. + +In this case we will fall back to the slow path, but cache that result forever (since +it won't get invalidated by a client notification). To prevent this we guard the +fast path by a check that the path also matches our watching patterns. +-} + +-- See Note [File existence cache and LSP file watchers] +-- | A map for tracking the file existence. +-- If a path maps to 'True' then it exists; if it maps to 'False' then it doesn't exist'; and +-- if it's not in the map then we don't know. +type FileExistsMap = STM.Map NormalizedFilePath Bool + +-- | A wrapper around a mutable 'FileExistsState' +newtype FileExistsMapVar = FileExistsMapVar FileExistsMap + +instance IsIdeGlobal FileExistsMapVar + +data Log + = LogFileStore FileStore.Log + | LogShake Shake.Log + deriving Show + +instance Pretty Log where + pretty = \case + LogFileStore msg -> pretty msg + LogShake msg -> pretty msg + +-- | Grab the current global value of 'FileExistsMap' without acquiring a dependency +getFileExistsMapUntracked :: Action FileExistsMap +getFileExistsMapUntracked = do + FileExistsMapVar v <- getIdeGlobalAction + return v + +-- | Modify the global store of file exists and return the keys that need to be marked as dirty +modifyFileExists :: IdeState -> [(NormalizedFilePath, FileChangeType)] -> IO [Key] +modifyFileExists state changes = do + FileExistsMapVar var <- getIdeGlobalState state + -- Masked to ensure that the previous values are flushed together with the map update + mask_ $ atomicallyNamed "modifyFileExists" $ do + forM_ changes $ \(f,c) -> + case fromChange c of + Just c' -> STM.focus (Focus.insert c') f var + Nothing -> pure () + -- See Note [Invalidating file existence results] + -- flush previous values + let (fileModifChanges, fileExistChanges) = + partition ((== FileChangeType_Changed) . snd) changes + keys0 <- concat <$> mapM (deleteValue (shakeExtras state) GetFileExists . fst) fileExistChanges + let keys1 = map (toKey GetFileExists . fst) fileExistChanges + let keys2 = map (toKey GetModificationTime . fst) fileModifChanges + return (keys0 <> keys1 <> keys2) + +fromChange :: FileChangeType -> Maybe Bool +fromChange FileChangeType_Created = Just True +fromChange FileChangeType_Deleted = Just False +fromChange FileChangeType_Changed = Nothing + +------------------------------------------------------------------------------------- + +-- | Returns True if the file exists +getFileExists :: NormalizedFilePath -> Action Bool +getFileExists fp = use_ GetFileExists fp + +{- Note [Which files should we watch?] + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The watcher system gives us a lot of flexibility: we can set multiple watchers, and they can all watch on glob +patterns. + +We used to have a quite precise system, where we would register a watcher for a single file path only (and always) +when we actually looked to see if it existed. The downside of this is that it sends a *lot* of notifications +to the client (thousands on a large project), and this could lock up some clients like emacs +(https://github.com/emacs-lsp/lsp-mode/issues/2165). + +Now we take the opposite approach: we register a single, quite general watcher that looks for all files +with a predefined set of extensions. The consequences are: +- The client will have to watch more files. This is usually not too bad, since the pattern is a single glob, +and the clients typically call out to an optimized implementation of file watching that understands globs. +- The client will send us a lot more notifications. This isn't too bad in practice, since although +we're watching a lot of files in principle, they don't get created or destroyed that often. +- We won't ever hit the fast lookup path for files which aren't in our watch pattern, since the only way +files get into our map is when the client sends us a notification about them because we're watching them. +This is fine so long as we're watching the files we check most often, i.e. source files. +-} + +-- | The list of file globs that we ask the client to watch. +watchedGlobs :: IdeOptions -> [String] +watchedGlobs opts = [ "**/*." ++ ext | ext <- allExtensions opts] + +allExtensions :: IdeOptions -> [String] +allExtensions opts = [extIncBoot | ext <- optExtensions opts, extIncBoot <- [ext, ext ++ "-boot"]] + +-- | Installs the 'getFileExists' rules. +-- Provides a fast implementation if client supports dynamic watched files. +-- Creates a global state as a side effect in that case. +fileExistsRules :: Recorder (WithPriority Log) -> Maybe (LanguageContextEnv Config) -> Rules () +fileExistsRules recorder lspEnv = do + supportsWatchedFiles <- case lspEnv of + Nothing -> pure False + Just lspEnv' -> liftIO $ runLspT lspEnv' isWatchSupported + -- Create the global always, although it should only be used if we have fast rules. + -- But there's a chance someone will send unexpected notifications anyway, + -- e.g. https://github.com/haskell/ghcide/issues/599 + addIdeGlobal . FileExistsMapVar =<< liftIO STM.newIO + + extras <- getShakeExtrasRules + opts <- liftIO $ getIdeOptionsIO extras + let globs = watchedGlobs opts + patterns = fmap Glob.compile globs + fpMatches fp = any (`Glob.match`fp) patterns + isWatched = if supportsWatchedFiles + then \f -> do + isWF <- isWorkspaceFile f + return $ isWF && fpMatches (fromNormalizedFilePath f) + else const $ pure False + + if supportsWatchedFiles + then fileExistsRulesFast recorder isWatched + else fileExistsRulesSlow recorder + + fileStoreRules (cmapWithPrio LogFileStore recorder) isWatched + +-- Requires an lsp client that provides WatchedFiles notifications, but assumes that this has already been checked. +fileExistsRulesFast :: Recorder (WithPriority Log) -> (NormalizedFilePath -> Action Bool) -> Rules () +fileExistsRulesFast recorder isWatched = + defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleNoDiagnostics $ \GetFileExists file -> do + isWF <- isWatched file + if isWF + then fileExistsFast file + else fileExistsSlow file + +{- Note [Invalidating file existence results] + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +We have two mechanisms for getting file existence information: +- The file existence cache +- The VFS lookup + +Both of these affect the results of the 'GetFileExists' rule, so we need to make sure it +is invalidated properly when things change. + +For the file existence cache, we manually flush the results of 'GetFileExists' when we +modify it (i.e. when a notification comes from the client). This is faster than using +'alwaysRerun' in the 'fileExistsFast', and we need it to be as fast as possible. + +For the VFS lookup, however, we won't get prompted to flush the result, so instead +we use 'alwaysRerun'. +-} + +fileExistsFast :: NormalizedFilePath -> Action (Maybe BS.ByteString, Maybe Bool) +fileExistsFast file = do + -- Could in principle use 'alwaysRerun' here, but it's too slwo, See Note [Invalidating file existence results] + mp <- getFileExistsMapUntracked + + mbFilesWatched <- liftIO $ atomically $ STM.lookup file mp + exist <- case mbFilesWatched of + Just exist -> pure exist + -- We don't know about it: use the slow route. + -- Note that we do *not* call 'fileExistsSlow', as that would trigger 'alwaysRerun'. + Nothing -> getFileExistsVFS file + pure (summarizeExists exist, Just exist) + +summarizeExists :: Bool -> Maybe BS.ByteString +summarizeExists x = Just $ if x then BS.singleton 1 else BS.empty + +fileExistsRulesSlow :: Recorder (WithPriority Log) -> Rules () +fileExistsRulesSlow recorder = + defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleNoDiagnostics $ \GetFileExists file -> fileExistsSlow file + +fileExistsSlow :: NormalizedFilePath -> Action (Maybe BS.ByteString, Maybe Bool) +fileExistsSlow file = do + -- See Note [Invalidating file existence results] + alwaysRerun + exist <- getFileExistsVFS file + pure (summarizeExists exist, Just exist) + +getFileExistsVFS :: NormalizedFilePath -> Action Bool +getFileExistsVFS file = do + vf <- getVirtualFile file + if isJust vf + then pure True + else liftIO $ handle (\(_ :: IOException) -> return False) $ + Dir.doesFileExist (fromNormalizedFilePath file) diff --git a/ghcide/src/Development/IDE/Core/FileStore.hs b/ghcide/src/Development/IDE/Core/FileStore.hs new file mode 100644 index 0000000000..e545ec7b14 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/FileStore.hs @@ -0,0 +1,362 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE TypeFamilies #-} + +module Development.IDE.Core.FileStore( + getFileModTimeContents, + getFileContents, + getUriContents, + getVersionedTextDoc, + setFileModified, + setSomethingModified, + fileStoreRules, + modificationTime, + typecheckParents, + resetFileStore, + resetInterfaceStore, + getModificationTimeImpl, + addIdeGlobal, + getFileContentsImpl, + getModTime, + isWatchSupported, + registerFileWatches, + shareFilePath, + Log(..), + ) where + +import Control.Concurrent.STM.Stats (STM, atomically) +import Control.Concurrent.STM.TQueue (writeTQueue) +import Control.Exception +import Control.Lens ((^.)) +import Control.Monad.Extra +import Control.Monad.IO.Class +import qualified Data.Binary as B +import qualified Data.ByteString as BS +import qualified Data.ByteString.Lazy as LBS +import qualified Data.HashMap.Strict as HashMap +import Data.IORef +import qualified Data.Text as T +import qualified Data.Text as Text +import Data.Text.Utf16.Rope.Mixed (Rope) +import Data.Time +import Data.Time.Clock.POSIX +import Development.IDE.Core.FileUtils +import Development.IDE.Core.IdeConfiguration (isWorkspaceFile) +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Shake hiding (Log) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Orphans () +import Development.IDE.Graph +import Development.IDE.Import.DependencyInformation +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location +import Development.IDE.Types.Options +import Development.IDE.Types.Shake (toKey) +import HieDb.Create (deleteMissingRealFiles) +import Ide.Logger (Pretty (pretty), + Priority (Info), + Recorder, + WithPriority, + cmapWithPrio, + logWith, viaShow, + (<+>)) +import qualified Ide.Logger as L +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message (toUntypedRegistration) +import qualified Language.LSP.Protocol.Message as LSP +import Language.LSP.Protocol.Types (DidChangeWatchedFilesRegistrationOptions (DidChangeWatchedFilesRegistrationOptions), + FileSystemWatcher (..), + TextDocumentIdentifier (..), + VersionedTextDocumentIdentifier (..), + _watchers, + uriToNormalizedFilePath) +import qualified Language.LSP.Protocol.Types as LSP +import qualified Language.LSP.Server as LSP +import Language.LSP.VFS +import System.FilePath +import System.IO.Error +import System.IO.Unsafe + +data Log + = LogCouldNotIdentifyReverseDeps !NormalizedFilePath + | LogTypeCheckingReverseDeps !NormalizedFilePath !(Maybe [NormalizedFilePath]) + | LogShake Shake.Log + deriving Show + +instance Pretty Log where + pretty = \case + LogCouldNotIdentifyReverseDeps path -> + "Could not identify reverse dependencies for" <+> viaShow path + (LogTypeCheckingReverseDeps path reverseDepPaths) -> + "Typechecking reverse dependencies for" + <+> viaShow path + <> ":" + <+> pretty (fmap (fmap show) reverseDepPaths) + LogShake msg -> pretty msg + +addWatchedFileRule :: Recorder (WithPriority Log) -> (NormalizedFilePath -> Action Bool) -> Rules () +addWatchedFileRule recorder isWatched = defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \AddWatchedFile f -> do + isAlreadyWatched <- isWatched f + isWp <- isWorkspaceFile f + if isAlreadyWatched then pure (Just True) else + if not isWp then pure (Just False) else do + ShakeExtras{lspEnv} <- getShakeExtras + case lspEnv of + Just env -> fmap Just $ liftIO $ LSP.runLspT env $ + registerFileWatches [fromNormalizedFilePath f] + Nothing -> pure $ Just False + + +getModificationTimeRule :: Recorder (WithPriority Log) -> Rules () +getModificationTimeRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \(GetModificationTime_ missingFileDiags) file -> + getModificationTimeImpl missingFileDiags file + +getModificationTimeImpl + :: Bool + -> NormalizedFilePath + -> Action (Maybe BS.ByteString, ([FileDiagnostic], Maybe FileVersion)) +getModificationTimeImpl missingFileDiags file = do + let file' = fromNormalizedFilePath file + let wrap time = (Just $ LBS.toStrict $ B.encode $ toRational time, ([], Just $ ModificationTime time)) + mbVf <- getVirtualFile file + case mbVf of + Just (virtualFileVersion -> ver) -> do + alwaysRerun + pure (Just $ LBS.toStrict $ B.encode ver, ([], Just $ VFSVersion ver)) + Nothing -> do + isWF <- use_ AddWatchedFile file + if isWF + then -- the file is watched so we can rely on FileWatched notifications, + -- but also need a dependency on IsFileOfInterest to reinstall + -- alwaysRerun when the file becomes VFS + void (use_ IsFileOfInterest file) + else if isInterface file + then -- interface files are tracked specially using the closed world assumption + pure () + else -- in all other cases we will need to freshly check the file system + alwaysRerun + + liftIO $ fmap wrap (getModTime file') + `catch` \(e :: IOException) -> do + let err | isDoesNotExistError e = "File does not exist: " ++ file' + | otherwise = "IO error while reading " ++ file' ++ ", " ++ displayException e + diag = ideErrorText file (T.pack err) + if isDoesNotExistError e && not missingFileDiags + then return (Nothing, ([], Nothing)) + else return (Nothing, ([diag], Nothing)) + + +getPhysicalModificationTimeRule :: Recorder (WithPriority Log) -> Rules () +getPhysicalModificationTimeRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetPhysicalModificationTime file -> + getPhysicalModificationTimeImpl file + +getPhysicalModificationTimeImpl + :: NormalizedFilePath + -> Action (Maybe BS.ByteString, ([FileDiagnostic], Maybe FileVersion)) +getPhysicalModificationTimeImpl file = do + let file' = fromNormalizedFilePath file + let wrap time = (Just $ LBS.toStrict $ B.encode $ toRational time, ([], Just $ ModificationTime time)) + + alwaysRerun + + liftIO $ fmap wrap (getModTime file') + `catch` \(e :: IOException) -> do + let err | isDoesNotExistError e = "File does not exist: " ++ file' + | otherwise = "IO error while reading " ++ file' ++ ", " ++ displayException e + diag = ideErrorText file (T.pack err) + if isDoesNotExistError e + then return (Nothing, ([], Nothing)) + else return (Nothing, ([diag], Nothing)) + +-- | Interface files cannot be watched, since they live outside the workspace. +-- But interface files are private, in that only HLS writes them. +-- So we implement watching ourselves, and bypass the need for alwaysRerun. +isInterface :: NormalizedFilePath -> Bool +isInterface f = takeExtension (fromNormalizedFilePath f) `elem` [".hi", ".hi-boot", ".hie", ".hie-boot", ".core"] + +-- | Reset the GetModificationTime state of interface files +resetInterfaceStore :: ShakeExtras -> NormalizedFilePath -> STM [Key] +resetInterfaceStore state f = do + deleteValue state GetModificationTime f + +-- | Reset the GetModificationTime state of watched files +-- Assumes the list does not include any FOIs +resetFileStore :: IdeState -> [(NormalizedFilePath, LSP.FileChangeType)] -> IO [Key] +resetFileStore ideState changes = mask $ \_ -> do + -- we record FOIs document versions in all the stored values + -- so NEVER reset FOIs to avoid losing their versions + -- FOI filtering is done by the caller (LSP Notification handler) + fmap concat <$> + forM changes $ \(nfp, c) -> do + case c of + LSP.FileChangeType_Changed + -- already checked elsewhere | not $ HM.member nfp fois + -> + atomically $ do + ks <- deleteValue (shakeExtras ideState) GetModificationTime nfp + vs <- deleteValue (shakeExtras ideState) GetPhysicalModificationTime nfp + pure $ ks ++ vs + _ -> pure [] + + +modificationTime :: FileVersion -> Maybe UTCTime +modificationTime VFSVersion{} = Nothing +modificationTime (ModificationTime posix) = Just $ posixSecondsToUTCTime posix + +getFileContentsRule :: Recorder (WithPriority Log) -> Rules () +getFileContentsRule recorder = define (cmapWithPrio LogShake recorder) $ \GetFileContents file -> getFileContentsImpl file + +getFileContentsImpl + :: NormalizedFilePath + -> Action ([FileDiagnostic], Maybe (FileVersion, Maybe Rope)) +getFileContentsImpl file = do + -- need to depend on modification time to introduce a dependency with Cutoff + time <- use_ GetModificationTime file + res <- do + mbVirtual <- getVirtualFile file + pure $ _file_text <$> mbVirtual + pure ([], Just (time, res)) + +-- | Returns the modification time and the contents. +-- For VFS paths, the modification time is the current time. +getFileModTimeContents :: NormalizedFilePath -> Action (UTCTime, Maybe Rope) +getFileModTimeContents f = do + (fv, contents) <- use_ GetFileContents f + modTime <- case modificationTime fv of + Just t -> pure t + Nothing -> do + foi <- use_ IsFileOfInterest f + liftIO $ case foi of + IsFOI Modified{} -> getCurrentTime + _ -> do + posix <- getModTime $ fromNormalizedFilePath f + pure $ posixSecondsToUTCTime posix + return (modTime, contents) + +getFileContents :: NormalizedFilePath -> Action (Maybe Rope) +getFileContents f = snd <$> use_ GetFileContents f + +getUriContents :: NormalizedUri -> Action (Maybe Rope) +getUriContents uri = + join <$> traverse getFileContents (uriToNormalizedFilePath uri) + +-- | Given a text document identifier, annotate it with the latest version. +-- +-- Like Language.LSP.Server.Core.getVersionedTextDoc, but gets the virtual file +-- from the Shake VFS rather than the LSP VFS. +getVersionedTextDoc :: TextDocumentIdentifier -> Action VersionedTextDocumentIdentifier +getVersionedTextDoc doc = do + let uri = doc ^. L.uri + mvf <- + maybe (pure Nothing) getVirtualFile $ + uriToNormalizedFilePath $ toNormalizedUri uri + let ver = case mvf of + Just (VirtualFile lspver _ _) -> lspver + Nothing -> 0 + return (VersionedTextDocumentIdentifier uri ver) + +fileStoreRules :: Recorder (WithPriority Log) -> (NormalizedFilePath -> Action Bool) -> Rules () +fileStoreRules recorder isWatched = do + getModificationTimeRule recorder + getPhysicalModificationTimeRule recorder + getFileContentsRule recorder + addWatchedFileRule recorder isWatched + +-- | Note that some buffer for a specific file has been modified but not +-- with what changes. +setFileModified :: Recorder (WithPriority Log) + -> VFSModified + -> IdeState + -> Bool -- ^ Was the file saved? + -> NormalizedFilePath + -> IO [Key] + -> IO () +setFileModified recorder vfs state saved nfp actionBefore = do + ideOptions <- getIdeOptionsIO $ shakeExtras state + doCheckParents <- optCheckParents ideOptions + let checkParents = case doCheckParents of + AlwaysCheck -> True + CheckOnSave -> saved + _ -> False + restartShakeSession (shakeExtras state) vfs (fromNormalizedFilePath nfp ++ " (modified)") [] $ do + keys<-actionBefore + return (toKey GetModificationTime nfp:keys) + when checkParents $ + typecheckParents recorder state nfp + +typecheckParents :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> IO () +typecheckParents recorder state nfp = void $ shakeEnqueue (shakeExtras state) parents + where parents = mkDelayedAction "ParentTC" L.Debug (typecheckParentsAction recorder nfp) + +typecheckParentsAction :: Recorder (WithPriority Log) -> NormalizedFilePath -> Action () +typecheckParentsAction recorder nfp = do + revs <- transitiveReverseDependencies nfp <$> useWithSeparateFingerprintRule_ GetModuleGraphTransReverseDepsFingerprints GetModuleGraph nfp + case revs of + Nothing -> logWith recorder Info $ LogCouldNotIdentifyReverseDeps nfp + Just rs -> do + logWith recorder Info $ LogTypeCheckingReverseDeps nfp revs + void $ uses GetModIface rs + +-- | Note that some keys have been modified and restart the session +-- Only valid if the virtual file system was initialised by LSP, as that +-- independently tracks which files are modified. +setSomethingModified :: VFSModified -> IdeState -> String -> IO [Key] -> IO () +setSomethingModified vfs state reason actionBetweenSession = do + -- Update database to remove any files that might have been renamed/deleted + atomically $ writeTQueue (indexQueue $ hiedbWriter $ shakeExtras state) (\withHieDb -> withHieDb deleteMissingRealFiles) + void $ restartShakeSession (shakeExtras state) vfs reason [] actionBetweenSession + +registerFileWatches :: [String] -> LSP.LspT Config IO Bool +registerFileWatches globs = do + watchSupported <- isWatchSupported + if watchSupported + then do + let + regParams = LSP.RegistrationParams [toUntypedRegistration registration] + -- The registration ID is arbitrary and is only used in case we want to deregister (which we won't). + -- We could also use something like a random UUID, as some other servers do, but this works for + -- our purposes. + registration = LSP.TRegistration { _id ="globalFileWatches" + , _method = LSP.SMethod_WorkspaceDidChangeWatchedFiles + , _registerOptions = Just regOptions} + regOptions = + DidChangeWatchedFilesRegistrationOptions { _watchers = watchers } + -- See Note [File existence cache and LSP file watchers] for why this exists, and the choice of watch kind + -- WatchKind_Custom 7 is for create, change, and delete + watchKind = LSP.WatchKind_Custom 7 + -- See Note [Which files should we watch?] for an explanation of why the pattern is the way that it is + -- The patterns will be something like "**/.hs", i.e. "any number of directory segments, + -- followed by a file with an extension 'hs'. + watcher glob = FileSystemWatcher { _globPattern = glob, _kind = Just watchKind } + -- We use multiple watchers instead of one using '{}' because lsp-test doesn't + -- support that: https://github.com/bubba/lsp-test/issues/77 + watchers = [ watcher (LSP.GlobPattern (LSP.InL (LSP.Pattern (Text.pack glob)))) | glob <- globs ] + + void $ LSP.sendRequest LSP.SMethod_ClientRegisterCapability regParams (const $ pure ()) -- TODO handle response + return True + else return False + +isWatchSupported :: LSP.LspT Config IO Bool +isWatchSupported = do + clientCapabilities <- LSP.getClientCapabilities + pure $ case () of + _ | LSP.ClientCapabilities{_workspace} <- clientCapabilities + , Just LSP.WorkspaceClientCapabilities{_didChangeWatchedFiles} <- _workspace + , Just LSP.DidChangeWatchedFilesClientCapabilities{_dynamicRegistration} <- _didChangeWatchedFiles + , Just True <- _dynamicRegistration + -> True + | otherwise -> False + +filePathMap :: IORef (HashMap.HashMap FilePath FilePath) +filePathMap = unsafePerformIO $ newIORef HashMap.empty +{-# NOINLINE filePathMap #-} + +shareFilePath :: FilePath -> FilePath +shareFilePath k = unsafePerformIO $ do + atomicModifyIORef' filePathMap $ \km -> + let new_key = HashMap.lookup k km + in case new_key of + Just v -> (km, v) + Nothing -> (HashMap.insert k k km, k) +{-# NOINLINE shareFilePath #-} diff --git a/ghcide/src/Development/IDE/Core/FileUtils.hs b/ghcide/src/Development/IDE/Core/FileUtils.hs new file mode 100644 index 0000000000..e8ff7299b4 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/FileUtils.hs @@ -0,0 +1,31 @@ +{-# LANGUAGE CPP #-} + +module Development.IDE.Core.FileUtils( + getModTime, + ) where + + +import Data.Time.Clock.POSIX + +#ifdef mingw32_HOST_OS +import qualified System.Directory as Dir +#else +import System.Posix.Files (getFileStatus, modificationTimeHiRes) +#endif + +-- Dir.getModificationTime is surprisingly slow since it performs +-- a ton of conversions. Since we do not actually care about +-- the format of the time, we can get away with something cheaper. +-- For now, we only try to do this on Unix systems where it seems to get the +-- time spent checking file modifications (which happens on every change) +-- from > 0.5s to ~0.15s. +-- We might also want to try speeding this up on Windows at some point. +-- TODO leverage DidChangeWatchedFile lsp notifications on clients that +-- support them, as done for GetFileExists +getModTime :: FilePath -> IO POSIXTime +getModTime f = +#ifdef mingw32_HOST_OS + utcTimeToPOSIXSeconds <$> Dir.getModificationTime f +#else + modificationTimeHiRes <$> getFileStatus f +#endif diff --git a/ghcide/src/Development/IDE/Core/IdeConfiguration.hs b/ghcide/src/Development/IDE/Core/IdeConfiguration.hs new file mode 100644 index 0000000000..eb42450bde --- /dev/null +++ b/ghcide/src/Development/IDE/Core/IdeConfiguration.hs @@ -0,0 +1,92 @@ +module Development.IDE.Core.IdeConfiguration + ( IdeConfiguration(..) + , registerIdeConfiguration + , getIdeConfiguration + , parseConfiguration + , parseWorkspaceFolder + , isWorkspaceFile + , modifyWorkspaceFolders + , modifyClientSettings + , getClientSettings + ) +where + +import Control.Concurrent.Strict + +import Control.Monad +import Control.Monad.IO.Class +import Data.Aeson.Types (Value) +import Data.Hashable (Hashed, hashed, unhashed) +import Data.HashSet (HashSet, singleton) +import Data.Text (isPrefixOf) +import Development.IDE.Core.Shake +import Development.IDE.Graph +import Development.IDE.Types.Location +import Language.LSP.Protocol.Types +import System.FilePath (isRelative) + +-- | Lsp client relevant configuration details +data IdeConfiguration = IdeConfiguration + { workspaceFolders :: HashSet NormalizedUri + , clientSettings :: Hashed (Maybe Value) + } + deriving (Show) + +newtype IdeConfigurationVar = IdeConfigurationVar {unIdeConfigurationRef :: Var IdeConfiguration} + +instance IsIdeGlobal IdeConfigurationVar + +registerIdeConfiguration :: ShakeExtras -> IdeConfiguration -> IO () +registerIdeConfiguration extras = + addIdeGlobalExtras extras . IdeConfigurationVar <=< newVar + +getIdeConfiguration :: Action IdeConfiguration +getIdeConfiguration = + getIdeGlobalAction >>= liftIO . readVar . unIdeConfigurationRef + +parseConfiguration :: InitializeParams -> IdeConfiguration +parseConfiguration InitializeParams {..} = + IdeConfiguration {..} + where + workspaceFolders = + foldMap (singleton . toNormalizedUri) (nullToMaybe _rootUri) + <> (foldMap . foldMap) + (singleton . parseWorkspaceFolder) + (nullToMaybe =<< _workspaceFolders) + clientSettings = hashed _initializationOptions + +parseWorkspaceFolder :: WorkspaceFolder -> NormalizedUri +parseWorkspaceFolder WorkspaceFolder{_uri} = + toNormalizedUri _uri + +modifyWorkspaceFolders + :: IdeState -> (HashSet NormalizedUri -> HashSet NormalizedUri) -> IO () +modifyWorkspaceFolders ide f = modifyIdeConfiguration ide f' + where f' (IdeConfiguration ws initOpts) = IdeConfiguration (f ws) initOpts + +modifyClientSettings + :: IdeState -> (Maybe Value -> Maybe Value) -> IO () +modifyClientSettings ide f = modifyIdeConfiguration ide f' + where f' (IdeConfiguration ws clientSettings) = + IdeConfiguration ws (hashed . f . unhashed $ clientSettings) + +modifyIdeConfiguration + :: IdeState -> (IdeConfiguration -> IdeConfiguration) -> IO () +modifyIdeConfiguration ide f = do + IdeConfigurationVar var <- getIdeGlobalState ide + void $ modifyVar' var f + +isWorkspaceFile :: NormalizedFilePath -> Action Bool +isWorkspaceFile file = + if isRelative (fromNormalizedFilePath file) + then return True + else do + IdeConfiguration {..} <- getIdeConfiguration + let toText = getUri . fromNormalizedUri + return $ + any + (\root -> toText root `isPrefixOf` toText (filePathToUri' file)) + workspaceFolders + +getClientSettings :: Action (Maybe Value) +getClientSettings = unhashed . clientSettings <$> getIdeConfiguration diff --git a/ghcide/src/Development/IDE/Core/OfInterest.hs b/ghcide/src/Development/IDE/Core/OfInterest.hs new file mode 100644 index 0000000000..19e0f40e24 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/OfInterest.hs @@ -0,0 +1,163 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE TypeFamilies #-} + +-- | Utilities and state for the files of interest - those which are currently +-- open in the editor. The rule is 'IsFileOfInterest' +module Development.IDE.Core.OfInterest( + ofInterestRules, + getFilesOfInterest, + getFilesOfInterestUntracked, + addFileOfInterest, + deleteFileOfInterest, + setFilesOfInterest, + kick, FileOfInterestStatus(..), + OfInterestVar(..), + scheduleGarbageCollection, + Log(..) + ) where + +import Control.Concurrent.Strict +import Control.Monad +import Control.Monad.IO.Class +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as HashMap +import Data.Proxy +import Development.IDE.Graph + +import Control.Concurrent.STM.Stats (atomically, + modifyTVar') +import Data.Aeson (toJSON) +import qualified Data.ByteString as BS +import Data.Maybe (catMaybes) +import Development.IDE.Core.ProgressReporting +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Shake hiding (Log) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.Plugin.Completions.Types +import Development.IDE.Types.Exports +import Development.IDE.Types.Location +import Development.IDE.Types.Options (IdeTesting (..)) +import Development.IDE.Types.Shake (toKey) +import GHC.TypeLits (KnownSymbol) +import Ide.Logger (Pretty (pretty), + Priority (..), + Recorder, + WithPriority, + cmapWithPrio, + logWith) +import qualified Language.LSP.Protocol.Message as LSP +import qualified Language.LSP.Server as LSP + +data Log = LogShake Shake.Log + deriving Show + +instance Pretty Log where + pretty = \case + LogShake msg -> pretty msg + +newtype OfInterestVar = OfInterestVar (Var (HashMap NormalizedFilePath FileOfInterestStatus)) + +instance IsIdeGlobal OfInterestVar + +-- | The rule that initialises the files of interest state. +ofInterestRules :: Recorder (WithPriority Log) -> Rules () +ofInterestRules recorder = do + addIdeGlobal . OfInterestVar =<< liftIO (newVar HashMap.empty) + addIdeGlobal . GarbageCollectVar =<< liftIO (newVar False) + defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleNoDiagnostics $ \IsFileOfInterest f -> do + alwaysRerun + filesOfInterest <- getFilesOfInterestUntracked + let foi = maybe NotFOI IsFOI $ f `HashMap.lookup` filesOfInterest + fp = summarize foi + res = (Just fp, Just foi) + return res + where + summarize NotFOI = BS.singleton 0 + summarize (IsFOI OnDisk) = BS.singleton 1 + summarize (IsFOI (Modified False)) = BS.singleton 2 + summarize (IsFOI (Modified True)) = BS.singleton 3 + +------------------------------------------------------------ +newtype GarbageCollectVar = GarbageCollectVar (Var Bool) +instance IsIdeGlobal GarbageCollectVar + +------------------------------------------------------------ +-- Exposed API + +getFilesOfInterest :: IdeState -> IO( HashMap NormalizedFilePath FileOfInterestStatus) +getFilesOfInterest state = do + OfInterestVar var <- getIdeGlobalState state + readVar var + +-- | Set the files-of-interest - not usually necessary or advisable. +-- The LSP client will keep this information up to date. +setFilesOfInterest :: IdeState -> HashMap NormalizedFilePath FileOfInterestStatus -> IO () +setFilesOfInterest state files = do + OfInterestVar var <- getIdeGlobalState state + writeVar var files + +getFilesOfInterestUntracked :: Action (HashMap NormalizedFilePath FileOfInterestStatus) +getFilesOfInterestUntracked = do + OfInterestVar var <- getIdeGlobalAction + liftIO $ readVar var + +addFileOfInterest :: IdeState -> NormalizedFilePath -> FileOfInterestStatus -> IO [Key] +addFileOfInterest state f v = do + OfInterestVar var <- getIdeGlobalState state + (prev, files) <- modifyVar var $ \dict -> do + let (prev, new) = HashMap.alterF (, Just v) f dict + pure (new, (prev, new)) + if prev /= Just v + then do + logWith (ideLogger state) Debug $ + LogSetFilesOfInterest (HashMap.toList files) + return [toKey IsFileOfInterest f] + else return [] + +deleteFileOfInterest :: IdeState -> NormalizedFilePath -> IO [Key] +deleteFileOfInterest state f = do + OfInterestVar var <- getIdeGlobalState state + files <- modifyVar' var $ HashMap.delete f + logWith (ideLogger state) Debug $ + LogSetFilesOfInterest (HashMap.toList files) + return [toKey IsFileOfInterest f] +scheduleGarbageCollection :: IdeState -> IO () +scheduleGarbageCollection state = do + GarbageCollectVar var <- getIdeGlobalState state + writeVar var True + +-- | Typecheck all the files of interest. +-- Could be improved +kick :: Action () +kick = do + files <- HashMap.keys <$> getFilesOfInterestUntracked + ShakeExtras{exportsMap, ideTesting = IdeTesting testing, lspEnv, progress} <- getShakeExtras + let signal :: KnownSymbol s => Proxy s -> Action () + signal msg = when testing $ liftIO $ + mRunLspT lspEnv $ + LSP.sendNotification (LSP.SMethod_CustomMethod msg) $ + toJSON $ map fromNormalizedFilePath files + + signal (Proxy @"kick/start") + liftIO $ progressUpdate progress ProgressNewStarted + + -- Update the exports map + results <- uses GenerateCore files + <* uses GetHieAst files + -- needed to have non local completions on the first edit + -- when the first edit breaks the module header + <* uses NonLocalCompletions files + let mguts = catMaybes results + void $ liftIO $ atomically $ modifyTVar' exportsMap (updateExportsMapMg mguts) + + liftIO $ progressUpdate progress ProgressCompleted + + GarbageCollectVar var <- getIdeGlobalAction + garbageCollectionScheduled <- liftIO $ readVar var + when garbageCollectionScheduled $ do + void garbageCollectDirtyKeys + liftIO $ writeVar var False + + signal (Proxy @"kick/done") diff --git a/ghcide/src/Development/IDE/Core/PluginUtils.hs b/ghcide/src/Development/IDE/Core/PluginUtils.hs new file mode 100644 index 0000000000..6ba633df26 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/PluginUtils.hs @@ -0,0 +1,255 @@ +{-# LANGUAGE GADTs #-} +module Development.IDE.Core.PluginUtils +(-- * Wrapped Action functions + runActionE +, runActionMT +, useE +, useMT +, usesE +, usesMT +, useWithStaleE +, useWithStaleMT +-- * Wrapped IdeAction functions +, runIdeActionE +, runIdeActionMT +, useWithStaleFastE +, useWithStaleFastMT +, uriToFilePathE +-- * Wrapped PositionMapping functions +, toCurrentPositionE +, toCurrentPositionMT +, fromCurrentPositionE +, fromCurrentPositionMT +, toCurrentRangeE +, toCurrentRangeMT +, fromCurrentRangeE +, fromCurrentRangeMT +-- * Diagnostics +, activeDiagnosticsInRange +, activeDiagnosticsInRangeMT +-- * Formatting handlers +, mkFormattingHandlers) where + +import Control.Concurrent.STM +import Control.Lens ((^.)) +import Control.Monad.Error.Class (MonadError (throwError)) +import Control.Monad.Extra +import Control.Monad.IO.Class +import Control.Monad.Reader (runReaderT) +import Control.Monad.Trans.Except +import Control.Monad.Trans.Maybe +import Data.Functor.Identity +import qualified Data.Text as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.Core.FileStore +import Development.IDE.Core.PositionMapping +import Development.IDE.Core.Service (runAction) +import Development.IDE.Core.Shake (IdeAction, IdeRule, + IdeState (shakeExtras), + mkDelayedAction, + shakeEnqueue) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Orphans () +import Development.IDE.Graph hiding (ShakeValue) +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location (NormalizedFilePath) +import qualified Development.IDE.Types.Location as Location +import qualified Ide.Logger as Logger +import Ide.Plugin.Error +import Ide.PluginUtils (rangesOverlap) +import Ide.Types +import qualified Language.LSP.Protocol.Lens as LSP +import Language.LSP.Protocol.Message (SMethod (..)) +import qualified Language.LSP.Protocol.Types as LSP +import qualified StmContainers.Map as STM + +-- ---------------------------------------------------------------------------- +-- Action wrappers +-- ---------------------------------------------------------------------------- + +-- |ExceptT version of `runAction`, takes a ExceptT Action +runActionE :: MonadIO m => String -> IdeState -> ExceptT e Action a -> ExceptT e m a +runActionE herald ide act = + mapExceptT liftIO . ExceptT $ + join $ shakeEnqueue (shakeExtras ide) (mkDelayedAction herald Logger.Debug $ runExceptT act) + +-- |MaybeT version of `runAction`, takes a MaybeT Action +runActionMT :: MonadIO m => String -> IdeState -> MaybeT Action a -> MaybeT m a +runActionMT herald ide act = + mapMaybeT liftIO . MaybeT $ + join $ shakeEnqueue (shakeExtras ide) (mkDelayedAction herald Logger.Debug $ runMaybeT act) + +-- |ExceptT version of `use` that throws a PluginRuleFailed upon failure +useE :: IdeRule k v => k -> NormalizedFilePath -> ExceptT PluginError Action v +useE k = maybeToExceptT (PluginRuleFailed (T.pack $ show k)) . useMT k + +-- |MaybeT version of `use` +useMT :: IdeRule k v => k -> NormalizedFilePath -> MaybeT Action v +useMT k = MaybeT . Shake.use k + +-- |ExceptT version of `uses` that throws a PluginRuleFailed upon failure +usesE :: (Traversable f, IdeRule k v) => k -> f NormalizedFilePath -> ExceptT PluginError Action (f v) +usesE k = maybeToExceptT (PluginRuleFailed (T.pack $ show k)) . usesMT k + +-- |MaybeT version of `uses` +usesMT :: (Traversable f, IdeRule k v) => k -> f NormalizedFilePath -> MaybeT Action (f v) +usesMT k xs = MaybeT $ sequence <$> Shake.uses k xs + +-- |ExceptT version of `useWithStale` that throws a PluginRuleFailed upon +-- failure +useWithStaleE :: IdeRule k v + => k -> NormalizedFilePath -> ExceptT PluginError Action (v, PositionMapping) +useWithStaleE key = maybeToExceptT (PluginRuleFailed (T.pack $ show key)) . useWithStaleMT key + +-- |MaybeT version of `useWithStale` +useWithStaleMT :: IdeRule k v + => k -> NormalizedFilePath -> MaybeT Action (v, PositionMapping) +useWithStaleMT key file = MaybeT $ runIdentity <$> Shake.usesWithStale key (Identity file) + +-- ---------------------------------------------------------------------------- +-- IdeAction wrappers +-- ---------------------------------------------------------------------------- + +-- |ExceptT version of `runIdeAction`, takes a ExceptT IdeAction +runIdeActionE :: MonadIO m => String -> Shake.ShakeExtras -> ExceptT e IdeAction a -> ExceptT e m a +runIdeActionE _herald s i = ExceptT $ liftIO $ runReaderT (Shake.runIdeActionT $ runExceptT i) s + +-- |MaybeT version of `runIdeAction`, takes a MaybeT IdeAction +runIdeActionMT :: MonadIO m => String -> Shake.ShakeExtras -> MaybeT IdeAction a -> MaybeT m a +runIdeActionMT _herald s i = MaybeT $ liftIO $ runReaderT (Shake.runIdeActionT $ runMaybeT i) s + +-- |ExceptT version of `useWithStaleFast` that throws a PluginRuleFailed upon +-- failure +useWithStaleFastE :: IdeRule k v => k -> NormalizedFilePath -> ExceptT PluginError IdeAction (v, PositionMapping) +useWithStaleFastE k = maybeToExceptT (PluginRuleFailed (T.pack $ show k)) . useWithStaleFastMT k + +-- |MaybeT version of `useWithStaleFast` +useWithStaleFastMT :: IdeRule k v => k -> NormalizedFilePath -> MaybeT IdeAction (v, PositionMapping) +useWithStaleFastMT k = MaybeT . Shake.useWithStaleFast k + +-- ---------------------------------------------------------------------------- +-- Location wrappers +-- ---------------------------------------------------------------------------- + +-- |ExceptT version of `uriToFilePath` that throws a PluginInvalidParams upon +-- failure +uriToFilePathE :: Monad m => LSP.Uri -> ExceptT PluginError m FilePath +uriToFilePathE uri = maybeToExceptT (PluginInvalidParams (T.pack $ "uriToFilePath' failed. Uri:" <> show uri)) $ uriToFilePathMT uri + +-- |MaybeT version of `uriToFilePath` +uriToFilePathMT :: Monad m => LSP.Uri -> MaybeT m FilePath +uriToFilePathMT = MaybeT . pure . Location.uriToFilePath' + +-- ---------------------------------------------------------------------------- +-- PositionMapping wrappers +-- ---------------------------------------------------------------------------- + +-- |ExceptT version of `toCurrentPosition` that throws a PluginInvalidUserState +-- upon failure +toCurrentPositionE :: Monad m => PositionMapping -> LSP.Position -> ExceptT PluginError m LSP.Position +toCurrentPositionE mapping = maybeToExceptT (PluginInvalidUserState "toCurrentPosition"). toCurrentPositionMT mapping + +-- |MaybeT version of `toCurrentPosition` +toCurrentPositionMT :: Monad m => PositionMapping -> LSP.Position -> MaybeT m LSP.Position +toCurrentPositionMT mapping = MaybeT . pure . toCurrentPosition mapping + +-- |ExceptT version of `fromCurrentPosition` that throws a +-- PluginInvalidUserState upon failure +fromCurrentPositionE :: Monad m => PositionMapping -> LSP.Position -> ExceptT PluginError m LSP.Position +fromCurrentPositionE mapping = maybeToExceptT (PluginInvalidUserState "fromCurrentPosition") . fromCurrentPositionMT mapping + +-- |MaybeT version of `fromCurrentPosition` +fromCurrentPositionMT :: Monad m => PositionMapping -> LSP.Position -> MaybeT m LSP.Position +fromCurrentPositionMT mapping = MaybeT . pure . fromCurrentPosition mapping + +-- |ExceptT version of `toCurrentRange` that throws a PluginInvalidUserState +-- upon failure +toCurrentRangeE :: Monad m => PositionMapping -> LSP.Range -> ExceptT PluginError m LSP.Range +toCurrentRangeE mapping = maybeToExceptT (PluginInvalidUserState "toCurrentRange") . toCurrentRangeMT mapping + +-- |MaybeT version of `toCurrentRange` +toCurrentRangeMT :: Monad m => PositionMapping -> LSP.Range -> MaybeT m LSP.Range +toCurrentRangeMT mapping = MaybeT . pure . toCurrentRange mapping + +-- |ExceptT version of `fromCurrentRange` that throws a PluginInvalidUserState +-- upon failure +fromCurrentRangeE :: Monad m => PositionMapping -> LSP.Range -> ExceptT PluginError m LSP.Range +fromCurrentRangeE mapping = maybeToExceptT (PluginInvalidUserState "fromCurrentRange") . fromCurrentRangeMT mapping + +-- |MaybeT version of `fromCurrentRange` +fromCurrentRangeMT :: Monad m => PositionMapping -> LSP.Range -> MaybeT m LSP.Range +fromCurrentRangeMT mapping = MaybeT . pure . fromCurrentRange mapping + +-- ---------------------------------------------------------------------------- +-- Diagnostics +-- ---------------------------------------------------------------------------- + +-- | @'activeDiagnosticsInRangeMT' shakeExtras nfp range@ computes the +-- 'FileDiagnostic' 's that HLS produced and overlap with the given @range@. +-- +-- This function is to be used whenever we need an authoritative source of truth +-- for which diagnostics are shown to the user. +-- These diagnostics can be used to provide various IDE features, for example +-- CodeActions, CodeLenses, or refactorings. +-- +-- However, why do we need this when computing 'CodeAction's? A 'CodeActionParam' +-- has the 'CodeActionContext' which already contains the diagnostics! +-- But according to the LSP docs, the server shouldn't rely that these Diagnostic +-- are actually up-to-date and accurately reflect the state of the document. +-- +-- From the LSP docs: +-- > An array of diagnostics known on the client side overlapping the range +-- > provided to the `textDocument/codeAction` request. They are provided so +-- > that the server knows which errors are currently presented to the user +-- > for the given range. There is no guarantee that these accurately reflect +-- > the error state of the resource. The primary parameter +-- > to compute code actions is the provided range. +-- +-- Thus, even when the client sends us the context, we should compute the +-- diagnostics on the server side. +activeDiagnosticsInRangeMT :: MonadIO m => Shake.ShakeExtras -> NormalizedFilePath -> LSP.Range -> MaybeT m [FileDiagnostic] +activeDiagnosticsInRangeMT ide nfp range = do + MaybeT $ liftIO $ atomically $ do + mDiags <- STM.lookup (LSP.normalizedFilePathToUri nfp) (Shake.publishedDiagnostics ide) + case mDiags of + Nothing -> pure Nothing + Just fileDiags -> do + pure $ Just $ filter diagRangeOverlaps fileDiags + where + diagRangeOverlaps = \fileDiag -> + rangesOverlap range (fileDiag ^. fdLspDiagnosticL . LSP.range) + +-- | Just like 'activeDiagnosticsInRangeMT'. See the docs of 'activeDiagnosticsInRangeMT' for details. +activeDiagnosticsInRange :: MonadIO m => Shake.ShakeExtras -> NormalizedFilePath -> LSP.Range -> m (Maybe [FileDiagnostic]) +activeDiagnosticsInRange ide nfp range = runMaybeT (activeDiagnosticsInRangeMT ide nfp range) + +-- ---------------------------------------------------------------------------- +-- Formatting handlers +-- ---------------------------------------------------------------------------- + +-- `mkFormattingHandlers` was moved here from hls-plugin-api package so that +-- `mkFormattingHandlers` can refer to `IdeState`. `IdeState` is defined in the +-- ghcide package, but hls-plugin-api does not depend on ghcide, so `IdeState` +-- is not in scope there. + +mkFormattingHandlers :: FormattingHandler IdeState -> PluginHandlers IdeState +mkFormattingHandlers f = mkPluginHandler SMethod_TextDocumentFormatting ( provider SMethod_TextDocumentFormatting) + <> mkPluginHandler SMethod_TextDocumentRangeFormatting (provider SMethod_TextDocumentRangeFormatting) + where + provider :: forall m. FormattingMethod m => SMethod m -> PluginMethodHandler IdeState m + provider m ide _pid params + | Just nfp <- LSP.uriToNormalizedFilePath $ LSP.toNormalizedUri uri = do + contentsMaybe <- liftIO $ runAction "mkFormattingHandlers" ide $ getFileContents nfp + case contentsMaybe of + Just contents -> do + let (typ, mtoken) = case m of + SMethod_TextDocumentFormatting -> (FormatText, params ^. LSP.workDoneToken) + SMethod_TextDocumentRangeFormatting -> (FormatRange (params ^. LSP.range), params ^. LSP.workDoneToken) + _ -> Prelude.error "mkFormattingHandlers: impossible" + f ide mtoken typ (Rope.toText contents) nfp opts + Nothing -> throwError $ PluginInvalidParams $ T.pack $ "Formatter plugin: could not get file contents for " ++ show uri + + | otherwise = throwError $ PluginInvalidParams $ T.pack $ "Formatter plugin: uriToFilePath failed for: " ++ show uri + where + uri = params ^. LSP.textDocument . LSP.uri + opts = params ^. LSP.options diff --git a/ghcide/src/Development/IDE/Core/PositionMapping.hs b/ghcide/src/Development/IDE/Core/PositionMapping.hs new file mode 100644 index 0000000000..de02f5b1f7 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/PositionMapping.hs @@ -0,0 +1,231 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +module Development.IDE.Core.PositionMapping + ( PositionMapping(..) + , PositionResult(..) + , lowerRange + , upperRange + , positionResultToMaybe + , fromCurrentPosition + , toCurrentPosition + , PositionDelta(..) + , addOldDelta + , idDelta + , composeDelta + , mkDelta + , toCurrentRange + , fromCurrentRange + , applyChange + , zeroMapping + , deltaFromDiff + -- toCurrent and fromCurrent are mainly exposed for testing + , toCurrent + , fromCurrent + ) where + +import Control.DeepSeq +import Control.Lens ((^.)) +import Control.Monad +import Data.Algorithm.Diff +import Data.Bifunctor +import Data.List +import qualified Data.Text as T +import qualified Data.Vector.Unboxed as V +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Types (Position (Position), + Range (Range), + TextDocumentContentChangeEvent (TextDocumentContentChangeEvent), + UInt, type (|?) (InL)) + +-- | Either an exact position, or the range of text that was substituted +data PositionResult a + = PositionRange -- ^ Fields need to be non-strict otherwise bind is exponential + { unsafeLowerRange :: a + , unsafeUpperRange :: a } + | PositionExact !a + deriving (Eq,Ord,Show,Functor) + +lowerRange :: PositionResult a -> a +lowerRange (PositionExact a) = a +lowerRange (PositionRange lower _) = lower + +upperRange :: PositionResult a -> a +upperRange (PositionExact a) = a +upperRange (PositionRange _ upper) = upper + +positionResultToMaybe :: PositionResult a -> Maybe a +positionResultToMaybe (PositionExact a) = Just a +positionResultToMaybe _ = Nothing + +instance Applicative PositionResult where + pure = PositionExact + (PositionExact f) <*> a = fmap f a + (PositionRange f g) <*> (PositionExact a) = PositionRange (f a) (g a) + (PositionRange f g) <*> (PositionRange lower upper) = PositionRange (f lower) (g upper) + +instance Monad PositionResult where + (PositionExact a) >>= f = f a + (PositionRange lower upper) >>= f = PositionRange lower' upper' + where + lower' = lowerRange $ f lower + upper' = upperRange $ f upper + +-- The position delta is the difference between two versions +data PositionDelta = PositionDelta + { toDelta :: !(Position -> PositionResult Position) + , fromDelta :: !(Position -> PositionResult Position) + } + +instance Show PositionDelta where + show PositionDelta{} = "PositionDelta{..}" + +instance NFData PositionDelta where + rnf (PositionDelta a b) = a `seq` b `seq` () + +fromCurrentPosition :: PositionMapping -> Position -> Maybe Position +fromCurrentPosition (PositionMapping pm) = positionResultToMaybe . fromDelta pm + +toCurrentPosition :: PositionMapping -> Position -> Maybe Position +toCurrentPosition (PositionMapping pm) = positionResultToMaybe . toDelta pm + +-- A position mapping is the difference from the current version to +-- a specific version +newtype PositionMapping = PositionMapping PositionDelta + +toCurrentRange :: PositionMapping -> Range -> Maybe Range +toCurrentRange mapping (Range a b) = + Range <$> toCurrentPosition mapping a <*> toCurrentPosition mapping b + +fromCurrentRange :: PositionMapping -> Range -> Maybe Range +fromCurrentRange mapping (Range a b) = + Range <$> fromCurrentPosition mapping a <*> fromCurrentPosition mapping b + +zeroMapping :: PositionMapping +zeroMapping = PositionMapping idDelta + +-- | Compose two position mappings. Composes in the same way as function +-- composition (ie the second argument is applied to the position first). +composeDelta :: PositionDelta + -> PositionDelta + -> PositionDelta +composeDelta (PositionDelta to1 from1) (PositionDelta to2 from2) = + PositionDelta (to1 <=< to2) + (from1 >=> from2) + +idDelta :: PositionDelta +idDelta = PositionDelta pure pure + +-- | Convert a set of changes into a delta from k to k + 1 +mkDelta :: [TextDocumentContentChangeEvent] -> PositionDelta +mkDelta cs = foldl' applyChange idDelta cs + +-- | addOldDelta +-- Add a old delta onto a Mapping k n to make a Mapping (k - 1) n +addOldDelta :: + PositionDelta -- ^ delta from version k - 1 to version k + -> PositionMapping -- ^ The input mapping is from version k to version n + -> PositionMapping -- ^ The output mapping is from version k - 1 to version n +addOldDelta delta (PositionMapping pm) = PositionMapping (composeDelta pm delta) + +-- TODO: We currently ignore the right hand side (if there is only text), as +-- that was what was done with lsp* 1.6 packages +applyChange :: PositionDelta -> TextDocumentContentChangeEvent -> PositionDelta +applyChange PositionDelta{..} (TextDocumentContentChangeEvent (InL x)) = PositionDelta + { toDelta = toCurrent (x ^. L.range) (x ^. L.text) <=< toDelta + , fromDelta = fromDelta <=< fromCurrent (x ^. L.range) (x ^. L.text) + } +applyChange posMapping _ = posMapping + +toCurrent :: Range -> T.Text -> Position -> PositionResult Position +toCurrent (Range start@(Position startLine startColumn) end@(Position endLine endColumn)) t (Position line column) + | line < startLine || line == startLine && column < startColumn = + -- Position is before the change and thereby unchanged. + PositionExact $ Position line column + | line > endLine || line == endLine && column >= endColumn = + -- Position is after the change so increase line and column number + -- as necessary. + PositionExact $ newLine `seq` newColumn `seq` Position newLine newColumn + | otherwise = PositionRange start end + -- Position is in the region that was changed. + where + lineDiff = linesNew - linesOld + linesNew = T.count "\n" t + linesOld = fromIntegral endLine - fromIntegral startLine + newEndColumn :: UInt + newEndColumn + | linesNew == 0 = fromIntegral $ fromIntegral startColumn + T.length t + | otherwise = fromIntegral $ T.length $ T.takeWhileEnd (/= '\n') t + newColumn :: UInt + newColumn + | line == endLine = fromIntegral $ (fromIntegral column + newEndColumn) - fromIntegral endColumn + | otherwise = column + newLine :: UInt + newLine = fromIntegral $ fromIntegral line + lineDiff + +fromCurrent :: Range -> T.Text -> Position -> PositionResult Position +fromCurrent (Range start@(Position startLine startColumn) end@(Position endLine endColumn)) t (Position line column) + | line < startLine || line == startLine && column < startColumn = + -- Position is before the change and thereby unchanged + PositionExact $ Position line column + | line > newEndLine || line == newEndLine && column >= newEndColumn = + -- Position is after the change so increase line and column number + -- as necessary. + PositionExact $ newLine `seq` newColumn `seq` Position newLine newColumn + | otherwise = PositionRange start end + -- Position is in the region that was changed. + where + lineDiff = linesNew - linesOld + linesNew = T.count "\n" t + linesOld = fromIntegral endLine - fromIntegral startLine + newEndLine :: UInt + newEndLine = fromIntegral $ fromIntegral endLine + lineDiff + newEndColumn :: UInt + newEndColumn + | linesNew == 0 = fromIntegral $ fromIntegral startColumn + T.length t + | otherwise = fromIntegral $ T.length $ T.takeWhileEnd (/= '\n') t + newColumn :: UInt + newColumn + | line == newEndLine = fromIntegral $ (fromIntegral column + fromIntegral endColumn) - newEndColumn + | otherwise = column + newLine :: UInt + newLine = fromIntegral $ fromIntegral line - lineDiff + +deltaFromDiff :: T.Text -> T.Text -> PositionDelta +deltaFromDiff (T.lines -> old) (T.lines -> new) = + PositionDelta (lookupPos (fromIntegral lnew) o2nPrevs o2nNexts old2new) (lookupPos (fromIntegral lold) n2oPrevs n2oNexts new2old) + where + !lnew = length new + !lold = length old + + diff = getDiff old new + + (V.fromList -> !old2new, V.fromList -> !new2old) = go diff 0 0 + + -- Compute previous and next lines that mapped successfully + !o2nPrevs = V.prescanl' f (-1) old2new + !o2nNexts = V.prescanr' (flip f) lnew old2new + + !n2oPrevs = V.prescanl' f (-1) new2old + !n2oNexts = V.prescanr' (flip f) lold new2old + + f :: Int -> Int -> Int + f !a !b = if b == -1 then a else b + + lookupPos :: UInt -> V.Vector Int -> V.Vector Int -> V.Vector Int -> Position -> PositionResult Position + lookupPos end prevs nexts xs (Position line col) + | line >= fromIntegral (V.length xs) = PositionRange (Position end 0) (Position end 0) + | otherwise = case V.unsafeIndex xs (fromIntegral line) of + -1 -> + -- look for the previous and next lines that mapped successfully + let !prev = 1 + V.unsafeIndex prevs (fromIntegral line) + !next = V.unsafeIndex nexts (fromIntegral line) + in PositionRange (Position (fromIntegral prev) 0) (Position (fromIntegral next) 0) + line' -> PositionExact (Position (fromIntegral line') col) + + -- Construct a mapping between lines in the diff + -- -1 for unsuccessful mapping + go :: [Diff T.Text] -> Int -> Int -> ([Int], [Int]) + go [] _ _ = ([],[]) + go (Both _ _ : xs) !glold !glnew = bimap (glnew :) (glold :) $ go xs (glold+1) (glnew+1) + go (First _ : xs) !glold !glnew = first (-1 :) $ go xs (glold+1) glnew + go (Second _ : xs) !glold !glnew = second (-1 :) $ go xs glold (glnew+1) diff --git a/ghcide/src/Development/IDE/Core/Preprocessor.hs b/ghcide/src/Development/IDE/Core/Preprocessor.hs new file mode 100644 index 0000000000..b3614d89ad --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Preprocessor.hs @@ -0,0 +1,243 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE CPP #-} + +module Development.IDE.Core.Preprocessor + ( preprocessor + ) where + +import Development.IDE.GHC.Compat +import qualified Development.IDE.GHC.Compat.Util as Util +import Development.IDE.GHC.CPP +import Development.IDE.GHC.Orphans () +import qualified Development.IDE.GHC.Util as Util + +import Control.DeepSeq (NFData (rnf)) +import Control.Exception (evaluate) +import Control.Exception.Safe (catch, throw) +import Control.Monad.IO.Class +import Control.Monad.Trans.Except +import Data.Char +import Data.IORef (IORef, modifyIORef, + newIORef, readIORef) +import Data.List.Extra +import Data.Maybe +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE.GHC.Error +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location +import qualified GHC.LanguageExtensions as LangExt +import qualified GHC.Runtime.Loader as Loader +import GHC.Utils.Logger (LogFlags (..)) +import System.FilePath +import System.IO.Extra + +-- | Given a file and some contents, apply any necessary preprocessors, +-- e.g. unlit/cpp. Return the resulting buffer and the DynFlags it implies. +preprocessor :: HscEnv -> FilePath -> Maybe Util.StringBuffer -> ExceptT [FileDiagnostic] IO (Util.StringBuffer, [String], HscEnv, Util.Fingerprint) +preprocessor env filename mbContents = do + -- Perform unlit + (isOnDisk, contents) <- + if isLiterate filename then do + newcontent <- liftIO $ runLhs env filename mbContents + return (False, newcontent) + else do + contents <- liftIO $ maybe (Util.hGetStringBuffer filename) return mbContents + let isOnDisk = isNothing mbContents + return (isOnDisk, contents) + + -- Compute the source hash before the preprocessor because this is + -- how GHC does it. + !src_hash <- liftIO $ Util.fingerprintFromStringBuffer contents + + -- Perform cpp + (opts, pEnv) <- ExceptT $ parsePragmasIntoHscEnv env filename contents + let dflags = hsc_dflags pEnv + let logger = hsc_logger pEnv + (newIsOnDisk, newContents, newOpts, newEnv) <- + if not $ xopt LangExt.Cpp dflags then + return (isOnDisk, contents, opts, pEnv) + else do + cppLogs <- liftIO $ newIORef [] + let newLogger = pushLogHook (const (logActionCompat $ logAction cppLogs)) logger + con <- ExceptT + $ (Right <$> (runCpp (putLogHook newLogger pEnv) filename + $ if isOnDisk then Nothing else Just contents)) + `catch` + ( \(e :: Util.GhcException) -> do + logs <- readIORef cppLogs + case diagsFromCPPLogs filename (reverse logs) of + [] -> throw e + diags -> return $ Left diags + ) + (options, hscEnv) <- ExceptT $ parsePragmasIntoHscEnv pEnv filename con + return (False, con, options, hscEnv) + + -- Perform preprocessor + if not $ gopt Opt_Pp dflags then + return (newContents, newOpts, newEnv, src_hash) + else do + con <- liftIO $ runPreprocessor newEnv filename $ if newIsOnDisk then Nothing else Just newContents + (options, hscEnv) <- ExceptT $ parsePragmasIntoHscEnv newEnv filename con + return (con, options, hscEnv, src_hash) + where + logAction :: IORef [CPPLog] -> LogActionCompat + logAction cppLogs dflags _reason severity srcSpan _style msg = do + let cppLog = CPPLog (fromMaybe SevWarning severity) srcSpan $ T.pack $ renderWithContext (log_default_user_context dflags) msg + modifyIORef cppLogs (cppLog :) + + + +data CPPLog = CPPLog Severity SrcSpan Text + deriving (Show) + + +data CPPDiag + = CPPDiag + { cdRange :: Range, + cdSeverity :: Maybe DiagnosticSeverity, + cdMessage :: [Text] + } + deriving (Show) + + +diagsFromCPPLogs :: FilePath -> [CPPLog] -> [FileDiagnostic] +diagsFromCPPLogs filename logs = + map (\d -> ideErrorFromLspDiag (cppDiagToDiagnostic d) (toNormalizedFilePath' filename) Nothing) $ + go [] logs + where + -- On errors, CPP calls logAction with a real span for the initial log and + -- then additional informational logs with `UnhelpfulSpan`. Collect those + -- informational log messages and attaches them to the initial log message. + go :: [CPPDiag] -> [CPPLog] -> [CPPDiag] + go acc [] = reverse $ map (\d -> d {cdMessage = reverse $ cdMessage d}) acc + go acc (CPPLog sev (RealSrcSpan rSpan _) msg : gLogs) = + let diag = CPPDiag (realSrcSpanToRange rSpan) (toDSeverity sev) [msg] + in go (diag : acc) gLogs + go (diag : diags) (CPPLog _sev (UnhelpfulSpan _) msg : gLogs) = + go (diag {cdMessage = msg : cdMessage diag} : diags) gLogs + go [] (CPPLog _sev (UnhelpfulSpan _) _msg : gLogs) = go [] gLogs + cppDiagToDiagnostic :: CPPDiag -> Diagnostic + cppDiagToDiagnostic d = + Diagnostic + { _range = cdRange d, + _severity = cdSeverity d, + _code = Nothing, + _source = Just "CPP", + _message = T.unlines $ cdMessage d, + _relatedInformation = Nothing, + _tags = Nothing, + _codeDescription = Nothing, + _data_ = Nothing + } + + +isLiterate :: FilePath -> Bool +isLiterate x = takeExtension x `elem` [".lhs",".lhs-boot"] + + +-- | This reads the pragma information directly from the provided buffer. +parsePragmasIntoHscEnv + :: HscEnv + -> FilePath + -> Util.StringBuffer + -> IO (Either [FileDiagnostic] ([String], HscEnv)) +parsePragmasIntoHscEnv env fp contents = catchSrcErrors dflags0 "pragmas" $ do + let (_warns,opts) = getOptions (initParserOpts dflags0) contents fp + + -- Force bits that might keep the dflags and stringBuffer alive unnecessarily + evaluate $ rnf opts + + (dflags, _, _) <- parseDynamicFilePragma dflags0 opts + hsc_env' <- Loader.initializePlugins (hscSetFlags dflags env) + return (map unLoc opts, hscSetFlags (disableWarningsAsErrors $ hsc_dflags hsc_env') hsc_env') + where dflags0 = hsc_dflags env + +-- | Run (unlit) literate haskell preprocessor on a file, or buffer if set +runLhs :: HscEnv -> FilePath -> Maybe Util.StringBuffer -> IO Util.StringBuffer +runLhs env filename contents = withTempDir $ \dir -> do + let fout = dir takeFileName filename <.> "unlit" + filesrc <- case contents of + Nothing -> return filename + Just cnts -> do + let fsrc = dir takeFileName filename <.> "literate" + withBinaryFile fsrc WriteMode $ \h -> + hPutStringBuffer h cnts + return fsrc + unlit filesrc fout + Util.hGetStringBuffer fout + where + logger = hsc_logger env + dflags = hsc_dflags env + + unlit filein fileout = runUnlit logger dflags (args filein fileout) + args filein fileout = [ + Option "-h" + , Option (escape filename) -- name this file + , FileOption "" filein -- input file + , FileOption "" fileout ] -- output file + -- taken from ghc's DriverPipeline.hs + escape ('\\':cs) = '\\':'\\': escape cs + escape ('\"':cs) = '\\':'\"': escape cs + escape ('\'':cs) = '\\':'\'': escape cs + escape (c:cs) = c : escape cs + escape [] = [] + +-- | Run CPP on a file +runCpp :: HscEnv -> FilePath -> Maybe Util.StringBuffer -> IO Util.StringBuffer +runCpp env0 filename mbContents = withTempDir $ \dir -> do + let out = dir takeFileName filename <.> "out" + let dflags1 = addOptP "-D__GHCIDE__" (hsc_dflags env0) + let env1 = hscSetFlags dflags1 env0 + + case mbContents of + Nothing -> do + -- Happy case, file is not modified, so run CPP on it in-place + -- which also makes things like relative #include files work + -- and means location information is correct + doCpp env1 filename out + liftIO $ Util.hGetStringBuffer out + + Just contents -> do + -- Sad path, we have to create a version of the path in a temp dir + -- __FILE__ macro is wrong, ignoring that for now (likely not a real issue) + + -- Relative includes aren't going to work, so we fix that by adding to the include path. + let dflags2 = addIncludePathsQuote (takeDirectory filename) dflags1 + let env2 = hscSetFlags dflags2 env0 + -- Location information is wrong, so we fix that by patching it afterwards. + let inp = dir "___GHCIDE_MAGIC___" + withBinaryFile inp WriteMode $ \h -> + hPutStringBuffer h contents + doCpp env2 inp out + + -- Fix up the filename in lines like: + -- # 1 "C:/Temp/extra-dir-914611385186/___GHCIDE_MAGIC___" + let tweak x + | Just y <- stripPrefix "# " x + , "___GHCIDE_MAGIC___" `isInfixOf` y + , let num = takeWhile (not . isSpace) y + -- important to use /, and never \ for paths, even on Windows, since then C escapes them + -- and GHC gets all confused + = "# " <> num <> " \"" <> map (\z -> if isPathSeparator z then '/' else z) filename <> "\"" + | otherwise = x + Util.stringToStringBuffer . unlines . map tweak . lines <$> readFileUTF8' out + + +-- | Run a preprocessor on a file +runPreprocessor :: HscEnv -> FilePath -> Maybe Util.StringBuffer -> IO Util.StringBuffer +runPreprocessor env filename mbContents = withTempDir $ \dir -> do + let out = dir takeFileName filename <.> "out" + inp <- case mbContents of + Nothing -> return filename + Just contents -> do + let inp = dir takeFileName filename <.> "hs" + withBinaryFile inp WriteMode $ \h -> + hPutStringBuffer h contents + return inp + runPp logger dflags [Option filename, Option inp, FileOption "" out] + Util.hGetStringBuffer out + where + logger = hsc_logger env + dflags = hsc_dflags env diff --git a/ghcide/src/Development/IDE/Core/ProgressReporting.hs b/ghcide/src/Development/IDE/Core/ProgressReporting.hs new file mode 100644 index 0000000000..3d8a2bf989 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/ProgressReporting.hs @@ -0,0 +1,236 @@ +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeFamilies #-} + +module Development.IDE.Core.ProgressReporting + ( ProgressEvent (..), + PerFileProgressReporting (..), + ProgressReporting, + noPerFileProgressReporting, + progressReporting, + progressReportingNoTrace, + -- utilities, reexported for use in Core.Shake + mRunLspT, + mRunLspTCallback, + -- for tests + recordProgress, + InProgressState (..), + progressStop, + progressUpdate + ) +where + +import Control.Concurrent.STM (STM) +import Control.Concurrent.STM.Stats (TVar, atomically, + atomicallyNamed, modifyTVar', + newTVarIO, readTVar, retry) +import Control.Concurrent.Strict (modifyVar_, newVar, + threadDelay) +import Control.Monad.Extra hiding (loop) +import Control.Monad.IO.Class +import Control.Monad.Trans.Class (lift) +import Data.Functor (($>)) +import qualified Data.Text as T +import Development.IDE.GHC.Orphans () +import Development.IDE.Types.Location +import Development.IDE.Types.Options +import qualified Focus +import Language.LSP.Protocol.Types +import Language.LSP.Server (ProgressAmount (..), + ProgressCancellable (..), + withProgress) +import qualified Language.LSP.Server as LSP +import qualified StmContainers.Map as STM +import UnliftIO (Async, async, bracket, cancel) + +data ProgressEvent + = ProgressNewStarted + | ProgressCompleted + | ProgressStarted + +data ProgressReporting = ProgressReporting + { _progressUpdate :: ProgressEvent -> IO (), + _progressStop :: IO () + -- ^ we are using IO here because creating and stopping the `ProgressReporting` + -- is different from how we use it. + } + +data PerFileProgressReporting = PerFileProgressReporting + { + inProgress :: forall a. NormalizedFilePath -> IO a -> IO a, + -- ^ see Note [ProgressReporting API and InProgressState] + progressReportingInner :: ProgressReporting + } + +class ProgressReporter a where + progressUpdate :: a -> ProgressEvent -> IO () + progressStop :: a -> IO () + +instance ProgressReporter ProgressReporting where + progressUpdate = _progressUpdate + progressStop = _progressStop + +instance ProgressReporter PerFileProgressReporting where + progressUpdate = _progressUpdate . progressReportingInner + progressStop = _progressStop . progressReportingInner + +{- Note [ProgressReporting API and InProgressState] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The progress of tasks can be tracked in two ways: + +1. `ProgressReporting`: we have an internal state that actively tracks the progress. + Changes to the progress are made directly to this state. + +2. `ProgressReporting`: there is an external state that tracks the progress. + The external state is converted into an STM Int for the purpose of reporting progress. + +The `inProgress` function is only useful when we are using `ProgressReporting`. +-} + +noProgressReporting :: ProgressReporting +noProgressReporting = ProgressReporting + { _progressUpdate = const $ pure (), + _progressStop = pure () + } +noPerFileProgressReporting :: IO PerFileProgressReporting +noPerFileProgressReporting = + return $ + PerFileProgressReporting + { inProgress = const id, + progressReportingInner = noProgressReporting + } + +-- | State used in 'delayedProgressReporting' +data State + = NotStarted + | Stopped + | Running (Async ()) + +-- | State transitions used in 'delayedProgressReporting' +data Transition = Event ProgressEvent | StopProgress + +updateState :: IO () -> Transition -> State -> IO State +updateState _ _ Stopped = pure Stopped +updateState start (Event ProgressNewStarted) NotStarted = Running <$> async start +updateState start (Event ProgressNewStarted) (Running job) = cancel job >> Running <$> async start +updateState start (Event ProgressStarted) NotStarted = Running <$> async start +updateState _ (Event ProgressStarted) (Running job) = return (Running job) +updateState _ (Event ProgressCompleted) (Running job) = cancel job $> NotStarted +updateState _ (Event ProgressCompleted) st = pure st +updateState _ StopProgress (Running job) = cancel job $> Stopped +updateState _ StopProgress st = pure st + +-- | Data structure to track progress across the project +-- see Note [ProgressReporting API and InProgressState] +data InProgressState + = InProgressState + { -- | Number of files to do + todoVar :: TVar Int, + -- | Number of files done + doneVar :: TVar Int, + currentVar :: STM.Map NormalizedFilePath Int + } + +newInProgress :: IO InProgressState +newInProgress = InProgressState <$> newTVarIO 0 <*> newTVarIO 0 <*> STM.newIO + +recordProgress :: InProgressState -> NormalizedFilePath -> (Int -> Int) -> IO () +recordProgress InProgressState {..} file shift = do + (prev, new) <- atomicallyNamed "recordProgress" $ STM.focus alterPrevAndNew file currentVar + atomicallyNamed "recordProgress2" $ case (prev, new) of + (Nothing, 0) -> modifyTVar' doneVar (+ 1) >> modifyTVar' todoVar (+ 1) + (Nothing, _) -> modifyTVar' todoVar (+ 1) + (Just 0, 0) -> pure () + (Just 0, _) -> modifyTVar' doneVar pred + (Just _, 0) -> modifyTVar' doneVar (+ 1) + (Just _, _) -> pure () + where + alterPrevAndNew = do + prev <- Focus.lookup + Focus.alter alter + new <- Focus.lookupWithDefault 0 + return (prev, new) + alter x = let x' = maybe (shift 0) shift x in Just x' + + +-- | `progressReportingNoTrace` initiates a new progress reporting session. +-- It functions similarly to `progressReporting`, but it utilizes an external state for progress tracking. +-- Refer to Note [ProgressReporting API and InProgressState] for more details. +progressReportingNoTrace :: + STM Int -> + STM Int -> + Maybe (LSP.LanguageContextEnv c) -> + T.Text -> + ProgressReportingStyle -> + IO ProgressReporting +progressReportingNoTrace _ _ Nothing _title _optProgressStyle = return noProgressReporting +progressReportingNoTrace todo done (Just lspEnv) title optProgressStyle = do + progressState <- newVar NotStarted + let _progressUpdate event = liftIO $ updateStateVar $ Event event + _progressStop = updateStateVar StopProgress + updateStateVar = modifyVar_ progressState . updateState (progressCounter lspEnv title optProgressStyle todo done) + return ProgressReporting {..} + +-- | `progressReporting` initiates a new progress reporting session. +-- It necessitates the active tracking of progress using the `inProgress` function. +-- Refer to Note [ProgressReporting API and InProgressState] for more details. +progressReporting :: + Maybe (LSP.LanguageContextEnv c) -> + T.Text -> + ProgressReportingStyle -> + IO PerFileProgressReporting +progressReporting Nothing _title _optProgressStyle = noPerFileProgressReporting +progressReporting (Just lspEnv) title optProgressStyle = do + inProgressState <- newInProgress + progressReportingInner <- progressReportingNoTrace (readTVar $ todoVar inProgressState) + (readTVar $ doneVar inProgressState) (Just lspEnv) title optProgressStyle + let + inProgress :: NormalizedFilePath -> IO a -> IO a + inProgress = updateStateForFile inProgressState + return PerFileProgressReporting {..} + where + updateStateForFile inProgress file = UnliftIO.bracket (liftIO $ f succ) (const $ liftIO $ f pred) . const + where + -- This functions are deliberately eta-expanded to avoid space leaks. + -- Do not remove the eta-expansion without profiling a session with at + -- least 1000 modifications. + + f = recordProgress inProgress file + +-- Kill this to complete the progress session +progressCounter :: + LSP.LanguageContextEnv c -> + T.Text -> + ProgressReportingStyle -> + STM Int -> + STM Int -> + IO () +progressCounter lspEnv title optProgressStyle getTodo getDone = + LSP.runLspT lspEnv $ withProgress title Nothing NotCancellable $ \update -> loop update 0 + where + loop _ _ | optProgressStyle == NoProgress = forever $ liftIO $ threadDelay maxBound + loop update prevPct = do + (todo, done, nextPct) <- liftIO $ atomically $ do + todo <- getTodo + done <- getDone + let nextFrac :: Double + nextFrac = if todo == 0 then 0 else fromIntegral done / fromIntegral todo + nextPct :: UInt + nextPct = floor $ 100 * nextFrac + when (nextPct == prevPct) retry + pure (todo, done, nextPct) + + _ <- update (ProgressAmount (Just nextPct) (Just $ T.pack $ show done <> "/" <> show todo)) + loop update nextPct + +mRunLspT :: (Applicative m) => Maybe (LSP.LanguageContextEnv c) -> LSP.LspT c m () -> m () +mRunLspT (Just lspEnv) f = LSP.runLspT lspEnv f +mRunLspT Nothing _ = pure () + +mRunLspTCallback :: + (Monad m) => + Maybe (LSP.LanguageContextEnv c) -> + (LSP.LspT c m a -> LSP.LspT c m a) -> + m a -> + m a +mRunLspTCallback (Just lspEnv) f g = LSP.runLspT lspEnv $ f (lift g) +mRunLspTCallback Nothing _ g = g diff --git a/ghcide/src/Development/IDE/Core/RuleTypes.hs b/ghcide/src/Development/IDE/Core/RuleTypes.hs new file mode 100644 index 0000000000..a13e6de14c --- /dev/null +++ b/ghcide/src/Development/IDE/Core/RuleTypes.hs @@ -0,0 +1,592 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeFamilies #-} + +-- | A Shake implementation of the compiler service, built +-- using the "Shaker" abstraction layer for in-memory use. +-- +module Development.IDE.Core.RuleTypes( + GhcSessionDeps(.., GhcSessionDeps), + module Development.IDE.Core.RuleTypes + ) where + +import Control.DeepSeq +import qualified Control.Exception as E +import Control.Lens +import Data.Aeson.Types (Value) +import Data.Hashable +import qualified Data.Map as M +import Data.Time.Clock.POSIX +import Data.Typeable +import Development.IDE.GHC.Compat hiding + (HieFileResult, + assert) +import Development.IDE.GHC.Compat.Util +import Development.IDE.GHC.CoreFile +import Development.IDE.GHC.Util +import Development.IDE.Graph +import Development.IDE.Import.DependencyInformation +import Development.IDE.Types.HscEnvEq (HscEnvEq) +import Development.IDE.Types.KnownTargets +import GHC.Generics (Generic) +import GHC.Iface.Ext.Types (HieASTs, + TypeIndex) +import GHC.Iface.Ext.Utils (RefMap) + +import Data.ByteString (ByteString) +import Data.Text.Utf16.Rope.Mixed (Rope) +import Development.IDE.Import.FindImports (ArtifactsLocation) +import Development.IDE.Spans.Common +import Development.IDE.Spans.LocalBindings +import Development.IDE.Types.Diagnostics +import GHC.Driver.Errors.Types (WarningMessages) +import GHC.Serialized (Serialized) +import Ide.Logger (Pretty (..), + viaShow) +import Language.LSP.Protocol.Types (Int32, + NormalizedFilePath) + +data LinkableType = ObjectLinkable | BCOLinkable + deriving (Eq,Ord,Show, Generic) +instance Hashable LinkableType +instance NFData LinkableType + +-- | Encode the linkable into an ordered bytestring. +-- This is used to drive an ordered "newness" predicate in the +-- 'NeedsCompilation' build rule. +encodeLinkableType :: Maybe LinkableType -> ByteString +encodeLinkableType Nothing = "0" +encodeLinkableType (Just BCOLinkable) = "1" +encodeLinkableType (Just ObjectLinkable) = "2" + +-- NOTATION +-- Foo+ means Foo for the dependencies +-- Foo* means Foo for me and Foo+ + +-- | The parse tree for the file using GetFileContents +type instance RuleResult GetParsedModule = ParsedModule + +-- | The parse tree for the file using GetFileContents, +-- all comments included using Opt_KeepRawTokenStream +type instance RuleResult GetParsedModuleWithComments = ParsedModule + +type instance RuleResult GetModuleGraph = DependencyInformation + +-- | it only compute the fingerprint of the module graph for a file and its dependencies +-- we need this to trigger recompilation when the sub module graph for a file changes +type instance RuleResult GetModuleGraphTransDepsFingerprints = Fingerprint +type instance RuleResult GetModuleGraphTransReverseDepsFingerprints = Fingerprint +type instance RuleResult GetModuleGraphImmediateReverseDepsFingerprints = Fingerprint + +data GetKnownTargets = GetKnownTargets + deriving (Show, Generic, Eq, Ord) +instance Hashable GetKnownTargets +instance NFData GetKnownTargets +type instance RuleResult GetKnownTargets = KnownTargets + +-- | Convert to Core, requires TypeCheck* +type instance RuleResult GenerateCore = ModGuts + +data GenerateCore = GenerateCore + deriving (Eq, Show, Generic) +instance Hashable GenerateCore +instance NFData GenerateCore + +type instance RuleResult GetLinkable = LinkableResult + +data LinkableResult + = LinkableResult + { linkableHomeMod :: !HomeModInfo + , linkableHash :: !ByteString + -- ^ The hash of the core file + } + +instance Show LinkableResult where + show = show . mi_module . hm_iface . linkableHomeMod + +instance NFData LinkableResult where + rnf = rwhnf + +data GetLinkable = GetLinkable + deriving (Eq, Show, Generic) +instance Hashable GetLinkable +instance NFData GetLinkable + +data GetImportMap = GetImportMap + deriving (Eq, Show, Generic) +instance Hashable GetImportMap +instance NFData GetImportMap + +type instance RuleResult GetImportMap = ImportMap +newtype ImportMap = ImportMap + { importMap :: M.Map ModuleName NormalizedFilePath -- ^ Where are the modules imported by this file located? + } deriving stock Show + deriving newtype NFData + +data Splices = Splices + { exprSplices :: [(LHsExpr GhcTc, LHsExpr GhcPs)] + , patSplices :: [(LHsExpr GhcTc, LPat GhcPs)] + , typeSplices :: [(LHsExpr GhcTc, LHsType GhcPs)] + , declSplices :: [(LHsExpr GhcTc, [LHsDecl GhcPs])] + , awSplices :: [(LHsExpr GhcTc, Serialized)] + } + +instance Semigroup Splices where + Splices e p t d aw <> Splices e' p' t' d' aw' = + Splices + (e <> e') + (p <> p') + (t <> t') + (d <> d') + (aw <> aw') + +instance Monoid Splices where + mempty = Splices mempty mempty mempty mempty mempty + +instance NFData Splices where + rnf Splices {..} = + liftRnf rwhnf exprSplices `seq` + liftRnf rwhnf patSplices `seq` + liftRnf rwhnf typeSplices `seq` liftRnf rwhnf declSplices `seq` () + +-- | Contains the typechecked module and the OrigNameCache entry for +-- that module. +data TcModuleResult = TcModuleResult + { tmrParsed :: ParsedModule + , tmrRenamed :: RenamedSource + , tmrTypechecked :: TcGblEnv + , tmrTopLevelSplices :: Splices + -- ^ Typechecked splice information + , tmrDeferredError :: !Bool + -- ^ Did we defer any type errors for this module? + , tmrRuntimeModules :: !(ModuleEnv ByteString) + -- ^ Which modules did we need at runtime while compiling this file? + -- Used for recompilation checking in the presence of TH + -- Stores the hash of their core file + , tmrWarnings :: WarningMessages + -- ^ Structured warnings for this module. + } +instance Show TcModuleResult where + show = show . pm_mod_summary . tmrParsed + +instance NFData TcModuleResult where + rnf = rwhnf + +tmrModSummary :: TcModuleResult -> ModSummary +tmrModSummary = pm_mod_summary . tmrParsed + +data HiFileResult = HiFileResult + { hirModSummary :: !ModSummary + -- Bang patterns here are important to stop the result retaining + -- a reference to a typechecked module + , hirModIface :: !ModIface + , hirModDetails :: ModDetails + -- ^ Populated lazily + , hirIfaceFp :: !ByteString + -- ^ Fingerprint for the ModIface + , hirRuntimeModules :: !(ModuleEnv ByteString) + -- ^ same as tmrRuntimeModules + , hirCoreFp :: !(Maybe (CoreFile, ByteString)) + -- ^ If we wrote a core file for this module, then its contents (lazily deserialised) + -- along with its hash + } + +hiFileFingerPrint :: HiFileResult -> ByteString +hiFileFingerPrint HiFileResult{..} = hirIfaceFp <> maybe "" snd hirCoreFp + +mkHiFileResult :: ModSummary -> ModIface -> ModDetails -> ModuleEnv ByteString -> Maybe (CoreFile, ByteString) -> HiFileResult +mkHiFileResult hirModSummary hirModIface hirModDetails hirRuntimeModules hirCoreFp = + E.assert (case hirCoreFp of + Just (CoreFile{cf_iface_hash}, _) -> getModuleHash hirModIface == cf_iface_hash + _ -> True) + HiFileResult{..} + where + hirIfaceFp = fingerprintToBS . getModuleHash $ hirModIface -- will always be two bytes + +instance NFData HiFileResult where + rnf = rwhnf + +instance Show HiFileResult where + show = show . hirModSummary + +-- | Save the uncompressed AST here, we compress it just before writing to disk +data HieAstResult + = forall a . (Typeable a) => HAR + { hieModule :: Module + , hieAst :: !(HieASTs a) + , refMap :: RefMap a + -- ^ Lazy because its value only depends on the hieAst, which is bundled in this type + -- Lazyness can't cause leaks here because the lifetime of `refMap` will be the same + -- as that of `hieAst` + , typeRefs :: M.Map Name [RealSrcSpan] + -- ^ type references in this file + , hieKind :: !(HieKind a) + -- ^ Is this hie file loaded from the disk, or freshly computed? + } + +data HieKind a where + HieFromDisk :: !HieFile -> HieKind TypeIndex + HieFresh :: HieKind Type + +instance NFData (HieKind a) where + rnf (HieFromDisk hf) = rnf hf + rnf HieFresh = () + +instance NFData HieAstResult where + rnf (HAR m hf _rm _tr kind) = rnf m `seq` rwhnf hf `seq` rnf kind + +instance Show HieAstResult where + show = show . hieModule + +-- | The type checked version of this file, requires TypeCheck+ +type instance RuleResult TypeCheck = TcModuleResult + +-- | The uncompressed HieAST +type instance RuleResult GetHieAst = HieAstResult + +-- | A IntervalMap telling us what is in scope at each point +type instance RuleResult GetBindings = Bindings + +data DocAndTyThingMap = DKMap {getDocMap :: !DocMap, getTyThingMap :: !TyThingMap} +instance NFData DocAndTyThingMap where + rnf (DKMap a b) = rwhnf a `seq` rwhnf b + +instance Show DocAndTyThingMap where + show = const "docmap" + +type instance RuleResult GetDocMap = DocAndTyThingMap + +-- | A GHC session that we reuse. +type instance RuleResult GhcSession = HscEnvEq + +-- | A GHC session preloaded with all the dependencies +-- This rule is also responsible for calling ReportImportCycles for the direct dependencies +type instance RuleResult GhcSessionDeps = HscEnvEq + +-- | Resolve the imports in a module to the file path of a module in the same package +type instance RuleResult GetLocatedImports = [(Located ModuleName, Maybe ArtifactsLocation)] + +-- | This rule is used to report import cycles. It depends on GetModuleGraph. +-- We cannot report the cycles directly from GetModuleGraph since +-- we can only report diagnostics for the current file. +type instance RuleResult ReportImportCycles = () + +-- | Read the module interface file from disk. Throws an error for VFS files. +-- This is an internal rule, use 'GetModIface' instead. +type instance RuleResult GetModIfaceFromDisk = HiFileResult + +-- | GetModIfaceFromDisk and index the `.hie` file into the database. +-- This is an internal rule, use 'GetModIface' instead. +type instance RuleResult GetModIfaceFromDiskAndIndex = HiFileResult + +-- | Get a module interface details, either from an interface file or a typechecked module +type instance RuleResult GetModIface = HiFileResult + +-- | Get the contents of a file, either dirty (if the buffer is modified) or Nothing to mean use from disk. +type instance RuleResult GetFileContents = (FileVersion, Maybe Rope) + +type instance RuleResult GetFileExists = Bool + +type instance RuleResult GetFileHash = Fingerprint + +type instance RuleResult AddWatchedFile = Bool + + +-- The Shake key type for getModificationTime queries +newtype GetModificationTime = GetModificationTime_ + { missingFileDiagnostics :: Bool + -- ^ If false, missing file diagnostics are not reported + } + deriving (Generic) + +instance Show GetModificationTime where + show _ = "GetModificationTime" + +instance Eq GetModificationTime where + -- Since the diagnostics are not part of the answer, the query identity is + -- independent from the 'missingFileDiagnostics' field + _ == _ = True + +instance Hashable GetModificationTime where + -- Since the diagnostics are not part of the answer, the query identity is + -- independent from the 'missingFileDiagnostics' field + hashWithSalt salt _ = salt + +instance NFData GetModificationTime + +data GetPhysicalModificationTime = GetPhysicalModificationTime + deriving (Generic, Show, Eq) + deriving anyclass (Hashable, NFData) + +-- | Get the modification time of a file on disk, ignoring any version in the VFS. +type instance RuleResult GetPhysicalModificationTime = FileVersion + +pattern GetModificationTime :: GetModificationTime +pattern GetModificationTime = GetModificationTime_ {missingFileDiagnostics=True} + +-- | Get the modification time of a file. +type instance RuleResult GetModificationTime = FileVersion + +-- | Either the mtime from disk or an LSP version +-- LSP versions always compare as greater than on disk versions +data FileVersion + = ModificationTime !POSIXTime -- order of constructors is relevant + | VFSVersion !Int32 + deriving (Show, Generic, Eq, Ord) + +instance NFData FileVersion + +vfsVersion :: FileVersion -> Maybe Int32 +vfsVersion (VFSVersion i) = Just i +vfsVersion ModificationTime{} = Nothing + +data GetFileContents = GetFileContents + deriving (Eq, Show, Generic) +instance Hashable GetFileContents +instance NFData GetFileContents + +data GetFileExists = GetFileExists + deriving (Eq, Show, Generic) + +instance NFData GetFileExists +instance Hashable GetFileExists + +data GetFileHash = GetFileHash + deriving (Eq, Show, Generic) + +instance NFData GetFileHash +instance Hashable GetFileHash + +data FileOfInterestStatus + = OnDisk + | Modified { firstOpen :: !Bool -- ^ was this file just opened + } + deriving (Eq, Show, Generic) +instance Hashable FileOfInterestStatus +instance NFData FileOfInterestStatus + +instance Pretty FileOfInterestStatus where + pretty = viaShow + +data IsFileOfInterestResult = NotFOI | IsFOI FileOfInterestStatus + deriving (Eq, Show, Generic) +instance Hashable IsFileOfInterestResult +instance NFData IsFileOfInterestResult + +type instance RuleResult IsFileOfInterest = IsFileOfInterestResult + +data ModSummaryResult = ModSummaryResult + { msrModSummary :: !ModSummary + , msrImports :: [LImportDecl GhcPs] + , msrFingerprint :: !Fingerprint + , msrHscEnv :: !HscEnv + -- ^ HscEnv for this particular ModSummary. + -- Contains initialised plugins, parsed options, etc... + -- + -- Implicit assumption: DynFlags in 'msrModSummary' are the same as + -- the DynFlags in 'msrHscEnv'. + } + +instance Show ModSummaryResult where + show _ = "" +instance NFData ModSummaryResult where + rnf ModSummaryResult{..} = + rnf msrModSummary `seq` rnf msrImports `seq` rnf msrFingerprint + +-- | Generate a ModSummary that has enough information to be used to get .hi and .hie files. +-- without needing to parse the entire source +type instance RuleResult GetModSummary = ModSummaryResult + +-- | Generate a ModSummary with the timestamps and preprocessed content elided, for more successful early cutoff +type instance RuleResult GetModSummaryWithoutTimestamps = ModSummaryResult + +data GetParsedModule = GetParsedModule + deriving (Eq, Show, Generic) +instance Hashable GetParsedModule +instance NFData GetParsedModule + +data GetParsedModuleWithComments = GetParsedModuleWithComments + deriving (Eq, Show, Generic) +instance Hashable GetParsedModuleWithComments +instance NFData GetParsedModuleWithComments + +data GetLocatedImports = GetLocatedImports + deriving (Eq, Show, Generic) +instance Hashable GetLocatedImports +instance NFData GetLocatedImports + +-- | Does this module need to be compiled? +type instance RuleResult NeedsCompilation = Maybe LinkableType + +data NeedsCompilation = NeedsCompilation + deriving (Eq, Show, Generic) +instance Hashable NeedsCompilation +instance NFData NeedsCompilation + +data GetModuleGraph = GetModuleGraph + deriving (Eq, Show, Generic) +instance Hashable GetModuleGraph +instance NFData GetModuleGraph + +data GetModuleGraphTransDepsFingerprints = GetModuleGraphTransDepsFingerprints + deriving (Eq, Show, Generic) +instance Hashable GetModuleGraphTransDepsFingerprints +instance NFData GetModuleGraphTransDepsFingerprints + +data GetModuleGraphTransReverseDepsFingerprints = GetModuleGraphTransReverseDepsFingerprints + deriving (Eq, Show, Generic) +instance Hashable GetModuleGraphTransReverseDepsFingerprints +instance NFData GetModuleGraphTransReverseDepsFingerprints + +data GetModuleGraphImmediateReverseDepsFingerprints = GetModuleGraphImmediateReverseDepsFingerprints + deriving (Eq, Show, Generic) +instance Hashable GetModuleGraphImmediateReverseDepsFingerprints +instance NFData GetModuleGraphImmediateReverseDepsFingerprints + +data ReportImportCycles = ReportImportCycles + deriving (Eq, Show, Generic) +instance Hashable ReportImportCycles +instance NFData ReportImportCycles + +data TypeCheck = TypeCheck + deriving (Eq, Show, Generic) +instance Hashable TypeCheck +instance NFData TypeCheck + +data GetDocMap = GetDocMap + deriving (Eq, Show, Generic) +instance Hashable GetDocMap +instance NFData GetDocMap + +data GetHieAst = GetHieAst + deriving (Eq, Show, Generic) +instance Hashable GetHieAst +instance NFData GetHieAst + +data GetBindings = GetBindings + deriving (Eq, Show, Generic) +instance Hashable GetBindings +instance NFData GetBindings + +data GhcSession = GhcSession + deriving (Eq, Show, Generic) +instance Hashable GhcSession +instance NFData GhcSession + +newtype GhcSessionDeps = GhcSessionDeps_ + { -- | Load full ModSummary values in the GHC session. + -- Required for interactive evaluation, but leads to more cache invalidations + fullModSummary :: Bool + } + deriving newtype (Eq, Hashable, NFData) + +instance Show GhcSessionDeps where + show (GhcSessionDeps_ False) = "GhcSessionDeps" + show (GhcSessionDeps_ True) = "GhcSessionDepsFull" + +pattern GhcSessionDeps :: GhcSessionDeps +pattern GhcSessionDeps = GhcSessionDeps_ False + +data GetModIfaceFromDisk = GetModIfaceFromDisk + deriving (Eq, Show, Generic) +instance Hashable GetModIfaceFromDisk +instance NFData GetModIfaceFromDisk + +data GetModIfaceFromDiskAndIndex = GetModIfaceFromDiskAndIndex + deriving (Eq, Show, Generic) +instance Hashable GetModIfaceFromDiskAndIndex +instance NFData GetModIfaceFromDiskAndIndex + +data GetModIface = GetModIface + deriving (Eq, Show, Generic) +instance Hashable GetModIface +instance NFData GetModIface + +data IsFileOfInterest = IsFileOfInterest + deriving (Eq, Show, Generic) +instance Hashable IsFileOfInterest +instance NFData IsFileOfInterest + +data GetModSummaryWithoutTimestamps = GetModSummaryWithoutTimestamps + deriving (Eq, Show, Generic) +instance Hashable GetModSummaryWithoutTimestamps +instance NFData GetModSummaryWithoutTimestamps + +data GetModSummary = GetModSummary + deriving (Eq, Show, Generic) +instance Hashable GetModSummary +instance NFData GetModSummary + +-- See Note [Client configuration in Rules] +-- | Get the client config stored in the ide state +data GetClientSettings = GetClientSettings + deriving (Eq, Show, Generic) +instance Hashable GetClientSettings +instance NFData GetClientSettings + +type instance RuleResult GetClientSettings = Hashed (Maybe Value) + +data AddWatchedFile = AddWatchedFile deriving (Eq, Show, Generic) +instance Hashable AddWatchedFile +instance NFData AddWatchedFile + + +-- A local rule type to get caching. We want to use newCache, but it has +-- thread killed exception issues, so we lift it to a full rule. +-- https://github.com/digital-asset/daml/pull/2808#issuecomment-529639547 +type instance RuleResult GhcSessionIO = IdeGhcSession + +data IdeGhcSession = IdeGhcSession + { loadSessionFun :: FilePath -> IO (IdeResult HscEnvEq, [FilePath]) + -- ^ Returns the Ghc session and the cradle dependencies + , sessionVersion :: !Int + -- ^ Used as Shake key, versions must be unique and not reused + } + +instance Show IdeGhcSession where show _ = "IdeGhcSession" +instance NFData IdeGhcSession where rnf !_ = () + +data GhcSessionIO = GhcSessionIO deriving (Eq, Show, Generic) +instance Hashable GhcSessionIO +instance NFData GhcSessionIO + +makeLensesWith + (lensRules & lensField .~ mappingNamer (pure . (++ "L"))) + ''Splices + +{- Note [Client configuration in Rules] + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The LSP client configuration is stored by `lsp` for us, and is accesible in +handlers through the LspT monad. + +This is all well and good, but what if we want to write a Rule that depends +on the configuration? For example, we might have a plugin that provides +diagnostics - if the configuration changes to turn off that plugin, then +we need to invalidate the Rule producing the diagnostics so that they go +away. More broadly, any time we define a Rule that really depends on the +configuration, such that the dependency needs to be tracked and the Rule +invalidated when the configuration changes, we have a problem. + +The solution is that we have to mirror the configuration into the state +that our build system knows about. That means that: +- We have a parallel record of the state in 'IdeConfiguration' +- We install a callback so that when the config changes we can update the +'IdeConfiguration' and mark the rule as dirty. + +Then we can define a Rule that gets the configuration, and build Actions +on top of that that behave properly. However, these should really only +be used if you need the dependency tracking - for normal usage in handlers +the config can simply be accessed directly from LspT. + +TODO(michaelpj): this is me writing down what I think the logic is, but +it doesn't make much sense to me. In particular, we *can* get the LspT +in an Action. So I don't know why we need to store it twice. We would +still need to invalidate the Rule otherwise we won't know it's changed, +though. See https://github.com/haskell/ghcide/pull/731 for some context. +-} diff --git a/ghcide/src/Development/IDE/Core/Rules.hs b/ghcide/src/Development/IDE/Core/Rules.hs new file mode 100644 index 0000000000..c123c9d4a8 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Rules.hs @@ -0,0 +1,1283 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE TypeFamilies #-} + +-- | A Shake implementation of the compiler service, built +-- using the "Shaker" abstraction layer for in-memory use. +-- +module Development.IDE.Core.Rules( + -- * Types + IdeState, GetParsedModule(..), TransitiveDependencies(..), + GhcSessionIO(..), GetClientSettings(..), + -- * Functions + runAction, + toIdeResult, + defineNoFile, + defineEarlyCutOffNoFile, + mainRule, + RulesConfig(..), + getParsedModule, + getParsedModuleWithComments, + getClientConfigAction, + usePropertyAction, + usePropertyByPathAction, + getHieFile, + -- * Rules + CompiledLinkables(..), + getParsedModuleRule, + getParsedModuleWithCommentsRule, + getLocatedImportsRule, + reportImportCyclesRule, + typeCheckRule, + getDocMapRule, + loadGhcSession, + getModIfaceFromDiskRule, + getModIfaceRule, + getModSummaryRule, + getModuleGraphRule, + knownFilesRule, + getClientSettingsRule, + getHieAstsRule, + getBindingsRule, + needsCompilationRule, + generateCoreRule, + getImportMapRule, + regenerateHiFile, + ghcSessionDepsDefinition, + getParsedModuleDefinition, + typeCheckRuleDefinition, + getRebuildCount, + getSourceFileSource, + currentLinkables, + GhcSessionDepsConfig(..), + Log(..), + DisplayTHWarning(..), + ) where + +import Control.Applicative +import Control.Concurrent.STM.Stats (atomically) +import Control.Concurrent.STM.TVar +import Control.Concurrent.Strict +import Control.DeepSeq +import Control.Exception (evaluate) +import Control.Exception.Safe +import Control.Lens ((%~), (&), (.~)) +import Control.Monad.Extra +import Control.Monad.IO.Unlift +import Control.Monad.Reader +import Control.Monad.State +import Control.Monad.Trans.Except (ExceptT, except, + runExceptT) +import Control.Monad.Trans.Maybe +import Data.Aeson (toJSON) +import qualified Data.Binary as B +import qualified Data.ByteString as BS +import qualified Data.ByteString.Lazy as LBS +import Data.Coerce +import Data.Default (Default, def) +import Data.Foldable +import Data.Hashable +import qualified Data.HashMap.Strict as HM +import qualified Data.HashSet as HashSet +import Data.IntMap.Strict (IntMap) +import qualified Data.IntMap.Strict as IntMap +import Data.IORef +import Data.List +import Data.List.Extra (nubOrdOn) +import qualified Data.Map as M +import Data.Maybe +import Data.Proxy +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Data.Time (UTCTime (..)) +import Data.Time.Clock.POSIX (posixSecondsToUTCTime) +import Data.Tuple.Extra +import Data.Typeable (cast) +import Development.IDE.Core.Compile +import Development.IDE.Core.FileExists hiding (Log, + LogShake) +import Development.IDE.Core.FileStore (getFileContents, + getFileModTimeContents, + getModTime) +import Development.IDE.Core.IdeConfiguration +import Development.IDE.Core.OfInterest hiding (Log, + LogShake) +import Development.IDE.Core.PositionMapping +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Service hiding (Log, + LogShake) +import Development.IDE.Core.Shake hiding (Log) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat hiding + (TargetId (..), + Var, + loadInterface, + nest, + parseModule, + settings, vcat, + (<+>)) +import qualified Development.IDE.GHC.Compat as Compat hiding + (nest, + vcat) +import qualified Development.IDE.GHC.Compat.Util as Util +import Development.IDE.GHC.Error +import Development.IDE.GHC.Util hiding + (modifyDynFlags) +import Development.IDE.Graph +import Development.IDE.Import.DependencyInformation +import Development.IDE.Import.FindImports +import qualified Development.IDE.Spans.AtPoint as AtPoint +import Development.IDE.Spans.Documentation +import Development.IDE.Spans.LocalBindings +import Development.IDE.Types.Diagnostics as Diag +import Development.IDE.Types.HscEnvEq +import Development.IDE.Types.Location +import Development.IDE.Types.Options +import qualified Development.IDE.Types.Shake as Shake +import GHC.Iface.Ext.Types (HieASTs (..)) +import GHC.Iface.Ext.Utils (generateReferencesMap) +import qualified GHC.LanguageExtensions as LangExt +import HIE.Bios.Ghc.Gap (hostIsDynamic) +import qualified HieDb +import Ide.Logger (Pretty (pretty), + Recorder, + WithPriority, + cmapWithPrio, + logWith, nest, + vcat, (<+>)) +import qualified Ide.Logger as Logger +import Ide.Plugin.Config +import Ide.Plugin.Properties (HasProperty, + HasPropertyByPath, + KeyNamePath, + KeyNameProxy, + Properties, + ToHsType, + useProperty, + usePropertyByPath) +import Ide.Types (DynFlagsModifications (dynFlagsModifyGlobal, dynFlagsModifyParser), + PluginId) +import qualified Language.LSP.Protocol.Lens as JL +import Language.LSP.Protocol.Message (SMethod (SMethod_CustomMethod, SMethod_WindowShowMessage)) +import Language.LSP.Protocol.Types (MessageType (MessageType_Info), + ShowMessageParams (ShowMessageParams)) +import Language.LSP.Server (LspT) +import qualified Language.LSP.Server as LSP +import Language.LSP.VFS +import Prelude hiding (mod) +import System.Directory (doesFileExist) +import System.Info.Extra (isWindows) + + +import qualified Data.IntMap as IM +import GHC.Fingerprint + +data Log + = LogShake Shake.Log + | LogReindexingHieFile !NormalizedFilePath + | LogLoadingHieFile !NormalizedFilePath + | LogLoadingHieFileFail !FilePath !SomeException + | LogLoadingHieFileSuccess !FilePath + | LogTypecheckedFOI !NormalizedFilePath + | LogDependencies !NormalizedFilePath [FilePath] + deriving Show + +instance Pretty Log where + pretty = \case + LogShake msg -> pretty msg + LogReindexingHieFile path -> + "Re-indexing hie file for" <+> pretty (fromNormalizedFilePath path) + LogLoadingHieFile path -> + "LOADING HIE FILE FOR" <+> pretty (fromNormalizedFilePath path) + LogLoadingHieFileFail path e -> + nest 2 $ + vcat + [ "FAILED LOADING HIE FILE FOR" <+> pretty path + , pretty (displayException e) ] + LogLoadingHieFileSuccess path -> + "SUCCEEDED LOADING HIE FILE FOR" <+> pretty path + LogTypecheckedFOI path -> vcat + [ "Typechecked a file which is not currently open in the editor:" <+> pretty (fromNormalizedFilePath path) + , "This can indicate a bug which results in excessive memory usage." + , "This may be a spurious warning if you have recently closed the file." + , "If you haven't opened this file recently, please file a report on the issue tracker mentioning" + <+> "the HLS version being used, the plugins enabled, and if possible the codebase and file which" + <+> "triggered this warning." + ] + LogDependencies nfp deps -> + vcat + [ "Add dependency" <+> pretty (fromNormalizedFilePath nfp) + , nest 2 $ pretty deps + ] + +templateHaskellInstructions :: T.Text +templateHaskellInstructions = "https://haskell-language-server.readthedocs.io/en/latest/troubleshooting.html#static-binaries" + +-- | This is useful for rules to convert rules that can only produce errors or +-- a result into the more general IdeResult type that supports producing +-- warnings while also producing a result. +toIdeResult :: Either [FileDiagnostic] v -> IdeResult v +toIdeResult = either (, Nothing) (([],) . Just) + +------------------------------------------------------------ +-- Exposed API +------------------------------------------------------------ + +-- TODO: rename +-- TODO: return text --> return rope +getSourceFileSource :: NormalizedFilePath -> Action BS.ByteString +getSourceFileSource nfp = do + msource <- getFileContents nfp + case msource of + Nothing -> liftIO $ BS.readFile (fromNormalizedFilePath nfp) + Just source -> pure $ T.encodeUtf8 $ Rope.toText source + +-- | Parse the contents of a haskell file. +getParsedModule :: NormalizedFilePath -> Action (Maybe ParsedModule) +getParsedModule = use GetParsedModule + +-- | Parse the contents of a haskell file, +-- ensuring comments are preserved in annotations +getParsedModuleWithComments :: NormalizedFilePath -> Action (Maybe ParsedModule) +getParsedModuleWithComments = use GetParsedModuleWithComments + +------------------------------------------------------------ +-- Rules +-- These typically go from key to value and are oracles. + +-- | WARNING: +-- We currently parse the module both with and without Opt_Haddock, and +-- return the one with Haddocks if it -- succeeds. However, this may not work +-- for hlint or any client code that might need the parsed source with all +-- annotations, including comments. +-- For that use case you might want to use `getParsedModuleWithCommentsRule` +-- See https://github.com/haskell/ghcide/pull/350#discussion_r370878197 +-- and https://github.com/mpickering/ghcide/pull/22#issuecomment-625070490 +-- GHC wiki about: https://gitlab.haskell.org/ghc/ghc/-/wikis/api-annotations +getParsedModuleRule :: Recorder (WithPriority Log) -> Rules () +getParsedModuleRule recorder = + -- this rule does not have early cutoff since all its dependencies already have it + define (cmapWithPrio LogShake recorder) $ \GetParsedModule file -> do + ModSummaryResult{msrModSummary = ms', msrHscEnv = hsc} <- use_ GetModSummary file + opt <- getIdeOptions + modify_dflags <- getModifyDynFlags dynFlagsModifyParser + let ms = ms' { ms_hspp_opts = modify_dflags $ ms_hspp_opts ms' } + reset_ms pm = pm { pm_mod_summary = ms' } + + liftIO $ (fmap.fmap.fmap) reset_ms $ getParsedModuleDefinition hsc opt file ms + +withoutOptHaddock :: ModSummary -> ModSummary +withoutOptHaddock = withoutOption Opt_Haddock + +withOption :: GeneralFlag -> ModSummary -> ModSummary +withOption opt ms = ms{ms_hspp_opts= gopt_set (ms_hspp_opts ms) opt} + +withoutOption :: GeneralFlag -> ModSummary -> ModSummary +withoutOption opt ms = ms{ms_hspp_opts= gopt_unset (ms_hspp_opts ms) opt} + +-- | This rule provides a ParsedModule preserving all annotations, +-- including keywords, punctuation and comments. +-- So it is suitable for use cases where you need a perfect edit. +getParsedModuleWithCommentsRule :: Recorder (WithPriority Log) -> Rules () +getParsedModuleWithCommentsRule recorder = + -- The parse diagnostics are owned by the GetParsedModule rule + -- For this reason, this rule does not produce any diagnostics + defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \GetParsedModuleWithComments file -> do + ModSummaryResult{msrModSummary = ms, msrHscEnv = hsc} <- use_ GetModSummary file + opt <- getIdeOptions + + let ms' = withoutOptHaddock $ withOption Opt_KeepRawTokenStream ms + modify_dflags <- getModifyDynFlags dynFlagsModifyParser + let ms'' = ms' { ms_hspp_opts = modify_dflags $ ms_hspp_opts ms' } + reset_ms pm = pm { pm_mod_summary = ms' } + + liftIO $ fmap (fmap reset_ms) $ snd <$> getParsedModuleDefinition hsc opt file ms'' + +getModifyDynFlags :: (DynFlagsModifications -> a) -> Action a +getModifyDynFlags f = do + opts <- getIdeOptions + cfg <- getClientConfigAction + pure $ f $ optModifyDynFlags opts cfg + + +getParsedModuleDefinition + :: HscEnv + -> IdeOptions + -> NormalizedFilePath + -> ModSummary -> IO ([FileDiagnostic], Maybe ParsedModule) +getParsedModuleDefinition packageState opt file ms = do + let fp = fromNormalizedFilePath file + (diag, res) <- parseModule opt packageState fp ms + case res of + Nothing -> pure (diag, Nothing) + Just modu -> pure (diag, Just modu) + +getLocatedImportsRule :: Recorder (WithPriority Log) -> Rules () +getLocatedImportsRule recorder = + define (cmapWithPrio LogShake recorder) $ \GetLocatedImports file -> do + ModSummaryResult{msrModSummary = ms} <- use_ GetModSummaryWithoutTimestamps file + (KnownTargets targets targetsMap) <- useNoFile_ GetKnownTargets + let imports = [(False, imp) | imp <- ms_textual_imps ms] ++ [(True, imp) | imp <- ms_srcimps ms] + env_eq <- use_ GhcSession file + let env = hscEnv env_eq + let import_dirs = map (second homeUnitEnv_dflags) $ hugElts $ hsc_HUG env + let dflags = hsc_dflags env + opt <- getIdeOptions + let getTargetFor modName nfp + | Just (TargetFile nfp') <- HM.lookup (TargetFile nfp) targetsMap = do + -- reuse the existing NormalizedFilePath in order to maximize sharing + itExists <- getFileExists nfp' + return $ if itExists then Just nfp' else Nothing + | Just tt <- HM.lookup (TargetModule modName) targets = do + -- reuse the existing NormalizedFilePath in order to maximize sharing + let ttmap = HM.mapWithKey const (HashSet.toMap tt) + nfp' = HM.lookupDefault nfp nfp ttmap + itExists <- getFileExists nfp' + return $ if itExists then Just nfp' else Nothing + | otherwise = do + itExists <- getFileExists nfp + return $ if itExists then Just nfp else Nothing + (diags, imports') <- fmap unzip $ forM imports $ \(isSource, (mbPkgName, modName)) -> do + diagOrImp <- locateModule (hscSetFlags dflags env) import_dirs (optExtensions opt) getTargetFor modName mbPkgName isSource + case diagOrImp of + Left diags -> pure (diags, Just (modName, Nothing)) + Right (FileImport path) -> pure ([], Just (modName, Just path)) + Right PackageImport -> pure ([], Nothing) + + {- IS THIS REALLY NEEDED? DOESNT SEEM SO + + -- does this module have an hs-boot file? If so add a direct dependency + let bootPath = toNormalizedFilePath' $ fromNormalizedFilePath file <.> "hs-boot" + boot <- use GetFileExists bootPath + bootArtifact <- if boot == Just True + then do + let modName = ms_mod_name ms + loc <- liftIO $ mkHomeModLocation dflags' modName (fromNormalizedFilePath bootPath) + return $ Just (noLoc modName, Just (ArtifactsLocation bootPath (Just loc) True)) + else pure Nothing + -} + let bootArtifact = Nothing + + let moduleImports = catMaybes $ bootArtifact : imports' + pure (concat diags, Just moduleImports) + +type RawDepM a = StateT (RawDependencyInformation, IntMap ArtifactsLocation) Action a + +execRawDepM :: Monad m => StateT (RawDependencyInformation, IntMap a1) m a2 -> m (RawDependencyInformation, IntMap a1) +execRawDepM act = + execStateT act + ( RawDependencyInformation IntMap.empty emptyPathIdMap IntMap.empty + , IntMap.empty + ) + +-- | Given a target file path, construct the raw dependency results by following +-- imports recursively. +rawDependencyInformation :: [NormalizedFilePath] -> Action (RawDependencyInformation, BootIdMap) +rawDependencyInformation fs = do + (rdi, ss) <- execRawDepM (goPlural fs) + let bm = IntMap.foldrWithKey (updateBootMap rdi) IntMap.empty ss + return (rdi, bm) + where + goPlural ff = do + mss <- lift $ (fmap.fmap) msrModSummary <$> uses GetModSummaryWithoutTimestamps ff + zipWithM go ff mss + + go :: NormalizedFilePath -- ^ Current module being processed + -> Maybe ModSummary -- ^ ModSummary of the module + -> RawDepM FilePathId + go f mbModSum = do + -- First check to see if we have already processed the FilePath + -- If we have, just return its Id but don't update any of the state. + -- Otherwise, we need to process its imports. + checkAlreadyProcessed f $ do + let al = modSummaryToArtifactsLocation f mbModSum + -- Get a fresh FilePathId for the new file + fId <- getFreshFid al + -- Record this module and its location + whenJust mbModSum $ \ms -> + modifyRawDepInfo (\rd -> rd { rawModuleMap = IntMap.insert (getFilePathId fId) + (ShowableModule $ ms_mod ms) + (rawModuleMap rd)}) + -- Adding an edge to the bootmap so we can make sure to + -- insert boot nodes before the real files. + addBootMap al fId + -- Try to parse the imports of the file + importsOrErr <- lift $ use GetLocatedImports f + case importsOrErr of + Nothing -> do + -- File doesn't parse so add the module as a failure into the + -- dependency information, continue processing the other + -- elements in the queue + modifyRawDepInfo (insertImport fId (Left ModuleParseError)) + return fId + Just modImports -> do + -- Get NFPs of the imports which have corresponding files + -- Imports either come locally from a file or from a package. + let (no_file, with_file) = splitImports modImports + (mns, ls) = unzip with_file + -- Recursively process all the imports we just learnt about + -- and get back a list of their FilePathIds + fids <- goPlural $ map artifactFilePath ls + -- Associate together the ModuleName with the FilePathId + let moduleImports' = map (,Nothing) no_file ++ zip mns (map Just fids) + -- Insert into the map the information about this modules + -- imports. + modifyRawDepInfo $ insertImport fId (Right $ ModuleImports moduleImports') + return fId + + + checkAlreadyProcessed :: NormalizedFilePath -> RawDepM FilePathId -> RawDepM FilePathId + checkAlreadyProcessed nfp k = do + (rawDepInfo, _) <- get + maybe k return (lookupPathToId (rawPathIdMap rawDepInfo) nfp) + + modifyRawDepInfo :: (RawDependencyInformation -> RawDependencyInformation) -> RawDepM () + modifyRawDepInfo f = modify (first f) + + addBootMap :: ArtifactsLocation -> FilePathId -> RawDepM () + addBootMap al fId = + modify (\(rd, ss) -> (rd, if isBootLocation al + then IntMap.insert (getFilePathId fId) al ss + else ss)) + + getFreshFid :: ArtifactsLocation -> RawDepM FilePathId + getFreshFid al = do + (rawDepInfo, ss) <- get + let (fId, path_map) = getPathId al (rawPathIdMap rawDepInfo) + -- Insert the File into the bootmap if it's a boot module + let rawDepInfo' = rawDepInfo { rawPathIdMap = path_map } + put (rawDepInfo', ss) + return fId + + -- Split in (package imports, local imports) + splitImports :: [(Located ModuleName, Maybe ArtifactsLocation)] + -> ([Located ModuleName], [(Located ModuleName, ArtifactsLocation)]) + splitImports = foldr splitImportsLoop ([],[]) + + splitImportsLoop (imp, Nothing) (ns, ls) = (imp:ns, ls) + splitImportsLoop (imp, Just artifact) (ns, ls) = (ns, (imp,artifact) : ls) + + updateBootMap pm boot_mod_id ArtifactsLocation{..} bm = + if not artifactIsSource + then + let msource_mod_id = lookupPathToId (rawPathIdMap pm) (toNormalizedFilePath' $ dropBootSuffix $ fromNormalizedFilePath artifactFilePath) + in case msource_mod_id of + Just source_mod_id -> insertBootId source_mod_id (FilePathId boot_mod_id) bm + Nothing -> bm + else bm + + dropBootSuffix :: FilePath -> FilePath + dropBootSuffix hs_src = reverse . drop (length @[] "-boot") . reverse $ hs_src + +reportImportCyclesRule :: Recorder (WithPriority Log) -> Rules () +reportImportCyclesRule recorder = + defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \ReportImportCycles file -> fmap (\errs -> if null errs then (Just "1",([], Just ())) else (Nothing, (errs, Nothing))) $ do + DependencyInformation{..} <- useWithSeparateFingerprintRule_ GetModuleGraphTransDepsFingerprints GetModuleGraph file + case pathToId depPathIdMap file of + -- The header of the file does not parse, so it can't be part of any import cycles. + Nothing -> pure [] + Just fileId -> + case IntMap.lookup (getFilePathId fileId) depErrorNodes of + Nothing -> pure [] + Just errs -> do + let cycles = mapMaybe (cycleErrorInFile fileId) (toList errs) + -- Convert cycles of files into cycles of module names + forM cycles $ \(imp, files) -> do + modNames <- forM files $ + getModuleName . idToPath depPathIdMap + pure $ toDiag imp $ sort modNames + where cycleErrorInFile f (PartOfCycle imp fs) + | f `elem` fs = Just (imp, fs) + cycleErrorInFile _ _ = Nothing + toDiag imp mods = + ideErrorWithSource (Just "Import cycle detection") (Just DiagnosticSeverity_Error) fp ("Cyclic module dependency between " <> showCycle mods) Nothing + & fdLspDiagnosticL %~ JL.range .~ rng + where rng = fromMaybe noRange $ srcSpanToRange (getLoc imp) + fp = toNormalizedFilePath' $ fromMaybe noFilePath $ srcSpanToFilename (getLoc imp) + getModuleName file = do + ms <- msrModSummary <$> use_ GetModSummaryWithoutTimestamps file + pure (moduleNameString . moduleName . ms_mod $ ms) + showCycle mods = T.intercalate ", " (map T.pack mods) + +getHieAstsRule :: Recorder (WithPriority Log) -> Rules () +getHieAstsRule recorder = + define (cmapWithPrio LogShake recorder) $ \GetHieAst f -> do + tmr <- use_ TypeCheck f + hsc <- hscEnv <$> use_ GhcSessionDeps f + getHieAstRuleDefinition f hsc tmr + +persistentHieFileRule :: Recorder (WithPriority Log) -> Rules () +persistentHieFileRule recorder = addPersistentRule GetHieAst $ \file -> runMaybeT $ do + res <- readHieFileForSrcFromDisk recorder file + vfsRef <- asks vfsVar + vfsData <- liftIO $ _vfsMap <$> readTVarIO vfsRef + (currentSource, ver) <- liftIO $ case M.lookup (filePathToUri' file) vfsData of + Nothing -> (,Nothing) . T.decodeUtf8 <$> BS.readFile (fromNormalizedFilePath file) + Just vf -> pure (virtualFileText vf, Just $ virtualFileVersion vf) + let refmap = generateReferencesMap . getAsts . Compat.hie_asts $ res + del = deltaFromDiff (T.decodeUtf8 $ Compat.hie_hs_src res) currentSource + pure (HAR (Compat.hie_module res) (Compat.hie_asts res) refmap mempty (HieFromDisk res),del,ver) + +getHieAstRuleDefinition :: NormalizedFilePath -> HscEnv -> TcModuleResult -> Action (IdeResult HieAstResult) +getHieAstRuleDefinition f hsc tmr = do + (diags, masts') <- liftIO $ generateHieAsts hsc tmr +#if MIN_VERSION_ghc(9,11,0) + let masts = fst <$> masts' +#else + let masts = masts' +#endif + se <- getShakeExtras + + isFoi <- use_ IsFileOfInterest f + diagsWrite <- case isFoi of + IsFOI Modified{firstOpen = False} -> do + when (coerce $ ideTesting se) $ liftIO $ mRunLspT (lspEnv se) $ + LSP.sendNotification (SMethod_CustomMethod (Proxy @"ghcide/reference/ready")) $ + toJSON $ fromNormalizedFilePath f + pure [] + _ | Just asts <- masts' -> do + source <- getSourceFileSource f + let exports = tcg_exports $ tmrTypechecked tmr + modSummary = tmrModSummary tmr + liftIO $ writeAndIndexHieFile hsc se modSummary f exports asts source + _ -> pure [] + + let refmap = generateReferencesMap . getAsts <$> masts + typemap = AtPoint.computeTypeReferences . getAsts <$> masts + pure (diags <> diagsWrite, HAR (ms_mod $ tmrModSummary tmr) <$> masts <*> refmap <*> typemap <*> pure HieFresh) + +getImportMapRule :: Recorder (WithPriority Log) -> Rules () +getImportMapRule recorder = define (cmapWithPrio LogShake recorder) $ \GetImportMap f -> do + im <- use GetLocatedImports f + let mkImports fileImports = M.fromList $ mapMaybe (\(m, mfp) -> (unLoc m,) . artifactFilePath <$> mfp) fileImports + pure ([], ImportMap . mkImports <$> im) + +-- | Ensure that go to definition doesn't block on startup +persistentImportMapRule :: Rules () +persistentImportMapRule = addPersistentRule GetImportMap $ \_ -> pure $ Just (ImportMap mempty, idDelta, Nothing) + +getBindingsRule :: Recorder (WithPriority Log) -> Rules () +getBindingsRule recorder = + define (cmapWithPrio LogShake recorder) $ \GetBindings f -> do + HAR{hieKind=kind, refMap=rm} <- use_ GetHieAst f + case kind of + HieFresh -> pure ([], Just $ bindings rm) + HieFromDisk _ -> pure ([], Nothing) + +getDocMapRule :: Recorder (WithPriority Log) -> Rules () +getDocMapRule recorder = + define (cmapWithPrio LogShake recorder) $ \GetDocMap file -> do + -- Stale data for the scenario where a broken module has previously typechecked + -- but we never generated a DocMap for it + (tmrTypechecked -> tc, _) <- useWithStale_ TypeCheck file + (hscEnv -> hsc, _) <- useWithStale_ GhcSessionDeps file + (HAR{refMap=rf}, _) <- useWithStale_ GetHieAst file + + dkMap <- liftIO $ mkDocMap hsc rf tc + return ([],Just dkMap) + +-- | Persistent rule to ensure that hover doesn't block on startup +persistentDocMapRule :: Rules () +persistentDocMapRule = addPersistentRule GetDocMap $ \_ -> pure $ Just (DKMap mempty mempty, idDelta, Nothing) + +readHieFileForSrcFromDisk :: Recorder (WithPriority Log) -> NormalizedFilePath -> MaybeT IdeAction Compat.HieFile +readHieFileForSrcFromDisk recorder file = do + ShakeExtras{withHieDb} <- ask + row <- MaybeT $ liftIO $ withHieDb (\hieDb -> HieDb.lookupHieFileFromSource hieDb $ fromNormalizedFilePath file) + let hie_loc = HieDb.hieModuleHieFile row + liftIO $ logWith recorder Logger.Debug $ LogLoadingHieFile file + exceptToMaybeT $ readHieFileFromDisk recorder hie_loc + +readHieFileFromDisk :: Recorder (WithPriority Log) -> FilePath -> ExceptT SomeException IdeAction Compat.HieFile +readHieFileFromDisk recorder hie_loc = do + nc <- asks ideNc + res <- liftIO $ tryAny $ loadHieFile (mkUpdater nc) hie_loc + case res of + Left e -> liftIO $ logWith recorder Logger.Debug $ LogLoadingHieFileFail hie_loc e + Right _ -> liftIO $ logWith recorder Logger.Debug $ LogLoadingHieFileSuccess hie_loc + except res + +-- | Typechecks a module. +typeCheckRule :: Recorder (WithPriority Log) -> Rules () +typeCheckRule recorder = define (cmapWithPrio LogShake recorder) $ \TypeCheck file -> do + pm <- use_ GetParsedModule file + hsc <- hscEnv <$> use_ GhcSessionDeps file + foi <- use_ IsFileOfInterest file + -- We should only call the typecheck rule for files of interest. + -- Keeping typechecked modules in memory for other files is + -- very expensive. + when (foi == NotFOI) $ + logWith recorder Logger.Warning $ LogTypecheckedFOI file + typeCheckRuleDefinition hsc pm file + +knownFilesRule :: Recorder (WithPriority Log) -> Rules () +knownFilesRule recorder = defineEarlyCutOffNoFile (cmapWithPrio LogShake recorder) $ \GetKnownTargets -> do + alwaysRerun + fs <- knownTargets + pure (LBS.toStrict $ B.encode $ hash fs, unhashed fs) + +getFileHashRule :: Recorder (WithPriority Log) -> Rules () +getFileHashRule recorder = + defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetFileHash file -> do + void $ use_ GetModificationTime file + fileHash <- liftIO $ Util.getFileHash (fromNormalizedFilePath file) + return (Just (fingerprintToBS fileHash), ([], Just fileHash)) + +getModuleGraphRule :: Recorder (WithPriority Log) -> Rules () +getModuleGraphRule recorder = defineEarlyCutOffNoFile (cmapWithPrio LogShake recorder) $ \GetModuleGraph -> do + fs <- toKnownFiles <$> useNoFile_ GetKnownTargets + dependencyInfoForFiles (HashSet.toList fs) + +dependencyInfoForFiles :: [NormalizedFilePath] -> Action (BS.ByteString, DependencyInformation) +dependencyInfoForFiles fs = do + (rawDepInfo, bm) <- rawDependencyInformation fs + let (all_fs, _all_ids) = unzip $ HM.toList $ pathToIdMap $ rawPathIdMap rawDepInfo + msrs <- uses GetModSummaryWithoutTimestamps all_fs + let mss = map (fmap msrModSummary) msrs + let deps = map (\i -> IM.lookup (getFilePathId i) (rawImports rawDepInfo)) _all_ids + nodeKeys = IM.fromList $ catMaybes $ zipWith (\fi mms -> (getFilePathId fi,) . NodeKey_Module . msKey <$> mms) _all_ids mss + mns = catMaybes $ zipWith go mss deps + go (Just ms) (Just (Right (ModuleImports xs))) = Just $ ModuleNode this_dep_keys ms + where this_dep_ids = mapMaybe snd xs + this_dep_keys = mapMaybe (\fi -> IM.lookup (getFilePathId fi) nodeKeys) this_dep_ids + go (Just ms) _ = Just $ ModuleNode [] ms + go _ _ = Nothing + mg = mkModuleGraph mns + let shallowFingers = IntMap.fromList $ foldr' (\(i, m) acc -> case m of + Just x -> (getFilePathId i,msrFingerprint x):acc + Nothing -> acc) [] $ zip _all_ids msrs + pure (fingerprintToBS $ Util.fingerprintFingerprints $ map (maybe fingerprint0 msrFingerprint) msrs, processDependencyInformation rawDepInfo bm mg shallowFingers) + +-- This is factored out so it can be directly called from the GetModIface +-- rule. Directly calling this rule means that on the initial load we can +-- garbage collect all the intermediate typechecked modules rather than +-- retain the information forever in the shake graph. +typeCheckRuleDefinition + :: HscEnv + -> ParsedModule + -> NormalizedFilePath + -> Action (IdeResult TcModuleResult) +typeCheckRuleDefinition hsc pm fp = do + IdeOptions { optDefer = defer } <- getIdeOptions + + unlift <- askUnliftIO + let dets = TypecheckHelpers + { getLinkables = unliftIO unlift . uses_ GetLinkable + , getModuleGraph = unliftIO unlift $ useWithSeparateFingerprintRule_ GetModuleGraphTransDepsFingerprints GetModuleGraph fp + } + addUsageDependencies $ liftIO $ + typecheckModule defer hsc dets pm + where + addUsageDependencies :: Action (a, Maybe TcModuleResult) -> Action (a, Maybe TcModuleResult) + addUsageDependencies a = do + r@(_, mtc) <- a + forM_ mtc $ \tc -> do + used_files <- liftIO $ readIORef $ tcg_dependent_files $ tmrTypechecked tc + void $ uses_ GetModificationTime (map toNormalizedFilePath' used_files) + return r + +-- | Get all the linkables stored in the graph, i.e. the ones we *do not* need to unload. +-- Doesn't actually contain the code, since we don't need it to unload +currentLinkables :: Action (ModuleEnv UTCTime) +currentLinkables = do + compiledLinkables <- getCompiledLinkables <$> getIdeGlobalAction + liftIO $ readVar compiledLinkables + +loadGhcSession :: Recorder (WithPriority Log) -> GhcSessionDepsConfig -> Rules () +loadGhcSession recorder ghcSessionDepsConfig = do + -- This function should always be rerun because it tracks changes + -- to the version of the collection of HscEnv's. + defineEarlyCutOffNoFile (cmapWithPrio LogShake recorder) $ \GhcSessionIO -> do + alwaysRerun + opts <- getIdeOptions + config <- getClientConfigAction + res <- optGhcSession opts + + let fingerprint = LBS.toStrict $ LBS.concat + [ B.encode (hash (sessionVersion res)) + -- When the session version changes, reload all session + -- hsc env sessions + , B.encode (show (sessionLoading config)) + -- The loading config affects session loading. + -- Invalidate all build nodes. + -- Changing the session loading config will increment + -- the 'sessionVersion', thus we don't generate the same fingerprint + -- twice by accident. + ] + return (fingerprint, res) + + defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GhcSession file -> do + IdeGhcSession{loadSessionFun} <- useNoFile_ GhcSessionIO + -- loading is always returning a absolute path now + (val,deps) <- liftIO $ loadSessionFun $ fromNormalizedFilePath file + + -- add the deps to the Shake graph + let addDependency fp = do + -- VSCode uses absolute paths in its filewatch notifications + let nfp = toNormalizedFilePath' fp + itExists <- getFileExists nfp + when itExists $ void $ do + use_ GetPhysicalModificationTime nfp + logWith recorder Logger.Info $ LogDependencies file deps + mapM_ addDependency deps + + let cutoffHash = LBS.toStrict $ B.encode (hash (snd val)) + return (Just cutoffHash, val) + + defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \(GhcSessionDeps_ fullModSummary) file -> do + env <- use_ GhcSession file + ghcSessionDepsDefinition fullModSummary ghcSessionDepsConfig env file + +newtype GhcSessionDepsConfig = GhcSessionDepsConfig + { fullModuleGraph :: Bool + } +instance Default GhcSessionDepsConfig where + def = GhcSessionDepsConfig + { fullModuleGraph = True + } + +-- | Note [GhcSessionDeps] +-- ~~~~~~~~~~~~~~~~~~~~~ +-- For a file 'Foo', GhcSessionDeps "Foo.hs" results in an HscEnv which includes +-- 1. HomeModInfo's (in the HUG/HPT) for all modules in the transitive closure of "Foo", **NOT** including "Foo" itself. +-- 2. ModSummary's (in the ModuleGraph) for all modules in the transitive closure of "Foo", including "Foo" itself. +-- 3. ModLocation's (in the FinderCache) all modules in the transitive closure of "Foo", including "Foo" itself. +ghcSessionDepsDefinition + :: -- | full mod summary + Bool -> + GhcSessionDepsConfig -> HscEnvEq -> NormalizedFilePath -> Action (Maybe HscEnvEq) +ghcSessionDepsDefinition fullModSummary GhcSessionDepsConfig{..} env file = do + let hsc = hscEnv env + + mbdeps <- mapM(fmap artifactFilePath . snd) <$> use_ GetLocatedImports file + case mbdeps of + Nothing -> return Nothing + Just deps -> do + when fullModuleGraph $ void $ use_ ReportImportCycles file + ms <- msrModSummary <$> if fullModSummary + then use_ GetModSummary file + else use_ GetModSummaryWithoutTimestamps file + + depSessions <- map hscEnv <$> uses_ (GhcSessionDeps_ fullModSummary) deps + ifaces <- uses_ GetModIface deps + let inLoadOrder = map (\HiFileResult{..} -> HomeModInfo hirModIface hirModDetails emptyHomeModInfoLinkable) ifaces + de <- useWithSeparateFingerprintRule_ GetModuleGraphTransDepsFingerprints GetModuleGraph file + mg <- do + if fullModuleGraph + then return $ depModuleGraph de + else do + let mgs = map hsc_mod_graph depSessions + -- On GHC 9.4+, the module graph contains not only ModSummary's but each `ModuleNode` in the graph + -- also points to all the direct descendants of the current module. To get the keys for the descendants + -- we must get their `ModSummary`s + !final_deps <- do + dep_mss <- map msrModSummary <$> uses_ GetModSummaryWithoutTimestamps deps + return $!! map (NodeKey_Module . msKey) dep_mss + let module_graph_nodes = + nubOrdOn mkNodeKey (ModuleNode final_deps ms : concatMap mgModSummaries' mgs) + liftIO $ evaluate $ liftRnf rwhnf module_graph_nodes + return $ mkModuleGraph module_graph_nodes + session' <- liftIO $ mergeEnvs hsc mg de ms inLoadOrder depSessions + + -- Here we avoid a call to to `newHscEnvEqWithImportPaths`, which creates a new + -- ExportsMap when it is called. We only need to create the ExportsMap once per + -- session, while `ghcSessionDepsDefinition` will be called for each file we need + -- to compile. `updateHscEnvEq` will refresh the HscEnv (session') and also + -- generate a new Unique. + Just <$> liftIO (updateHscEnvEq env session') + +-- | Load a iface from disk, or generate it if there isn't one or it is out of date +-- This rule also ensures that the `.hie` and `.o` (if needed) files are written out. +getModIfaceFromDiskRule :: Recorder (WithPriority Log) -> Rules () +getModIfaceFromDiskRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleWithOldValue $ \GetModIfaceFromDisk f old -> do + ms <- msrModSummary <$> use_ GetModSummary f + mb_session <- use GhcSessionDeps f + case mb_session of + Nothing -> return (Nothing, ([], Nothing)) + Just session -> do + linkableType <- getLinkableType f + ver <- use_ GetModificationTime f + let m_old = case old of + Shake.Succeeded (Just old_version) v -> Just (v, old_version) + Shake.Stale _ (Just old_version) v -> Just (v, old_version) + _ -> Nothing + recompInfo = RecompilationInfo + { source_version = ver + , old_value = m_old + , get_file_version = use GetModificationTime_{missingFileDiagnostics = False} + , get_linkable_hashes = \fs -> map (snd . fromJust . hirCoreFp) <$> uses_ GetModIface fs + , get_module_graph = useWithSeparateFingerprintRule_ GetModuleGraphTransDepsFingerprints GetModuleGraph f + , regenerate = regenerateHiFile session f ms + } + hsc_env' <- setFileCacheHook (hscEnv session) + r <- loadInterface hsc_env' ms linkableType recompInfo + case r of + (diags, Nothing) -> return (Nothing, (diags, Nothing)) + (diags, Just x) -> do + let !fp = Just $! hiFileFingerPrint x + return (fp, (diags, Just x)) + +-- | Check state of hiedb after loading an iface from disk - have we indexed the corresponding `.hie` file? +-- This function is responsible for ensuring database consistency +-- Whenever we read a `.hi` file, we must check to ensure we have also +-- indexed the corresponding `.hie` file. If this is not the case (for example, +-- `ghcide` could be killed before indexing finishes), we must re-index the +-- `.hie` file. There should be an up2date `.hie` file on +-- disk since we are careful to write out the `.hie` file before writing the +-- `.hi` file +getModIfaceFromDiskAndIndexRule :: Recorder (WithPriority Log) -> Rules () +getModIfaceFromDiskAndIndexRule recorder = + -- doesn't need early cutoff since all its dependencies already have it + defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \GetModIfaceFromDiskAndIndex f -> do + x <- use_ GetModIfaceFromDisk f + se@ShakeExtras{withHieDb} <- getShakeExtras + + -- GetModIfaceFromDisk should have written a `.hie` file, must check if it matches version in db + let ms = hirModSummary x + hie_loc = Compat.ml_hie_file $ ms_location ms + fileHash <- liftIO $ Util.getFileHash hie_loc + mrow <- liftIO $ withHieDb (\hieDb -> HieDb.lookupHieFileFromSource hieDb (fromNormalizedFilePath f)) + let hie_loc' = HieDb.hieModuleHieFile <$> mrow + case mrow of + Just row + | fileHash == HieDb.modInfoHash (HieDb.hieModInfo row) + && Just hie_loc == hie_loc' + -> do + -- All good, the db has indexed the file + when (coerce $ ideTesting se) $ liftIO $ mRunLspT (lspEnv se) $ + LSP.sendNotification (SMethod_CustomMethod (Proxy @"ghcide/reference/ready")) $ + toJSON $ fromNormalizedFilePath f + -- Not in db, must re-index + _ -> do + ehf <- liftIO $ runIdeAction "GetModIfaceFromDiskAndIndex" se $ runExceptT $ + readHieFileFromDisk recorder hie_loc + case ehf of + -- Uh oh, we failed to read the file for some reason, need to regenerate it + Left err -> fail $ "failed to read .hie file " ++ show hie_loc ++ ": " ++ displayException err + -- can just re-index the file we read from disk + Right hf -> liftIO $ do + logWith recorder Logger.Debug $ LogReindexingHieFile f + indexHieFile se ms f fileHash hf + + return (Just x) + +newtype DisplayTHWarning = DisplayTHWarning (IO()) +instance IsIdeGlobal DisplayTHWarning + +getModSummaryRule :: LspT Config IO () -> Recorder (WithPriority Log) -> Rules () +getModSummaryRule displayTHWarning recorder = do + menv <- lspEnv <$> getShakeExtrasRules + case menv of + Just env -> do + displayItOnce <- liftIO $ once $ LSP.runLspT env displayTHWarning + addIdeGlobal (DisplayTHWarning displayItOnce) + Nothing -> do + logItOnce <- liftIO $ once $ putStrLn "" + addIdeGlobal (DisplayTHWarning logItOnce) + + defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetModSummary f -> do + session' <- hscEnv <$> use_ GhcSession f + modify_dflags <- getModifyDynFlags dynFlagsModifyGlobal + let session = setNonHomeFCHook $ hscSetFlags (modify_dflags $ hsc_dflags session') session' -- TODO wz1000 + (modTime, mFileContent) <- getFileModTimeContents f + let fp = fromNormalizedFilePath f + modS <- liftIO $ runExceptT $ + getModSummaryFromImports session fp modTime (textToStringBuffer . Rope.toText <$> mFileContent) + case modS of + Right res -> do + -- Check for Template Haskell + when (uses_th_qq $ msrModSummary res) $ do + DisplayTHWarning act <- getIdeGlobalAction + liftIO act + let bufFingerPrint = ms_hs_hash (msrModSummary res) + let fingerPrint = Util.fingerprintFingerprints + [ msrFingerprint res, bufFingerPrint ] + return ( Just (fingerprintToBS fingerPrint) , ([], Just res)) + Left diags -> return (Nothing, (diags, Nothing)) + + defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleNoDiagnostics $ \GetModSummaryWithoutTimestamps f -> do + mbMs <- use GetModSummary f + case mbMs of + Just res@ModSummaryResult{..} -> do + let ms = msrModSummary { + ms_hspp_buf = error "use GetModSummary instead of GetModSummaryWithoutTimestamps" + } + fp = fingerprintToBS msrFingerprint + return (Just fp, Just res{msrModSummary = ms}) + Nothing -> return (Nothing, Nothing) + +generateCore :: RunSimplifier -> NormalizedFilePath -> Action (IdeResult ModGuts) +generateCore runSimplifier file = do + packageState <- hscEnv <$> use_ GhcSessionDeps file + hsc' <- setFileCacheHook packageState + tm <- use_ TypeCheck file + liftIO $ compileModule runSimplifier hsc' (tmrModSummary tm) (tmrTypechecked tm) + +generateCoreRule :: Recorder (WithPriority Log) -> Rules () +generateCoreRule recorder = + define (cmapWithPrio LogShake recorder) $ \GenerateCore -> generateCore (RunSimplifier True) + +getModIfaceRule :: Recorder (WithPriority Log) -> Rules () +getModIfaceRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetModIface f -> do + fileOfInterest <- use_ IsFileOfInterest f + res <- case fileOfInterest of + IsFOI status -> do + -- Never load from disk for files of interest + tmr <- use_ TypeCheck f + linkableType <- getLinkableType f + hsc <- hscEnv <$> use_ GhcSessionDeps f + hsc' <- setFileCacheHook hsc + let compile = fmap ([],) $ use GenerateCore f + se <- getShakeExtras + (diags, !mbHiFile) <- writeCoreFileIfNeeded se hsc' linkableType compile tmr + let fp = hiFileFingerPrint <$> mbHiFile + hiDiags <- case mbHiFile of + Just hiFile + | OnDisk <- status + , not (tmrDeferredError tmr) -> liftIO $ writeHiFile se hsc' hiFile + _ -> pure [] + return (fp, (diags++hiDiags, mbHiFile)) + NotFOI -> do + hiFile <- use GetModIfaceFromDiskAndIndex f + let fp = hiFileFingerPrint <$> hiFile + return (fp, ([], hiFile)) + + pure res + +-- | Count of total times we asked GHC to recompile +newtype RebuildCounter = RebuildCounter { getRebuildCountVar :: TVar Int } +instance IsIdeGlobal RebuildCounter + +getRebuildCount :: Action Int +getRebuildCount = do + count <- getRebuildCountVar <$> getIdeGlobalAction + liftIO $ readTVarIO count + +incrementRebuildCount :: Action () +incrementRebuildCount = do + count <- getRebuildCountVar <$> getIdeGlobalAction + liftIO $ atomically $ modifyTVar' count (+1) + +setFileCacheHook :: HscEnv -> Action HscEnv +setFileCacheHook old_hsc_env = do +#if MIN_VERSION_ghc(9,11,0) + unlift <- askUnliftIO + return $ old_hsc_env { hsc_FC = (hsc_FC old_hsc_env) { lookupFileCache = unliftIO unlift . use_ GetFileHash . toNormalizedFilePath' } } +#else + return old_hsc_env +#endif + +-- | Also generates and indexes the `.hie` file, along with the `.o` file if needed +-- Invariant maintained is that if the `.hi` file was successfully written, then the +-- `.hie` and `.o` file (if needed) were also successfully written +regenerateHiFile :: HscEnvEq -> NormalizedFilePath -> ModSummary -> Maybe LinkableType -> Action ([FileDiagnostic], Maybe HiFileResult) +regenerateHiFile sess f ms compNeeded = do + hsc <- setFileCacheHook (hscEnv sess) + opt <- getIdeOptions + + -- By default, we parse with `-haddock` unless 'OptHaddockParse' is overwritten. + (diags, mb_pm) <- liftIO $ getParsedModuleDefinition hsc opt f ms + case mb_pm of + Nothing -> return (diags, Nothing) + Just pm -> do + -- Invoke typechecking directly to update it without incurring a dependency + -- on the parsed module and the typecheck rules + (diags', mtmr) <- typeCheckRuleDefinition hsc pm f + case mtmr of + Nothing -> pure (diags', Nothing) + Just tmr -> do + + let compile = liftIO $ compileModule (RunSimplifier True) hsc (pm_mod_summary pm) $ tmrTypechecked tmr + + se <- getShakeExtras + + -- Bang pattern is important to avoid leaking 'tmr' + (diags'', !res) <- writeCoreFileIfNeeded se hsc compNeeded compile tmr + + -- Write hi file + hiDiags <- case res of + Just !hiFile -> do + + -- Write hie file. Do this before writing the .hi file to + -- ensure that we always have a up2date .hie file if we have + -- a .hi file + se' <- getShakeExtras + (gDiags, masts) <- liftIO $ generateHieAsts hsc tmr + source <- getSourceFileSource f + wDiags <- forM masts $ \asts -> + liftIO $ writeAndIndexHieFile hsc se' (tmrModSummary tmr) f (tcg_exports $ tmrTypechecked tmr) asts source + + -- We don't write the `.hi` file if there are deferred errors, since we won't get + -- accurate diagnostics next time if we do + hiDiags <- if not $ tmrDeferredError tmr + then liftIO $ writeHiFile se' hsc hiFile + else pure [] + + pure (hiDiags <> gDiags <> concat wDiags) + Nothing -> pure [] + + return (diags <> diags' <> diags'' <> hiDiags, res) + + +-- | HscEnv should have deps included already +-- This writes the core file if a linkable is required +-- The actual linkable will be generated on demand when required by `GetLinkable` +writeCoreFileIfNeeded :: ShakeExtras -> HscEnv -> Maybe LinkableType -> Action (IdeResult ModGuts) -> TcModuleResult -> Action (IdeResult HiFileResult) +writeCoreFileIfNeeded _ hsc Nothing _ tmr = do + incrementRebuildCount + res <- liftIO $ mkHiFileResultNoCompile hsc tmr + pure ([], Just $! res) +writeCoreFileIfNeeded se hsc (Just _) getGuts tmr = do + incrementRebuildCount + (diags, mguts) <- getGuts + case mguts of + Nothing -> pure (diags, Nothing) + Just guts -> do + (diags', !res) <- liftIO $ mkHiFileResultCompile se hsc tmr guts + pure (diags++diags', res) + +-- See Note [Client configuration in Rules] +getClientSettingsRule :: Recorder (WithPriority Log) -> Rules () +getClientSettingsRule recorder = defineEarlyCutOffNoFile (cmapWithPrio LogShake recorder) $ \GetClientSettings -> do + alwaysRerun + settings <- clientSettings <$> getIdeConfiguration + return (LBS.toStrict $ B.encode $ hash settings, settings) + +usePropertyAction :: + (HasProperty s k t r) => + KeyNameProxy s -> + PluginId -> + Properties r -> + Action (ToHsType t) +usePropertyAction kn plId p = do + pluginConfig <- getPluginConfigAction plId + pure $ useProperty kn p $ plcConfig pluginConfig + +usePropertyByPathAction :: + (HasPropertyByPath props path t) => + KeyNamePath path -> + PluginId -> + Properties props -> + Action (ToHsType t) +usePropertyByPathAction path plId p = do + pluginConfig <- getPluginConfigAction plId + pure $ usePropertyByPath path p $ plcConfig pluginConfig + +-- --------------------------------------------------------------------- + +getLinkableRule :: Recorder (WithPriority Log) -> Rules () +getLinkableRule recorder = + defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetLinkable f -> do + HiFileResult{hirModSummary, hirModIface, hirModDetails, hirCoreFp} <- use_ GetModIface f + let obj_file = ml_obj_file (ms_location hirModSummary) + core_file = ml_core_file (ms_location hirModSummary) +#if MIN_VERSION_ghc(9,11,0) + mkLinkable t mod l = Linkable t mod (pure l) + dotO o = DotO o ModuleObject +#else + mkLinkable t mod l = LM t mod [l] + dotO = DotO +#endif + case hirCoreFp of + Nothing -> error $ "called GetLinkable for a file without a linkable: " ++ show f + Just (bin_core, fileHash) -> do + session <- use_ GhcSessionDeps f + linkableType <- getLinkableType f >>= \case + Nothing -> error $ "called GetLinkable for a file which doesn't need compilation: " ++ show f + Just t -> pure t + -- Can't use `GetModificationTime` rule because the core file was possibly written in this + -- very session, so the results aren't reliable + core_t <- liftIO $ getModTime core_file + (warns, hmi) <- case linkableType of + -- Bytecode needs to be regenerated from the core file + BCOLinkable -> liftIO $ coreFileToLinkable linkableType (hscEnv session) hirModSummary hirModIface hirModDetails bin_core (posixSecondsToUTCTime core_t) + -- Object code can be read from the disk + ObjectLinkable -> do + -- object file is up to date if it is newer than the core file + -- Can't use a rule like 'GetModificationTime' or 'GetFileExists' because 'coreFileToLinkable' will write the object file, and + -- thus bump its modification time, forcing this rule to be rerun every time. + exists <- liftIO $ doesFileExist obj_file + mobj_time <- liftIO $ + if exists + then Just <$> getModTime obj_file + else pure Nothing + case mobj_time of + Just obj_t + | obj_t >= core_t -> pure ([], Just $ HomeModInfo hirModIface hirModDetails (justObjects $ mkLinkable (posixSecondsToUTCTime obj_t) (ms_mod hirModSummary) (dotO obj_file))) + _ -> liftIO $ coreFileToLinkable linkableType (hscEnv session) hirModSummary hirModIface hirModDetails bin_core (error "object doesn't have time") + -- Record the linkable so we know not to unload it, and unload old versions + whenJust ((homeModInfoByteCode =<< hmi) <|> (homeModInfoObject =<< hmi)) +#if MIN_VERSION_ghc(9,11,0) + $ \(Linkable time mod _) -> do +#else + $ \(LM time mod _) -> do +#endif + compiledLinkables <- getCompiledLinkables <$> getIdeGlobalAction + liftIO $ modifyVar compiledLinkables $ \old -> do + let !to_keep = extendModuleEnv old mod time + --We need to unload old linkables before we can load in new linkables. However, + --the unload function in the GHC API takes a list of linkables to keep (i.e. + --not unload). Earlier we unloaded right before loading in new linkables, which + --is effectively once per splice. This can be slow as unload needs to walk over + --the list of all loaded linkables, for each splice. + -- + --Solution: now we unload old linkables right after we generate a new linkable and + --just before returning it to be loaded. This has a substantial effect on recompile + --times as the number of loaded modules and splices increases. + -- + --We use a dummy DotA linkable part to fake a NativeCode linkable. + --The unload function doesn't care about the exact linkable parts. + unload (hscEnv session) (map (\(mod', time') -> mkLinkable time' mod' (DotA "dummy")) $ moduleEnvToList to_keep) + return (to_keep, ()) + return (fileHash <$ hmi, (warns, LinkableResult <$> hmi <*> pure fileHash)) + +-- | For now we always use bytecode unless something uses unboxed sums and tuples along with TH +getLinkableType :: NormalizedFilePath -> Action (Maybe LinkableType) +getLinkableType f = use_ NeedsCompilation f + +needsCompilationRule :: NormalizedFilePath -> Action (IdeResultNoDiagnosticsEarlyCutoff (Maybe LinkableType)) +needsCompilationRule file + | "boot" `isSuffixOf` fromNormalizedFilePath file = + pure (Just $ encodeLinkableType Nothing, Just Nothing) +needsCompilationRule file = do + graph <- useWithSeparateFingerprintRule GetModuleGraphImmediateReverseDepsFingerprints GetModuleGraph file + res <- case graph of + -- Treat as False if some reverse dependency header fails to parse + Nothing -> pure Nothing + Just depinfo -> case immediateReverseDependencies file depinfo of + -- If we fail to get immediate reverse dependencies, fail with an error message + Nothing -> fail $ "Failed to get the immediate reverse dependencies of " ++ show file + Just revdeps -> do + -- It's important to use stale data here to avoid wasted work. + -- if NeedsCompilation fails for a module M its result will be under-approximated + -- to False in its dependencies. However, if M actually used TH, this will + -- cause a re-evaluation of GetModIface for all dependencies + -- (since we don't need to generate object code anymore). + -- Once M is fixed we will discover that we actually needed all the object code + -- that we just threw away, and thus have to recompile all dependencies once + -- again, this time keeping the object code. + -- A file needs to be compiled if any file that depends on it uses TemplateHaskell or needs to be compiled + (modsums,needsComps) <- liftA2 + (,) (map (fmap (msrModSummary . fst)) <$> usesWithStale GetModSummaryWithoutTimestamps revdeps) + (uses NeedsCompilation revdeps) + pure $ computeLinkableType modsums (map join needsComps) + pure (Just $ encodeLinkableType res, Just res) + where + computeLinkableType :: [Maybe ModSummary] -> [Maybe LinkableType] -> Maybe LinkableType + computeLinkableType deps xs + | Just ObjectLinkable `elem` xs = Just ObjectLinkable -- If any dependent needs object code, so do we + | Just BCOLinkable `elem` xs = Just BCOLinkable -- If any dependent needs bytecode, then we need to be compiled + | any (maybe False uses_th_qq) deps = Just BCOLinkable -- If any dependent needs TH, then we need to be compiled + | otherwise = Nothing -- If none of these conditions are satisfied, we don't need to compile + +uses_th_qq :: ModSummary -> Bool +uses_th_qq (ms_hspp_opts -> dflags) = + xopt LangExt.TemplateHaskell dflags || xopt LangExt.QuasiQuotes dflags + +-- | Tracks which linkables are current, so we don't need to unload them +newtype CompiledLinkables = CompiledLinkables { getCompiledLinkables :: Var (ModuleEnv UTCTime) } +instance IsIdeGlobal CompiledLinkables + +data RulesConfig = RulesConfig + { -- | Share the computation for the entire module graph + -- We usually compute the full module graph for the project + -- and share it for all files. + -- However, in large projects it might not be desirable to wait + -- for computing the entire module graph before starting to + -- typecheck a particular file. + -- Disabling this drastically decreases sharing and is likely to + -- increase memory usage if you have multiple files open + -- Disabling this also disables checking for import cycles + fullModuleGraph :: Bool + -- | Disable TH for improved performance in large codebases + , enableTemplateHaskell :: Bool + -- | Warning to show when TH is not supported by the current HLS binary + , templateHaskellWarning :: LspT Config IO () + } + +instance Default RulesConfig where + def = RulesConfig True True displayTHWarning + where + displayTHWarning :: LspT c IO () + displayTHWarning + | not isWindows && not hostIsDynamic = do + LSP.sendNotification SMethod_WindowShowMessage $ + ShowMessageParams MessageType_Info thWarningMessage + | otherwise = return () + +thWarningMessage :: T.Text +thWarningMessage = T.unwords + [ "This HLS binary does not support Template Haskell." + , "Follow the [instructions](" <> templateHaskellInstructions <> ")" + , "to build an HLS binary with support for Template Haskell." + ] + +-- | A rule that wires per-file rules together +mainRule :: Recorder (WithPriority Log) -> RulesConfig -> Rules () +mainRule recorder RulesConfig{..} = do + linkables <- liftIO $ newVar emptyModuleEnv + addIdeGlobal $ CompiledLinkables linkables + rebuildCountVar <- liftIO $ newTVarIO 0 + addIdeGlobal $ RebuildCounter rebuildCountVar + getParsedModuleRule recorder + getParsedModuleWithCommentsRule recorder + getLocatedImportsRule recorder + reportImportCyclesRule recorder + typeCheckRule recorder + getDocMapRule recorder + loadGhcSession recorder def{fullModuleGraph} + getModIfaceFromDiskRule recorder + getModIfaceFromDiskAndIndexRule recorder + getModIfaceRule recorder + getModSummaryRule templateHaskellWarning recorder + getModuleGraphRule recorder + getFileHashRule recorder + knownFilesRule recorder + getClientSettingsRule recorder + getHieAstsRule recorder + getBindingsRule recorder + -- This rule uses a custom newness check that relies on the encoding + -- produced by 'encodeLinkable'. This works as follows: + -- * -> + -- * ObjectLinkable -> BCOLinkable : the prev linkable can be reused, signal "no change" + -- * Object/BCO -> NoLinkable : the prev linkable can be ignored, signal "no change" + -- * otherwise : the prev linkable cannot be reused, signal "value has changed" + if enableTemplateHaskell + then defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleWithCustomNewnessCheck (<=) $ \NeedsCompilation file -> + needsCompilationRule file + else defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \NeedsCompilation _ -> return $ Just Nothing + generateCoreRule recorder + getImportMapRule recorder + persistentHieFileRule recorder + persistentDocMapRule + persistentImportMapRule + getLinkableRule recorder + defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetModuleGraphTransDepsFingerprints file -> do + di <- useNoFile_ GetModuleGraph + let finger = lookupFingerprint file di (depTransDepsFingerprints di) + return (fingerprintToBS <$> finger, ([], finger)) + defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetModuleGraphTransReverseDepsFingerprints file -> do + di <- useNoFile_ GetModuleGraph + let finger = lookupFingerprint file di (depTransReverseDepsFingerprints di) + return (fingerprintToBS <$> finger, ([], finger)) + defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetModuleGraphImmediateReverseDepsFingerprints file -> do + di <- useNoFile_ GetModuleGraph + let finger = lookupFingerprint file di (depImmediateReverseDepsFingerprints di) + return (fingerprintToBS <$> finger, ([], finger)) + + +-- | Get HieFile for haskell file on NormalizedFilePath +getHieFile :: NormalizedFilePath -> Action (Maybe HieFile) +getHieFile nfp = runMaybeT $ do + HAR {hieAst} <- MaybeT $ use GetHieAst nfp + tmr <- MaybeT $ use TypeCheck nfp + ghc <- MaybeT $ use GhcSession nfp + msr <- MaybeT $ use GetModSummaryWithoutTimestamps nfp + source <- lift $ getSourceFileSource nfp + let exports = tcg_exports $ tmrTypechecked tmr + typedAst <- MaybeT $ pure $ cast hieAst + liftIO $ runHsc (hscEnv ghc) $ mkHieFile' (msrModSummary msr) exports typedAst source diff --git a/ghcide/src/Development/IDE/Core/Service.hs b/ghcide/src/Development/IDE/Core/Service.hs new file mode 100644 index 0000000000..52639aeb22 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Service.hs @@ -0,0 +1,107 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE TypeFamilies #-} + +-- | A Shake implementation of the compiler service, built +-- using the "Shaker" abstraction layer for in-memory use. +-- +module Development.IDE.Core.Service( + getIdeOptions, getIdeOptionsIO, + IdeState, initialise, shutdown, + runAction, + getDiagnostics, + ideLogger, + updatePositionMapping, + Log(..), + ) where + +import Control.Applicative ((<|>)) +import Development.IDE.Core.Debouncer +import Development.IDE.Core.FileExists (fileExistsRules) +import Development.IDE.Core.OfInterest hiding (Log, LogShake) +import Development.IDE.Graph +import Development.IDE.Types.Options (IdeOptions (..)) +import Ide.Logger as Logger (Pretty (pretty), + Priority (Debug), + Recorder, + WithPriority, + cmapWithPrio) +import Ide.Plugin.Config +import qualified Language.LSP.Protocol.Types as LSP +import qualified Language.LSP.Server as LSP + +import Control.Monad +import qualified Development.IDE.Core.FileExists as FileExists +import qualified Development.IDE.Core.OfInterest as OfInterest +import Development.IDE.Core.Shake hiding (Log) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.Types.Monitoring (Monitoring) +import Development.IDE.Types.Shake (WithHieDb) +import Ide.Types (IdePlugins) +import System.Environment (lookupEnv) + +data Log + = LogShake Shake.Log + | LogOfInterest OfInterest.Log + | LogFileExists FileExists.Log + deriving Show + +instance Pretty Log where + pretty = \case + LogShake msg -> pretty msg + LogOfInterest msg -> pretty msg + LogFileExists msg -> pretty msg + + +------------------------------------------------------------ +-- Exposed API + +-- | Initialise the Compiler Service. +initialise :: Recorder (WithPriority Log) + -> Config + -> IdePlugins IdeState + -> Rules () + -> Maybe (LSP.LanguageContextEnv Config) + -> Debouncer LSP.NormalizedUri + -> IdeOptions + -> WithHieDb + -> ThreadQueue + -> Monitoring + -> FilePath -- ^ Root directory see Note [Root Directory] + -> IO IdeState +initialise recorder defaultConfig plugins mainRule lspEnv debouncer options withHieDb hiedbChan metrics rootDir = do + shakeProfiling <- do + let fromConf = optShakeProfiling options + fromEnv <- lookupEnv "GHCIDE_BUILD_PROFILING" + return $ fromConf <|> fromEnv + shakeOpen + (cmapWithPrio LogShake recorder) + lspEnv + defaultConfig + plugins + debouncer + shakeProfiling + (optReportProgress options) + (optTesting options) + withHieDb + hiedbChan + (optShakeOptions options) + metrics + (do + addIdeGlobal $ GlobalIdeOptions options + ofInterestRules (cmapWithPrio LogOfInterest recorder) + fileExistsRules (cmapWithPrio LogFileExists recorder) lspEnv + mainRule) + rootDir + +-- | Shutdown the Compiler Service. +shutdown :: IdeState -> IO () +shutdown = shakeShut + +-- This will return as soon as the result of the action is +-- available. There might still be other rules running at this point, +-- e.g., the ofInterestRule. +runAction :: String -> IdeState -> Action a -> IO a +runAction herald ide act = + join $ shakeEnqueue (shakeExtras ide) (mkDelayedAction herald Logger.Debug act) diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs new file mode 100644 index 0000000000..6fc9a4d00e --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -0,0 +1,1483 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE PackageImports #-} +{-# LANGUAGE RecursiveDo #-} +{-# LANGUAGE TypeFamilies #-} + +-- | A Shake implementation of the compiler service. +-- +-- There are two primary locations where data lives, and both of +-- these contain much the same data: +-- +-- * The Shake database (inside 'shakeDb') stores a map of shake keys +-- to shake values. In our case, these are all of type 'Q' to 'A'. +-- During a single run all the values in the Shake database are consistent +-- so are used in conjunction with each other, e.g. in 'uses'. +-- +-- * The 'Values' type stores a map of keys to values. These values are +-- always stored as real Haskell values, whereas Shake serialises all 'A' values +-- between runs. To deserialise a Shake value, we just consult Values. +module Development.IDE.Core.Shake( + IdeState, shakeSessionInit, shakeExtras, shakeDb, rootDir, + ShakeExtras(..), getShakeExtras, getShakeExtrasRules, + KnownTargets(..), Target(..), toKnownFiles, unionKnownTargets, mkKnownTargets, + IdeRule, IdeResult, + GetModificationTime(GetModificationTime, GetModificationTime_, missingFileDiagnostics), + shakeOpen, shakeShut, + shakeEnqueue, + newSession, + use, useNoFile, uses, useWithStaleFast, useWithStaleFast', delayedAction, + useWithSeparateFingerprintRule, + useWithSeparateFingerprintRule_, + FastResult(..), + use_, useNoFile_, uses_, + useWithStale, usesWithStale, + useWithStale_, usesWithStale_, + BadDependency(..), + RuleBody(..), + define, defineNoDiagnostics, + defineEarlyCutoff, + defineNoFile, defineEarlyCutOffNoFile, + getDiagnostics, + mRunLspT, mRunLspTCallback, + getHiddenDiagnostics, + IsIdeGlobal, addIdeGlobal, addIdeGlobalExtras, getIdeGlobalState, getIdeGlobalAction, + getIdeGlobalExtras, + getIdeOptions, + getIdeOptionsIO, + GlobalIdeOptions(..), + HLS.getClientConfig, + getPluginConfigAction, + knownTargets, + ideLogger, + actionLogger, + getVirtualFile, + FileVersion(..), + updatePositionMapping, + updatePositionMappingHelper, + deleteValue, + WithProgressFunc, WithIndefiniteProgressFunc, + ProgressEvent(..), + DelayedAction, mkDelayedAction, + IdeAction(..), runIdeAction, + mkUpdater, + -- Exposed for testing. + Q(..), + IndexQueue, + HieDb, + HieDbWriter(..), + addPersistentRule, + garbageCollectDirtyKeys, + garbageCollectDirtyKeysOlderThan, + Log(..), + VFSModified(..), getClientConfigAction, + ThreadQueue(..), + runWithSignal + ) where + +import Control.Concurrent.Async +import Control.Concurrent.STM +import Control.Concurrent.STM.Stats (atomicallyNamed) +import Control.Concurrent.Strict +import Control.DeepSeq +import Control.Exception.Extra hiding (bracket_) +import Control.Lens ((%~), (&), (?~)) +import Control.Monad.Extra +import Control.Monad.IO.Class +import Control.Monad.Reader +import Control.Monad.Trans.Maybe +import Data.Aeson (Result (Success), + toJSON) +import qualified Data.Aeson.Types as A +import qualified Data.ByteString.Char8 as BS +import qualified Data.ByteString.Char8 as BS8 +import Data.Coerce (coerce) +import Data.Default +import Data.Dynamic +import Data.EnumMap.Strict (EnumMap) +import qualified Data.EnumMap.Strict as EM +import Data.Foldable (find, for_) +import Data.Functor ((<&>)) +import Data.Functor.Identity +import Data.Hashable +import qualified Data.HashMap.Strict as HMap +import Data.HashSet (HashSet) +import qualified Data.HashSet as HSet +import Data.List.Extra (foldl', partition, + takeEnd) +import qualified Data.Map.Strict as Map +import Data.Maybe +import qualified Data.SortedList as SL +import Data.String (fromString) +import qualified Data.Text as T +import Data.Time +import Data.Traversable +import Data.Tuple.Extra +import Data.Typeable +import Data.Unique +import Data.Vector (Vector) +import qualified Data.Vector as Vector +import Development.IDE.Core.Debouncer +import Development.IDE.Core.FileUtils (getModTime) +import Development.IDE.Core.PositionMapping +import Development.IDE.Core.ProgressReporting +import Development.IDE.Core.RuleTypes +import Development.IDE.Types.Options as Options +import qualified Language.LSP.Protocol.Message as LSP +import qualified Language.LSP.Server as LSP + +import Development.IDE.Core.Tracing +import Development.IDE.Core.WorkerThread +import Development.IDE.GHC.Compat (NameCache, + NameCacheUpdater, + initNameCache, + knownKeyNames) +import Development.IDE.GHC.Orphans () +import Development.IDE.Graph hiding (ShakeValue, + action) +import qualified Development.IDE.Graph as Shake +import Development.IDE.Graph.Database (ShakeDatabase, + shakeGetBuildStep, + shakeGetDatabaseKeys, + shakeNewDatabase, + shakeProfileDatabase, + shakeRunDatabaseForKeys) +import Development.IDE.Graph.Rule +import Development.IDE.Types.Action +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Exports hiding (exportsMapSize) +import qualified Development.IDE.Types.Exports as ExportsMap +import Development.IDE.Types.KnownTargets +import Development.IDE.Types.Location +import Development.IDE.Types.Monitoring (Monitoring (..)) +import Development.IDE.Types.Shake +import qualified Focus +import GHC.Fingerprint +import GHC.Stack (HasCallStack) +import GHC.TypeLits (KnownSymbol) +import HieDb.Types +import Ide.Logger hiding (Priority) +import qualified Ide.Logger as Logger +import Ide.Plugin.Config +import qualified Ide.PluginUtils as HLS +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import qualified Language.LSP.Protocol.Types as LSP +import Language.LSP.VFS hiding (start) +import qualified "list-t" ListT +import OpenTelemetry.Eventlog hiding (addEvent) +import qualified Prettyprinter as Pretty +import qualified StmContainers.Map as STM +import System.FilePath hiding (makeRelative) +import System.IO.Unsafe (unsafePerformIO) +import System.Time.Extra +import UnliftIO (MonadUnliftIO (withRunInIO)) + + +data Log + = LogCreateHieDbExportsMapStart + | LogCreateHieDbExportsMapFinish !Int + | LogBuildSessionRestart !String ![DelayedActionInternal] !KeySet !Seconds !(Maybe FilePath) + | LogBuildSessionRestartTakingTooLong !Seconds + | LogDelayedAction !(DelayedAction ()) !Seconds + | LogBuildSessionFinish !(Maybe SomeException) + | LogDiagsDiffButNoLspEnv ![FileDiagnostic] + | LogDefineEarlyCutoffRuleNoDiagHasDiag !FileDiagnostic + | LogDefineEarlyCutoffRuleCustomNewnessHasDiag !FileDiagnostic + | LogCancelledAction !T.Text + | LogSessionInitialised + | LogLookupPersistentKey !T.Text + | LogShakeGarbageCollection !T.Text !Int !Seconds + -- * OfInterest Log messages + | LogSetFilesOfInterest ![(NormalizedFilePath, FileOfInterestStatus)] + deriving Show + +instance Pretty Log where + pretty = \case + LogCreateHieDbExportsMapStart -> + "Initializing exports map from hiedb" + LogCreateHieDbExportsMapFinish exportsMapSize -> + "Done initializing exports map from hiedb. Size:" <+> pretty exportsMapSize + LogBuildSessionRestart reason actionQueue keyBackLog abortDuration shakeProfilePath -> + vcat + [ "Restarting build session due to" <+> pretty reason + , "Action Queue:" <+> pretty (map actionName actionQueue) + , "Keys:" <+> pretty (map show $ toListKeySet keyBackLog) + , "Aborting previous build session took" <+> pretty (showDuration abortDuration) <+> pretty shakeProfilePath ] + LogBuildSessionRestartTakingTooLong seconds -> + "Build restart is taking too long (" <> pretty seconds <> " seconds)" + LogDelayedAction delayedAct seconds -> + hsep + [ "Finished:" <+> pretty (actionName delayedAct) + , "Took:" <+> pretty (showDuration seconds) ] + LogBuildSessionFinish e -> + vcat + [ "Finished build session" + , pretty (fmap displayException e) ] + LogDiagsDiffButNoLspEnv fileDiagnostics -> + "updateFileDiagnostics published different from new diagnostics - file diagnostics:" + <+> pretty (showDiagnosticsColored fileDiagnostics) + LogDefineEarlyCutoffRuleNoDiagHasDiag fileDiagnostic -> + "defineEarlyCutoff RuleNoDiagnostics - file diagnostic:" + <+> pretty (showDiagnosticsColored [fileDiagnostic]) + LogDefineEarlyCutoffRuleCustomNewnessHasDiag fileDiagnostic -> + "defineEarlyCutoff RuleWithCustomNewnessCheck - file diagnostic:" + <+> pretty (showDiagnosticsColored [fileDiagnostic]) + LogCancelledAction action -> + pretty action <+> "was cancelled" + LogSessionInitialised -> "Shake session initialized" + LogLookupPersistentKey key -> + "LOOKUP PERSISTENT FOR:" <+> pretty key + LogShakeGarbageCollection label number duration -> + pretty label <+> "of" <+> pretty number <+> "keys (took " <+> pretty (showDuration duration) <> ")" + LogSetFilesOfInterest ofInterest -> + "Set files of interst to" <> Pretty.line + <> indent 4 (pretty $ fmap (first fromNormalizedFilePath) ofInterest) + +-- | We need to serialize writes to the database, so we send any function that +-- needs to write to the database over the channel, where it will be picked up by +-- a worker thread. +data HieDbWriter + = HieDbWriter + { indexQueue :: IndexQueue + , indexPending :: TVar (HMap.HashMap NormalizedFilePath Fingerprint) -- ^ Avoid unnecessary/out of date indexing + , indexCompleted :: TVar Int -- ^ to report progress + , indexProgressReporting :: ProgressReporting + } + +-- | Actions to queue up on the index worker thread +-- The inner `(HieDb -> IO ()) -> IO ()` wraps `HieDb -> IO ()` +-- with (currently) retry functionality +type IndexQueue = TQueue (((HieDb -> IO ()) -> IO ()) -> IO ()) + +data ThreadQueue = ThreadQueue { + tIndexQueue :: IndexQueue + , tRestartQueue :: TQueue (IO ()) + , tLoaderQueue :: TQueue (IO ()) +} + +-- Note [Semantic Tokens Cache Location] +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- storing semantic tokens cache for each file in shakeExtras might +-- not be ideal, since it most used in LSP request handlers +-- instead of rules. We should consider moving it to a more +-- appropriate place in the future if we find one, store it for now. + +-- information we stash inside the shakeExtra field +data ShakeExtras = ShakeExtras + { --eventer :: LSP.FromServerMessage -> IO () + lspEnv :: Maybe (LSP.LanguageContextEnv Config) + ,debouncer :: Debouncer NormalizedUri + ,shakeRecorder :: Recorder (WithPriority Log) + ,idePlugins :: IdePlugins IdeState + ,globals :: TVar (HMap.HashMap TypeRep Dynamic) + -- ^ Registry of global state used by rules. + -- Small and immutable after startup, so not worth using an STM.Map. + ,state :: Values + ,diagnostics :: STMDiagnosticStore + ,hiddenDiagnostics :: STMDiagnosticStore + ,publishedDiagnostics :: STM.Map NormalizedUri [FileDiagnostic] + -- ^ This represents the set of diagnostics that we have published. + -- Due to debouncing not every change might get published. + + ,semanticTokensCache:: STM.Map NormalizedFilePath SemanticTokens + -- ^ Cache of last response of semantic tokens for each file, + -- so we can compute deltas for semantic tokens(SMethod_TextDocumentSemanticTokensFullDelta). + -- putting semantic tokens cache and id in shakeExtras might not be ideal + -- see Note [Semantic Tokens Cache Location] + ,semanticTokensId :: TVar Int + -- ^ semanticTokensId is used to generate unique ids for each lsp response of semantic tokens. + ,positionMapping :: STM.Map NormalizedUri (EnumMap Int32 (PositionDelta, PositionMapping)) + -- ^ Map from a text document version to a PositionMapping that describes how to map + -- positions in a version of that document to positions in the latest version + -- First mapping is delta from previous version and second one is an + -- accumulation to the current version. + ,progress :: PerFileProgressReporting + ,ideTesting :: IdeTesting + -- ^ Whether to enable additional lsp messages used by the test suite for checking invariants + ,restartShakeSession + :: VFSModified + -> String + -> [DelayedAction ()] + -> IO [Key] + -> IO () + ,ideNc :: NameCache + -- | A mapping of module name to known target (or candidate targets, if missing) + ,knownTargetsVar :: TVar (Hashed KnownTargets) + -- | A mapping of exported identifiers for local modules. Updated on kick + ,exportsMap :: TVar ExportsMap + -- | A work queue for actions added via 'runInShakeSession' + ,actionQueue :: ActionQueue + ,clientCapabilities :: ClientCapabilities + , withHieDb :: WithHieDb -- ^ Use only to read. + , hiedbWriter :: HieDbWriter -- ^ use to write + , persistentKeys :: TVar (KeyMap GetStalePersistent) + -- ^ Registry for functions that compute/get "stale" results for the rule + -- (possibly from disk) + , vfsVar :: TVar VFS + -- ^ A snapshot of the current state of the virtual file system. Updated on shakeRestart + -- VFS state is managed by LSP. However, the state according to the lsp library may be newer than the state of the current session, + -- leaving us vulnerable to subtle race conditions. To avoid this, we take a snapshot of the state of the VFS on every + -- restart, so that the whole session sees a single consistent view of the VFS. + -- We don't need a STM.Map because we never update individual keys ourselves. + , defaultConfig :: Config + -- ^ Default HLS config, only relevant if the client does not provide any Config + , dirtyKeys :: TVar KeySet + -- ^ Set of dirty rule keys since the last Shake run + , restartQueue :: TQueue (IO ()) + -- ^ Queue of restart actions to be run. + , loaderQueue :: TQueue (IO ()) + -- ^ Queue of loader actions to be run. + } + +type WithProgressFunc = forall a. + T.Text -> LSP.ProgressCancellable -> ((LSP.ProgressAmount -> IO ()) -> IO a) -> IO a +type WithIndefiniteProgressFunc = forall a. + T.Text -> LSP.ProgressCancellable -> IO a -> IO a + +type GetStalePersistent = NormalizedFilePath -> IdeAction (Maybe (Dynamic,PositionDelta,Maybe Int32)) + +getShakeExtras :: Action ShakeExtras +getShakeExtras = do + -- Will fail the action with a pattern match failure, but be caught + Just x <- getShakeExtra @ShakeExtras + return x + +getShakeExtrasRules :: Rules ShakeExtras +getShakeExtrasRules = do + mExtras <- getShakeExtraRules @ShakeExtras + case mExtras of + Just x -> return x + -- This will actually crash HLS + Nothing -> liftIO $ fail "missing ShakeExtras" + +-- See Note [Client configuration in Rules] +-- | Returns the client configuration, creating a build dependency. +-- You should always use this function when accessing client configuration +-- from build rules. +getClientConfigAction :: Action Config +getClientConfigAction = do + ShakeExtras{lspEnv, idePlugins} <- getShakeExtras + currentConfig <- (`LSP.runLspT` LSP.getConfig) `traverse` lspEnv + mbVal <- unhashed <$> useNoFile_ GetClientSettings + let defValue = fromMaybe def currentConfig + case A.parse (parseConfig idePlugins defValue) <$> mbVal of + Just (Success c) -> return c + _ -> return defValue + +getPluginConfigAction :: PluginId -> Action PluginConfig +getPluginConfigAction plId = do + config <- getClientConfigAction + ShakeExtras{idePlugins = IdePlugins plugins} <- getShakeExtras + let plugin = fromMaybe (error $ "Plugin not found: " <> show plId) $ + find (\p -> pluginId p == plId) plugins + return $ HLS.configForPlugin config plugin + +-- | Register a function that will be called to get the "stale" result of a rule, possibly from disk +-- This is called when we don't already have a result, or computing the rule failed. +-- The result of this function will always be marked as 'stale', and a 'proper' rebuild of the rule will +-- be queued if the rule hasn't run before. +addPersistentRule :: IdeRule k v => k -> (NormalizedFilePath -> IdeAction (Maybe (v,PositionDelta,Maybe Int32))) -> Rules () +addPersistentRule k getVal = do + ShakeExtras{persistentKeys} <- getShakeExtrasRules + void $ liftIO $ atomically $ modifyTVar' persistentKeys $ insertKeyMap (newKey k) (fmap (fmap (first3 toDyn)) . getVal) + +class Typeable a => IsIdeGlobal a where + +-- | Read a virtual file from the current snapshot +getVirtualFile :: NormalizedFilePath -> Action (Maybe VirtualFile) +getVirtualFile nf = do + vfs <- fmap _vfsMap . liftIO . readTVarIO . vfsVar =<< getShakeExtras + pure $! Map.lookup (filePathToUri' nf) vfs -- Don't leak a reference to the entire map + +-- Take a snapshot of the current LSP VFS +vfsSnapshot :: Maybe (LSP.LanguageContextEnv a) -> IO VFS +vfsSnapshot Nothing = pure $ VFS mempty +vfsSnapshot (Just lspEnv) = LSP.runLspT lspEnv LSP.getVirtualFiles + + +addIdeGlobal :: IsIdeGlobal a => a -> Rules () +addIdeGlobal x = do + extras <- getShakeExtrasRules + liftIO $ addIdeGlobalExtras extras x + +addIdeGlobalExtras :: IsIdeGlobal a => ShakeExtras -> a -> IO () +addIdeGlobalExtras ShakeExtras{globals} x@(typeOf -> ty) = + void $ liftIO $ atomically $ modifyTVar' globals $ \mp -> case HMap.lookup ty mp of + Just _ -> error $ "Internal error, addIdeGlobalExtras, got the same type twice for " ++ show ty + Nothing -> HMap.insert ty (toDyn x) mp + +getIdeGlobalExtras :: forall a . (HasCallStack, IsIdeGlobal a) => ShakeExtras -> IO a +getIdeGlobalExtras ShakeExtras{globals} = do + let typ = typeRep (Proxy :: Proxy a) + x <- HMap.lookup (typeRep (Proxy :: Proxy a)) <$> readTVarIO globals + case x of + Just y + | Just z <- fromDynamic y -> pure z + | otherwise -> errorIO $ "Internal error, getIdeGlobalExtras, wrong type for " ++ show typ ++ " (got " ++ show (dynTypeRep y) ++ ")" + Nothing -> errorIO $ "Internal error, getIdeGlobalExtras, no entry for " ++ show typ + +getIdeGlobalAction :: forall a . (HasCallStack, IsIdeGlobal a) => Action a +getIdeGlobalAction = liftIO . getIdeGlobalExtras =<< getShakeExtras + +getIdeGlobalState :: forall a . IsIdeGlobal a => IdeState -> IO a +getIdeGlobalState = getIdeGlobalExtras . shakeExtras + +newtype GlobalIdeOptions = GlobalIdeOptions IdeOptions +instance IsIdeGlobal GlobalIdeOptions + +getIdeOptions :: Action IdeOptions +getIdeOptions = do + GlobalIdeOptions x <- getIdeGlobalAction + mbEnv <- lspEnv <$> getShakeExtras + case mbEnv of + Nothing -> return x + Just env -> do + config <- liftIO $ LSP.runLspT env HLS.getClientConfig + return x{optCheckProject = pure $ checkProject config, + optCheckParents = pure $ checkParents config + } + +getIdeOptionsIO :: ShakeExtras -> IO IdeOptions +getIdeOptionsIO ide = do + GlobalIdeOptions x <- getIdeGlobalExtras ide + return x + +-- | Return the most recent, potentially stale, value and a PositionMapping +-- for the version of that value. +lastValueIO :: IdeRule k v => ShakeExtras -> k -> NormalizedFilePath -> IO (Maybe (v, PositionMapping)) +lastValueIO s@ShakeExtras{positionMapping,persistentKeys,state} k file = do + + let readPersistent + | IdeTesting testing <- ideTesting s -- Don't read stale persistent values in tests + , testing = pure Nothing + | otherwise = do + pmap <- readTVarIO persistentKeys + mv <- runMaybeT $ do + liftIO $ logWith (shakeRecorder s) Debug $ LogLookupPersistentKey (T.pack $ show k) + f <- MaybeT $ pure $ lookupKeyMap (newKey k) pmap + (dv,del,ver) <- MaybeT $ runIdeAction "lastValueIO" s $ f file + MaybeT $ pure $ (,del,ver) <$> fromDynamic dv + case mv of + Nothing -> atomicallyNamed "lastValueIO 1" $ do + STM.focus (Focus.alter (alterValue $ Failed True)) (toKey k file) state + return Nothing + Just (v,del,mbVer) -> do + actual_version <- case mbVer of + Just ver -> pure (Just $ VFSVersion ver) + Nothing -> (Just . ModificationTime <$> getModTime (fromNormalizedFilePath file)) + `catch` (\(_ :: IOException) -> pure Nothing) + atomicallyNamed "lastValueIO 2" $ do + STM.focus (Focus.alter (alterValue $ Stale (Just del) actual_version (toDyn v))) (toKey k file) state + Just . (v,) . addOldDelta del <$> mappingForVersion positionMapping file actual_version + + -- We got a new stale value from the persistent rule, insert it in the map without affecting diagnostics + alterValue new Nothing = Just (ValueWithDiagnostics new mempty) -- If it wasn't in the map, give it empty diagnostics + alterValue new (Just old@(ValueWithDiagnostics val diags)) = Just $ case val of + -- Old failed, we can update it preserving diagnostics + Failed{} -> ValueWithDiagnostics new diags + -- Something already succeeded before, leave it alone + _ -> old + + atomicallyNamed "lastValueIO 4" (STM.lookup (toKey k file) state) >>= \case + Nothing -> readPersistent + Just (ValueWithDiagnostics value _) -> case value of + Succeeded ver (fromDynamic -> Just v) -> + atomicallyNamed "lastValueIO 5" $ Just . (v,) <$> mappingForVersion positionMapping file ver + Stale del ver (fromDynamic -> Just v) -> + atomicallyNamed "lastValueIO 6" $ Just . (v,) . maybe id addOldDelta del <$> mappingForVersion positionMapping file ver + Failed p | not p -> readPersistent + _ -> pure Nothing + +-- | Return the most recent, potentially stale, value and a PositionMapping +-- for the version of that value. +lastValue :: IdeRule k v => k -> NormalizedFilePath -> Action (Maybe (v, PositionMapping)) +lastValue key file = do + s <- getShakeExtras + liftIO $ lastValueIO s key file + +mappingForVersion + :: STM.Map NormalizedUri (EnumMap Int32 (a, PositionMapping)) + -> NormalizedFilePath + -> Maybe FileVersion + -> STM PositionMapping +mappingForVersion allMappings file (Just (VFSVersion ver)) = do + mapping <- STM.lookup (filePathToUri' file) allMappings + return $ maybe zeroMapping snd $ EM.lookup ver =<< mapping +mappingForVersion _ _ _ = pure zeroMapping + +type IdeRule k v = + ( Shake.RuleResult k ~ v + , Shake.ShakeValue k + , Show v + , Typeable v + , NFData v + ) + +-- | A live Shake session with the ability to enqueue Actions for running. +-- Keeps the 'ShakeDatabase' open, so at most one 'ShakeSession' per database. +newtype ShakeSession = ShakeSession + { cancelShakeSession :: IO () + -- ^ Closes the Shake session + } + +-- Note [Root Directory] +-- ~~~~~~~~~~~~~~~~~~~~~ +-- We keep track of the root directory explicitly, which is the directory of the project root. +-- We might be setting it via these options with decreasing priority: +-- +-- 1. from LSP workspace root, `resRootPath` in `LanguageContextEnv`. +-- 2. command line (--cwd) +-- 3. default to the current directory. +-- +-- Using `getCurrentDirectory` makes it more difficult to run the tests, as we spawn one thread of HLS per test case. +-- If we modify the global Variable CWD, via `setCurrentDirectory`, all other test threads are suddenly affected, +-- forcing us to run all integration tests sequentially. +-- +-- Also, there might be a race condition if we depend on the current directory, as some plugin might change it. +-- e.g. stylish's `loadConfig`. https://github.com/haskell/haskell-language-server/issues/4234 +-- +-- But according to https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#workspace_workspaceFolders +-- The root dir is deprecated, that means we should cleanup dependency on the project root(Or $CWD) thing gradually, +-- so multi-workspaces can actually be supported when we use absolute path everywhere(might also need some high level design). +-- That might not be possible unless we have everything adapted to it, like 'hlint' and 'evaluation of template haskell'. +-- But we should still be working towards the goal. +-- +-- We can drop it in the future once: +-- 1. We can get rid all the usages of root directory in the codebase. +-- 2. LSP version we support actually removes the root directory from the protocol. +-- + +-- | A Shake database plus persistent store. Can be thought of as storing +-- mappings from @(FilePath, k)@ to @RuleResult k@. +data IdeState = IdeState + {shakeDb :: ShakeDatabase + ,shakeSession :: MVar ShakeSession + ,shakeExtras :: ShakeExtras + ,shakeDatabaseProfile :: ShakeDatabase -> IO (Maybe FilePath) + ,stopMonitoring :: IO () + -- | See Note [Root Directory] + ,rootDir :: FilePath + } + + + +-- This is debugging code that generates a series of profiles, if the Boolean is true +shakeDatabaseProfileIO :: Maybe FilePath -> IO(ShakeDatabase -> IO (Maybe FilePath)) +shakeDatabaseProfileIO mbProfileDir = do + profileStartTime <- formatTime defaultTimeLocale "%Y%m%d-%H%M%S" <$> getCurrentTime + profileCounter <- newVar (0::Int) + return $ \shakeDb -> + for mbProfileDir $ \dir -> do + count <- modifyVar profileCounter $ \x -> let !y = x+1 in return (y,y) + let file = "ide-" ++ profileStartTime ++ "-" ++ takeEnd 5 ("0000" ++ show count) <.> "html" + shakeProfileDatabase shakeDb $ dir file + return (dir file) + +setValues :: IdeRule k v + => Values + -> k + -> NormalizedFilePath + -> Value v + -> Vector FileDiagnostic + -> STM () +setValues state key file val diags = + STM.insert (ValueWithDiagnostics (fmap toDyn val) diags) (toKey key file) state + + +-- | Delete the value stored for a given ide build key +-- and return the key that was deleted. +deleteValue + :: Shake.ShakeValue k + => ShakeExtras + -> k + -> NormalizedFilePath + -> STM [Key] +deleteValue ShakeExtras{state} key file = do + STM.delete (toKey key file) state + return [toKey key file] + + +-- | We return Nothing if the rule has not run and Just Failed if it has failed to produce a value. +getValues :: + forall k v. + IdeRule k v => + Values -> + k -> + NormalizedFilePath -> + STM (Maybe (Value v, Vector FileDiagnostic)) +getValues state key file = do + STM.lookup (toKey key file) state >>= \case + Nothing -> pure Nothing + Just (ValueWithDiagnostics v diagsV) -> do + let !r = seqValue $ fmap (fromJust . fromDynamic @v) v + !res = (r,diagsV) + -- Force to make sure we do not retain a reference to the HashMap + -- and we blow up immediately if the fromJust should fail + -- (which would be an internal error). + return $ Just res + +-- | Get all the files in the project +knownTargets :: Action (Hashed KnownTargets) +knownTargets = do + ShakeExtras{knownTargetsVar} <- getShakeExtras + liftIO $ readTVarIO knownTargetsVar + +-- | Seq the result stored in the Shake value. This only +-- evaluates the value to WHNF not NF. We take care of the latter +-- elsewhere and doing it twice is expensive. +seqValue :: Value v -> Value v +seqValue val = case val of + Succeeded ver v -> rnf ver `seq` v `seq` val + Stale d ver v -> rnf d `seq` rnf ver `seq` v `seq` val + Failed _ -> val + +-- | Open a 'IdeState', should be shut using 'shakeShut'. +shakeOpen :: Recorder (WithPriority Log) + -> Maybe (LSP.LanguageContextEnv Config) + -> Config + -> IdePlugins IdeState + -> Debouncer NormalizedUri + -> Maybe FilePath + -> IdeReportProgress + -> IdeTesting + -> WithHieDb + -> ThreadQueue + -> ShakeOptions + -> Monitoring + -> Rules () + -> FilePath + -- ^ Root directory, this one might be picking up from `LanguageContextEnv`'s `resRootPath` + -- , see Note [Root Directory] + -> IO IdeState +shakeOpen recorder lspEnv defaultConfig idePlugins debouncer + shakeProfileDir (IdeReportProgress reportProgress) + ideTesting + withHieDb threadQueue opts monitoring rules rootDir = mdo + -- see Note [Serializing runs in separate thread] + let indexQueue = tIndexQueue threadQueue + restartQueue = tRestartQueue threadQueue + loaderQueue = tLoaderQueue threadQueue + + ideNc <- initNameCache 'r' knownKeyNames + shakeExtras <- do + globals <- newTVarIO HMap.empty + state <- STM.newIO + diagnostics <- STM.newIO + hiddenDiagnostics <- STM.newIO + publishedDiagnostics <- STM.newIO + semanticTokensCache <- STM.newIO + positionMapping <- STM.newIO + knownTargetsVar <- newTVarIO $ hashed emptyKnownTargets + let restartShakeSession = shakeRestart recorder ideState + persistentKeys <- newTVarIO mempty + indexPending <- newTVarIO HMap.empty + indexCompleted <- newTVarIO 0 + semanticTokensId <- newTVarIO 0 + indexProgressReporting <- progressReportingNoTrace + (liftM2 (+) (length <$> readTVar indexPending) (readTVar indexCompleted)) + (readTVar indexCompleted) + lspEnv "Indexing" optProgressStyle + let hiedbWriter = HieDbWriter{..} + exportsMap <- newTVarIO mempty + -- lazily initialize the exports map with the contents of the hiedb + -- TODO: exceptions can be swallowed here? + _ <- async $ do + logWith recorder Debug LogCreateHieDbExportsMapStart + em <- createExportsMapHieDb withHieDb + atomically $ modifyTVar' exportsMap (<> em) + logWith recorder Debug $ LogCreateHieDbExportsMapFinish (ExportsMap.size em) + + progress <- + if reportProgress + then progressReporting lspEnv "Processing" optProgressStyle + else noPerFileProgressReporting + actionQueue <- newQueue + + let clientCapabilities = maybe def LSP.resClientCapabilities lspEnv + dirtyKeys <- newTVarIO mempty + -- Take one VFS snapshot at the start + vfsVar <- newTVarIO =<< vfsSnapshot lspEnv + pure ShakeExtras{shakeRecorder = recorder, ..} + shakeDb <- + shakeNewDatabase + opts { shakeExtra = newShakeExtra shakeExtras } + rules + shakeSession <- newEmptyMVar + shakeDatabaseProfile <- shakeDatabaseProfileIO shakeProfileDir + + IdeOptions + { optProgressStyle + , optCheckParents + } <- getIdeOptionsIO shakeExtras + + checkParents <- optCheckParents + + -- monitoring + let readValuesCounter = fromIntegral . countRelevantKeys checkParents <$> getStateKeys shakeExtras + readDirtyKeys = fromIntegral . countRelevantKeys checkParents . toListKeySet <$> readTVarIO(dirtyKeys shakeExtras) + readIndexPending = fromIntegral . HMap.size <$> readTVarIO (indexPending $ hiedbWriter shakeExtras) + readExportsMap = fromIntegral . ExportsMap.exportsMapSize <$> readTVarIO (exportsMap shakeExtras) + readDatabaseCount = fromIntegral . countRelevantKeys checkParents . map fst <$> shakeGetDatabaseKeys shakeDb + readDatabaseStep = fromIntegral <$> shakeGetBuildStep shakeDb + + registerGauge monitoring "ghcide.values_count" readValuesCounter + registerGauge monitoring "ghcide.dirty_keys_count" readDirtyKeys + registerGauge monitoring "ghcide.indexing_pending_count" readIndexPending + registerGauge monitoring "ghcide.exports_map_count" readExportsMap + registerGauge monitoring "ghcide.database_count" readDatabaseCount + registerCounter monitoring "ghcide.num_builds" readDatabaseStep + + stopMonitoring <- start monitoring + + let ideState = IdeState{..} + return ideState + + +getStateKeys :: ShakeExtras -> IO [Key] +getStateKeys = (fmap.fmap) fst . atomically . ListT.toList . STM.listT . state + +-- | Must be called in the 'Initialized' handler and only once +shakeSessionInit :: Recorder (WithPriority Log) -> IdeState -> IO () +shakeSessionInit recorder IdeState{..} = do + -- Take a snapshot of the VFS - it should be empty as we've received no notifications + -- till now, but it can't hurt to be in sync with the `lsp` library. + vfs <- vfsSnapshot (lspEnv shakeExtras) + initSession <- newSession recorder shakeExtras (VFSModified vfs) shakeDb [] "shakeSessionInit" + putMVar shakeSession initSession + logWith recorder Debug LogSessionInitialised + +shakeShut :: IdeState -> IO () +shakeShut IdeState{..} = do + runner <- tryReadMVar shakeSession + -- Shake gets unhappy if you try to close when there is a running + -- request so we first abort that. + for_ runner cancelShakeSession + void $ shakeDatabaseProfile shakeDb + progressStop $ progress shakeExtras + progressStop $ indexProgressReporting $ hiedbWriter shakeExtras + stopMonitoring + + +-- | This is a variant of withMVar where the first argument is run unmasked and if it throws +-- an exception, the previous value is restored while the second argument is executed masked. +withMVar' :: MVar a -> (a -> IO b) -> (b -> IO (a, c)) -> IO c +withMVar' var unmasked masked = uninterruptibleMask $ \restore -> do + a <- takeMVar var + b <- restore (unmasked a) `onException` putMVar var a + (a', c) <- masked b + putMVar var a' + pure c + + +mkDelayedAction :: String -> Logger.Priority -> Action a -> DelayedAction a +mkDelayedAction = DelayedAction Nothing + +-- | These actions are run asynchronously after the current action is +-- finished running. For example, to trigger a key build after a rule +-- has already finished as is the case with useWithStaleFast +delayedAction :: DelayedAction a -> IdeAction (IO a) +delayedAction a = do + extras <- ask + liftIO $ shakeEnqueue extras a + + +-- | Restart the current 'ShakeSession' with the given system actions. +-- Any actions running in the current session will be aborted, +-- but actions added via 'shakeEnqueue' will be requeued. +shakeRestart :: Recorder (WithPriority Log) -> IdeState -> VFSModified -> String -> [DelayedAction ()] -> IO [Key] -> IO () +shakeRestart recorder IdeState{..} vfs reason acts ioActionBetweenShakeSession = + void $ awaitRunInThread (restartQueue shakeExtras) $ do + withMVar' + shakeSession + (\runner -> do + (stopTime,()) <- duration $ logErrorAfter 10 $ cancelShakeSession runner + keys <- ioActionBetweenShakeSession + -- it is every important to update the dirty keys after we enter the critical section + -- see Note [Housekeeping rule cache and dirty key outside of hls-graph] + atomically $ modifyTVar' (dirtyKeys shakeExtras) $ \x -> foldl' (flip insertKeySet) x keys + res <- shakeDatabaseProfile shakeDb + backlog <- readTVarIO $ dirtyKeys shakeExtras + queue <- atomicallyNamed "actionQueue - peek" $ peekInProgress $ actionQueue shakeExtras + + -- this log is required by tests + logWith recorder Debug $ LogBuildSessionRestart reason queue backlog stopTime res + ) + -- It is crucial to be masked here, otherwise we can get killed + -- between spawning the new thread and updating shakeSession. + -- See https://github.com/haskell/ghcide/issues/79 + (\() -> do + (,()) <$> newSession recorder shakeExtras vfs shakeDb acts reason) + where + logErrorAfter :: Seconds -> IO () -> IO () + logErrorAfter seconds action = flip withAsync (const action) $ do + sleep seconds + logWith recorder Error (LogBuildSessionRestartTakingTooLong seconds) + +-- | Enqueue an action in the existing 'ShakeSession'. +-- Returns a computation to block until the action is run, propagating exceptions. +-- Assumes a 'ShakeSession' is available. +-- +-- Appropriate for user actions other than edits. +shakeEnqueue :: ShakeExtras -> DelayedAction a -> IO (IO a) +shakeEnqueue ShakeExtras{actionQueue, shakeRecorder} act = do + (b, dai) <- instantiateDelayedAction act + atomicallyNamed "actionQueue - push" $ pushQueue dai actionQueue + let wait' barrier = + waitBarrier barrier `catches` + [ Handler(\BlockedIndefinitelyOnMVar -> + fail $ "internal bug: forever blocked on MVar for " <> + actionName act) + , Handler (\e@AsyncCancelled -> do + logWith shakeRecorder Debug $ LogCancelledAction (T.pack $ actionName act) + + atomicallyNamed "actionQueue - abort" $ abortQueue dai actionQueue + throw e) + ] + return (wait' b >>= either throwIO return) + +data VFSModified = VFSUnmodified | VFSModified !VFS + +-- | Set up a new 'ShakeSession' with a set of initial actions +-- Will crash if there is an existing 'ShakeSession' running. +newSession + :: Recorder (WithPriority Log) + -> ShakeExtras + -> VFSModified + -> ShakeDatabase + -> [DelayedActionInternal] + -> String + -> IO ShakeSession +newSession recorder extras@ShakeExtras{..} vfsMod shakeDb acts reason = do + + -- Take a new VFS snapshot + case vfsMod of + VFSUnmodified -> pure () + VFSModified vfs -> atomically $ writeTVar vfsVar vfs + + IdeOptions{optRunSubset} <- getIdeOptionsIO extras + reenqueued <- atomicallyNamed "actionQueue - peek" $ peekInProgress actionQueue + allPendingKeys <- + if optRunSubset + then Just <$> readTVarIO dirtyKeys + else return Nothing + let + -- A daemon-like action used to inject additional work + -- Runs actions from the work queue sequentially + pumpActionThread otSpan = do + d <- liftIO $ atomicallyNamed "action queue - pop" $ popQueue actionQueue + actionFork (run otSpan d) $ \_ -> pumpActionThread otSpan + + -- TODO figure out how to thread the otSpan into defineEarlyCutoff + run _otSpan d = do + start <- liftIO offsetTime + getAction d + liftIO $ atomicallyNamed "actionQueue - done" $ doneQueue d actionQueue + runTime <- liftIO start + logWith recorder (actionPriority d) $ LogDelayedAction d runTime + + -- The inferred type signature doesn't work in ghc >= 9.0.1 + workRun :: (forall b. IO b -> IO b) -> IO (IO ()) + workRun restore = withSpan "Shake session" $ \otSpan -> do + setTag otSpan "reason" (fromString reason) + setTag otSpan "queue" (fromString $ unlines $ map actionName reenqueued) + whenJust allPendingKeys $ \kk -> setTag otSpan "keys" (BS8.pack $ unlines $ map show $ toListKeySet kk) + let keysActs = pumpActionThread otSpan : map (run otSpan) (reenqueued ++ acts) + res <- try @SomeException $ + restore $ shakeRunDatabaseForKeys (toListKeySet <$> allPendingKeys) shakeDb keysActs + return $ do + let exception = + case res of + Left e -> Just e + _ -> Nothing + logWith recorder Debug $ LogBuildSessionFinish exception + + -- Do the work in a background thread + workThread <- asyncWithUnmask workRun + + -- run the wrap up in a separate thread since it contains interruptible + -- commands (and we are not using uninterruptible mask) + -- TODO: can possibly swallow exceptions? + _ <- async $ join $ wait workThread + + -- Cancelling is required to flush the Shake database when either + -- the filesystem or the Ghc configuration have changed + let cancelShakeSession :: IO () + cancelShakeSession = cancel workThread + + pure (ShakeSession{..}) + +instantiateDelayedAction + :: DelayedAction a + -> IO (Barrier (Either SomeException a), DelayedActionInternal) +instantiateDelayedAction (DelayedAction _ s p a) = do + u <- newUnique + b <- newBarrier + let a' = do + -- work gets reenqueued when the Shake session is restarted + -- it can happen that a work item finished just as it was reenqueued + -- in that case, skipping the work is fine + alreadyDone <- liftIO $ isJust <$> waitBarrierMaybe b + unless alreadyDone $ do + x <- actionCatch @SomeException (Right <$> a) (pure . Left) + -- ignore exceptions if the barrier has been filled concurrently + liftIO $ void $ try @SomeException $ signalBarrier b x + d' = DelayedAction (Just u) s p a' + return (b, d') + +getDiagnostics :: IdeState -> STM [FileDiagnostic] +getDiagnostics IdeState{shakeExtras = ShakeExtras{diagnostics}} = do + getAllDiagnostics diagnostics + +getHiddenDiagnostics :: IdeState -> STM [FileDiagnostic] +getHiddenDiagnostics IdeState{shakeExtras = ShakeExtras{hiddenDiagnostics}} = do + getAllDiagnostics hiddenDiagnostics + +-- | Find and release old keys from the state Hashmap +-- For the record, there are other state sources that this process does not release: +-- * diagnostics store (normal, hidden and published) +-- * position mapping store +-- * indexing queue +-- * exports map +garbageCollectDirtyKeys :: Action [Key] +garbageCollectDirtyKeys = do + IdeOptions{optCheckParents} <- getIdeOptions + checkParents <- liftIO optCheckParents + garbageCollectDirtyKeysOlderThan 0 checkParents + +garbageCollectDirtyKeysOlderThan :: Int -> CheckParents -> Action [Key] +garbageCollectDirtyKeysOlderThan maxAge checkParents = otTracedGarbageCollection "dirty GC" $ do + dirtySet <- getDirtySet + garbageCollectKeys "dirty GC" maxAge checkParents dirtySet + +garbageCollectKeys :: String -> Int -> CheckParents -> [(Key, Int)] -> Action [Key] +garbageCollectKeys label maxAge checkParents agedKeys = do + start <- liftIO offsetTime + ShakeExtras{state, dirtyKeys, lspEnv, shakeRecorder, ideTesting} <- getShakeExtras + (n::Int, garbage) <- liftIO $ + foldM (removeDirtyKey dirtyKeys state) (0,[]) agedKeys + t <- liftIO start + when (n>0) $ liftIO $ do + logWith shakeRecorder Debug $ LogShakeGarbageCollection (T.pack label) n t + when (coerce ideTesting) $ liftIO $ mRunLspT lspEnv $ + LSP.sendNotification (SMethod_CustomMethod (Proxy @"ghcide/GC")) + (toJSON $ mapMaybe (fmap showKey . fromKeyType) garbage) + return garbage + + where + showKey = show . Q + removeDirtyKey dk values st@(!counter, keys) (k, age) + | age > maxAge + , Just (kt,_) <- fromKeyType k + , not(kt `HSet.member` preservedKeys checkParents) + = atomicallyNamed "GC" $ do + gotIt <- STM.focus (Focus.member <* Focus.delete) k values + when gotIt $ + modifyTVar' dk (insertKeySet k) + return $ if gotIt then (counter+1, k:keys) else st + | otherwise = pure st + +countRelevantKeys :: CheckParents -> [Key] -> Int +countRelevantKeys checkParents = + Prelude.length . filter (maybe False (not . (`HSet.member` preservedKeys checkParents) . fst) . fromKeyType) + +preservedKeys :: CheckParents -> HashSet TypeRep +preservedKeys checkParents = HSet.fromList $ + -- always preserved + [ typeOf GetFileExists + , typeOf GetModificationTime + , typeOf IsFileOfInterest + , typeOf GhcSessionIO + , typeOf GetClientSettings + , typeOf AddWatchedFile + , typeOf GetKnownTargets + ] + ++ concat + -- preserved if CheckParents is enabled since we need to rebuild the ModuleGraph + [ [ typeOf GetModSummary + , typeOf GetModSummaryWithoutTimestamps + , typeOf GetLocatedImports + ] + | checkParents /= NeverCheck + ] + +-- | Define a new Rule without early cutoff +define + :: IdeRule k v + => Recorder (WithPriority Log) -> (k -> NormalizedFilePath -> Action (IdeResult v)) -> Rules () +define recorder op = defineEarlyCutoff recorder $ Rule $ \k v -> (Nothing,) <$> op k v + +defineNoDiagnostics + :: IdeRule k v + => Recorder (WithPriority Log) -> (k -> NormalizedFilePath -> Action (Maybe v)) -> Rules () +defineNoDiagnostics recorder op = defineEarlyCutoff recorder $ RuleNoDiagnostics $ \k v -> (Nothing,) <$> op k v + +-- | Request a Rule result if available +use :: IdeRule k v + => k -> NormalizedFilePath -> Action (Maybe v) +use key file = runIdentity <$> uses key (Identity file) + +-- | Request a Rule result, it not available return the last computed result, if any, which may be stale +useWithStale :: IdeRule k v + => k -> NormalizedFilePath -> Action (Maybe (v, PositionMapping)) +useWithStale key file = runIdentity <$> usesWithStale key (Identity file) + +-- |Request a Rule result, it not available return the last computed result +-- which may be stale. +-- +-- Throws an `BadDependency` exception which is caught by the rule system if +-- none available. +-- +-- WARNING: Not suitable for PluginHandlers. Use `useWithStaleE` instead. +useWithStale_ :: IdeRule k v + => k -> NormalizedFilePath -> Action (v, PositionMapping) +useWithStale_ key file = runIdentity <$> usesWithStale_ key (Identity file) + +-- |Plural version of 'useWithStale_' +-- +-- Throws an `BadDependency` exception which is caught by the rule system if +-- none available. +-- +-- WARNING: Not suitable for PluginHandlers. +usesWithStale_ :: (Traversable f, IdeRule k v) => k -> f NormalizedFilePath -> Action (f (v, PositionMapping)) +usesWithStale_ key files = do + res <- usesWithStale key files + case sequence res of + Nothing -> liftIO $ throwIO $ BadDependency (show key) + Just v -> return v + +-- | IdeActions are used when we want to return a result immediately, even if it +-- is stale Useful for UI actions like hover, completion where we don't want to +-- block. +-- +-- Run via 'runIdeAction'. +newtype IdeAction a = IdeAction { runIdeActionT :: (ReaderT ShakeExtras IO) a } + deriving newtype (MonadReader ShakeExtras, MonadIO, Functor, Applicative, Monad, Semigroup) + +runIdeAction :: String -> ShakeExtras -> IdeAction a -> IO a +runIdeAction _herald s i = runReaderT (runIdeActionT i) s + +askShake :: IdeAction ShakeExtras +askShake = ask + + +mkUpdater :: NameCache -> NameCacheUpdater +mkUpdater = id + +-- | A (maybe) stale result now, and an up to date one later +data FastResult a = FastResult { stale :: Maybe (a,PositionMapping), uptoDate :: IO (Maybe a) } + +-- | Lookup value in the database and return with the stale value immediately +-- Will queue an action to refresh the value. +-- Might block the first time the rule runs, but never blocks after that. +useWithStaleFast :: IdeRule k v => k -> NormalizedFilePath -> IdeAction (Maybe (v, PositionMapping)) +useWithStaleFast key file = stale <$> useWithStaleFast' key file + +-- | Same as useWithStaleFast but lets you wait for an up to date result +useWithStaleFast' :: IdeRule k v => k -> NormalizedFilePath -> IdeAction (FastResult v) +useWithStaleFast' key file = do + -- This lookup directly looks up the key in the shake database and + -- returns the last value that was computed for this key without + -- checking freshness. + + -- Async trigger the key to be built anyway because we want to + -- keep updating the value in the key. + waitValue <- delayedAction $ mkDelayedAction ("C:" ++ show key ++ ":" ++ fromNormalizedFilePath file) Debug $ use key file + + s@ShakeExtras{state} <- askShake + r <- liftIO $ atomicallyNamed "useStateFast" $ getValues state key file + liftIO $ case r of + -- block for the result if we haven't computed before + Nothing -> do + -- Check if we can get a stale value from disk + res <- lastValueIO s key file + case res of + Nothing -> do + a <- waitValue + pure $ FastResult ((,zeroMapping) <$> a) (pure a) + Just _ -> pure $ FastResult res waitValue + -- Otherwise, use the computed value even if it's out of date. + Just _ -> do + res <- lastValueIO s key file + pure $ FastResult res waitValue + +useNoFile :: IdeRule k v => k -> Action (Maybe v) +useNoFile key = use key emptyFilePath + +-- Requests a rule if available. +-- +-- Throws an `BadDependency` exception which is caught by the rule system if +-- none available. +-- +-- WARNING: Not suitable for PluginHandlers. Use `useE` instead. +use_ :: IdeRule k v => k -> NormalizedFilePath -> Action v +use_ key file = runIdentity <$> uses_ key (Identity file) + +useNoFile_ :: IdeRule k v => k -> Action v +useNoFile_ key = use_ key emptyFilePath + +-- |Plural version of `use_` +-- +-- Throws an `BadDependency` exception which is caught by the rule system if +-- none available. +-- +-- WARNING: Not suitable for PluginHandlers. Use `usesE` instead. +uses_ :: (Traversable f, IdeRule k v) => k -> f NormalizedFilePath -> Action (f v) +uses_ key files = do + res <- uses key files + case sequence res of + Nothing -> liftIO $ throwIO $ BadDependency (show key) + Just v -> return v + +-- | Plural version of 'use' +uses :: (Traversable f, IdeRule k v) + => k -> f NormalizedFilePath -> Action (f (Maybe v)) +uses key files = fmap (\(A value) -> currentValue value) <$> apply (fmap (Q . (key,)) files) + +-- | Return the last computed result which might be stale. +usesWithStale :: (Traversable f, IdeRule k v) + => k -> f NormalizedFilePath -> Action (f (Maybe (v, PositionMapping))) +usesWithStale key files = do + _ <- apply (fmap (Q . (key,)) files) + -- We don't look at the result of the 'apply' since 'lastValue' will + -- return the most recent successfully computed value regardless of + -- whether the rule succeeded or not. + traverse (lastValue key) files + +-- we use separate fingerprint rules to trigger the rebuild of the rule +useWithSeparateFingerprintRule + :: (IdeRule k v, IdeRule k1 Fingerprint) + => k1 -> k -> NormalizedFilePath -> Action (Maybe v) +useWithSeparateFingerprintRule fingerKey key file = do + _ <- use fingerKey file + useWithoutDependency key emptyFilePath + +-- we use separate fingerprint rules to trigger the rebuild of the rule +useWithSeparateFingerprintRule_ + :: (IdeRule k v, IdeRule k1 Fingerprint) + => k1 -> k -> NormalizedFilePath -> Action v +useWithSeparateFingerprintRule_ fingerKey key file = do + useWithSeparateFingerprintRule fingerKey key file >>= \case + Just v -> return v + Nothing -> liftIO $ throwIO $ BadDependency (show key) + +useWithoutDependency :: IdeRule k v + => k -> NormalizedFilePath -> Action (Maybe v) +useWithoutDependency key file = + (\(Identity (A value)) -> currentValue value) <$> applyWithoutDependency (Identity (Q (key, file))) + +data RuleBody k v + = Rule (k -> NormalizedFilePath -> Action (Maybe BS.ByteString, IdeResult v)) + | RuleNoDiagnostics (k -> NormalizedFilePath -> Action (Maybe BS.ByteString, Maybe v)) + | RuleWithCustomNewnessCheck + { newnessCheck :: BS.ByteString -> BS.ByteString -> Bool + , build :: k -> NormalizedFilePath -> Action (Maybe BS.ByteString, Maybe v) + } + | RuleWithOldValue (k -> NormalizedFilePath -> Value v -> Action (Maybe BS.ByteString, IdeResult v)) + +-- | Define a new Rule with early cutoff +defineEarlyCutoff + :: IdeRule k v + => Recorder (WithPriority Log) + -> RuleBody k v + -> Rules () +defineEarlyCutoff recorder (Rule op) = addRule $ \(Q (key, file)) (old :: Maybe BS.ByteString) mode -> otTracedAction key file mode traceA $ \traceDiagnostics -> do + extras <- getShakeExtras + let diagnostics ver diags = do + traceDiagnostics diags + updateFileDiagnostics recorder file ver (newKey key) extras diags + defineEarlyCutoff' diagnostics (==) key file old mode $ const $ op key file +defineEarlyCutoff recorder (RuleNoDiagnostics op) = addRule $ \(Q (key, file)) (old :: Maybe BS.ByteString) mode -> otTracedAction key file mode traceA $ \traceDiagnostics -> do + let diagnostics _ver diags = do + traceDiagnostics diags + mapM_ (logWith recorder Warning . LogDefineEarlyCutoffRuleNoDiagHasDiag) diags + defineEarlyCutoff' diagnostics (==) key file old mode $ const $ second (mempty,) <$> op key file +defineEarlyCutoff recorder RuleWithCustomNewnessCheck{..} = + addRule $ \(Q (key, file)) (old :: Maybe BS.ByteString) mode -> + otTracedAction key file mode traceA $ \ traceDiagnostics -> do + let diagnostics _ver diags = do + traceDiagnostics diags + mapM_ (logWith recorder Warning . LogDefineEarlyCutoffRuleCustomNewnessHasDiag) diags + defineEarlyCutoff' diagnostics newnessCheck key file old mode $ + const $ second (mempty,) <$> build key file +defineEarlyCutoff recorder (RuleWithOldValue op) = addRule $ \(Q (key, file)) (old :: Maybe BS.ByteString) mode -> otTracedAction key file mode traceA $ \traceDiagnostics -> do + extras <- getShakeExtras + let diagnostics ver diags = do + traceDiagnostics diags + updateFileDiagnostics recorder file ver (newKey key) extras diags + defineEarlyCutoff' diagnostics (==) key file old mode $ op key file + +defineNoFile :: IdeRule k v => Recorder (WithPriority Log) -> (k -> Action v) -> Rules () +defineNoFile recorder f = defineNoDiagnostics recorder $ \k file -> do + if file == emptyFilePath then do res <- f k; return (Just res) else + fail $ "Rule " ++ show k ++ " should always be called with the empty string for a file" + +defineEarlyCutOffNoFile :: IdeRule k v => Recorder (WithPriority Log) -> (k -> Action (BS.ByteString, v)) -> Rules () +defineEarlyCutOffNoFile recorder f = defineEarlyCutoff recorder $ RuleNoDiagnostics $ \k file -> do + if file == emptyFilePath then do (hashString, res) <- f k; return (Just hashString, Just res) else + fail $ "Rule " ++ show k ++ " should always be called with the empty string for a file" + +defineEarlyCutoff' + :: forall k v. IdeRule k v + => (Maybe Int32 -> [FileDiagnostic] -> Action ()) -- ^ update diagnostics + -- | compare current and previous for freshness + -> (BS.ByteString -> BS.ByteString -> Bool) + -> k + -> NormalizedFilePath + -> Maybe BS.ByteString + -> RunMode + -> (Value v -> Action (Maybe BS.ByteString, IdeResult v)) + -> Action (RunResult (A (RuleResult k))) +defineEarlyCutoff' doDiagnostics cmp key file mbOld mode action = do + ShakeExtras{state, progress, dirtyKeys} <- getShakeExtras + options <- getIdeOptions + let trans g x = withRunInIO $ \run -> g (run x) + (if optSkipProgress options key then id else trans (inProgress progress file)) $ do + val <- case mbOld of + Just old | mode == RunDependenciesSame -> do + mbValue <- liftIO $ atomicallyNamed "define - read 1" $ getValues state key file + case mbValue of + -- No changes in the dependencies and we have + -- an existing successful result. + Just (v@(Succeeded _ x), diags) -> do + ver <- estimateFileVersionUnsafely key (Just x) file + doDiagnostics (vfsVersion =<< ver) $ Vector.toList diags + return $ Just $ RunResult ChangedNothing old (A v) $ return () + _ -> return Nothing + _ -> + -- assert that a "clean" rule is never a cache miss + -- as this is likely a bug in the dirty key tracking + assert (mode /= RunDependenciesSame) $ return Nothing + res <- case val of + Just res -> return res + Nothing -> do + staleV <- liftIO $ atomicallyNamed "define -read 3" $ getValues state key file <&> \case + Nothing -> Failed False + Just (Succeeded ver v, _) -> Stale Nothing ver v + Just (Stale d ver v, _) -> Stale d ver v + Just (Failed b, _) -> Failed b + (mbBs, (diags, mbRes)) <- actionCatch + (do v <- action staleV; liftIO $ evaluate $ force v) $ + \(e :: SomeException) -> do + pure (Nothing, ([ideErrorText file (T.pack $ show (key, file) ++ show e) | not $ isBadDependency e],Nothing)) + + ver <- estimateFileVersionUnsafely key mbRes file + (bs, res) <- case mbRes of + Nothing -> do + pure (toShakeValue ShakeStale mbBs, staleV) + Just v -> pure (maybe ShakeNoCutoff ShakeResult mbBs, Succeeded ver v) + doDiagnostics (vfsVersion =<< ver) diags + let eq = case (bs, fmap decodeShakeValue mbOld) of + (ShakeResult a, Just (ShakeResult b)) -> cmp a b + (ShakeStale a, Just (ShakeStale b)) -> cmp a b + -- If we do not have a previous result + -- or we got ShakeNoCutoff we always return False. + _ -> False + return $ RunResult + (if eq then ChangedRecomputeSame else ChangedRecomputeDiff) + (encodeShakeValue bs) + (A res) $ do + -- this hook needs to be run in the same transaction as the key is marked clean + -- see Note [Housekeeping rule cache and dirty key outside of hls-graph] + setValues state key file res (Vector.fromList diags) + modifyTVar' dirtyKeys (deleteKeySet $ toKey key file) + return res + where + -- Highly unsafe helper to compute the version of a file + -- without creating a dependency on the GetModificationTime rule + -- (and without creating cycles in the build graph). + estimateFileVersionUnsafely + :: k + -> Maybe v + -> NormalizedFilePath + -> Action (Maybe FileVersion) + estimateFileVersionUnsafely _k v fp + | fp == emptyFilePath = pure Nothing + | Just Refl <- eqT @k @GetModificationTime = pure v + -- GetModificationTime depends on these rules, so avoid creating a cycle + | Just Refl <- eqT @k @AddWatchedFile = pure Nothing + | Just Refl <- eqT @k @IsFileOfInterest = pure Nothing + -- GetFileExists gets called for missing files + | Just Refl <- eqT @k @GetFileExists = pure Nothing + -- For all other rules - compute the version properly without: + -- * creating a dependency: If everything depends on GetModificationTime, we lose early cutoff + -- * creating bogus "file does not exists" diagnostics + | otherwise = useWithoutDependency (GetModificationTime_ False) fp + +-- Note [Housekeeping rule cache and dirty key outside of hls-graph] +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- Hls-graph contains its own internal running state for each key in the shakeDatabase. +-- ShakeExtras contains `state` field (rule result cache) and `dirtyKeys` (keys that became +-- dirty in between build sessions) that is not visible to the hls-graph +-- Essentially, we need to keep the rule cache and dirty key and hls-graph's internal state +-- in sync. + +-- 1. A dirty key collected in a session should not be removed from dirty keys in the same session. +-- Since if we clean out the dirty key in the same session, +-- 1.1. we will lose the chance to dirty its reverse dependencies. Since it only happens during session restart. +-- 1.2. a key might be marked as dirty in ShakeExtras while it's being recomputed by hls-graph which could lead to it's premature removal from dirtyKeys. +-- See issue https://github.com/haskell/haskell-language-server/issues/4093 for more details. + +-- 2. When a key is marked clean in the hls-graph's internal running +-- state, the rule cache and dirty keys are updated in the same transaction. +-- otherwise, some situations like the following can happen: +-- thread 1: hls-graph session run a key +-- thread 1: defineEarlyCutoff' run the action for the key +-- thread 1: the action is done, rule cache and dirty key are updated +-- thread 2: we restart the hls-graph session, thread 1 is killed, the +-- hls-graph's internal state is not updated. +-- This is problematic with early cut off because we are having a new rule cache matching the +-- old hls-graph's internal state, which might case it's reverse dependency to skip the recomputation. +-- See https://github.com/haskell/haskell-language-server/issues/4194 for more details. + +traceA :: A v -> String +traceA (A Failed{}) = "Failed" +traceA (A Stale{}) = "Stale" +traceA (A Succeeded{}) = "Success" + +updateFileDiagnostics :: MonadIO m + => Recorder (WithPriority Log) + -> NormalizedFilePath + -> Maybe Int32 + -> Key + -> ShakeExtras + -> [FileDiagnostic] -- ^ current results + -> m () +updateFileDiagnostics recorder fp ver k ShakeExtras{diagnostics, hiddenDiagnostics, publishedDiagnostics, debouncer, lspEnv, ideTesting} current0 = do + liftIO $ withTrace ("update diagnostics " <> fromString(fromNormalizedFilePath fp)) $ \ addTag -> do + addTag "key" (show k) + let (currentShown, currentHidden) = partition ((== ShowDiag) . fdShouldShowDiagnostic) current + uri = filePathToUri' fp + addTagUnsafe :: String -> String -> String -> a -> a + addTagUnsafe msg t x v = unsafePerformIO(addTag (msg <> t) x) `seq` v + update :: (forall a. String -> String -> a -> a) -> [FileDiagnostic] -> STMDiagnosticStore -> STM [FileDiagnostic] + update addTagUnsafeMethod new store = addTagUnsafeMethod "count" (show $ Prelude.length new) $ setStageDiagnostics addTagUnsafeMethod uri ver (renderKey k) new store + current = map (fdLspDiagnosticL %~ diagsFromRule) current0 + addTag "version" (show ver) + mask_ $ do + -- Mask async exceptions to ensure that updated diagnostics are always + -- published. Otherwise, we might never publish certain diagnostics if + -- an exception strikes between modifyVar but before + -- publishDiagnosticsNotification. + newDiags <- liftIO $ atomicallyNamed "diagnostics - update" $ update (addTagUnsafe "shown ") currentShown diagnostics + _ <- liftIO $ atomicallyNamed "diagnostics - hidden" $ update (addTagUnsafe "hidden ") currentHidden hiddenDiagnostics + let uri' = filePathToUri' fp + let delay = if null newDiags then 0.1 else 0 + registerEvent debouncer delay uri' $ withTrace ("report diagnostics " <> fromString (fromNormalizedFilePath fp)) $ \tag -> do + join $ mask_ $ do + lastPublish <- atomicallyNamed "diagnostics - publish" $ STM.focus (Focus.lookupWithDefault [] <* Focus.insert newDiags) uri' publishedDiagnostics + let action = when (lastPublish /= newDiags) $ case lspEnv of + Nothing -> -- Print an LSP event. + logWith recorder Info $ LogDiagsDiffButNoLspEnv newDiags + Just env -> LSP.runLspT env $ do + liftIO $ tag "count" (show $ Prelude.length newDiags) + liftIO $ tag "key" (show k) + LSP.sendNotification SMethod_TextDocumentPublishDiagnostics $ + LSP.PublishDiagnosticsParams (fromNormalizedUri uri') (fmap fromIntegral ver) (map fdLspDiagnostic newDiags) + return action + where + diagsFromRule :: Diagnostic -> Diagnostic + diagsFromRule c@Diagnostic{_range} + | coerce ideTesting = c & L.relatedInformation ?~ + [ DiagnosticRelatedInformation + (Location + (filePathToUri $ fromNormalizedFilePath fp) + _range + ) + (T.pack $ show k) + ] + | otherwise = c + + +ideLogger :: IdeState -> Recorder (WithPriority Log) +ideLogger IdeState{shakeExtras=ShakeExtras{shakeRecorder}} = shakeRecorder + +actionLogger :: Action (Recorder (WithPriority Log)) +actionLogger = shakeRecorder <$> getShakeExtras + +-------------------------------------------------------------------------------- +type STMDiagnosticStore = STM.Map NormalizedUri StoreItem' +data StoreItem' = StoreItem' (Maybe Int32) FileDiagnosticsBySource +type FileDiagnosticsBySource = Map.Map (Maybe T.Text) (SL.SortedList FileDiagnostic) + +getDiagnosticsFromStore :: StoreItem' -> [FileDiagnostic] +getDiagnosticsFromStore (StoreItem' _ diags) = concatMap SL.fromSortedList $ Map.elems diags + +updateSTMDiagnostics :: + (forall a. String -> String -> a -> a) -> + STMDiagnosticStore -> + NormalizedUri -> + Maybe Int32 -> + FileDiagnosticsBySource -> + STM [FileDiagnostic] +updateSTMDiagnostics addTag store uri mv newDiagsBySource = + getDiagnosticsFromStore . fromJust <$> STM.focus (Focus.alter update *> Focus.lookup) uri store + where + update (Just(StoreItem' mvs dbs)) + | addTag "previous version" (show mvs) $ + addTag "previous count" (show $ Prelude.length $ filter (not.null) $ Map.elems dbs) False = undefined + | mvs == mv = Just (StoreItem' mv (newDiagsBySource <> dbs)) + update _ = Just (StoreItem' mv newDiagsBySource) + +-- | Sets the diagnostics for a file and compilation step +-- if you want to clear the diagnostics call this with an empty list +setStageDiagnostics + :: (forall a. String -> String -> a -> a) + -> NormalizedUri + -> Maybe Int32 -- ^ the time that the file these diagnostics originate from was last edited + -> T.Text + -> [FileDiagnostic] + -> STMDiagnosticStore + -> STM [FileDiagnostic] +setStageDiagnostics addTag uri ver stage diags ds = updateSTMDiagnostics addTag ds uri ver updatedDiags + where + !updatedDiags = Map.singleton (Just stage) $! SL.toSortedList diags + +getAllDiagnostics :: + STMDiagnosticStore -> + STM [FileDiagnostic] +getAllDiagnostics = + fmap (concatMap (\(_,v) -> getDiagnosticsFromStore v)) . ListT.toList . STM.listT + +updatePositionMapping :: IdeState -> VersionedTextDocumentIdentifier -> [TextDocumentContentChangeEvent] -> STM () +updatePositionMapping IdeState{shakeExtras = ShakeExtras{positionMapping}} VersionedTextDocumentIdentifier{..} changes = + STM.focus (Focus.alter f) uri positionMapping + where + uri = toNormalizedUri _uri + f = Just . updatePositionMappingHelper _version changes . fromMaybe mempty + + +updatePositionMappingHelper :: + Int32 + -> [TextDocumentContentChangeEvent] + -> EnumMap Int32 (PositionDelta, PositionMapping) + -> EnumMap Int32 (PositionDelta, PositionMapping) +updatePositionMappingHelper ver changes mappingForUri = snd $ + -- Very important to use mapAccum here so that the tails of + -- each mapping can be shared, otherwise quadratic space is + -- used which is evident in long running sessions. + EM.mapAccumRWithKey (\acc _k (delta, _) -> let new = addOldDelta delta acc in (new, (delta, acc))) + zeroMapping + (EM.insert ver (mkDelta changes, zeroMapping) mappingForUri) + +-- | sends a signal whenever shake session is run/restarted +-- being used in cabal and hlint plugin tests to know when its time +-- to look for file diagnostics +kickSignal :: KnownSymbol s => Bool -> Maybe (LSP.LanguageContextEnv c) -> [NormalizedFilePath] -> Proxy s -> Action () +kickSignal testing lspEnv files msg = when testing $ liftIO $ mRunLspT lspEnv $ + LSP.sendNotification (LSP.SMethod_CustomMethod msg) $ + toJSON $ map fromNormalizedFilePath files + +-- | Add kick start/done signal to rule +runWithSignal :: (KnownSymbol s0, KnownSymbol s1, IdeRule k v) => Proxy s0 -> Proxy s1 -> [NormalizedFilePath] -> k -> Action () +runWithSignal msgStart msgEnd files rule = do + ShakeExtras{ideTesting = Options.IdeTesting testing, lspEnv} <- getShakeExtras + kickSignal testing lspEnv files msgStart + void $ uses rule files + kickSignal testing lspEnv files msgEnd diff --git a/ghcide/src/Development/IDE/Core/Tracing.hs b/ghcide/src/Development/IDE/Core/Tracing.hs new file mode 100644 index 0000000000..34839faaee --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Tracing.hs @@ -0,0 +1,142 @@ + +module Development.IDE.Core.Tracing + ( otTracedHandler + , otTracedAction + , otTracedProvider + , otSetUri + , otTracedGarbageCollection + , withTrace + , withEventTrace + , withTelemetryRecorder + ) +where + +import Control.Exception.Safe (generalBracket) +import Control.Monad.Catch (ExitCase (..), MonadMask) +import Control.Monad.IO.Unlift +import Data.ByteString (ByteString) +import Data.ByteString.Char8 (pack) +import Data.String (IsString (fromString)) +import qualified Data.Text as T +import Data.Text.Encoding (encodeUtf8) +import Data.Word (Word16) +import Debug.Trace.Flags (userTracingEnabled) +import Development.IDE.Graph (Action) +import Development.IDE.Graph.Rule +import Development.IDE.Types.Diagnostics (FileDiagnostic, + showDiagnostics) +import Development.IDE.Types.Location (Uri (..)) +import Ide.Logger +import Ide.Types (PluginId (..)) +import Language.LSP.Protocol.Types (NormalizedFilePath, + fromNormalizedFilePath) +import OpenTelemetry.Eventlog (SpanInFlight (..), addEvent, + beginSpan, endSpan, setTag, + withSpan) + + +withTrace :: (MonadMask m, MonadIO m) => String -> ((String -> String -> m ()) -> m a) -> m a +withTrace name act + | userTracingEnabled + = withSpan (fromString name) $ \sp -> do + let setSpan' k v = setTag sp (fromString k) (fromString v) + act setSpan' + | otherwise = act (\_ _ -> pure ()) + +withEventTrace :: (MonadMask m, MonadIO m) => String -> ((ByteString -> m ()) -> m a) -> m a +withEventTrace name act + | userTracingEnabled + = withSpan (fromString name) $ \sp -> do + act (addEvent sp "") + | otherwise = act (\_ -> pure ()) + +-- | Returns a logger that produces telemetry events in a single span +withTelemetryRecorder :: (MonadIO m, MonadMask m) => (Recorder (WithPriority (Doc a)) -> m c) -> m c +withTelemetryRecorder k = withSpan "Logger" $ \sp -> + -- Tracy doesn't like when we create a new span for every log line. + -- To workaround that, we create a single span for all log events. + -- This is fine since we don't care about the span itself, only about the events + k $ telemetryLogRecorder sp + +-- | Returns a logger that produces telemetry events in a single span. +telemetryLogRecorder :: SpanInFlight -> Recorder (WithPriority (Doc a)) +telemetryLogRecorder sp = Recorder $ \WithPriority {..} -> + liftIO $ addEvent sp (fromString $ show priority) (encodeUtf8 $ trim $ renderStrict $ layoutCompact payload) + where + -- eventlog message size is limited by EVENT_PAYLOAD_SIZE_MAX = STG_WORD16_MAX + trim = T.take (fromIntegral(maxBound :: Word16) - 10) + +-- | Trace a handler using OpenTelemetry. Adds various useful info into tags in the OpenTelemetry span. +otTracedHandler + :: MonadUnliftIO m + => String -- ^ Message type + -> String -- ^ Message label + -> (SpanInFlight -> m a) + -> m a +otTracedHandler requestType label act + | userTracingEnabled = do + let !name = + if null label + then requestType + else requestType <> ":" <> show label + -- Add an event so all requests can be quickly seen in the viewer without searching + runInIO <- askRunInIO + liftIO $ withSpan (fromString name) (\sp -> addEvent sp "" (fromString $ name <> " received") >> runInIO (act sp)) + | otherwise = act (SpanInFlight 0) + +otSetUri :: SpanInFlight -> Uri -> IO () +otSetUri sp (Uri t) = setTag sp "uri" (encodeUtf8 t) + +-- | Trace a Shake action using opentelemetry. +otTracedAction + :: Show k + => k -- ^ The Action's Key + -> NormalizedFilePath -- ^ Path to the file the action was run for + -> RunMode + -> (a -> String) + -> (([FileDiagnostic] -> Action ()) -> Action (RunResult a)) -- ^ The action + -> Action (RunResult a) +otTracedAction key file mode result act + | userTracingEnabled = fst <$> + generalBracket + (do + sp <- beginSpan (fromString (show key)) + setTag sp "File" (fromString $ fromNormalizedFilePath file) + setTag sp "Mode" (fromString $ show mode) + return sp + ) + (\sp ec -> do + case ec of + ExitCaseAbort -> setTag sp "aborted" "1" + ExitCaseException e -> setTag sp "exception" (pack $ show e) + ExitCaseSuccess res -> do + setTag sp "result" (pack $ result $ runValue res) + setTag sp "changed" $ case res of + RunResult x _ _ _ -> fromString $ show x + endSpan sp) + (\sp -> act (liftIO . setTag sp "diagnostics" . encodeUtf8 . showDiagnostics )) + | otherwise = act (\_ -> return ()) + +otTracedGarbageCollection :: (MonadMask f, MonadIO f, Show a) => ByteString -> f [a] -> f [a] +otTracedGarbageCollection label act + | userTracingEnabled = fst <$> + generalBracket + (beginSpan label) + (\sp ec -> do + case ec of + ExitCaseAbort -> setTag sp "aborted" "1" + ExitCaseException e -> setTag sp "exception" (pack $ show e) + ExitCaseSuccess res -> setTag sp "keys" (pack $ unlines $ map show res) + endSpan sp) + (const act) + | otherwise = act + +otTracedProvider :: MonadUnliftIO m => PluginId -> ByteString -> m a -> m a +otTracedProvider (PluginId pluginName) provider act + | userTracingEnabled = do + runInIO <- askRunInIO + liftIO $ withSpan (provider <> " provider") $ \sp -> do + setTag sp "plugin" (encodeUtf8 pluginName) + runInIO act + | otherwise = act + diff --git a/ghcide/src/Development/IDE/Core/UseStale.hs b/ghcide/src/Development/IDE/Core/UseStale.hs new file mode 100644 index 0000000000..498ea44bee --- /dev/null +++ b/ghcide/src/Development/IDE/Core/UseStale.hs @@ -0,0 +1,160 @@ +{-# LANGUAGE DerivingVia #-} +{-# LANGUAGE GADTs #-} + +module Development.IDE.Core.UseStale + ( Age(..) + , Tracked + , unTrack + , PositionMap + , TrackedStale (..) + , untrackedStaleValue + , unsafeMkStale + , unsafeMkCurrent + , unsafeCopyAge + , MapAge (..) + , dualPositionMap + , useWithStale + , useWithStale_ + ) where + +import Control.Arrow +import Control.Category (Category) +import qualified Control.Category as C +import Control.DeepSeq (NFData) +import Data.Aeson +import Data.Coerce (coerce) +import Data.Functor ((<&>)) +import Data.Functor.Identity (Identity (Identity)) +import Data.Kind (Type) +import Data.String (fromString) +import Development.IDE (Action, IdeRule, + NormalizedFilePath, + Range, + rangeToRealSrcSpan, + realSrcSpanToRange) +import qualified Development.IDE.Core.PositionMapping as P +import qualified Development.IDE.Core.Shake as IDE +import Development.IDE.GHC.Compat (RealSrcSpan, srcSpanFile) +import Development.IDE.GHC.Compat.Util (unpackFS) + + +------------------------------------------------------------------------------ +-- | A data kind for 'Tracked'. +data Age = Current | Stale Type + + +------------------------------------------------------------------------------ +-- | Some value, tagged with its age. All 'Current' ages are considered to be +-- the same thing, but 'Stale' values are protected by an untouchable variable +-- to ensure they can't be unified. +newtype Tracked (age :: Age) a = UnsafeTracked + { unTrack :: a + } + deriving stock (Functor, Foldable, Traversable) + deriving newtype (Eq, Ord, Show, Read, ToJSON, FromJSON, NFData) + deriving (Applicative, Monad) via Identity + + +------------------------------------------------------------------------------ +-- | Like 'P.PositionMapping', but with annotated ages for how 'Tracked' values +-- change. Use the 'Category' instance to compose 'PositionMapping's in order +-- to transform between values of different stale ages. +newtype PositionMap (from :: Age) (to :: Age) = PositionMap + { _getPositionMapping :: P.PositionMapping + } + +instance Category PositionMap where + id = coerce P.zeroMapping + (.) = coerce P.composeDelta + + +------------------------------------------------------------------------------ +-- | Get a 'PositionMap' that runs in the opposite direction. +dualPositionMap :: PositionMap from to -> PositionMap to from +dualPositionMap (PositionMap (P.PositionMapping (P.PositionDelta from to))) = + PositionMap $ P.PositionMapping $ P.PositionDelta to from + + +------------------------------------------------------------------------------ +-- | A pair containing a @'Tracked' 'Stale'@ value, as well as +-- a 'PositionMapping' that will fast-forward values to the current age. +data TrackedStale a where + TrackedStale + :: Tracked (Stale s) a + -> PositionMap (Stale s) Current + -> TrackedStale a + +instance Functor TrackedStale where + fmap f (TrackedStale t pm) = TrackedStale (fmap f t) pm + + +untrackedStaleValue :: TrackedStale a -> a +untrackedStaleValue (TrackedStale ta _) = coerce ta + + +------------------------------------------------------------------------------ +-- | A class for which 'Tracked' values can be run across a 'PositionMapping' +-- to change their ages. +class MapAge a where + {-# MINIMAL mapAgeFrom | mapAgeTo #-} + mapAgeFrom :: PositionMap from to -> Tracked to a -> Maybe (Tracked from a) + mapAgeFrom = mapAgeTo . dualPositionMap + + mapAgeTo :: PositionMap from to -> Tracked from a -> Maybe (Tracked to a) + mapAgeTo = mapAgeFrom . dualPositionMap + + +instance MapAge Range where + mapAgeFrom = coerce P.fromCurrentRange + mapAgeTo = coerce P.toCurrentRange + + +instance MapAge RealSrcSpan where + mapAgeFrom = + invMapAge (\fs -> rangeToRealSrcSpan (fromString $ unpackFS fs)) + (srcSpanFile &&& realSrcSpanToRange) + . mapAgeFrom + + +------------------------------------------------------------------------------ +-- | Helper function for deriving 'MapAge' for values in terms of other +-- instances. +invMapAge + :: (c -> a -> b) + -> (b -> (c, a)) + -> (Tracked from a -> Maybe (Tracked to a)) + -> Tracked from b + -> Maybe (Tracked to b) +invMapAge to from f t = + let (c, t') = unTrack $ fmap from t + in fmap (fmap $ to c) $ f $ UnsafeTracked t' + + +unsafeMkCurrent :: age -> Tracked 'Current age +unsafeMkCurrent = coerce + + +unsafeMkStale :: age -> Tracked (Stale s) age +unsafeMkStale = coerce + + +unsafeCopyAge :: Tracked age a -> b -> Tracked age b +unsafeCopyAge _ = coerce + + +-- | Request a Rule result, it not available return the last computed result, if any, which may be stale +useWithStale :: IdeRule k v + => k -> NormalizedFilePath -> Action (Maybe (TrackedStale v)) +useWithStale key file = do + x <- IDE.useWithStale key file + pure $ x <&> \(v, pm) -> + TrackedStale (coerce v) (coerce pm) + +-- | Request a Rule result, it not available return the last computed result which may be stale. +-- Errors out if none available. +useWithStale_ :: IdeRule k v + => k -> NormalizedFilePath -> Action (TrackedStale v) +useWithStale_ key file = do + (v, pm) <- IDE.useWithStale_ key file + pure $ TrackedStale (coerce v) (coerce pm) + diff --git a/ghcide/src/Development/IDE/Core/WorkerThread.hs b/ghcide/src/Development/IDE/Core/WorkerThread.hs new file mode 100644 index 0000000000..6d141c7ef3 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/WorkerThread.hs @@ -0,0 +1,59 @@ +{- +Module : Development.IDE.Core.WorkerThread +Author : @soulomoon +SPDX-License-Identifier: Apache-2.0 + +Description : This module provides an API for managing worker threads in the IDE. +see Note [Serializing runs in separate thread] +-} +module Development.IDE.Core.WorkerThread + (withWorkerQueue, awaitRunInThread) + where + +import Control.Concurrent.Async (AsyncCancelled (AsyncCancelled), + withAsync) +import Control.Concurrent.STM +import Control.Concurrent.Strict (newBarrier, signalBarrier, + waitBarrier) +import Control.Exception.Safe (Exception (fromException), + SomeException, throwIO, try) +import Control.Monad (forever) +import Control.Monad.Cont (ContT (ContT)) + +{- +Note [Serializing runs in separate thread] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +We often want to take long-running actions using some resource that cannot be shared. +In this instance it is useful to have a queue of jobs to run using the resource. +Like the db writes, session loading in session loader, shake session restarts. + +Originally we used various ways to implement this, but it was hard to maintain and error prone. +Moreover, we can not stop these threads uniformly when we are shutting down the server. +-} + +-- | 'withWorkerQueue' creates a new 'TQueue', and launches a worker +-- thread which polls the queue for requests and runs the given worker +-- function on them. +withWorkerQueue :: (t -> IO a) -> ContT () IO (TQueue t) +withWorkerQueue workerAction = ContT $ \mainAction -> do + q <- newTQueueIO + withAsync (writerThread q) $ \_ -> mainAction q + where + writerThread q = + forever $ do + l <- atomically $ readTQueue q + workerAction l + +-- | 'awaitRunInThread' queues up an 'IO' action to be run by a worker thread, +-- and then blocks until the result is computed. If the action throws an +-- non-async exception, it is rethrown in the calling thread. +awaitRunInThread :: TQueue (IO ()) -> IO result -> IO result +awaitRunInThread q act = do + -- Take an action from TQueue, run it and + -- use barrier to wait for the result + barrier <- newBarrier + atomically $ writeTQueue q $ try act >>= signalBarrier barrier + resultOrException <- waitBarrier barrier + case resultOrException of + Left e -> throwIO (e :: SomeException) + Right r -> return r diff --git a/ghcide/src/Development/IDE/GHC/CPP.hs b/ghcide/src/Development/IDE/GHC/CPP.hs new file mode 100644 index 0000000000..c97afd90e7 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/CPP.hs @@ -0,0 +1,63 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} +{-# LANGUAGE NondecreasingIndentation #-} + +----------------------------------------------------------------------------- +-- +-- GHC Driver +-- +-- (c) The University of Glasgow 2005 +-- +----------------------------------------------------------------------------- + +module Development.IDE.GHC.CPP(doCpp, addOptP) +where + +import Development.IDE.GHC.Compat as Compat +import Development.IDE.GHC.Compat.Util +import GHC +import GHC.Settings +import qualified GHC.SysTools.Cpp as Pipeline + +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] + + +#if MIN_VERSION_ghc(9,10,2) +import qualified GHC.SysTools.Tasks as Pipeline +#endif + +#if MIN_VERSION_ghc(9,11,0) +import qualified GHC.SysTools.Tasks as Pipeline +#endif + +addOptP :: String -> DynFlags -> DynFlags +addOptP f = alterToolSettings $ \s -> s + { toolSettings_opt_P = f : toolSettings_opt_P s + , toolSettings_opt_P_fingerprint = fingerprintStrings (f : toolSettings_opt_P s) + } + where + fingerprintStrings ss = fingerprintFingerprints $ map fingerprintString ss + alterToolSettings g dynFlags = dynFlags { toolSettings = g (toolSettings dynFlags) } + +doCpp :: HscEnv -> FilePath -> FilePath -> IO () +doCpp env input_fn output_fn = + -- See GHC commit a2f53ac8d968723417baadfab5be36a020ea6850 + -- this function/Pipeline.doCpp previously had a raw parameter + -- always set to True that corresponded to these settings + let cpp_opts = Pipeline.CppOpts + { cppLinePragmas = True + +#if MIN_VERSION_ghc(9,10,2) + , sourceCodePreprocessor = Pipeline.SCPHsCpp +#elif MIN_VERSION_ghc(9,10,0) + , useHsCpp = True +#else + , cppUseCc = False +#endif + + } in + + Pipeline.doCpp (hsc_logger env) (hsc_tmpfs env) (hsc_dflags env) (hsc_unit_env env) cpp_opts input_fn output_fn + diff --git a/ghcide/src/Development/IDE/GHC/Compat.hs b/ghcide/src/Development/IDE/GHC/Compat.hs new file mode 100644 index 0000000000..befd22c8de --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat.hs @@ -0,0 +1,468 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} +{-# LANGUAGE PatternSynonyms #-} + +-- | Attempt at hiding the GHC version differences we can. +module Development.IDE.GHC.Compat( + hPutStringBuffer, + addIncludePathsQuote, + getModuleHash, + setUpTypedHoles, + lookupNameCache, + disableWarningsAsErrors, + reLoc, + reLocA, + renderMessages, + pattern PFailedWithErrorMessages, + myCoreToStgExpr, + Usage(..), + FastStringCompat, + bytesFS, + mkFastStringByteString, + nodeInfo', + getNodeIds, + getSourceNodeIds, + sourceNodeInfo, + generatedNodeInfo, + simpleNodeInfoCompat, + isAnnotationInNodeInfo, + nodeAnnotations, + mkAstNode, + combineRealSrcSpans, + isQualifiedImport, + GhcVersion(..), + ghcVersion, + ghcVersionStr, + -- * HIE Compat + HieFileResult(..), + HieFile(..), + hieExportNames, + mkHieFile', + enrichHie, + writeHieFile, + readHieFile, + setHieDir, + dontWriteHieFiles, + -- * Compat modules + module Development.IDE.GHC.Compat.Core, + module Development.IDE.GHC.Compat.Env, + module Development.IDE.GHC.Compat.Iface, + module Development.IDE.GHC.Compat.Logger, + module Development.IDE.GHC.Compat.Outputable, + module Development.IDE.GHC.Compat.Parser, + module Development.IDE.GHC.Compat.Plugins, + module Development.IDE.GHC.Compat.Units, + -- * Extras that rely on compat modules + -- * SysTools + Option (..), + runUnlit, + runPp, + + -- * Recompilation avoidance + hscCompileCoreExprHook, + CoreExpr, + simplifyExpr, + tidyExpr, + emptyTidyEnv, + corePrepExpr, + corePrepPgm, + lintInteractiveExpr, + icInteractiveModule, + HomePackageTable, + lookupHpt, + loadModulesHome, + bcoFreeNames, + ModIfaceAnnotation, + pattern Annotation, + AnnTarget(ModuleTarget), + extendAnnEnvList, + module UniqDSet, + module UniqSet, + module UniqDFM, + getDependentMods, + flattenBinds, + mkRnEnv2, + emptyInScopeSet, + Unfolding(..), + noUnfolding, + loadExpr, + byteCodeGen, + bc_bcos, + loadDecls, + hscInterp, + expectJust, + extract_cons, + recDotDot, + + + Dependencies(dep_direct_mods), + NameCacheUpdater, + + XModulePs(..), + +#if !MIN_VERSION_ghc(9,7,0) + liftZonkM, + nonDetFoldOccEnv, +#endif + +#if MIN_VERSION_ghc(9,7,0) + tcInitTidyEnv, +#endif + + ) where +import Control.Applicative ((<|>)) +import qualified Data.ByteString as BS +import Data.Coerce (coerce) +import Data.List (foldl') +import qualified Data.Map as Map +import qualified Data.Set as S +import Data.String (IsString (fromString)) +import Development.IDE.GHC.Compat.Core +import Development.IDE.GHC.Compat.Env +import Development.IDE.GHC.Compat.Iface +import Development.IDE.GHC.Compat.Logger +import Development.IDE.GHC.Compat.Outputable +import Development.IDE.GHC.Compat.Parser +import Development.IDE.GHC.Compat.Plugins +import Development.IDE.GHC.Compat.Units +import Development.IDE.GHC.Compat.Util +import GHC hiding (ModLocation, + RealSrcSpan, exprType, + getLoc, lookupName) +import Prelude hiding (mod) + +import qualified GHC.Core.Opt.Pipeline as GHC +import GHC.Core.Tidy (tidyExpr) +import GHC.CoreToStg.Prep (corePrepPgm) +import qualified GHC.CoreToStg.Prep as GHC +import GHC.Driver.Hooks (hscCompileCoreExprHook) +import GHC.Iface.Ext.Types hiding + (nodeAnnotations) +import qualified GHC.Iface.Ext.Types as GHC (nodeAnnotations) +import GHC.Iface.Ext.Utils + +import GHC.ByteCode.Asm (bcoFreeNames) +import GHC.Core +import GHC.Data.FastString +import GHC.Data.StringBuffer +import GHC.Driver.Session hiding (ExposePackage) +import GHC.Iface.Ext.Ast (enrichHie) +import GHC.Iface.Ext.Binary +import GHC.Iface.Make (mkIfaceExports) +import GHC.SysTools.Tasks (runPp, runUnlit) +import GHC.Types.Annotations (AnnTarget (ModuleTarget), + Annotation (..), + extendAnnEnvList) +import qualified GHC.Types.Avail as Avail +import GHC.Types.Unique.DFM as UniqDFM +import GHC.Types.Unique.DSet as UniqDSet +import GHC.Types.Unique.Set as UniqSet +import GHC.Types.Var.Env + +import GHC.Builtin.Uniques +import GHC.ByteCode.Types +import GHC.Core.Lint.Interactive (interactiveInScope) +import GHC.CoreToStg +import GHC.Data.Maybe +import GHC.Driver.Config.Core.Lint.Interactive (lintInteractiveExpr) +import GHC.Driver.Config.Core.Opt.Simplify (initSimplifyExprOpts) +import GHC.Driver.Config.CoreToStg (initCoreToStgOpts) +import GHC.Driver.Config.CoreToStg.Prep (initCorePrepConfig) +import GHC.Driver.Config.Stg.Pipeline +import GHC.Driver.Env as Env +import GHC.Iface.Env +import GHC.Linker.Loader (loadDecls, loadExpr) +import GHC.Runtime.Context (icInteractiveModule) +import GHC.Stg.Pipeline +import GHC.Stg.Syntax +import GHC.StgToByteCode +import GHC.Types.CostCentre +import GHC.Types.IPE +import GHC.Types.SrcLoc (combineRealSrcSpans) +import GHC.Unit.Home.ModInfo (HomePackageTable, + lookupHpt) +import GHC.Unit.Module.Deps (Dependencies (dep_direct_mods), + Usage (..)) +import GHC.Unit.Module.ModIface + +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] + +#if MIN_VERSION_ghc(9,7,0) +import GHC.Tc.Zonk.TcType (tcInitTidyEnv) +#endif + +#if !MIN_VERSION_ghc(9,7,0) +liftZonkM :: a -> a +liftZonkM = id + +nonDetFoldOccEnv :: (a -> b -> b) -> b -> OccEnv a -> b +nonDetFoldOccEnv = foldOccEnv +#endif + + +type ModIfaceAnnotation = Annotation + + +myCoreToStgExpr :: Logger -> DynFlags -> InteractiveContext + -> Bool + -> Module -> ModLocation -> CoreExpr + -> IO ( Id + ,[CgStgTopBinding] -- output program + , InfoTableProvMap + , CollectedCCs ) +myCoreToStgExpr logger dflags ictxt + for_bytecode + this_mod ml prepd_expr = do + {- Create a temporary binding (just because myCoreToStg needs a + binding for the stg2stg step) -} + let bco_tmp_id = mkSysLocal (fsLit "BCO_toplevel") + (mkPseudoUniqueE 0) + ManyTy + (exprType prepd_expr) + (stg_binds, prov_map, collected_ccs) <- + myCoreToStg logger + dflags + ictxt + for_bytecode + this_mod + ml + [NonRec bco_tmp_id prepd_expr] + return (bco_tmp_id, stg_binds, prov_map, collected_ccs) + +myCoreToStg :: Logger -> DynFlags -> InteractiveContext + -> Bool + -> Module -> ModLocation -> CoreProgram + -> IO ( [CgStgTopBinding] -- output program + , InfoTableProvMap + , CollectedCCs ) -- CAF cost centre info (declared and used) +myCoreToStg logger dflags ictxt + for_bytecode + this_mod ml prepd_binds = do + let (stg_binds, denv, cost_centre_info) + = {-# SCC "Core2Stg" #-} + coreToStg + (initCoreToStgOpts dflags) + this_mod ml prepd_binds + +#if MIN_VERSION_ghc(9,8,0) + (unzip -> (stg_binds2,_),_) +#else + (stg_binds2,_) +#endif + <- {-# SCC "Stg2Stg" #-} + stg2stg logger + (interactiveInScope ictxt) + (initStgPipelineOpts dflags for_bytecode) this_mod stg_binds + + return (stg_binds2, denv, cost_centre_info) + +#if MIN_VERSION_ghc(9,9,0) +reLocA :: (HasLoc (GenLocated a e), HasAnnotation b) + => GenLocated a e -> GenLocated b e +reLocA = reLoc +#endif + +getDependentMods :: ModIface -> [ModuleName] +getDependentMods = map (gwib_mod . snd) . S.toList . dep_direct_mods . mi_deps + +simplifyExpr :: DynFlags -> HscEnv -> CoreExpr -> IO CoreExpr +simplifyExpr _ env = GHC.simplifyExpr (Development.IDE.GHC.Compat.Env.hsc_logger env) (ue_eps (Development.IDE.GHC.Compat.Env.hsc_unit_env env)) (initSimplifyExprOpts (hsc_dflags env) (hsc_IC env)) + +corePrepExpr :: DynFlags -> HscEnv -> CoreExpr -> IO CoreExpr +corePrepExpr _ env expr = do + cfg <- initCorePrepConfig env + GHC.corePrepExpr (Development.IDE.GHC.Compat.Env.hsc_logger env) cfg expr + +renderMessages :: PsMessages -> (Bag WarnMsg, Bag ErrMsg) +renderMessages msgs = + let renderMsgs extractor = (fmap . fmap) GhcPsMessage . getMessages $ extractor msgs + in (renderMsgs psWarnings, renderMsgs psErrors) + +pattern PFailedWithErrorMessages :: forall a b. (b -> Bag (MsgEnvelope GhcMessage)) -> ParseResult a +pattern PFailedWithErrorMessages msgs + <- PFailed (const . fmap (fmap GhcPsMessage) . getMessages . getPsErrorMessages -> msgs) +{-# COMPLETE POk, PFailedWithErrorMessages #-} + +hieExportNames :: HieFile -> [(SrcSpan, Name)] +hieExportNames = nameListFromAvails . hie_exports + +type NameCacheUpdater = NameCache + +mkHieFile' :: ModSummary + -> [Avail.AvailInfo] +#if MIN_VERSION_ghc(9,11,0) + -> (HieASTs Type, NameEntityInfo) +#else + -> HieASTs Type +#endif + -> BS.ByteString + -> Hsc HieFile +mkHieFile' ms exports +#if MIN_VERSION_ghc(9,11,0) + (asts, entityInfo) +#else + asts +#endif + src = do + let Just src_file = ml_hs_file $ ms_location ms + (asts',arr) = compressTypes asts + return $ HieFile + { hie_hs_file = src_file + , hie_module = ms_mod ms + , hie_types = arr + , hie_asts = asts' +#if MIN_VERSION_ghc(9,11,0) + , hie_entity_infos = entityInfo +#endif + -- mkIfaceExports sorts the AvailInfos for stability + , hie_exports = mkIfaceExports exports + , hie_hs_src = src + } + +addIncludePathsQuote :: FilePath -> DynFlags -> DynFlags +addIncludePathsQuote path x = x{includePaths = f $ includePaths x} + where f i = i{includePathsQuote = path : includePathsQuote i} + +setHieDir :: FilePath -> DynFlags -> DynFlags +setHieDir _f d = d { hieDir = Just _f} + +dontWriteHieFiles :: DynFlags -> DynFlags +dontWriteHieFiles d = gopt_unset d Opt_WriteHie + +setUpTypedHoles :: DynFlags -> DynFlags +setUpTypedHoles df + = flip gopt_unset Opt_AbstractRefHoleFits -- too spammy + $ flip gopt_unset Opt_ShowDocsOfHoleFits -- not used + $ flip gopt_unset Opt_ShowMatchesOfHoleFits -- nice but broken (forgets module qualifiers) + $ flip gopt_unset Opt_ShowProvOfHoleFits -- not used + $ flip gopt_unset Opt_ShowTypeAppOfHoleFits -- not used + $ flip gopt_unset Opt_ShowTypeAppVarsOfHoleFits -- not used + $ flip gopt_unset Opt_ShowTypeOfHoleFits -- massively simplifies parsing + $ flip gopt_set Opt_SortBySubsumHoleFits -- very nice and fast enough in most cases + $ flip gopt_unset Opt_SortValidHoleFits + $ flip gopt_unset Opt_UnclutterValidHoleFits + $ df + { refLevelHoleFits = refLevelHoleFits df <|> Just 1 -- becomes slow at higher levels + + -- Sometimes GHC can emit a lot of hole fits, this causes editors to be slow + -- or just crash, we limit the hole fits to 10. The number was chosen + -- arbirtarily by the author. + , maxRefHoleFits = maxRefHoleFits df <|> Just 10 + , maxValidHoleFits = maxValidHoleFits df <|> Just 10 + } + + +nameListFromAvails :: [Avail.AvailInfo] -> [(SrcSpan, Name)] +nameListFromAvails as = + map (\n -> (nameSrcSpan n, n)) (concatMap Avail.availNames as) + + +getModuleHash :: ModIface -> Fingerprint +getModuleHash = mi_mod_hash . mi_final_exts + + +disableWarningsAsErrors :: DynFlags -> DynFlags +disableWarningsAsErrors df = + flip gopt_unset Opt_WarnIsError $! foldl' wopt_unset_fatal df [toEnum 0 ..] + +isQualifiedImport :: ImportDecl a -> Bool +isQualifiedImport ImportDecl{ideclQualified = NotQualified} = False +isQualifiedImport ImportDecl{} = True +isQualifiedImport _ = False + +-- | Like getNodeIds but with generated node removed +getSourceNodeIds :: HieAST a -> Map.Map Identifier (IdentifierDetails a) +getSourceNodeIds = Map.foldl' combineNodeIds Map.empty . Map.filterWithKey (\k _ -> k == SourceInfo) . getSourcedNodeInfo . sourcedNodeInfo + +getNodeIds :: HieAST a -> Map.Map Identifier (IdentifierDetails a) +getNodeIds = Map.foldl' combineNodeIds Map.empty . getSourcedNodeInfo . sourcedNodeInfo + +combineNodeIds :: Map.Map Identifier (IdentifierDetails a) + -> NodeInfo a -> Map.Map Identifier (IdentifierDetails a) +ad `combineNodeIds` (NodeInfo _ _ bd) = Map.unionWith (<>) ad bd + +-- Copied from GHC and adjusted to accept TypeIndex instead of Type +-- nodeInfo' :: Ord a => HieAST a -> NodeInfo a +nodeInfo' :: HieAST TypeIndex -> NodeInfo TypeIndex +nodeInfo' = Map.foldl' combineNodeInfo' emptyNodeInfo . getSourcedNodeInfo . sourcedNodeInfo + +combineNodeInfo' :: Ord a => NodeInfo a -> NodeInfo a -> NodeInfo a +(NodeInfo as ai ad) `combineNodeInfo'` (NodeInfo bs bi bd) = + NodeInfo (S.union as bs) (mergeSorted ai bi) (Map.unionWith (<>) ad bd) + where + mergeSorted :: Ord a => [a] -> [a] -> [a] + mergeSorted la@(a:axs) lb@(b:bxs) = case compare a b of + LT -> a : mergeSorted axs lb + EQ -> a : mergeSorted axs bxs + GT -> b : mergeSorted la bxs + mergeSorted axs [] = axs + mergeSorted [] bxs = bxs + +sourceNodeInfo :: HieAST a -> Maybe (NodeInfo a) +sourceNodeInfo = Map.lookup SourceInfo . getSourcedNodeInfo . sourcedNodeInfo + +generatedNodeInfo :: HieAST a -> Maybe (NodeInfo a) +generatedNodeInfo = Map.lookup GeneratedInfo . getSourcedNodeInfo . sourcedNodeInfo + +data GhcVersion + = GHC96 + | GHC98 + | GHC910 + | GHC912 + deriving (Eq, Ord, Show, Enum) + +ghcVersionStr :: String +ghcVersionStr = VERSION_ghc + +ghcVersion :: GhcVersion +#if MIN_VERSION_GLASGOW_HASKELL(9,12,0,0) +ghcVersion = GHC912 +#elif MIN_VERSION_GLASGOW_HASKELL(9,10,0,0) +ghcVersion = GHC910 +#elif MIN_VERSION_GLASGOW_HASKELL(9,8,0,0) +ghcVersion = GHC98 +#else +ghcVersion = GHC96 +#endif + +simpleNodeInfoCompat :: FastStringCompat -> FastStringCompat -> NodeInfo a +simpleNodeInfoCompat ctor typ = simpleNodeInfo (coerce ctor) (coerce typ) + +isAnnotationInNodeInfo :: (FastStringCompat, FastStringCompat) -> NodeInfo a -> Bool +isAnnotationInNodeInfo p = S.member p . nodeAnnotations + +nodeAnnotations :: NodeInfo a -> S.Set (FastStringCompat, FastStringCompat) +nodeAnnotations = S.map (\(NodeAnnotation ctor typ) -> (coerce ctor, coerce typ)) . GHC.nodeAnnotations + +newtype FastStringCompat = FastStringCompat LexicalFastString + deriving (Show, Eq, Ord) + +instance IsString FastStringCompat where + fromString = FastStringCompat . LexicalFastString . fromString + +mkAstNode :: NodeInfo a -> Span -> [HieAST a] -> HieAST a +mkAstNode n = Node (SourcedNodeInfo $ Map.singleton GeneratedInfo n) + +-- | Load modules, quickly. Input doesn't need to be desugared. +-- A module must be loaded before dependent modules can be typechecked. +-- This variant of loadModuleHome will *never* cause recompilation, it just +-- modifies the session. +-- The order modules are loaded is important when there are hs-boot files. +-- In particular you should make sure to load the .hs version of a file after the +-- .hs-boot version. +loadModulesHome + :: [HomeModInfo] + -> HscEnv + -> HscEnv +loadModulesHome mod_infos e = + hscUpdateHUG (\hug -> foldl' (flip addHomeModInfoToHug) hug mod_infos) (e { hsc_type_env_vars = emptyKnotVars }) + +recDotDot :: HsRecFields (GhcPass p) arg -> Maybe Int +recDotDot x = + unRecFieldsDotDot <$> + unLoc <$> rec_dotdot x + +extract_cons (NewTypeCon x) = [x] +extract_cons (DataTypeCons _ xs) = xs diff --git a/ghcide/src/Development/IDE/GHC/Compat/CmdLine.hs b/ghcide/src/Development/IDE/GHC/Compat/CmdLine.hs new file mode 100644 index 0000000000..7c9efb37e8 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/CmdLine.hs @@ -0,0 +1,19 @@ +{-# LANGUAGE CPP #-} + +-- | Compat module Interface file relevant code. +module Development.IDE.GHC.Compat.CmdLine ( + processCmdLineP + , CmdLineP (..) + , getCmdLineState + , putCmdLineState + , Flag(..) + , OptKind(..) + , EwM + , defFlag + , liftEwM + ) where + +import GHC.Driver.CmdLine +import GHC.Driver.Session (CmdLineP (..), getCmdLineState, + processCmdLineP, putCmdLineState) + diff --git a/ghcide/src/Development/IDE/GHC/Compat/Core.hs b/ghcide/src/Development/IDE/GHC/Compat/Core.hs new file mode 100644 index 0000000000..42f654b609 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/Core.hs @@ -0,0 +1,753 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE PatternSynonyms #-} + +-- | Compat Core module that handles the GHC module hierarchy re-organization +-- by re-exporting everything we care about. +-- +-- This module provides no other compat mechanisms, except for simple +-- backward-compatible pattern synonyms. +module Development.IDE.GHC.Compat.Core ( + -- * Session + DynFlags, + extensions, + extensionFlags, + targetPlatform, + packageFlags, + generalFlags, + warningFlags, + topDir, + hiDir, + tmpDir, + importPaths, + useColor, + canUseColor, + useUnicode, + objectDir, + flagsForCompletion, + setImportPaths, + outputFile, + pluginModNames, + refLevelHoleFits, + maxRefHoleFits, + maxValidHoleFits, + setOutputFile, + lookupType, + needWiredInHomeIface, + loadWiredInHomeIface, + readIface, + loadSysInterface, + importDecl, + CommandLineOption, + sPgm_F, + settings, + gopt, + gopt_set, + gopt_unset, + wopt, + wopt_set, + xFlags, + xopt, + xopt_unset, + xopt_set, + FlagSpec(..), + WarningFlag(..), + GeneralFlag(..), + PackageFlag, + PackageArg(..), + ModRenaming(..), + pattern ExposePackage, + parseDynamicFlagsCmdLine, + parseDynamicFilePragma, + wWarningFlags, + updOptLevel, + -- slightly unsafe + setUnsafeGlobalDynFlags, + -- * Linear Haskell + scaledThing, + -- * Interface Files + IfaceExport, + IfaceTyCon(..), + ModIface, + ModIface_(..), +#if MIN_VERSION_ghc(9,11,0) + pattern ModIface, + set_mi_top_env, + set_mi_usages, +#endif + HscSource(..), + WhereFrom(..), + loadInterface, + loadModuleInterface, + RecompileRequired(..), + mkPartialIface, + mkFullIface, + IsBootInterface(..), + -- * Fixity + LexicalFixity(..), + Fixity (..), + mi_fix, + defaultFixity, + lookupFixityRn, + -- * ModSummary + ModSummary(..), + -- * HomeModInfo + HomeModInfo(..), + -- * ModGuts + ModGuts(..), + CgGuts(..), + -- * ModDetails + ModDetails(..), + -- * HsExpr, + -- * Var + Type ( + TyCoRep.TyVarTy, + TyCoRep.AppTy, + TyCoRep.TyConApp, + TyCoRep.ForAllTy, + -- Omitted on purpose + -- pattern Synonym right below it + -- TyCoRep.FunTy, + TyCoRep.LitTy, + TyCoRep.CastTy, + TyCoRep.CoercionTy + ), + pattern FunTy, + pattern ConPatIn, + conPatDetails, + mapConPatDetail, + -- * Specs + ImpDeclSpec(..), + ImportSpec(..), + -- * SourceText + SourceText(..), + -- * Ways + Way, + wayGeneralFlags, + wayUnsetGeneralFlags, + -- * AvailInfo + Avail.AvailInfo, + pattern AvailName, + pattern AvailFL, + pattern AvailTC, + Avail.availName, + Avail.availNames, +#if !MIN_VERSION_ghc(9,7,0) + Avail.availNamesWithSelectors, +#endif + Avail.availsToNameSet, + -- * TcGblEnv + TcGblEnv(..), + -- * Parsing and LExer types + HsModule(..), + GHC.ParsedSource, + GHC.RenamedSource, + -- * Compilation Main + HscEnv, + GHC.runGhc, + unGhc, + Session(..), + modifySession, + getSession, + GHC.setSessionDynFlags, + getSessionDynFlags, + GhcMonad, + Ghc, + runHsc, + compileFile, + Phase(..), + hscDesugar, + hscGenHardCode, + hscInteractive, + hscSimplify, + hscTypecheckRename, + hscUpdateHPT, + Development.IDE.GHC.Compat.Core.makeSimpleDetails, + -- * Typecheck utils + tcSplitForAllTyVars, + tcSplitForAllTyVarBinder_maybe, + typecheckIface, + Development.IDE.GHC.Compat.Core.mkIfaceTc, + Development.IDE.GHC.Compat.Core.mkBootModDetailsTc, + Development.IDE.GHC.Compat.Core.initTidyOpts, + driverNoStop, + tidyProgram, + ImportedModsVal(..), + importedByUser, + GHC.TypecheckedSource, + -- * Source Locations + HasSrcSpan, + SrcLoc.Located, + SrcLoc.unLoc, + getLoc, + GHC.getLocA, + GHC.locA, + GHC.noLocA, + unLocA, + LocatedAn, + GHC.LocatedA, + GHC.AnnListItem(..), + GHC.NameAnn(..), + SrcLoc.RealLocated, + SrcLoc.GenLocated(..), + SrcLoc.SrcSpan(SrcLoc.UnhelpfulSpan), + SrcLoc.RealSrcSpan, + pattern RealSrcSpan, + SrcLoc.RealSrcLoc, + pattern RealSrcLoc, + SrcLoc.SrcLoc(SrcLoc.UnhelpfulLoc), + BufSpan, +#if !MIN_VERSION_ghc(9,9,0) + GHC.SrcAnn, +#endif + SrcLoc.leftmost_smallest, + SrcLoc.containsSpan, + SrcLoc.mkGeneralSrcSpan, + SrcLoc.mkRealSrcSpan, + SrcLoc.mkRealSrcLoc, + SrcLoc.getRealSrcSpan, + SrcLoc.realSrcLocSpan, + SrcLoc.realSrcSpanStart, + SrcLoc.realSrcSpanEnd, + isSubspanOfA, + SrcLoc.isSubspanOf, + SrcLoc.wiredInSrcSpan, + SrcLoc.mkSrcSpan, + SrcLoc.srcSpanStart, + SrcLoc.srcSpanStartLine, + SrcLoc.srcSpanStartCol, + SrcLoc.srcSpanEnd, + SrcLoc.srcSpanEndLine, + SrcLoc.srcSpanEndCol, + SrcLoc.srcSpanFile, + SrcLoc.srcLocCol, + SrcLoc.srcLocFile, + SrcLoc.srcLocLine, + SrcLoc.noSrcSpan, + SrcLoc.noSrcLoc, + SrcLoc.noLoc, + SrcLoc.srcSpanToRealSrcSpan, + mapLoc, + -- * Finder + FindResult(..), + mkHomeModLocation, + findObjectLinkableMaybe, + InstalledFindResult(..), + -- * Module and Package + ModuleOrigin(..), + PackageName(..), + -- * Linker +#if MIN_VERSION_ghc(9,11,0) + LinkablePart(..), +#else + Unlinked(..), +#endif + Linkable(..), + unload, + -- * Hooks + Hooks, + runMetaHook, + MetaHook, + MetaRequest(..), + metaRequestE, + metaRequestP, + metaRequestT, + metaRequestD, + metaRequestAW, + -- * HPT + addToHpt, + addListToHpt, + -- * Driver-Make + Target(..), + TargetId(..), + mkSimpleTarget, + -- * GHCi + initObjLinker, + loadDLL, + InteractiveImport(..), + GHC.getContext, + GHC.setContext, + GHC.parseImportDecl, + GHC.runDecls, + Warn(..), + -- * ModLocation + GHC.ModLocation, + Module.ml_hs_file, + Module.ml_obj_file, + Module.ml_hi_file, + Module.ml_hie_file, + -- * DataCon + DataCon.dataConExTyCoVars, + -- * Role + Role(..), + -- * Panic + Plain.PlainGhcException, + -- * Other + GHC.CoreModule(..), + GHC.SafeHaskellMode(..), + pattern GRE, + gre_name, + gre_imp, + gre_lcl, + gre_par, + collectHsBindsBinders, + -- * Util Module re-exports + module GHC.Builtin.Names, + module GHC.Builtin.Types, + module GHC.Builtin.Types.Prim, + module GHC.Builtin.Utils, + module GHC.Core.Class, + module GHC.Core.Coercion, + module GHC.Core.ConLike, + module GHC.Core.DataCon, + module GHC.Core.FamInstEnv, + module GHC.Core.InstEnv, + module GHC.Types.Unique.FM, + module GHC.Core.PatSyn, + module GHC.Core.Predicate, + module GHC.Core.TyCon, + module GHC.Core.TyCo.Ppr, + module GHC.Core.Type, + module GHC.Core.Unify, + module GHC.Core.Utils, + + module GHC.HsToCore.Docs, + module GHC.HsToCore.Expr, + module GHC.HsToCore.Monad, + + module GHC.Iface.Syntax, + module GHC.Iface.Recomp, + + module GHC.Hs.Decls, + module GHC.Hs.Expr, + module GHC.Hs.Doc, + module GHC.Hs.Extension, + module GHC.Hs.ImpExp, + module GHC.Hs.Pat, + module GHC.Hs.Type, + module GHC.Hs.Utils, + module Language.Haskell.Syntax, + + module GHC.Rename.Names, + module GHC.Rename.Splice, + + module GHC.Tc.Instance.Family, + module GHC.Tc.Module, + module GHC.Tc.Types, + module GHC.Tc.Types.Evidence, + module GHC.Tc.Utils.Env, + module GHC.Tc.Utils.Monad, + + module GHC.Types.Basic, + module GHC.Types.Id, + module GHC.Types.Name, + module GHC.Types.Name.Set, + module GHC.Types.Name.Cache, + module GHC.Types.Name.Env, + module GHC.Types.Name.Reader, + module GHC.Utils.Error, +#if !MIN_VERSION_ghc(9,7,0) + module GHC.Types.Avail, +#endif + module GHC.Types.SourceFile, + module GHC.Types.SourceText, + module GHC.Types.TyThing, + module GHC.Types.TyThing.Ppr, + module GHC.Types.Unique.Supply, + module GHC.Types.Var, + module GHC.Unit.Module, + module GHC.Unit.Module.Graph, + -- * Syntax re-exports + module GHC.Hs, + module GHC.Hs.Binds, + module GHC.Parser, + module GHC.Parser.Header, + module GHC.Parser.Lexer, + module GHC.Utils.Panic, + CompileReason(..), + hsc_type_env_vars, + hscUpdateHUG, hsc_HUG, + GhcMessage(..), + getKey, + module GHC.Driver.Env.KnotVars, + module GHC.Linker.Types, + module GHC.Types.Unique.Map, + module GHC.Utils.TmpFs, + module GHC.Unit.Finder.Types, + module GHC.Unit.Env, + module GHC.Driver.Phases, + Extension(..), + mkCgInteractiveGuts, + justBytecode, + justObjects, + emptyHomeModInfoLinkable, + homeModInfoByteCode, + homeModInfoObject, + groupOrigin, + isVisibleFunArg, +#if MIN_VERSION_ghc(9,8,0) + lookupGlobalRdrEnv +#endif + ) where + +import qualified GHC + +-- NOTE(ozkutuk): Cpp clashes Phase.Cpp, so we hide it. +-- Not the greatest solution, but gets the job done +-- (until the CPP extension is actually needed). +import GHC.LanguageExtensions.Type hiding (Cpp) + +import GHC.Builtin.Names hiding (Unique, printName) +import GHC.Builtin.Types +import GHC.Builtin.Types.Prim +import GHC.Builtin.Utils +import GHC.Core (CoreProgram) +import GHC.Core.Class +import GHC.Core.Coercion +import GHC.Core.ConLike +import GHC.Core.DataCon hiding (dataConExTyCoVars) +import qualified GHC.Core.DataCon as DataCon +import GHC.Core.FamInstEnv hiding (pprFamInst) +import GHC.Core.InstEnv +import GHC.Core.PatSyn +import GHC.Core.Predicate +import GHC.Core.TyCo.Ppr +import qualified GHC.Core.TyCo.Rep as TyCoRep +import GHC.Core.TyCon +import GHC.Core.Type +import GHC.Core.Unify +import GHC.Core.Utils +import GHC.Driver.CmdLine (Warn (..)) +import GHC.Driver.Hooks +import GHC.Driver.Main as GHC +import GHC.Driver.Monad +import GHC.Driver.Phases +import GHC.Driver.Pipeline +import GHC.Driver.Plugins +import GHC.Driver.Session hiding (ExposePackage) +import qualified GHC.Driver.Session as DynFlags +import GHC.Hs.Binds +import GHC.HsToCore.Docs +import GHC.HsToCore.Expr +import GHC.HsToCore.Monad +import GHC.Iface.Load +import GHC.Iface.Make as GHC +import GHC.Iface.Recomp +import GHC.Iface.Syntax +import GHC.Iface.Tidy as GHC +import GHC.IfaceToCore +import GHC.Parser +import GHC.Parser.Header hiding (getImports) +import GHC.Rename.Fixity (lookupFixityRn) +import GHC.Rename.Names +import GHC.Rename.Splice +import qualified GHC.Runtime.Interpreter as GHCi +import GHC.Tc.Instance.Family +import GHC.Tc.Module +import GHC.Tc.Types +import GHC.Tc.Types.Evidence hiding ((<.>)) +import GHC.Tc.Utils.Env +import GHC.Tc.Utils.Monad hiding (Applicative (..), IORef, + MonadFix (..), MonadIO (..), allM, + anyM, concatMapM, mapMaybeM, foldMapM, + (<$>)) +import GHC.Tc.Utils.TcType as TcType +import qualified GHC.Types.Avail as Avail +import GHC.Types.Basic +import GHC.Types.Id +import GHC.Types.Name hiding (varName) +import GHC.Types.Name.Cache +import GHC.Types.Name.Env +import GHC.Types.Name.Reader hiding (GRE, gre_imp, gre_lcl, + gre_name, gre_par) +import qualified GHC.Types.Name.Reader as RdrName +import GHC.Types.SrcLoc (BufPos, BufSpan, + SrcLoc (UnhelpfulLoc), + SrcSpan (UnhelpfulSpan)) +import qualified GHC.Types.SrcLoc as SrcLoc +import GHC.Types.Unique.FM +import GHC.Types.Unique.Supply +import GHC.Types.Var (Var (varName), setTyVarUnique, + setVarUnique) + +import qualified GHC.Types.Var as TypesVar +import GHC.Unit.Info (PackageName (..)) +import GHC.Unit.Module hiding (ModLocation (..), UnitId, + moduleUnit, toUnitId) +import qualified GHC.Unit.Module as Module +import GHC.Unit.State (ModuleOrigin (..)) +import GHC.Utils.Error (Severity (..), emptyMessages) +import GHC.Utils.Panic hiding (try) +import qualified GHC.Utils.Panic.Plain as Plain + + +import Data.Foldable (toList) +import GHC.Core.Multiplicity (scaledThing) +import GHC.Data.Bag +import qualified GHC.Data.Strict as Strict +import qualified GHC.Driver.Config.Finder as GHC +import qualified GHC.Driver.Config.Tidy as GHC +import GHC.Driver.Env +import GHC.Driver.Env as GHCi +import GHC.Driver.Env.KnotVars +import GHC.Driver.Errors.Types +import GHC.Hs (HsModule (..)) +import GHC.Hs.Decls hiding (FunDep) +import GHC.Hs.Doc +import GHC.Hs.Expr +import GHC.Hs.Extension +import GHC.Hs.ImpExp +import GHC.Hs.Pat +import GHC.Hs.Type +import GHC.Hs.Utils hiding (collectHsBindsBinders) +import qualified GHC.Linker.Loader as Linker +import GHC.Linker.Types +import GHC.Parser.Annotation (EpAnn (..)) +import GHC.Parser.Lexer hiding (getPsMessages, + initParserState) +import GHC.Platform.Ways +import GHC.Runtime.Context (InteractiveImport (..)) +import GHC.Types.Fixity (Fixity (..), LexicalFixity (..), + defaultFixity) +import GHC.Types.Meta +import GHC.Types.Name.Set +import GHC.Types.SourceFile (HscSource (..)) +import GHC.Types.SourceText +import GHC.Types.Target (Target (..), TargetId (..)) +import GHC.Types.TyThing +import GHC.Types.TyThing.Ppr +import GHC.Types.Unique +import GHC.Types.Unique.Map +import GHC.Unit.Env +import GHC.Unit.Finder hiding (mkHomeModLocation) +import qualified GHC.Unit.Finder as GHC +import GHC.Unit.Finder.Types +import GHC.Unit.Home.ModInfo +import GHC.Unit.Module.Graph +import GHC.Unit.Module.Imported +import GHC.Unit.Module.ModDetails +import GHC.Unit.Module.ModGuts +#if !MIN_VERSION_ghc(9,9,0) +import GHC.Hs (SrcSpanAnn') +#endif +import GHC.Unit.Module.ModIface (IfaceExport, ModIface, + ModIface_ (..), mi_fix +#if MIN_VERSION_ghc(9,11,0) + , pattern ModIface + , set_mi_top_env + , set_mi_usages +#endif + ) +import GHC.Unit.Module.ModSummary (ModSummary (..)) +import GHC.Utils.Error (mkPlainErrorMsgEnvelope) +import GHC.Utils.Panic +import GHC.Utils.TmpFs +import Language.Haskell.Syntax hiding (FunDep) + + +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] + +#if MIN_VERSION_ghc(9,11,0) +import System.OsPath +#endif + +#if !MIN_VERSION_ghc(9,7,0) +import GHC.Types.Avail (greNamePrintableName) +#endif + +#if !MIN_VERSION_ghc(9,9,0) +import GHC.Hs (SrcSpanAnn') +#endif + +mkHomeModLocation :: DynFlags -> ModuleName -> FilePath -> IO Module.ModLocation +#if MIN_VERSION_ghc(9,11,0) +mkHomeModLocation df mn f = + let osf = unsafeEncodeUtf f + in pure $ GHC.mkHomeModLocation (GHC.initFinderOpts df) mn osf +#else +mkHomeModLocation df mn f = pure $ GHC.mkHomeModLocation (GHC.initFinderOpts df) mn f +#endif + +pattern RealSrcSpan :: SrcLoc.RealSrcSpan -> Maybe BufSpan -> SrcLoc.SrcSpan + +pattern RealSrcSpan x y <- SrcLoc.RealSrcSpan x ((\case Strict.Nothing -> Nothing; Strict.Just a -> Just a) -> y) where + RealSrcSpan x y = SrcLoc.RealSrcSpan x (case y of Nothing -> Strict.Nothing; Just a -> Strict.Just a) + +{-# COMPLETE RealSrcSpan, UnhelpfulSpan #-} + +pattern RealSrcLoc :: SrcLoc.RealSrcLoc -> Strict.Maybe BufPos-> SrcLoc.SrcLoc +pattern RealSrcLoc x y = SrcLoc.RealSrcLoc x y +{-# COMPLETE RealSrcLoc, UnhelpfulLoc #-} + + +pattern AvailTC :: Name -> [Name] -> [FieldLabel] -> Avail.AvailInfo +#if __GLASGOW_HASKELL__ >= 907 +pattern AvailTC n names pieces <- Avail.AvailTC n ((,[]) -> (names,pieces)) +#else +pattern AvailTC n names pieces <- Avail.AvailTC n ((\gres -> foldr (\gre (names, pieces) -> case gre of + Avail.NormalGreName name -> (name: names, pieces) + Avail.FieldGreName label -> (names, label:pieces)) ([], []) gres) -> (names, pieces)) +#endif + +pattern AvailName :: Name -> Avail.AvailInfo +#if __GLASGOW_HASKELL__ >= 907 +pattern AvailName n <- Avail.Avail n +#else +pattern AvailName n <- Avail.Avail (Avail.NormalGreName n) +#endif + +pattern AvailFL :: FieldLabel -> Avail.AvailInfo +#if __GLASGOW_HASKELL__ >= 907 +pattern AvailFL fl <- (const Nothing -> Just fl) -- this pattern always fails as this field was removed in 9.7 +#else +pattern AvailFL fl <- Avail.Avail (Avail.FieldGreName fl) +#endif + +{-# COMPLETE AvailTC, AvailName, AvailFL #-} + +setImportPaths :: [FilePath] -> DynFlags -> DynFlags +setImportPaths importPaths flags = flags { importPaths = importPaths } + +pattern ExposePackage :: String -> PackageArg -> ModRenaming -> PackageFlag +-- https://github.com/facebook/fbghc +#ifdef __FACEBOOK_HASKELL__ +pattern ExposePackage s a mr <- DynFlags.ExposePackage s a _ mr +#else +pattern ExposePackage s a mr = DynFlags.ExposePackage s a mr +#endif + +isVisibleFunArg :: Development.IDE.GHC.Compat.Core.FunTyFlag -> Bool +isVisibleFunArg = TypesVar.isVisibleFunArg +type FunTyFlag = TypesVar.FunTyFlag +pattern FunTy :: Development.IDE.GHC.Compat.Core.FunTyFlag -> Type -> Type -> Type +pattern FunTy af arg res <- TyCoRep.FunTy {ft_af = af, ft_arg = arg, ft_res = res} + +class HasSrcSpan a where + getLoc :: a -> SrcSpan + +instance HasSrcSpan SrcSpan where + getLoc = id + +instance HasSrcSpan (SrcLoc.GenLocated SrcSpan a) where + getLoc = GHC.getLoc + +#if MIN_VERSION_ghc(9,9,0) +instance HasSrcSpan (EpAnn a) where + getLoc = GHC.getHasLoc +#endif + +#if MIN_VERSION_ghc(9,9,0) +instance HasSrcSpan (SrcLoc.GenLocated (EpAnn ann) a) where + getLoc (L l _) = getLoc l +instance HasSrcSpan (SrcLoc.GenLocated (GHC.EpaLocation) a) where + getLoc = GHC.getHasLoc +#else +instance HasSrcSpan (SrcSpanAnn' ann) where + getLoc = GHC.locA +instance HasSrcSpan (SrcLoc.GenLocated (SrcSpanAnn' ann) a) where + getLoc (L l _) = l +#endif + +pattern L :: HasSrcSpan a => SrcSpan -> e -> SrcLoc.GenLocated a e +pattern L l a <- GHC.L (getLoc -> l) a +{-# COMPLETE L #-} + +-- This is from the old api, but it still simplifies +pattern ConPatIn :: SrcLoc.Located (ConLikeP GhcPs) -> HsConPatDetails GhcPs -> Pat GhcPs +#if MIN_VERSION_ghc(9,9,0) +pattern ConPatIn con args <- ConPat _ (L _ (SrcLoc.noLoc -> con)) args + where + ConPatIn con args = ConPat GHC.noAnn (GHC.noLocA $ SrcLoc.unLoc con) args +#else +pattern ConPatIn con args <- ConPat EpAnnNotUsed (L _ (SrcLoc.noLoc -> con)) args + where + ConPatIn con args = ConPat EpAnnNotUsed (GHC.noLocA $ SrcLoc.unLoc con) args +#endif + +conPatDetails :: Pat p -> Maybe (HsConPatDetails p) +conPatDetails (ConPat _ _ args) = Just args +conPatDetails _ = Nothing + +mapConPatDetail :: (HsConPatDetails p -> Maybe (HsConPatDetails p)) -> Pat p -> Maybe (Pat p) +mapConPatDetail f pat@(ConPat _ _ args) = (\args' -> pat { pat_args = args'}) <$> f args +mapConPatDetail _ _ = Nothing + + +initObjLinker :: HscEnv -> IO () +initObjLinker env = + GHCi.initObjLinker (GHCi.hscInterp env) + +loadDLL :: HscEnv -> String -> IO (Maybe String) +loadDLL env str = do + res <- GHCi.loadDLL (GHCi.hscInterp env) str +#if MIN_VERSION_ghc(9,11,0) || (MIN_VERSION_ghc(9, 8, 3) && !MIN_VERSION_ghc(9, 9, 0)) || (MIN_VERSION_ghc(9, 10, 2) && !MIN_VERSION_ghc(9, 11, 0)) + pure $ + case res of + Left err_msg -> Just err_msg + Right _ -> Nothing +#else + pure res +#endif + +unload :: HscEnv -> [Linkable] -> IO () +unload hsc_env linkables = + Linker.unload + (GHCi.hscInterp hsc_env) + hsc_env linkables + + +isSubspanOfA :: LocatedAn la a -> LocatedAn lb b -> Bool +isSubspanOfA a b = SrcLoc.isSubspanOf (GHC.getLocA a) (GHC.getLocA b) + +type LocatedAn a = GHC.LocatedAn a + +unLocA :: forall pass a. XRec (GhcPass pass) a -> a +unLocA = unXRec @(GhcPass pass) + + +pattern GRE :: Name -> Parent -> Bool -> [ImportSpec] -> RdrName.GlobalRdrElt +{-# COMPLETE GRE #-} +pattern GRE{gre_name, gre_par, gre_lcl, gre_imp} <- RdrName.GRE +#if MIN_VERSION_ghc(9,7,0) + {gre_name = gre_name +#else + {gre_name = (greNamePrintableName -> gre_name) +#endif + ,gre_par, gre_lcl, gre_imp = (toList -> gre_imp)} + +collectHsBindsBinders :: CollectPass p => LHsBindsLR p idR -> [IdP p] +collectHsBindsBinders x = GHC.collectHsBindsBinders CollNoDictBinders x + + + +makeSimpleDetails :: HscEnv -> TcGblEnv -> IO ModDetails +makeSimpleDetails hsc_env = + GHC.makeSimpleDetails + (hsc_logger hsc_env) + +mkIfaceTc :: HscEnv -> GHC.SafeHaskellMode -> ModDetails -> ModSummary -> Maybe CoreProgram -> TcGblEnv -> IO ModIface +mkIfaceTc hscEnv shm md _ms _mcp = + GHC.mkIfaceTc hscEnv shm md _ms _mcp -- mcp::Maybe CoreProgram is only used in GHC >= 9.6 + +mkBootModDetailsTc :: HscEnv -> TcGblEnv -> IO ModDetails +mkBootModDetailsTc session = GHC.mkBootModDetailsTc + (hsc_logger session) + + +initTidyOpts :: HscEnv -> IO TidyOpts +initTidyOpts = + GHC.initTidyOpts + +driverNoStop :: StopPhase +driverNoStop = NoStop + +groupOrigin :: MatchGroup GhcRn body -> Origin +mapLoc :: (a -> b) -> SrcLoc.GenLocated l a -> SrcLoc.GenLocated l b +mapLoc = fmap +groupOrigin = mg_ext + +mkSimpleTarget :: DynFlags -> FilePath -> Target +mkSimpleTarget df fp = Target (TargetFile fp Nothing) True (homeUnitId_ df) Nothing + +#if MIN_VERSION_ghc(9,7,0) +lookupGlobalRdrEnv gre_env occ = lookupGRE gre_env (LookupOccName occ AllRelevantGREs) +#endif diff --git a/ghcide/src/Development/IDE/GHC/Compat/Driver.hs b/ghcide/src/Development/IDE/GHC/Compat/Driver.hs new file mode 100644 index 0000000000..6ab1d26df2 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/Driver.hs @@ -0,0 +1,142 @@ +-- ============================================================================ +-- DO NOT EDIT +-- This module copies parts of the driver code in GHC.Driver.Main to provide +-- `hscTypecheckRenameWithDiagnostics`. +-- Issue to add this function: https://gitlab.haskell.org/ghc/ghc/-/issues/24996 +-- MR to add this function: https://gitlab.haskell.org/ghc/ghc/-/merge_requests/12891 +-- ============================================================================ + +{-# LANGUAGE CPP #-} + +module Development.IDE.GHC.Compat.Driver + ( hscTypecheckRenameWithDiagnostics + ) where + +#if MIN_VERSION_ghc(9,11,0) + +import GHC.Driver.Main (hscTypecheckRenameWithDiagnostics) + +#else + +import Control.Monad +import GHC.Core +import GHC.Data.FastString +import GHC.Data.Maybe +import GHC.Driver.Env +import GHC.Driver.Errors.Types +import GHC.Driver.Main +import GHC.Driver.Session +import GHC.Hs +import GHC.Hs.Dump +import GHC.Iface.Ext.Ast (mkHieFile) +import GHC.Iface.Ext.Binary (hie_file_result, readHieFile, + writeHieFile) +import GHC.Iface.Ext.Debug (diffFile, validateScopes) +import GHC.Iface.Ext.Types (getAsts, hie_asts, hie_module) +import GHC.Tc.Module +import GHC.Tc.Utils.Monad +import GHC.Types.SourceFile +import GHC.Types.SrcLoc +import GHC.Unit +import GHC.Unit.Module.ModDetails +import GHC.Unit.Module.ModIface +import GHC.Unit.Module.ModSummary +import GHC.Utils.Error +import GHC.Utils.Logger +import GHC.Utils.Outputable +import GHC.Utils.Panic.Plain + +hscTypecheckRenameWithDiagnostics :: HscEnv -> ModSummary -> HsParsedModule + -> IO ((TcGblEnv, RenamedStuff), Messages GhcMessage) +hscTypecheckRenameWithDiagnostics hsc_env mod_summary rdr_module = + runHsc' hsc_env $ hsc_typecheck True mod_summary (Just rdr_module) + +-- ============================================================================ +-- DO NOT EDIT - Refer to top of file +-- ============================================================================ +hsc_typecheck :: Bool -- ^ Keep renamed source? + -> ModSummary -> Maybe HsParsedModule + -> Hsc (TcGblEnv, RenamedStuff) +hsc_typecheck keep_rn mod_summary mb_rdr_module = do + hsc_env <- getHscEnv + let hsc_src = ms_hsc_src mod_summary + dflags = hsc_dflags hsc_env + home_unit = hsc_home_unit hsc_env + outer_mod = ms_mod mod_summary + mod_name = moduleName outer_mod + outer_mod' = mkHomeModule home_unit mod_name + inner_mod = homeModuleNameInstantiation home_unit mod_name + src_filename = ms_hspp_file mod_summary + real_loc = realSrcLocSpan $ mkRealSrcLoc (mkFastString src_filename) 1 1 + keep_rn' = gopt Opt_WriteHie dflags || keep_rn + massert (isHomeModule home_unit outer_mod) + tc_result <- if hsc_src == HsigFile && not (isHoleModule inner_mod) + then ioMsgMaybe $ hoistTcRnMessage $ tcRnInstantiateSignature hsc_env outer_mod' real_loc + else + do hpm <- case mb_rdr_module of + Just hpm -> return hpm + Nothing -> hscParse' mod_summary + tc_result0 <- tcRnModule' mod_summary keep_rn' hpm + if hsc_src == HsigFile + then + do (iface, _) <- liftIO $ hscSimpleIface hsc_env Nothing tc_result0 mod_summary + ioMsgMaybe $ hoistTcRnMessage $ + tcRnMergeSignatures hsc_env hpm tc_result0 iface + else return tc_result0 + rn_info <- extract_renamed_stuff mod_summary tc_result + return (tc_result, rn_info) + +-- ============================================================================ +-- DO NOT EDIT - Refer to top of file +-- ============================================================================ +extract_renamed_stuff :: ModSummary -> TcGblEnv -> Hsc RenamedStuff +extract_renamed_stuff mod_summary tc_result = do + let rn_info = getRenamedStuff tc_result + + dflags <- getDynFlags + logger <- getLogger + liftIO $ putDumpFileMaybe logger Opt_D_dump_rn_ast "Renamer" + FormatHaskell (showAstData NoBlankSrcSpan NoBlankEpAnnotations rn_info) + + -- Create HIE files + when (gopt Opt_WriteHie dflags) $ do + -- I assume this fromJust is safe because `-fwrite-hie-file` + -- enables the option which keeps the renamed source. + hieFile <- mkHieFile mod_summary tc_result (fromJust rn_info) + let out_file = ml_hie_file $ ms_location mod_summary + liftIO $ writeHieFile out_file hieFile + liftIO $ putDumpFileMaybe logger Opt_D_dump_hie "HIE AST" FormatHaskell (ppr $ hie_asts hieFile) + + -- Validate HIE files + when (gopt Opt_ValidateHie dflags) $ do + hs_env <- Hsc $ \e w -> return (e, w) + liftIO $ do + -- Validate Scopes + case validateScopes (hie_module hieFile) $ getAsts $ hie_asts hieFile of + [] -> putMsg logger $ text "Got valid scopes" + xs -> do + putMsg logger $ text "Got invalid scopes" + mapM_ (putMsg logger) xs + -- Roundtrip testing + file' <- readHieFile (hsc_NC hs_env) out_file + case diffFile hieFile (hie_file_result file') of + [] -> + putMsg logger $ text "Got no roundtrip errors" + xs -> do + putMsg logger $ text "Got roundtrip errors" + let logger' = updateLogFlags logger (log_set_dopt Opt_D_ppr_debug) + mapM_ (putMsg logger') xs + return rn_info + +-- ============================================================================ +-- DO NOT EDIT - Refer to top of file +-- ============================================================================ +hscSimpleIface :: HscEnv + -> Maybe CoreProgram + -> TcGblEnv + -> ModSummary + -> IO (ModIface, ModDetails) +hscSimpleIface hsc_env mb_core_program tc_result summary + = runHsc hsc_env $ hscSimpleIface' mb_core_program tc_result summary + +#endif diff --git a/ghcide/src/Development/IDE/GHC/Compat/Env.hs b/ghcide/src/Development/IDE/GHC/Compat/Env.hs new file mode 100644 index 0000000000..cbccc1a3de --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/Env.hs @@ -0,0 +1,147 @@ +{-# LANGUAGE CPP #-} + +-- | Compat module for the main Driver types, such as 'HscEnv', +-- 'UnitEnv' and some DynFlags compat functions. +module Development.IDE.GHC.Compat.Env ( + Env.HscEnv(hsc_FC, hsc_NC, hsc_IC, hsc_mod_graph + , hsc_type_env_vars + ), + Env.hsc_HPT, + InteractiveContext(..), + setInteractivePrintName, + setInteractiveDynFlags, + Env.hsc_dflags, + hsc_EPS, + Env.hsc_logger, + Env.hsc_tmpfs, + Env.hsc_unit_env, + Env.hsc_hooks, + hscSetHooks, + TmpFs, + -- * HomeUnit + hscHomeUnit, + HomeUnit, + setHomeUnitId_, + Home.mkHomeModule, + -- * Provide backwards Compatible + -- types and helper functions. + Logger, + UnitEnv, + hscSetUnitEnv, + hscSetFlags, + initTempFs, + -- * Home Unit + Session.homeUnitId_, + -- * DynFlags Helper + setBytecodeLinkerOptions, + setInterpreterLinkerOptions, + Session.safeImportsOn, + -- * Ways + Ways, + Way, + hostFullWays, + setWays, + wayGeneralFlags, + wayUnsetGeneralFlags, + -- * Backend, backwards compatible + Backend, + setBackend, + ghciBackend, + Development.IDE.GHC.Compat.Env.platformDefaultBackend, + workingDirectory, + setWorkingDirectory, + hscSetActiveUnitId, + reexportedModules, + ) where + +import GHC (setInteractiveDynFlags) + +import GHC.Driver.Backend as Backend +import GHC.Driver.Env (HscEnv, hscSetActiveUnitId) +import qualified GHC.Driver.Env as Env +import GHC.Driver.Hooks (Hooks) +import GHC.Driver.Session +import qualified GHC.Driver.Session as Session +import GHC.Platform.Ways +import GHC.Runtime.Context +import GHC.Unit.Env (UnitEnv) +import GHC.Unit.Home as Home +import GHC.Unit.Types (UnitId) +import GHC.Utils.Logger +import GHC.Utils.TmpFs + + +hsc_EPS :: HscEnv -> UnitEnv +hsc_EPS = Env.hsc_unit_env + +setWorkingDirectory :: FilePath -> DynFlags -> DynFlags +setWorkingDirectory p d = d { workingDirectory = Just p } + +setHomeUnitId_ :: UnitId -> DynFlags -> DynFlags +setHomeUnitId_ uid df = df { Session.homeUnitId_ = uid } + +hscSetFlags :: DynFlags -> HscEnv -> HscEnv +hscSetFlags df env = env { Env.hsc_dflags = df } + +initTempFs :: HscEnv -> IO HscEnv +initTempFs env = do + tmpFs <- initTmpFs + pure env { Env.hsc_tmpfs = tmpFs } + +hscSetUnitEnv :: UnitEnv -> HscEnv -> HscEnv +hscSetUnitEnv ue env = env { Env.hsc_unit_env = ue } + +hscSetHooks :: Hooks -> HscEnv -> HscEnv +hscSetHooks hooks env = + env { Env.hsc_hooks = hooks } + +hscHomeUnit :: HscEnv -> HomeUnit +hscHomeUnit = + Env.hsc_home_unit + +-- | We don't want to generate object code so we compile to bytecode +-- (HscInterpreted) which implies LinkInMemory +-- HscInterpreted +setBytecodeLinkerOptions :: DynFlags -> DynFlags +setBytecodeLinkerOptions df = df { + ghcLink = LinkInMemory + , backend = noBackend + , ghcMode = CompManager + } + +setInterpreterLinkerOptions :: DynFlags -> DynFlags +setInterpreterLinkerOptions df = df { + ghcLink = LinkInMemory + , backend = interpreterBackend + , ghcMode = CompManager + } + +-- ------------------------------------------------------- +-- Ways helpers +-- ------------------------------------------------------- + + +setWays :: Ways -> DynFlags -> DynFlags +setWays newWays flags = + flags { Session.targetWays_ = newWays} + +-- ------------------------------------------------------- +-- Backend helpers +-- ------------------------------------------------------- + + +ghciBackend :: Backend +#if MIN_VERSION_ghc(9,6,0) +ghciBackend = interpreterBackend +#else +ghciBackend = Interpreter +#endif + +platformDefaultBackend :: DynFlags -> Backend +platformDefaultBackend = + Backend.platformDefaultBackend . targetPlatform + +setBackend :: Backend -> DynFlags -> DynFlags +setBackend backend flags = + flags { backend = backend } + diff --git a/ghcide/src/Development/IDE/GHC/Compat/Error.hs b/ghcide/src/Development/IDE/GHC/Compat/Error.hs new file mode 100644 index 0000000000..de59afa146 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/Error.hs @@ -0,0 +1,136 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE TemplateHaskell #-} +module Development.IDE.GHC.Compat.Error ( + -- * Top-level error types and lens for easy access + MsgEnvelope(..), + msgEnvelopeErrorL, + GhcMessage(..), + -- * Error messages for the typechecking and renamer phase + TcRnMessage (..), + TcRnMessageDetailed (..), + Hole(..), + stripTcRnMessageContext, + -- * Parsing error message + PsMessage(..), + -- * Desugaring diagnostic + DsMessage (..), + -- * Driver error message + DriverMessage (..), + -- * General Diagnostics + Diagnostic(..), + -- * Prisms and lenses for error selection + _TcRnMessage, + _TcRnMessageWithCtx, + _GhcPsMessage, + _GhcDsMessage, + _GhcDriverMessage, + _ReportHoleError, + _TcRnIllegalWildcardInType, + _TcRnPartialTypeSignatures, + _TcRnMissingSignature, + _TcRnSolverReport, + _TcRnMessageWithInfo, + _TypeHole, + _ConstraintHole, + reportContextL, + reportContentL, + _MismatchMessage, + _TypeEqMismatchActual, + _TypeEqMismatchExpected, + ) where + +import Control.Lens +import Development.IDE.GHC.Compat (Type) +import GHC.Driver.Errors.Types +import GHC.HsToCore.Errors.Types +import GHC.Tc.Errors.Types +import GHC.Tc.Types.Constraint (Hole (..), HoleSort) +import GHC.Types.Error + +-- | Some 'TcRnMessage's are nested in other constructors for additional context. +-- For example, 'TcRnWithHsDocContext' and 'TcRnMessageWithInfo'. +-- However, in most occasions you don't need the additional context and you just want +-- the error message. @'_TcRnMessage'@ recursively unwraps these constructors, +-- until there are no more constructors with additional context. +-- +-- Use @'_TcRnMessageWithCtx'@ if you need the additional context. You can always +-- strip it later using @'stripTcRnMessageContext'@. +-- +_TcRnMessage :: Fold GhcMessage TcRnMessage +_TcRnMessage = _TcRnMessageWithCtx . to stripTcRnMessageContext + +_TcRnMessageWithCtx :: Prism' GhcMessage TcRnMessage +_TcRnMessageWithCtx = prism' GhcTcRnMessage (\case + GhcTcRnMessage tcRnMsg -> Just tcRnMsg + _ -> Nothing) + +_GhcPsMessage :: Prism' GhcMessage PsMessage +_GhcPsMessage = prism' GhcPsMessage (\case + GhcPsMessage psMsg -> Just psMsg + _ -> Nothing) + +_GhcDsMessage :: Prism' GhcMessage DsMessage +_GhcDsMessage = prism' GhcDsMessage (\case + GhcDsMessage dsMsg -> Just dsMsg + _ -> Nothing) + +_GhcDriverMessage :: Prism' GhcMessage DriverMessage +_GhcDriverMessage = prism' GhcDriverMessage (\case + GhcDriverMessage driverMsg -> Just driverMsg + _ -> Nothing) + +-- | Some 'TcRnMessage's are nested in other constructors for additional context. +-- For example, 'TcRnWithHsDocContext' and 'TcRnMessageWithInfo'. +-- However, in some occasions you don't need the additional context and you just want +-- the error message. @'stripTcRnMessageContext'@ recursively unwraps these constructors, +-- until there are no more constructors with additional context. +-- +stripTcRnMessageContext :: TcRnMessage -> TcRnMessage +stripTcRnMessageContext = \case +#if MIN_VERSION_ghc(9, 6, 1) + TcRnWithHsDocContext _ tcMsg -> stripTcRnMessageContext tcMsg +#endif + TcRnMessageWithInfo _ (TcRnMessageDetailed _ tcMsg) -> stripTcRnMessageContext tcMsg + msg -> msg + +msgEnvelopeErrorL :: Lens' (MsgEnvelope e) e +msgEnvelopeErrorL = lens errMsgDiagnostic (\envelope e -> envelope { errMsgDiagnostic = e } ) + +makePrisms ''TcRnMessage + +makeLensesWith + (lensRules & lensField .~ mappingNamer (pure . (++ "L"))) + ''SolverReportWithCtxt + +makePrisms ''TcSolverReportMsg + +makePrisms ''HoleSort + +-- | Focus 'MismatchMsg' from 'TcSolverReportMsg'. Currently, 'MismatchMsg' can be +-- extracted from 'CannotUnifyVariable' and 'Mismatch' constructors. +_MismatchMessage :: Traversal' TcSolverReportMsg MismatchMsg +_MismatchMessage focus (Mismatch msg t a c) = (\msg' -> Mismatch msg' t a c) <$> focus msg +_MismatchMessage focus (CannotUnifyVariable msg a) = flip CannotUnifyVariable a <$> focus msg +_MismatchMessage _ report = pure report + +-- | Focus 'teq_mismatch_expected' from 'TypeEqMismatch'. +_TypeEqMismatchExpected :: Traversal' MismatchMsg Type +#if MIN_VERSION_ghc(9,10,2) +_TypeEqMismatchExpected focus mismatch@(TypeEqMismatch _ _ _ expected _ _ _) = + (\expected' -> mismatch { teq_mismatch_expected = expected' }) <$> focus expected +#else +_TypeEqMismatchExpected focus mismatch@(TypeEqMismatch _ _ _ _ expected _ _ _) = + (\expected' -> mismatch { teq_mismatch_expected = expected' }) <$> focus expected +#endif +_TypeEqMismatchExpected _ mismatch = pure mismatch + +-- | Focus 'teq_mismatch_actual' from 'TypeEqMismatch'. +_TypeEqMismatchActual :: Traversal' MismatchMsg Type +#if MIN_VERSION_ghc(9,10,2) +_TypeEqMismatchActual focus mismatch@(TypeEqMismatch _ _ _ _ actual _ _) = + (\actual' -> mismatch { teq_mismatch_actual = actual' }) <$> focus actual +#else +_TypeEqMismatchActual focus mismatch@(TypeEqMismatch _ _ _ _ _ actual _ _) = + (\actual' -> mismatch { teq_mismatch_expected = actual' }) <$> focus actual +#endif +_TypeEqMismatchActual _ mismatch = pure mismatch diff --git a/ghcide/src/Development/IDE/GHC/Compat/Iface.hs b/ghcide/src/Development/IDE/GHC/Compat/Iface.hs new file mode 100644 index 0000000000..39cf9e0d45 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/Iface.hs @@ -0,0 +1,36 @@ +{-# LANGUAGE CPP #-} + +-- | Compat module Interface file relevant code. +module Development.IDE.GHC.Compat.Iface ( + writeIfaceFile, + cannotFindModule, + ) where + +import Development.IDE.GHC.Compat.Env +import Development.IDE.GHC.Compat.Outputable +import GHC +import GHC.Driver.Session (targetProfile) +import qualified GHC.Iface.Load as Iface +import GHC.Unit.Finder.Types (FindResult) + +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] + +#if MIN_VERSION_ghc(9,7,0) +import GHC.Iface.Errors.Ppr (missingInterfaceErrorDiagnostic) +import GHC.Iface.Errors.Types (IfaceMessage) +#endif + +writeIfaceFile :: HscEnv -> FilePath -> ModIface -> IO () +#if MIN_VERSION_ghc(9,11,0) +writeIfaceFile env fp iface = Iface.writeIface (hsc_logger env) (targetProfile $ hsc_dflags env) (Iface.flagsToIfCompression $ hsc_dflags env) fp iface +#else +writeIfaceFile env fp iface = Iface.writeIface (hsc_logger env) (targetProfile $ hsc_dflags env) fp iface +#endif + +cannotFindModule :: HscEnv -> ModuleName -> FindResult -> SDoc +cannotFindModule env modname fr = +#if MIN_VERSION_ghc(9,7,0) + missingInterfaceErrorDiagnostic (defaultDiagnosticOpts @IfaceMessage) $ Iface.cannotFindModule env modname fr +#else + Iface.cannotFindModule env modname fr +#endif diff --git a/ghcide/src/Development/IDE/GHC/Compat/Logger.hs b/ghcide/src/Development/IDE/GHC/Compat/Logger.hs new file mode 100644 index 0000000000..c3cc5247d0 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/Logger.hs @@ -0,0 +1,35 @@ +{-# LANGUAGE CPP #-} +-- | Compat module for GHC 9.2 Logger infrastructure. +module Development.IDE.GHC.Compat.Logger ( + putLogHook, + Logger.pushLogHook, + -- * Logging stuff + LogActionCompat, + logActionCompat, + defaultLogActionHPutStrDoc, + ) where + +import Development.IDE.GHC.Compat.Core +import Development.IDE.GHC.Compat.Env as Env +import Development.IDE.GHC.Compat.Outputable + + +import GHC.Types.Error +import GHC.Utils.Logger as Logger +import GHC.Utils.Outputable + +putLogHook :: Logger -> HscEnv -> HscEnv +putLogHook logger env = + env { hsc_logger = logger } + +type LogActionCompat = LogFlags -> Maybe DiagnosticReason -> Maybe Severity -> SrcSpan -> PrintUnqualified -> SDoc -> IO () + +-- alwaysQualify seems to still do the right thing here, according to the "unqualified warnings" test. +logActionCompat :: LogActionCompat -> LogAction +#if MIN_VERSION_ghc(9,7,0) +logActionCompat logAction logFlags (MCDiagnostic severity (ResolvedDiagnosticReason wr) _) loc = logAction logFlags (Just wr) (Just severity) loc alwaysQualify +#else +logActionCompat logAction logFlags (MCDiagnostic severity wr _) loc = logAction logFlags (Just wr) (Just severity) loc alwaysQualify +#endif +logActionCompat logAction logFlags _cls loc = logAction logFlags Nothing Nothing loc alwaysQualify + diff --git a/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs b/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs new file mode 100644 index 0000000000..ccec23c9c3 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/Outputable.hs @@ -0,0 +1,133 @@ +{-# LANGUAGE CPP #-} + +module Development.IDE.GHC.Compat.Outputable ( + SDoc, + Outputable, + showSDoc, + showSDocUnsafe, + showSDocForUser, + ppr, pprPanic, text, vcat, (<+>), ($$), empty, hang, nest, punctuate, + printSDocQualifiedUnsafe, + printWithoutUniques, + mkPrintUnqualifiedDefault, + PrintUnqualified, + defaultUserStyle, + withPprStyle, + -- * Parser errors + PsWarning, + PsError, + defaultDiagnosticOpts, + GhcMessage, + DriverMessage, + Messages, + initDiagOpts, + pprMessages, + DiagnosticReason(..), + renderDiagnosticMessageWithHints, + pprMsgEnvelopeBagWithLoc, + Error.getMessages, + renderWithContext, + defaultSDocContext, + errMsgDiagnostic, + unDecorated, + diagnosticMessage, + -- * Error infrastructure + DecoratedSDoc, + MsgEnvelope, + ErrMsg, + WarnMsg, + SourceError(..), + errMsgSpan, + errMsgSeverity, + formatErrorWithQual, + mkWarnMsg, + mkSrcErr, + srcErrorMessages, + textDoc, + ) where + +import Data.Maybe +import GHC.Driver.Config.Diagnostic +import GHC.Driver.Env +import GHC.Driver.Errors.Types (DriverMessage, GhcMessage) +import GHC.Driver.Ppr +import GHC.Driver.Session +import GHC.Parser.Errors.Types +import qualified GHC.Types.Error as Error +import GHC.Types.Name.Ppr +import GHC.Types.Name.Reader +import GHC.Types.SourceError +import GHC.Types.SrcLoc +import GHC.Unit.State +import GHC.Utils.Error +import GHC.Utils.Outputable as Out +import GHC.Utils.Panic + +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] + +#if MIN_VERSION_ghc(9,7,0) +import GHC.Types.Error (defaultDiagnosticOpts) +#endif + +type PrintUnqualified = NamePprCtx + +-- | A compatible function to print `Outputable` instances +-- without unique symbols. +-- +-- It print with a user-friendly style like: `a_a4ME` as `a`. +printWithoutUniques :: Outputable a => a -> String +printWithoutUniques = + renderWithContext (defaultSDocContext + { + sdocStyle = defaultUserStyle + , sdocSuppressUniques = True + , sdocCanUseUnicode = True + }) . ppr + +printSDocQualifiedUnsafe :: PrintUnqualified -> SDoc -> String +printSDocQualifiedUnsafe unqual doc = + -- Taken from 'showSDocForUser' + renderWithContext (defaultSDocContext { sdocStyle = sty }) doc' + where + sty = mkUserStyle unqual AllTheWay + doc' = pprWithUnitState emptyUnitState doc + + + +formatErrorWithQual :: DynFlags -> MsgEnvelope DecoratedSDoc -> String +formatErrorWithQual dflags e = + showSDoc dflags (pprNoLocMsgEnvelope e) + +pprNoLocMsgEnvelope :: MsgEnvelope DecoratedSDoc -> SDoc +pprNoLocMsgEnvelope (MsgEnvelope { errMsgDiagnostic = e + , errMsgContext = unqual }) + = sdocWithContext $ \_ctx -> + withErrStyle unqual $ +#if MIN_VERSION_ghc(9,7,0) + formatBulleted e +#else + formatBulleted _ctx e +#endif + + + +type ErrMsg = MsgEnvelope GhcMessage +type WarnMsg = MsgEnvelope GhcMessage + +mkPrintUnqualifiedDefault :: HscEnv -> GlobalRdrEnv -> PrintUnqualified +mkPrintUnqualifiedDefault env = + mkNamePprCtx ptc (hsc_unit_env env) + where + ptc = initPromotionTickContext (hsc_dflags env) + +renderDiagnosticMessageWithHints :: forall a. Diagnostic a => a -> DecoratedSDoc +renderDiagnosticMessageWithHints a = Error.unionDecoratedSDoc + (diagnosticMessage + (defaultDiagnosticOpts @a) + a) (mkDecorated $ map ppr $ diagnosticHints a) + +mkWarnMsg :: DynFlags -> Maybe DiagnosticReason -> b -> SrcSpan -> PrintUnqualified -> SDoc -> MsgEnvelope DecoratedSDoc +mkWarnMsg df reason _logFlags l st doc = fmap renderDiagnosticMessageWithHints $ mkMsgEnvelope (initDiagOpts df) l st (mkPlainDiagnostic (fromMaybe WarningWithoutFlag reason) [] doc) + +textDoc :: String -> SDoc +textDoc = text diff --git a/ghcide/src/Development/IDE/GHC/Compat/Parser.hs b/ghcide/src/Development/IDE/GHC/Compat/Parser.hs new file mode 100644 index 0000000000..8e2967ed30 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/Parser.hs @@ -0,0 +1,77 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE PatternSynonyms #-} + +-- | Parser compatibility module. +module Development.IDE.GHC.Compat.Parser ( + initParserOpts, + initParserState, + PsSpan(..), + pattern HsParsedModule, + type GHC.HsParsedModule, + Development.IDE.GHC.Compat.Parser.hpm_module, + Development.IDE.GHC.Compat.Parser.hpm_src_files, + pattern ParsedModule, + Development.IDE.GHC.Compat.Parser.pm_parsed_source, + type GHC.ParsedModule, + Development.IDE.GHC.Compat.Parser.pm_mod_summary, + Development.IDE.GHC.Compat.Parser.pm_extra_src_files, + -- * API Annotations +#if !MIN_VERSION_ghc(9,11,0) + Anno.AnnKeywordId(..), +#endif + pattern EpaLineComment, + pattern EpaBlockComment + ) where + +import Development.IDE.GHC.Compat.Core +import Development.IDE.GHC.Compat.Util +import qualified GHC.Parser.Annotation as Anno +import qualified GHC.Parser.Lexer as Lexer +import GHC.Types.SrcLoc (PsSpan (..)) + + + +import GHC (EpaCommentTok (..), + pm_extra_src_files, + pm_mod_summary, + pm_parsed_source) +import qualified GHC +import qualified GHC.Driver.Config.Parser as Config +import GHC.Hs (hpm_module, hpm_src_files) + + + +initParserOpts :: DynFlags -> ParserOpts +initParserOpts = + Config.initParserOpts + +initParserState :: ParserOpts -> StringBuffer -> RealSrcLoc -> PState +initParserState = + Lexer.initParserState + +pattern HsParsedModule :: Located (HsModule GhcPs) -> [FilePath] -> GHC.HsParsedModule +pattern HsParsedModule + { hpm_module + , hpm_src_files + } <- GHC.HsParsedModule{..} + where + HsParsedModule hpm_module hpm_src_files = + GHC.HsParsedModule hpm_module hpm_src_files + + +pattern ParsedModule :: ModSummary -> ParsedSource -> [FilePath] -> GHC.ParsedModule +pattern ParsedModule + { pm_mod_summary + , pm_parsed_source + , pm_extra_src_files + } <- GHC.ParsedModule{..} + where + ParsedModule ms parsed extra_src_files = + GHC.ParsedModule + { pm_mod_summary = ms + , pm_parsed_source = parsed + , pm_extra_src_files = extra_src_files + } +{-# COMPLETE ParsedModule :: GHC.ParsedModule #-} + + diff --git a/ghcide/src/Development/IDE/GHC/Compat/Plugins.hs b/ghcide/src/Development/IDE/GHC/Compat/Plugins.hs new file mode 100644 index 0000000000..35bf48374b --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/Plugins.hs @@ -0,0 +1,49 @@ +{-# LANGUAGE CPP #-} + +-- | Plugin Compat utils. +module Development.IDE.GHC.Compat.Plugins ( + -- * Plugin Compat Types, and initialisation + Plugin(..), + defaultPlugin, + PluginWithArgs(..), + applyPluginsParsedResultAction, + + -- * Static plugins + StaticPlugin(..), + hsc_static_plugins, + + -- * Plugin messages + PsMessages(..), + getPsMessages + ) where + +import Development.IDE.GHC.Compat.Core +import Development.IDE.GHC.Compat.Parser as Parser + +import qualified GHC.Driver.Env as Env +import GHC.Driver.Plugins (ParsedResult (..), + Plugin (..), + PluginWithArgs (..), + PsMessages (..), + StaticPlugin (..), + defaultPlugin, + staticPlugins, withPlugins) +import qualified GHC.Parser.Lexer as Lexer + + +getPsMessages :: PState -> PsMessages +getPsMessages pst = + uncurry PsMessages $ Lexer.getPsMessages pst + +applyPluginsParsedResultAction :: HscEnv -> ModSummary -> ParsedSource -> PsMessages -> IO (ParsedSource, PsMessages) +applyPluginsParsedResultAction env ms parsed msgs = do + -- Apply parsedResultAction of plugins + let applyPluginAction p opts = parsedResultAction p opts ms + fmap (\result -> (hpm_module (parsedResultModule result), parsedResultMessages result)) $ runHsc env $ withPlugins + (Env.hsc_plugins env) + applyPluginAction + (ParsedResult (HsParsedModule parsed []) msgs) + + +hsc_static_plugins :: HscEnv -> [StaticPlugin] +hsc_static_plugins = staticPlugins . Env.hsc_plugins diff --git a/ghcide/src/Development/IDE/GHC/Compat/Units.hs b/ghcide/src/Development/IDE/GHC/Compat/Units.hs new file mode 100644 index 0000000000..f7f634e448 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/Units.hs @@ -0,0 +1,194 @@ +{-# LANGUAGE CPP #-} + +-- | Compat module for 'UnitState' and 'UnitInfo'. +module Development.IDE.GHC.Compat.Units ( + -- * UnitState + UnitState, + initUnits, + unitState, + getUnitName, + explicitUnits, + preloadClosureUs, + listVisibleModuleNames, + LookupResult(..), + lookupModuleWithSuggestions, + -- * UnitInfoMap + UnitInfoMap, + getUnitInfoMap, + lookupUnit, + lookupUnit', + -- * UnitInfo + UnitInfo, + unitExposedModules, + unitDepends, + unitHaddockInterfaces, + mkUnit, + unitPackageNameString, + unitPackageVersion, + -- * UnitId helpers + UnitId, + Unit, + unitString, + stringToUnit, + definiteUnitId, + defUnitId, + installedModule, + -- * Module + toUnitId, + moduleUnitId, + moduleUnit, + -- * ExternalPackageState + ExternalPackageState(..), + -- * Utils + filterInplaceUnits, + FinderCache, + showSDocForUser', + findImportedModule, + ) where + +import Data.Either +import Development.IDE.GHC.Compat.Core +import Development.IDE.GHC.Compat.Env +import Development.IDE.GHC.Compat.Outputable +import Prelude hiding (mod) + +import Control.Monad +import qualified Data.List.NonEmpty as NE +import qualified Data.Map.Strict as Map +import qualified GHC +import qualified GHC.Data.ShortText as ST +import qualified GHC.Driver.Session as DynFlags +import GHC.Types.PkgQual (PkgQual (NoPkgQual)) +import GHC.Types.Unique.Set +import GHC.Unit.External +import qualified GHC.Unit.Finder as GHC +import GHC.Unit.Home.ModInfo +import qualified GHC.Unit.Info as UnitInfo +import GHC.Unit.State (LookupResult, UnitInfo, + UnitInfoMap, + UnitState (unitInfoMap), + lookupUnit', mkUnit, + unitDepends, + unitExposedModules, + unitPackageNameString, + unitPackageVersion) +import qualified GHC.Unit.State as State +import GHC.Unit.Types + + +type PreloadUnitClosure = UniqSet UnitId + +unitState :: HscEnv -> UnitState +unitState = ue_units . hsc_unit_env + +createUnitEnvFromFlags :: NE.NonEmpty DynFlags -> HomeUnitGraph +createUnitEnvFromFlags unitDflags = + let + newInternalUnitEnv dflags = mkHomeUnitEnv dflags emptyHomePackageTable Nothing + unitEnvList = NE.map (\dflags -> (homeUnitId_ dflags, newInternalUnitEnv dflags)) unitDflags + in + unitEnv_new (Map.fromList (NE.toList unitEnvList)) + +initUnits :: [DynFlags] -> HscEnv -> IO HscEnv +initUnits unitDflags env = do + let dflags0 = hsc_dflags env + -- additionally, set checked dflags so we don't lose fixes + let initial_home_graph = createUnitEnvFromFlags (dflags0 NE.:| unitDflags) + home_units = unitEnv_keys initial_home_graph + home_unit_graph <- forM initial_home_graph $ \homeUnitEnv -> do + let cached_unit_dbs = homeUnitEnv_unit_dbs homeUnitEnv + dflags = homeUnitEnv_dflags homeUnitEnv + old_hpt = homeUnitEnv_hpt homeUnitEnv + + (dbs,unit_state,home_unit,mconstants) <- State.initUnits (hsc_logger env) dflags cached_unit_dbs home_units + + updated_dflags <- DynFlags.updatePlatformConstants dflags mconstants + pure HomeUnitEnv + { homeUnitEnv_units = unit_state + , homeUnitEnv_unit_dbs = Just dbs + , homeUnitEnv_dflags = updated_dflags + , homeUnitEnv_hpt = old_hpt + , homeUnitEnv_home_unit = Just home_unit + } + + let dflags1 = homeUnitEnv_dflags $ unitEnv_lookup (homeUnitId_ dflags0) home_unit_graph + let unit_env = UnitEnv + { ue_platform = targetPlatform dflags1 + , ue_namever = GHC.ghcNameVersion dflags1 + , ue_home_unit_graph = home_unit_graph + , ue_current_unit = homeUnitId_ dflags0 + , ue_eps = ue_eps (hsc_unit_env env) + } + pure $ hscSetFlags dflags1 $ hscSetUnitEnv unit_env env + + +explicitUnits :: UnitState -> [Unit] +explicitUnits ue = + map fst $ State.explicitUnits ue + +listVisibleModuleNames :: HscEnv -> [ModuleName] +listVisibleModuleNames env = + State.listVisibleModuleNames $ unitState env + +getUnitName :: HscEnv -> UnitId -> Maybe PackageName +getUnitName env i = + State.unitPackageName <$> State.lookupUnitId (unitState env) i + +lookupModuleWithSuggestions + :: HscEnv + -> ModuleName + -> GHC.PkgQual + -> LookupResult +lookupModuleWithSuggestions env modname mpkg = + State.lookupModuleWithSuggestions (unitState env) modname mpkg + +getUnitInfoMap :: HscEnv -> UnitInfoMap +getUnitInfoMap = + unitInfoMap . ue_units . hsc_unit_env + +lookupUnit :: HscEnv -> Unit -> Maybe UnitInfo +lookupUnit env pid = State.lookupUnit (unitState env) pid + +preloadClosureUs :: HscEnv -> PreloadUnitClosure +preloadClosureUs = State.preloadClosure . unitState + +unitHaddockInterfaces :: UnitInfo -> [FilePath] +unitHaddockInterfaces = + fmap ST.unpack . UnitInfo.unitHaddockInterfaces + +-- ------------------------------------------------------------------ +-- Backwards Compatible UnitState +-- ------------------------------------------------------------------ + +-- ------------------------------------------------------------------ +-- Patterns and helpful definitions +-- ------------------------------------------------------------------ + +definiteUnitId :: Definite uid -> GenUnit uid +definiteUnitId = RealUnit +defUnitId :: unit -> Definite unit +defUnitId = Definite +installedModule :: unit -> ModuleName -> GenModule unit +installedModule = Module + + +filterInplaceUnits :: [UnitId] -> [PackageFlag] -> ([UnitId], [PackageFlag]) +filterInplaceUnits us packageFlags = + partitionEithers (map isInplace packageFlags) + where + isInplace :: PackageFlag -> Either UnitId PackageFlag + isInplace p@(ExposePackage _ (UnitIdArg u) _) = + if toUnitId u `elem` us + then Left $ toUnitId u + else Right p + isInplace p = Right p + +showSDocForUser' :: HscEnv -> PrintUnqualified -> SDoc -> String +showSDocForUser' env = showSDocForUser (hsc_dflags env) (unitState env) + +findImportedModule :: HscEnv -> ModuleName -> IO (Maybe Module) +findImportedModule env mn = do + res <- GHC.findImportedModule env mn NoPkgQual + case res of + Found _ mod -> pure . pure $ mod + _ -> pure Nothing diff --git a/ghcide/src/Development/IDE/GHC/Compat/Util.hs b/ghcide/src/Development/IDE/GHC/Compat/Util.hs new file mode 100644 index 0000000000..1f9e3a1609 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat/Util.hs @@ -0,0 +1,82 @@ +{-# LANGUAGE CPP #-} +-- | GHC Utils and Datastructures re-exports. +-- +-- Mainly handles module hierarchy re-organisation of GHC +-- from version < 9.0 to >= 9.0. +-- +-- Some Functions, such as 'toList' shadow other function-names. +-- This way this module can be imported qualified more naturally. +module Development.IDE.GHC.Compat.Util ( + -- * Exception handling + MonadCatch, + GhcException, + handleGhcException, + catch, + try, + -- * Bags + Bag, + bagToList, + listToBag, + unionBags, + isEmptyBag, + -- * Boolean Formula + LBooleanFormula, + BooleanFormula(..), + -- * OverridingBool + OverridingBool(..), + -- * Maybes + MaybeErr(..), + orElse, + -- * Pair + Pair(..), + -- * EnumSet + EnumSet, + toList, + -- * FastString exports + FastString, + -- Export here, so we can coerce safely on consumer sites + LexicalFastString(..), + uniq, + unpackFS, + mkFastString, + fsLit, + pprHsString, + -- * Fingerprint + Fingerprint(..), + getFileHash, + fingerprintData, + fingerprintString, + fingerprintFingerprints, + -- * Unique + Uniquable, + nonDetCmpUnique, + getUnique, + Unique, + mkUnique, + newTagUnique, + -- * UniqDFM + emptyUDFM, + plusUDFM, + plusUDFM_C, + -- * String Buffer + StringBuffer(..), + hGetStringBuffer, + stringToStringBuffer, + nextChar, + atEnd, + ) where + +import Control.Exception.Safe (MonadCatch, catch, try) +import GHC.Data.Bag +import GHC.Data.Bool +import GHC.Data.BooleanFormula +import GHC.Data.EnumSet +import GHC.Data.FastString +import GHC.Data.Maybe +import GHC.Data.Pair +import GHC.Data.StringBuffer +import GHC.Types.Unique +import GHC.Types.Unique.DFM +import GHC.Utils.Fingerprint +import GHC.Utils.Outputable (pprHsString) +import GHC.Utils.Panic hiding (try) diff --git a/ghcide/src/Development/IDE/GHC/CoreFile.hs b/ghcide/src/Development/IDE/GHC/CoreFile.hs new file mode 100644 index 0000000000..99b7328770 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/CoreFile.hs @@ -0,0 +1,211 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE RecordWildCards #-} + +-- | CoreFiles let us serialize Core to a file in order to later recover it +-- without reparsing or retypechecking +module Development.IDE.GHC.CoreFile + ( CoreFile(..) + , codeGutsToCoreFile + , typecheckCoreFile + , readBinCoreFile + , writeBinCoreFile + , getImplicitBinds + ) where + +import Control.Monad +import Control.Monad.IO.Class +import Data.Foldable +import Data.IORef +import Data.List (isPrefixOf) +import Data.Maybe +import Development.IDE.GHC.Compat +import qualified Development.IDE.GHC.Compat.Util as Util +import GHC.Core +import GHC.CoreToIface +import GHC.Fingerprint +import GHC.Iface.Binary +import GHC.Iface.Env +#if MIN_VERSION_ghc(9,11,0) +import qualified GHC.Iface.Load as Iface +#endif +import GHC.Iface.Recomp.Binary (fingerprintBinMem) +import GHC.IfaceToCore +import GHC.Types.Id.Make +import GHC.Types.TypeEnv +import GHC.Utils.Binary +import Prelude hiding (mod) + + +-- | Initial ram buffer to allocate for writing interface files +initBinMemSize :: Int +initBinMemSize = 1024 * 1024 + +data CoreFile + = CoreFile + { cf_bindings :: [TopIfaceBinding IfaceId] + -- ^ The actual core file bindings, deserialized lazily + , cf_iface_hash :: !Fingerprint + } + +-- | Like IfaceBinding, but lets us serialize internal names as well +data TopIfaceBinding v + = TopIfaceNonRec v IfaceExpr + | TopIfaceRec [(v, IfaceExpr)] + deriving (Functor, Foldable, Traversable) + +-- | GHC doesn't export 'tcIdDetails', 'tcIfaceInfo', or 'tcIfaceType', +-- but it does export 'tcIfaceDecl' +-- so we use `IfaceDecl` as a container for all of these +-- invariant: 'IfaceId' is always a 'IfaceId' constructor +type IfaceId = IfaceDecl + +instance Binary (TopIfaceBinding IfaceId) where + put_ bh (TopIfaceNonRec d e) = do + putByte bh 0 + put_ bh d + put_ bh e + put_ bh (TopIfaceRec vs) = do + putByte bh 1 + put_ bh vs + get bh = do + t <- getByte bh + case t of + 0 -> TopIfaceNonRec <$> get bh <*> get bh + 1 -> TopIfaceRec <$> get bh + _ -> error "Binary TopIfaceBinding" + +instance Binary CoreFile where + put_ bh (CoreFile core fp) = lazyPut bh core >> put_ bh fp + get bh = CoreFile <$> lazyGet bh <*> get bh + +readBinCoreFile + :: NameCacheUpdater + -> FilePath + -> IO (CoreFile, Fingerprint) +readBinCoreFile name_cache fat_hi_path = do + bh <- readBinMem fat_hi_path + file <- getWithUserData name_cache bh + !fp <- Util.getFileHash fat_hi_path + return (file, fp) + +-- | Write a core file +writeBinCoreFile :: DynFlags -> FilePath -> CoreFile -> IO Fingerprint +writeBinCoreFile _dflags core_path fat_iface = do + bh <- openBinMem initBinMemSize + + let quietTrace = + QuietBinIFace + + putWithUserData + quietTrace +#if MIN_VERSION_ghc(9,11,0) + (Iface.flagsToIfCompression _dflags) +#endif + bh + fat_iface + + -- And send the result to the file + writeBinMem bh core_path + + !fp <- fingerprintBinMem bh + pure fp + +-- Implicit binds aren't tidied, so we can't serialise them. +-- This isn't a problem however since we can regenerate them from the +-- original ModIface +codeGutsToCoreFile + :: Fingerprint -- ^ Hash of the interface this was generated from + -> CgGuts + -> CoreFile +-- In GHC 9.6, implicit binds are tidied and part of core binds +codeGutsToCoreFile hash CgGuts{..} = CoreFile (map (toIfaceTopBind1 cg_module) cg_binds) hash + +getImplicitBinds :: TyCon -> [CoreBind] +getImplicitBinds tc = cls_binds ++ getTyConImplicitBinds tc + where + cls_binds = maybe [] getClassImplicitBinds (tyConClass_maybe tc) + +getTyConImplicitBinds :: TyCon -> [CoreBind] +getTyConImplicitBinds tc + | isNewTyCon tc = [] -- See Note [Compulsory newtype unfolding] in MkId + | otherwise = map get_defn (mapMaybe dataConWrapId_maybe (tyConDataCons tc)) + +getClassImplicitBinds :: Class -> [CoreBind] +getClassImplicitBinds cls + = [ NonRec op (mkDictSelRhs cls val_index) + | (op, val_index) <- classAllSelIds cls `zip` [0..] ] + +get_defn :: Id -> CoreBind +get_defn identifier = NonRec identifier templ + where + templ = case maybeUnfoldingTemplate (realIdUnfolding identifier) of + Nothing -> error "get_dfn: no unfolding template" + Just x -> x + +toIfaceTopBndr1 :: Module -> Id -> IfaceId +toIfaceTopBndr1 mod identifier + = IfaceId (mangleDeclName mod $ getName identifier) + (toIfaceType (idType identifier)) + (toIfaceIdDetails (idDetails identifier)) + (toIfaceIdInfo (idInfo identifier)) + +toIfaceTopBind1 :: Module -> Bind Id -> TopIfaceBinding IfaceId +toIfaceTopBind1 mod (NonRec b r) = TopIfaceNonRec (toIfaceTopBndr1 mod b) (toIfaceExpr r) +toIfaceTopBind1 mod (Rec prs) = TopIfaceRec [(toIfaceTopBndr1 mod b, toIfaceExpr r) | (b,r) <- prs] + +typecheckCoreFile :: Module -> IORef TypeEnv -> CoreFile -> IfG CoreProgram +typecheckCoreFile this_mod type_var (CoreFile prepd_binding _) = + initIfaceLcl this_mod (text "typecheckCoreFile") NotBoot $ do + tcTopIfaceBindings1 type_var prepd_binding + +-- | Internal names can't be serialized, so we mange them +-- to an external name and restore at deserialization time +-- This is necessary because we rely on stuffing TopIfaceBindings into +-- a IfaceId because we don't have access to 'tcIfaceType' etc.. +mangleDeclName :: Module -> Name -> Name +mangleDeclName mod name + | isExternalName name = name + | otherwise = mkExternalName (nameUnique name) (mangleModule mod) (nameOccName name) (nameSrcSpan name) + +-- | Mangle the module name too to avoid conflicts +mangleModule :: Module -> Module +mangleModule mod = mkModule (moduleUnit mod) (mkModuleName $ "GHCIDEINTERNAL" ++ moduleNameString (moduleName mod)) + +isGhcideModule :: Module -> Bool +isGhcideModule mod = "GHCIDEINTERNAL" `isPrefixOf` (moduleNameString $ moduleName mod) + +-- Is this a fake external name that we need to make into an internal name? +isGhcideName :: Name -> Bool +isGhcideName = isGhcideModule . nameModule + +tcTopIfaceBindings1 :: IORef TypeEnv -> [TopIfaceBinding IfaceId] + -> IfL [CoreBind] +tcTopIfaceBindings1 ty_var ver_decls + = do + int <- mapM (traverse tcIfaceId) ver_decls + let all_ids = concatMap toList int + liftIO $ modifyIORef ty_var (flip extendTypeEnvList $ map AnId all_ids) + extendIfaceIdEnv all_ids $ mapM tc_iface_bindings int + +tcIfaceId :: IfaceId -> IfL Id +tcIfaceId = fmap getIfaceId . tcIfaceDecl False <=< unmangle_decl_name + where + unmangle_decl_name ifid@IfaceId{ ifName = name } + -- Check if the name is mangled + | isGhcideName name = do + name' <- newIfaceName (mkVarOcc $ getOccString name) + pure $ ifid{ ifName = name' } + | otherwise = pure ifid + unmangle_decl_name _ifid = error "tcIfaceId: got non IfaceId: " + -- invariant: 'IfaceId' is always a 'IfaceId' constructor + getIfaceId (AnId identifier) = identifier + getIfaceId _ = error "tcIfaceId: got non Id" + +tc_iface_bindings :: TopIfaceBinding Id -> IfL CoreBind +tc_iface_bindings (TopIfaceNonRec v e) = do + e' <- tcIfaceExpr e + pure $ NonRec v e' +tc_iface_bindings (TopIfaceRec vs) = do + vs' <- traverse (\(v, e) -> (v,) <$> tcIfaceExpr e) vs + pure $ Rec vs' + diff --git a/ghcide/src/Development/IDE/GHC/Error.hs b/ghcide/src/Development/IDE/GHC/Error.hs new file mode 100644 index 0000000000..048987f8ae --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Error.hs @@ -0,0 +1,265 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DisambiguateRecordFields #-} +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +module Development.IDE.GHC.Error + ( + -- * Producing Diagnostic values + diagFromGhcErrorMessages + , diagFromErrMsgs + , diagFromErrMsg + , diagFromSDocErrMsgs + , diagFromSDocErrMsg + , diagFromString + , diagFromStrings + , diagFromGhcException + , catchSrcErrors + + -- * utilities working with spans + , srcSpanToLocation + , srcSpanToRange + , realSrcSpanToRange + , realSrcLocToPosition + , realSrcSpanToLocation + , realSrcSpanToCodePointRange + , realSrcLocToCodePointPosition + , srcSpanToFilename + , rangeToSrcSpan + , rangeToRealSrcSpan + , positionToRealSrcLoc + , zeroSpan + , realSpan + , isInsideSrcSpan + , spanContainsRange + , noSpan + + -- * utilities working with severities + , toDSeverity + ) where + +import Control.Lens +import Data.Maybe +import Data.String (fromString) +import qualified Data.Text as T +import Data.Tuple.Extra (uncurry3) +import Development.IDE.GHC.Compat (GhcMessage, MsgEnvelope, + errMsgDiagnostic, + errMsgSeverity, errMsgSpan, + formatErrorWithQual, + srcErrorMessages) +import qualified Development.IDE.GHC.Compat as Compat +import qualified Development.IDE.GHC.Compat.Util as Compat +import Development.IDE.GHC.Orphans () +import Development.IDE.Types.Diagnostics as D +import Development.IDE.Types.Location +import GHC +import Language.LSP.Protocol.Types (isSubrangeOf) +import Language.LSP.VFS (CodePointPosition (CodePointPosition), + CodePointRange (CodePointRange)) + + +diagFromText :: T.Text -> D.DiagnosticSeverity -> SrcSpan -> T.Text -> Maybe (MsgEnvelope GhcMessage) -> FileDiagnostic +diagFromText diagSource sev loc msg origMsg = + D.ideErrorWithSource + (Just diagSource) (Just sev) + (toNormalizedFilePath' $ fromMaybe noFilePath $ srcSpanToFilename loc) + msg origMsg + & fdLspDiagnosticL %~ \diag -> diag { D._range = fromMaybe noRange $ srcSpanToRange loc } + +-- | Produce a GHC-style error from a source span and a message. +diagFromErrMsg :: T.Text -> DynFlags -> MsgEnvelope GhcMessage -> [FileDiagnostic] +diagFromErrMsg diagSource dflags origErr = + let err = fmap (\e -> (Compat.renderDiagnosticMessageWithHints e, Just origErr)) origErr + in + diagFromSDocWithOptionalOrigMsg diagSource dflags err + +-- | Compatibility function for creating '[FileDiagnostic]' from +-- a 'Compat.Bag' of GHC error messages. +-- The function signature changes based on the GHC version. +-- While this is not desirable, it avoids more CPP statements in code +-- that implements actual logic. +diagFromGhcErrorMessages :: T.Text -> DynFlags -> Compat.Bag (MsgEnvelope GhcMessage) -> [FileDiagnostic] +diagFromGhcErrorMessages sourceParser dflags errs = + diagFromErrMsgs sourceParser dflags errs + +diagFromErrMsgs :: T.Text -> DynFlags -> Compat.Bag (MsgEnvelope GhcMessage) -> [FileDiagnostic] +diagFromErrMsgs diagSource dflags = concatMap (diagFromErrMsg diagSource dflags) . Compat.bagToList + +diagFromSDocErrMsg :: T.Text -> DynFlags -> MsgEnvelope Compat.DecoratedSDoc -> [FileDiagnostic] +diagFromSDocErrMsg diagSource dflags err = + diagFromSDocWithOptionalOrigMsg diagSource dflags (fmap (,Nothing) err) + +diagFromSDocErrMsgs :: T.Text -> DynFlags -> Compat.Bag (MsgEnvelope Compat.DecoratedSDoc) -> [FileDiagnostic] +diagFromSDocErrMsgs diagSource dflags = concatMap (diagFromSDocErrMsg diagSource dflags) . Compat.bagToList + +diagFromSDocWithOptionalOrigMsg :: T.Text -> DynFlags -> MsgEnvelope (Compat.DecoratedSDoc, Maybe (MsgEnvelope GhcMessage)) -> [FileDiagnostic] +diagFromSDocWithOptionalOrigMsg diagSource dflags err = + [ diagFromText diagSource sev (errMsgSpan err) (T.pack (formatErrorWithQual dflags (fmap fst err))) (snd (errMsgDiagnostic err)) + | Just sev <- [toDSeverity $ errMsgSeverity err]] + +-- | Convert a GHC SrcSpan to a DAML compiler Range +srcSpanToRange :: SrcSpan -> Maybe Range +srcSpanToRange (UnhelpfulSpan _) = Nothing +srcSpanToRange (Compat.RealSrcSpan real _) = Just $ realSrcSpanToRange real +-- srcSpanToRange = fmap realSrcSpanToRange . realSpan + +realSrcSpanToRange :: RealSrcSpan -> Range +realSrcSpanToRange real = + Range (realSrcLocToPosition $ Compat.realSrcSpanStart real) + (realSrcLocToPosition $ Compat.realSrcSpanEnd real) + +realSrcLocToPosition :: RealSrcLoc -> Position +realSrcLocToPosition real = + Position (fromIntegral $ srcLocLine real - 1) (fromIntegral $ srcLocCol real - 1) + +-- Note [Unicode support] +-- ~~~~~~~~~~~~~~~~~~~~~~ +-- the current situation is: +-- LSP Positions use UTF-16 code units(Unicode may count as variable columns); +-- GHC use Unicode code points(Unicode count as one column). +-- To support unicode, ideally range should be in lsp standard, +-- and codePoint should be in ghc standard. +-- see https://github.com/haskell/lsp/pull/407 + +-- | Convert a GHC SrcSpan to CodePointRange +-- see Note [Unicode support] +realSrcSpanToCodePointRange :: RealSrcSpan -> CodePointRange +realSrcSpanToCodePointRange real = + CodePointRange + (realSrcLocToCodePointPosition $ Compat.realSrcSpanStart real) + (realSrcLocToCodePointPosition $ Compat.realSrcSpanEnd real) + +-- | Convert a GHC RealSrcLoc to CodePointPosition +-- see Note [Unicode support] +realSrcLocToCodePointPosition :: RealSrcLoc -> CodePointPosition +realSrcLocToCodePointPosition real = + CodePointPosition (fromIntegral $ srcLocLine real - 1) (fromIntegral $ srcLocCol real - 1) + +-- | Extract a file name from a GHC SrcSpan (use message for unhelpful ones) +-- FIXME This may not be an _absolute_ file name, needs fixing. +srcSpanToFilename :: SrcSpan -> Maybe FilePath +srcSpanToFilename (UnhelpfulSpan _) = Nothing +srcSpanToFilename (Compat.RealSrcSpan real _) = Just $ Compat.unpackFS $ srcSpanFile real +-- srcSpanToFilename = fmap (FS.unpackFS . srcSpanFile) . realSpan + +realSrcSpanToLocation :: RealSrcSpan -> Location +realSrcSpanToLocation real = Location file (realSrcSpanToRange real) + where file = fromNormalizedUri $ filePathToUri' $ toNormalizedFilePath' $ Compat.unpackFS $ srcSpanFile real + +srcSpanToLocation :: SrcSpan -> Maybe Location +srcSpanToLocation src = do + fs <- srcSpanToFilename src + rng <- srcSpanToRange src + -- important that the URI's we produce have been properly normalized, otherwise they point at weird places in VS Code + pure $ Location (fromNormalizedUri $ filePathToUri' $ toNormalizedFilePath' fs) rng + +rangeToSrcSpan :: NormalizedFilePath -> Range -> SrcSpan +rangeToSrcSpan = fmap (\x -> Compat.RealSrcSpan x Nothing) . rangeToRealSrcSpan + +rangeToRealSrcSpan + :: NormalizedFilePath -> Range -> RealSrcSpan +rangeToRealSrcSpan nfp = + Compat.mkRealSrcSpan + <$> positionToRealSrcLoc nfp . _start + <*> positionToRealSrcLoc nfp . _end + +positionToRealSrcLoc :: NormalizedFilePath -> Position -> RealSrcLoc +positionToRealSrcLoc nfp (Position l c)= + Compat.mkRealSrcLoc (fromString $ fromNormalizedFilePath nfp) (fromIntegral $ l + 1) (fromIntegral $ c + 1) + +isInsideSrcSpan :: Position -> SrcSpan -> Bool +p `isInsideSrcSpan` r = case srcSpanToRange r of + Just (Range sp ep) -> sp <= p && p <= ep + _ -> False + +-- Returns Nothing if the SrcSpan does not represent a valid range +spanContainsRange :: SrcSpan -> Range -> Maybe Bool +spanContainsRange srcSpan range = (range `isSubrangeOf`) <$> srcSpanToRange srcSpan + +-- | Convert a GHC severity to a DAML compiler Severity. Severities below +-- "Warning" level are dropped (returning Nothing). +toDSeverity :: GHC.Severity -> Maybe D.DiagnosticSeverity +toDSeverity SevIgnore = Nothing +toDSeverity SevWarning = Just DiagnosticSeverity_Warning +toDSeverity SevError = Just DiagnosticSeverity_Error + + +-- | Produce a bag of GHC-style errors (@ErrorMessages@) from the given +-- (optional) locations and message strings. +diagFromStrings :: T.Text -> D.DiagnosticSeverity -> [(SrcSpan, String, Maybe (MsgEnvelope GhcMessage))] -> [FileDiagnostic] +diagFromStrings diagSource sev = concatMap (uncurry3 (diagFromString diagSource sev)) + +-- | Produce a GHC-style error from a source span and a message. +diagFromString :: T.Text -> D.DiagnosticSeverity -> SrcSpan -> String -> Maybe (MsgEnvelope GhcMessage) -> [FileDiagnostic] +diagFromString diagSource sev sp x origMsg = [diagFromText diagSource sev sp (T.pack x) origMsg] + + +-- | Produces an "unhelpful" source span with the given string. +noSpan :: String -> SrcSpan +noSpan = Compat.mkGeneralSrcSpan . Compat.fsLit + + +-- | creates a span with zero length in the filename of the argument passed +zeroSpan :: Compat.FastString -- ^ file path of span + -> RealSrcSpan +zeroSpan file = Compat.realSrcLocSpan (Compat.mkRealSrcLoc file 1 1) + +realSpan :: SrcSpan + -> Maybe RealSrcSpan +realSpan = \case + Compat.RealSrcSpan r _ -> Just r + UnhelpfulSpan _ -> Nothing + + +-- | Catch the errors thrown by GHC (SourceErrors and +-- compiler-internal exceptions like Panic or InstallationError), and turn them into +-- diagnostics +catchSrcErrors :: DynFlags -> T.Text -> IO a -> IO (Either [FileDiagnostic] a) +catchSrcErrors dflags fromWhere ghcM = do + Compat.handleGhcException ghcExceptionToDiagnostics $ + handleSourceError sourceErrorToDiagnostics $ + Right <$> ghcM + where + ghcExceptionToDiagnostics = return . Left . diagFromGhcException fromWhere dflags + sourceErrorToDiagnostics diag = pure $ Left $ + diagFromErrMsgs fromWhere dflags (Compat.getMessages (srcErrorMessages diag)) + +diagFromGhcException :: T.Text -> DynFlags -> GhcException -> [FileDiagnostic] +diagFromGhcException diagSource dflags exc = diagFromString diagSource DiagnosticSeverity_Error (noSpan "") (showGHCE dflags exc) Nothing + +showGHCE :: DynFlags -> GhcException -> String +showGHCE dflags exc = case exc of + Signal n + -> "Signal: " <> show n + + Panic s + -> unwords ["Compilation Issue:", s, "\n", requestReport] + PprPanic s sdoc + -> unlines ["Compilation Issue", s,"" + , Compat.showSDoc dflags sdoc + , requestReport ] + + Sorry s + -> "Unsupported feature: " <> s + PprSorry s sdoc + -> unlines ["Unsupported feature: ", s,"" + , Compat.showSDoc dflags sdoc] + + + ---------- errors below should not happen at all -------- + InstallationError str + -> "Installation error: " <> str + + UsageError str -- should never happen + -> unlines ["Unexpected usage error", str] + + CmdLineError str + -> unlines ["Unexpected usage error", str] + + ProgramError str + -> "Program error: " <> str + PprProgramError str sdoc -> + unlines ["Program error:", str,"" + , Compat.showSDoc dflags sdoc] + where + requestReport = "Please report this bug to the compiler authors." diff --git a/ghcide/src/Development/IDE/GHC/Orphans.hs b/ghcide/src/Development/IDE/GHC/Orphans.hs new file mode 100644 index 0000000000..068ca6a78a --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Orphans.hs @@ -0,0 +1,237 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -Wno-orphans #-} + +-- | Orphan instances for GHC. +-- Note that the 'NFData' instances may not be law abiding. +module Development.IDE.GHC.Orphans() where +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Util + +import Control.DeepSeq +import Control.Monad.Trans.Reader (ReaderT (..)) +import Data.Aeson +import Data.Hashable +import Data.String (IsString (fromString)) +import Data.Text (unpack) + +import Data.Bifunctor (Bifunctor (..)) +import GHC.ByteCode.Types +import GHC.Data.Bag +import GHC.Data.FastString +import qualified GHC.Data.StringBuffer as SB +import GHC.Iface.Ext.Types +import GHC.Parser.Annotation +import GHC.Types.PkgQual +import GHC.Types.SrcLoc + +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] + +import GHC.Unit.Home.ModInfo +import GHC.Unit.Module.Location (ModLocation (..)) +import GHC.Unit.Module.WholeCoreBindings + +-- Orphan instance for Shake.hs +-- https://hub.darcs.net/ross/transformers/issue/86 +deriving instance (Semigroup (m a)) => Semigroup (ReaderT r m a) + +-- Orphan instances for types from the GHC API. +instance Show CoreModule where show = unpack . printOutputable +instance NFData CoreModule where rnf = rwhnf +instance Show CgGuts where show = unpack . printOutputable . cg_module +instance NFData CgGuts where rnf = rwhnf +instance Show ModDetails where show = const "" +instance NFData ModDetails where rnf = rwhnf +instance NFData SafeHaskellMode where rnf = rwhnf +instance Show Linkable where show = unpack . printOutputable +#if MIN_VERSION_ghc(9,11,0) +instance NFData Linkable where rnf (Linkable a b c) = rnf a `seq` rnf b `seq` rnf c +instance NFData LinkableObjectSort where rnf = rwhnf +instance NFData LinkablePart where + rnf (DotO a b) = rnf a `seq` rnf b + rnf (DotA f) = rnf f + rnf (DotDLL f) = rnf f + rnf (BCOs a) = seqCompiledByteCode a + rnf (CoreBindings wcb) = rnf wcb + rnf (LazyBCOs a b) = seqCompiledByteCode a `seq` rnf b +#else +instance NFData Linkable where rnf (LM a b c) = rnf a `seq` rnf b `seq` rnf c +instance NFData Unlinked where + rnf (DotO f) = rnf f + rnf (DotA f) = rnf f + rnf (DotDLL f) = rnf f + rnf (BCOs a b) = seqCompiledByteCode a `seq` liftRnf rwhnf b + rnf (CoreBindings wcb) = rnf wcb + rnf (LoadedBCOs us) = rnf us +#endif + +instance NFData WholeCoreBindings where +#if MIN_VERSION_ghc(9,11,0) + rnf (WholeCoreBindings bs m ml f) = rnf bs `seq` rnf m `seq` rnf ml `seq` rnf f +#else + rnf (WholeCoreBindings bs m ml) = rnf bs `seq` rnf m `seq` rnf ml +#endif + +instance NFData ModLocation where +#if MIN_VERSION_ghc(9,11,0) + rnf (OsPathModLocation mf f1 f2 f3 f4 f5) = rnf mf `seq` rnf f1 `seq` rnf f2 `seq` rnf f3 `seq` rnf f4 `seq` rnf f5 +#else + rnf (ModLocation mf f1 f2 f3 f4 f5) = rnf mf `seq` rnf f1 `seq` rnf f2 `seq` rnf f3 `seq` rnf f4 `seq` rnf f5 +#endif + +instance Show PackageFlag where show = unpack . printOutputable +instance Show InteractiveImport where show = unpack . printOutputable +instance Show PackageName where show = unpack . printOutputable + +instance Show UnitId where show = unpack . printOutputable +deriving instance Ord SrcSpan +deriving instance Ord UnhelpfulSpanReason + +instance NFData SB.StringBuffer where rnf = rwhnf + +instance Show Module where + show = moduleNameString . moduleName + +instance Show ModSummary where + show = show . ms_mod + +instance Show ParsedModule where + show = show . pm_mod_summary + +instance NFData ModSummary where + rnf = rwhnf + +instance Ord FastString where + compare a b = if a == b then EQ else compare (fs_sbs a) (fs_sbs b) + + +#if MIN_VERSION_ghc(9,9,0) +instance NFData (EpAnn a) where + rnf = rwhnf +#else +instance NFData (SrcSpanAnn' a) where + rnf = rwhnf +deriving instance Functor SrcSpanAnn' +#endif + +instance Bifunctor GenLocated where + bimap f g (L l x) = L (f l) (g x) + +instance NFData ParsedModule where + rnf = rwhnf + +instance Show HieFile where + show = show . hie_module + +instance NFData HieFile where + rnf = rwhnf + + +instance Hashable ModuleName where + hashWithSalt salt = hashWithSalt salt . show + + +instance NFData a => NFData (IdentifierDetails a) where + rnf (IdentifierDetails a b) = rnf a `seq` rnf (length b) + +instance NFData RealSrcSpan where + rnf = rwhnf + +srcSpanFileTag, srcSpanStartLineTag, srcSpanStartColTag, + srcSpanEndLineTag, srcSpanEndColTag :: String +srcSpanFileTag = "srcSpanFile" +srcSpanStartLineTag = "srcSpanStartLine" +srcSpanStartColTag = "srcSpanStartCol" +srcSpanEndLineTag = "srcSpanEndLine" +srcSpanEndColTag = "srcSpanEndCol" + +instance ToJSON RealSrcSpan where + toJSON spn = + object + [ fromString srcSpanFileTag .= unpackFS (srcSpanFile spn) + , fromString srcSpanStartLineTag .= srcSpanStartLine spn + , fromString srcSpanStartColTag .= srcSpanStartCol spn + , fromString srcSpanEndLineTag .= srcSpanEndLine spn + , fromString srcSpanEndColTag .= srcSpanEndCol spn + ] + +instance FromJSON RealSrcSpan where + parseJSON = withObject "object" $ \obj -> do + file <- fromString <$> (obj .: fromString srcSpanFileTag) + mkRealSrcSpan + <$> (mkRealSrcLoc file + <$> obj .: fromString srcSpanStartLineTag + <*> obj .: fromString srcSpanStartColTag + ) + <*> (mkRealSrcLoc file + <$> obj .: fromString srcSpanEndLineTag + <*> obj .: fromString srcSpanEndColTag + ) + +instance NFData Type where + rnf = rwhnf + +instance Show a => Show (Bag a) where + show = show . bagToList + +instance Show ModGuts where + show _ = "modguts" +instance NFData ModGuts where + rnf = rwhnf + +instance NFData (ImportDecl GhcPs) where + rnf = rwhnf + +instance (NFData (HsModule a)) where + rnf = rwhnf + +instance Show OccName where show = unpack . printOutputable + + +#if MIN_VERSION_ghc(9,7,0) +instance Hashable OccName where hashWithSalt s n = hashWithSalt s (getKey $ getUnique $ occNameFS n, getKey $ getUnique $ occNameSpace n) +#else +instance Hashable OccName where hashWithSalt s n = hashWithSalt s (getKey $ getUnique n) +#endif + +instance Show HomeModInfo where show = show . mi_module . hm_iface + +instance Show ModuleGraph where show _ = "ModuleGraph {..}" +instance NFData ModuleGraph where rnf = rwhnf + +instance NFData HomeModInfo where + rnf (HomeModInfo iface dets link) = rwhnf iface `seq` rnf dets `seq` rnf link + +instance NFData PkgQual where + rnf NoPkgQual = () + rnf (ThisPkg uid) = rnf uid + rnf (OtherPkg uid) = rnf uid + +instance NFData UnitId where + rnf = rwhnf + +instance NFData NodeKey where + rnf = rwhnf + +instance NFData HomeModLinkable where + rnf = rwhnf + +instance NFData (HsExpr (GhcPass Renamed)) where + rnf = rwhnf + +instance NFData (Pat (GhcPass Renamed)) where + rnf = rwhnf + +instance NFData (HsExpr (GhcPass Typechecked)) where + rnf = rwhnf + +instance NFData (Pat (GhcPass Typechecked)) where + rnf = rwhnf + +instance NFData Extension where + rnf = rwhnf + +instance NFData (UniqFM Name [Name]) where + rnf (ufmToIntMap -> m) = rnf m diff --git a/ghcide/src/Development/IDE/GHC/Util.hs b/ghcide/src/Development/IDE/GHC/Util.hs new file mode 100644 index 0000000000..fb051bda5a --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Util.hs @@ -0,0 +1,327 @@ +{-# LANGUAGE CPP #-} +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +-- | General utility functions, mostly focused around GHC operations. +module Development.IDE.GHC.Util( + modifyDynFlags, + evalGhcEnv, + -- * GHC wrappers + printRdrName, + Development.IDE.GHC.Util.printName, + ParseResult(..), runParser, + lookupPackageConfig, + textToStringBuffer, + bytestringToStringBuffer, + stringBufferToByteString, + moduleImportPath, + cgGutsToCoreModule, + fingerprintToBS, + fingerprintFromByteString, + fingerprintFromStringBuffer, + fingerprintFromPut, + -- * General utilities + readFileUtf8, + hDuplicateTo', + setHieDir, + dontWriteHieFiles, + disableWarningsAsErrors, + printOutputable, + getExtensions, + stripOccNamePrefix, + ) where + +import Control.Concurrent +import Control.Exception as E +import Data.Binary.Put (Put, runPut) +import qualified Data.ByteString as BS +import Data.ByteString.Internal (ByteString (..)) +import qualified Data.ByteString.Internal as BS +import qualified Data.ByteString.Lazy as LBS +import Data.IORef +import Data.List.Extra +import Data.Maybe +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Text.Encoding.Error as T +import Data.Typeable +import Development.IDE.GHC.Compat as GHC hiding (unitState) +import qualified Development.IDE.GHC.Compat.Parser as Compat +import qualified Development.IDE.GHC.Compat.Units as Compat +import Development.IDE.Types.Location +import Foreign.ForeignPtr +import Foreign.Ptr +import Foreign.Storable +import GHC hiding (ParsedModule (..), + parser) +import GHC.IO.BufferedIO (BufferedIO) +import GHC.IO.Device as IODevice +import GHC.IO.Encoding +import GHC.IO.Exception +import GHC.IO.Handle.Internals +import GHC.IO.Handle.Types +import Ide.PluginUtils (unescape) +import System.FilePath + +import Data.Monoid (First (..)) +import GHC.Data.EnumSet +import GHC.Data.FastString +import GHC.Data.StringBuffer +import GHC.Utils.Fingerprint +---------------------------------------------------------------------- +-- GHC setup + +-- | Used to modify dyn flags in preference to calling 'setSessionDynFlags', +-- since that function also reloads packages (which is very slow). +modifyDynFlags :: GhcMonad m => (DynFlags -> DynFlags) -> m () +modifyDynFlags f = do + newFlags <- f <$> getSessionDynFlags + -- We do not use setSessionDynFlags here since we handle package + -- initialization separately. + modifySession $ \h -> + hscSetFlags newFlags h { hsc_IC = (hsc_IC h) {ic_dflags = newFlags} } + +-- | Given a 'Unit' try and find the associated 'PackageConfig' in the environment. +lookupPackageConfig :: Unit -> HscEnv -> Maybe GHC.UnitInfo +lookupPackageConfig unit env = + Compat.lookupUnit' False unitState prClsre unit + where + unitState = Compat.getUnitInfoMap env + prClsre = preloadClosureUs env + + +-- | Convert from the @text@ package to the @GHC@ 'StringBuffer'. +-- Currently implemented somewhat inefficiently (if it ever comes up in a profile). +textToStringBuffer :: T.Text -> StringBuffer +textToStringBuffer = stringToStringBuffer . T.unpack + +runParser :: DynFlags -> String -> P a -> ParseResult a +runParser flags str parser = unP parser parseState + where + filename = "" + location = mkRealSrcLoc (mkFastString filename) 1 1 + buffer = stringToStringBuffer str + parseState = Compat.initParserState (Compat.initParserOpts flags) buffer location + +stringBufferToByteString :: StringBuffer -> ByteString +stringBufferToByteString StringBuffer{..} = PS buf cur len + +bytestringToStringBuffer :: ByteString -> StringBuffer +bytestringToStringBuffer (PS buf cur len) = StringBuffer{..} + +-- | Pretty print a 'RdrName' wrapping operators in parens +printRdrName :: RdrName -> String +printRdrName name = T.unpack $ printOutputable $ parenSymOcc rn (ppr rn) + where + rn = rdrNameOcc name + +-- | Pretty print a 'Name' wrapping operators in parens +printName :: Name -> String +printName = printRdrName . nameRdrName + +-- | Run a 'Ghc' monad value using an existing 'HscEnv'. Sets up and tears down all the required +-- pieces, but designed to be more efficient than a standard 'runGhc'. +evalGhcEnv :: HscEnv -> Ghc b -> IO b +evalGhcEnv env act = snd <$> runGhcEnv env act + +-- | Run a 'Ghc' monad value using an existing 'HscEnv'. Sets up and tears down all the required +-- pieces, but designed to be more efficient than a standard 'runGhc'. +runGhcEnv :: HscEnv -> Ghc a -> IO (HscEnv, a) +runGhcEnv env act = do + hsc_env <- initTempFs env + ref <- newIORef hsc_env + res <- unGhc (withCleanupSession act) (Session ref) + (,res) <$> readIORef ref + +-- | Given a module location, and its parse tree, figure out what is the include directory implied by it. +-- For example, given the file @\/usr\/\Test\/Foo\/Bar.hs@ with the module name @Foo.Bar@ the directory +-- @\/usr\/Test@ should be on the include path to find sibling modules. +moduleImportPath :: NormalizedFilePath -> GHC.ModuleName -> Maybe FilePath +-- The call to takeDirectory is required since DAML does not require that +-- the file name matches the module name in the last component. +-- Once that has changed we can get rid of this. +moduleImportPath (takeDirectory . fromNormalizedFilePath -> pathDir) mn + -- This happens for single-component modules since takeDirectory "A" == "." + | modDir == "." = Just pathDir + | otherwise = dropTrailingPathSeparator <$> stripSuffix modDir pathDir + where + -- A for module A.B + modDir = + takeDirectory $ + fromNormalizedFilePath $ toNormalizedFilePath' $ + moduleNameSlashes mn + +-- | Read a UTF8 file, with lenient decoding, so it will never raise a decoding error. +readFileUtf8 :: FilePath -> IO T.Text +readFileUtf8 f = T.decodeUtf8With T.lenientDecode <$> BS.readFile f + +-- | Convert from a 'CgGuts' to a 'CoreModule'. +cgGutsToCoreModule :: SafeHaskellMode -> CgGuts -> ModDetails -> CoreModule +cgGutsToCoreModule safeMode guts modDetails = CoreModule + (cg_module guts) + (md_types modDetails) + (cg_binds guts) + safeMode + +-- | Convert a 'Fingerprint' to a 'ByteString' by copying the byte across. +-- Will produce an 8 byte unreadable ByteString. +fingerprintToBS :: Fingerprint -> BS.ByteString +fingerprintToBS (Fingerprint a b) = BS.unsafeCreate 8 $ \ptr -> do + let ptr' = castPtr ptr + pokeElemOff ptr' 0 a + pokeElemOff ptr' 1 b + +-- | Take the 'Fingerprint' of a 'StringBuffer'. +fingerprintFromStringBuffer :: StringBuffer -> IO Fingerprint +fingerprintFromStringBuffer (StringBuffer buf len cur) = + withForeignPtr buf $ \ptr -> fingerprintData (ptr `plusPtr` cur) len + +fingerprintFromByteString :: ByteString -> IO Fingerprint +fingerprintFromByteString bs = do + let (fptr, offset, len) = BS.toForeignPtr bs + withForeignPtr fptr $ \ptr -> + fingerprintData (ptr `plusPtr` offset) len + +fingerprintFromPut :: Put -> IO Fingerprint +fingerprintFromPut = fingerprintFromByteString . LBS.toStrict . runPut + +-- | A slightly modified version of 'hDuplicateTo' from GHC. +-- Importantly, it avoids the bug listed in https://gitlab.haskell.org/ghc/ghc/merge_requests/2318. +hDuplicateTo' :: Handle -> Handle -> IO () +hDuplicateTo' h1@(FileHandle path m1) h2@(FileHandle _ m2) = do + withHandle__' "hDuplicateTo" h2 m2 $ \h2_ -> do + -- The implementation in base has this call to hClose_help. + -- _ <- hClose_help h2_ + -- hClose_help does two things: + -- 1. It flushes the buffer, we replicate this here + _ <- flushWriteBuffer h2_ `E.catch` \(_ :: IOException) -> pure () + -- 2. It closes the handle. This is redundant since dup2 takes care of that + -- but even worse it is actively harmful! Once the handle has been closed + -- another thread is free to reallocate it. This leads to dup2 failing with EBUSY + -- if it happens just in the right moment. + withHandle_' "hDuplicateTo" h1 m1 $ \h1_ -> do + dupHandleTo path h1 Nothing h2_ h1_ (Just handleFinalizer) +hDuplicateTo' h1@(DuplexHandle path r1 w1) h2@(DuplexHandle _ r2 w2) = do + withHandle__' "hDuplicateTo" h2 w2 $ \w2_ -> do + _ <- hClose_help w2_ + withHandle_' "hDuplicateTo" h1 w1 $ \w1_ -> do + dupHandleTo path h1 Nothing w2_ w1_ (Just handleFinalizer) + withHandle__' "hDuplicateTo" h2 r2 $ \r2_ -> do + _ <- hClose_help r2_ + withHandle_' "hDuplicateTo" h1 r1 $ \r1_ -> do + dupHandleTo path h1 (Just w1) r2_ r1_ Nothing +hDuplicateTo' h1 _ = + ioe_dupHandlesNotCompatible h1 + +-- | This is copied unmodified from GHC since it is not exposed. +dupHandleTo :: FilePath + -> Handle + -> Maybe (MVar Handle__) + -> Handle__ + -> Handle__ + -> Maybe HandleFinalizer + -> IO Handle__ +dupHandleTo filepath h other_side + _hto_@Handle__{haDevice=devTo} + h_@Handle__{haDevice=dev} mb_finalizer = do + flushBuffer h_ + case cast devTo of + Nothing -> ioe_dupHandlesNotCompatible h + Just dev' -> do + _ <- IODevice.dup2 dev dev' + FileHandle _ m <- dupHandle_ dev' filepath other_side h_ mb_finalizer + takeMVar m + +-- | This is copied unmodified from GHC since it is not exposed. +-- Note the beautiful inline comment! +dupHandle_ :: (RawIO dev, IODevice dev, BufferedIO dev, Typeable dev) => dev + -> FilePath + -> Maybe (MVar Handle__) + -> Handle__ + -> Maybe HandleFinalizer + -> IO Handle +dupHandle_ new_dev filepath other_side Handle__{..} mb_finalizer = do + -- XXX wrong! + mb_codec <- if isJust haEncoder then fmap Just getLocaleEncoding else return Nothing + mkHandle new_dev filepath haType True{-buffered-} mb_codec + NewlineMode { inputNL = haInputNL, outputNL = haOutputNL } + mb_finalizer other_side + +-- | This is copied unmodified from GHC since it is not exposed. +ioe_dupHandlesNotCompatible :: Handle -> IO a +ioe_dupHandlesNotCompatible h = + ioException (IOError (Just h) IllegalOperation "hDuplicateTo" + "handles are incompatible" Nothing Nothing) + +-------------------------------------------------------------------------------- +-- Tracing exactprint terms + +-- | Print a GHC value in `defaultUserStyle` without unique symbols. +-- +-- This is the most common print utility. +-- It will do something additionally compared to what the 'Outputable' instance does. +-- +-- 1. print with a user-friendly style: `a_a4ME` as `a`. +-- 2. unescape escape sequences of printable unicode characters within a pair of double quotes +printOutputable :: Outputable a => a -> T.Text +printOutputable = + -- IfaceTyLit from GHC.Iface.Type implements Outputable with 'show'. + -- Showing a String escapes non-ascii printable characters. We unescape it here. + -- More discussion at https://github.com/haskell/haskell-language-server/issues/3115. + unescape . T.pack . printWithoutUniques +{-# INLINE printOutputable #-} + +getExtensions :: ParsedModule -> [Extension] +getExtensions = toList . extensionFlags . ms_hspp_opts . pm_mod_summary + +-- | When e.g. DuplicateRecordFields is enabled, compiler generates +-- names like "$sel:accessor:One" and "$sel:accessor:Two" to +-- disambiguate record selectors +-- https://ghc.haskell.org/trac/ghc/wiki/Records/OverloadedRecordFields/DuplicateRecordFields#Implementation +stripOccNamePrefix :: T.Text -> T.Text +stripOccNamePrefix name = T.takeWhile (/=':') $ fromMaybe name $ + getFirst $ foldMap (First . (`T.stripPrefix` name)) + occNamePrefixes + +-- | Prefixes that can occur in a GHC OccName +occNamePrefixes :: [T.Text] +occNamePrefixes = + [ + -- long ones + "$con2tag_" + , "$tag2con_" + , "$maxtag_" + + -- four chars + , "$sel:" + , "$tc'" + + -- three chars + , "$dm" + , "$co" + , "$tc" + , "$cp" + , "$fx" + + -- two chars + , "$W" + , "$w" + , "$m" + , "$b" + , "$c" + , "$d" + , "$i" + , "$s" + , "$f" + , "$r" + , "C:" + , "N:" + , "D:" + , "$p" + , "$L" + , "$f" + , "$t" + , "$c" + , "$m" + ] + diff --git a/ghcide/src/Development/IDE/GHC/Warnings.hs b/ghcide/src/Development/IDE/GHC/Warnings.hs new file mode 100644 index 0000000000..fe77ea8456 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Warnings.hs @@ -0,0 +1,61 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE CPP #-} +{-# LANGUAGE ExplicitNamespaces #-} + +module Development.IDE.GHC.Warnings(withWarnings) where + +import Control.Concurrent.Strict +import Control.Lens (over) +import qualified Data.Text as T + +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Error +import Development.IDE.Types.Diagnostics + +{- + Note [withWarnings and its dangers] + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + withWarnings collects warnings by registering a custom logger which extracts + the SDocs of those warnings. If you receive warnings this way, you will not + get them in a structured form. In the medium term we'd like to remove all + uses of withWarnings to get structured messages everywhere we can. + + For the time being, withWarnings is no longer used for anything in the main + typecheckModule codepath, but it is still used for bytecode/object code + generation, as well as a few other places. + + I suspect some of these functions (e.g. codegen) will need deeper changes to + be able to get diagnostics as a list, though I don't have great evidence for + that atm. I haven't taken a look to see if those functions that are wrapped + with this could produce diagnostics another way. + + It would be good for someone to take a look. What we've done so far gives us + diagnostics for renaming and typechecking, and doesn't require us to copy + too much code from GHC or make any deeper changes, and lets us get started + with the bulk of the useful plugin work, but it would be good to have all + diagnostics with structure be collected that way. +-} + +-- | Take a GHC monadic action (e.g. @typecheckModule pm@ for some +-- parsed module 'pm@') and produce a "decorated" action that will +-- harvest any warnings encountered executing the action. The 'phase' +-- argument classifies the context (e.g. "Parser", "Typechecker"). +-- +-- The ModSummary function is required because of +-- https://github.com/ghc/ghc/blob/5f1d949ab9e09b8d95319633854b7959df06eb58/compiler/main/GHC.hs#L623-L640 +-- which basically says that log_action is taken from the ModSummary when GHC feels like it. +-- The given argument lets you refresh a ModSummary log_action +-- +-- Also, See Note [withWarnings and its dangers] for some commentary on this function. +withWarnings :: T.Text -> ((HscEnv -> HscEnv) -> IO a) -> IO ([(Maybe DiagnosticReason, FileDiagnostic)], a) +withWarnings diagSource action = do + warnings <- newVar [] + let newAction :: DynFlags -> LogActionCompat + newAction dynFlags logFlags wr _ loc prUnqual msg = do + let wr_d = map ((wr,) . over fdLspDiagnosticL (attachReason wr)) $ diagFromSDocErrMsg diagSource dynFlags (mkWarnMsg dynFlags wr logFlags loc prUnqual msg) + modifyVar_ warnings $ return . (wr_d:) + newLogger env = pushLogHook (const (logActionCompat (newAction (hsc_dflags env)))) (hsc_logger env) + res <- action $ \env -> putLogHook (newLogger env) env + warns <- readVar warnings + return (reverse $ concat warns, res) diff --git a/ghcide/src/Development/IDE/Import/DependencyInformation.hs b/ghcide/src/Development/IDE/Import/DependencyInformation.hs new file mode 100644 index 0000000000..471cf52eab --- /dev/null +++ b/ghcide/src/Development/IDE/Import/DependencyInformation.hs @@ -0,0 +1,459 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE CPP #-} + +module Development.IDE.Import.DependencyInformation + ( DependencyInformation(..) + , ModuleImports(..) + , RawDependencyInformation(..) + , NodeError(..) + , ModuleParseError(..) + , TransitiveDependencies(..) + , FilePathId(..) + , NamedModuleDep(..) + , ShowableModule(..) + , ShowableModuleEnv(..) + , PathIdMap (..) + , emptyPathIdMap + , getPathId + , lookupPathToId + , insertImport + , pathToId + , idToPath + , idToModLocation + , reachableModules + , processDependencyInformation + , transitiveDeps + , transitiveReverseDependencies + , immediateReverseDependencies + , lookupModuleFile + , BootIdMap + , insertBootId + , lookupFingerprint + ) where + +import Control.DeepSeq +import Data.Bifunctor +import Data.Coerce +import Data.Either +import Data.Graph hiding (edges, path) +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as HMS +import Data.IntMap (IntMap) +import qualified Data.IntMap.Lazy as IntMapLazy +import qualified Data.IntMap.Strict as IntMap +import Data.IntSet (IntSet) +import qualified Data.IntSet as IntSet +import Data.List +import Data.List.NonEmpty (NonEmpty (..), nonEmpty) +import qualified Data.List.NonEmpty as NonEmpty +import Data.Maybe +import Data.Tuple.Extra hiding (first, second) +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Compat.Util (Fingerprint) +import qualified Development.IDE.GHC.Compat.Util as Util +import Development.IDE.GHC.Orphans () +import Development.IDE.Import.FindImports (ArtifactsLocation (..)) +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location +import GHC.Generics (Generic) +import Prelude hiding (mod) + + +-- | The imports for a given module. +newtype ModuleImports = ModuleImports + { moduleImports :: [(Located ModuleName, Maybe FilePathId)] + -- ^ Imports of a module in the current package and the file path of + -- that module on disk (if we found it) + } deriving Show + +-- | For processing dependency information, we need lots of maps and sets of +-- filepaths. Comparing Strings is really slow, so we work with IntMap/IntSet +-- instead and only convert at the edges. +newtype FilePathId = FilePathId { getFilePathId :: Int } + deriving (Show, NFData, Eq, Ord) + +-- | Map from 'FilePathId' +type FilePathIdMap = IntMap + +-- | Set of 'FilePathId's +type FilePathIdSet = IntSet + +data PathIdMap = PathIdMap + { idToPathMap :: !(FilePathIdMap ArtifactsLocation) + , pathToIdMap :: !(HashMap NormalizedFilePath FilePathId) + , nextFreshId :: !Int + } + deriving (Show, Generic) + +instance NFData PathIdMap + +emptyPathIdMap :: PathIdMap +emptyPathIdMap = PathIdMap IntMap.empty HMS.empty 0 + +getPathId :: ArtifactsLocation -> PathIdMap -> (FilePathId, PathIdMap) +getPathId path m@PathIdMap{..} = + case HMS.lookup (artifactFilePath path) pathToIdMap of + Nothing -> + let !newId = FilePathId nextFreshId + in (newId, insertPathId newId ) + Just fileId -> (fileId, m) + where + insertPathId :: FilePathId -> PathIdMap + insertPathId fileId = + PathIdMap + (IntMap.insert (getFilePathId fileId) path idToPathMap) + (HMS.insert (artifactFilePath path) fileId pathToIdMap) + (succ nextFreshId) + +insertImport :: FilePathId -> Either ModuleParseError ModuleImports -> RawDependencyInformation -> RawDependencyInformation +insertImport (FilePathId k) v rawDepInfo = rawDepInfo { rawImports = IntMap.insert k v (rawImports rawDepInfo) } + +pathToId :: PathIdMap -> NormalizedFilePath -> Maybe FilePathId +pathToId PathIdMap{pathToIdMap} path = pathToIdMap HMS.!? path + +lookupPathToId :: PathIdMap -> NormalizedFilePath -> Maybe FilePathId +lookupPathToId PathIdMap{pathToIdMap} path = HMS.lookup path pathToIdMap + +idToPath :: PathIdMap -> FilePathId -> NormalizedFilePath +idToPath pathIdMap filePathId = artifactFilePath $ idToModLocation pathIdMap filePathId + +idToModLocation :: PathIdMap -> FilePathId -> ArtifactsLocation +idToModLocation PathIdMap{idToPathMap} (FilePathId i) = idToPathMap IntMap.! i + +type BootIdMap = FilePathIdMap FilePathId + +insertBootId :: FilePathId -> FilePathId -> BootIdMap -> BootIdMap +insertBootId k = IntMap.insert (getFilePathId k) + +-- | Unprocessed results that we find by following imports recursively. +data RawDependencyInformation = RawDependencyInformation + { rawImports :: !(FilePathIdMap (Either ModuleParseError ModuleImports)) + , rawPathIdMap :: !PathIdMap + -- The rawBootMap maps the FilePathId of a hs-boot file to its + -- corresponding hs file. It is used when topologically sorting as we + -- need to add edges between .hs-boot and .hs so that the .hs files + -- appear later in the sort. + , rawModuleMap :: !(FilePathIdMap ShowableModule) + } deriving Show + +data DependencyInformation = + DependencyInformation + { depErrorNodes :: !(FilePathIdMap (NonEmpty NodeError)) + -- ^ Nodes that cannot be processed correctly. + , depModules :: !(FilePathIdMap ShowableModule) + , depModuleDeps :: !(FilePathIdMap FilePathIdSet) + -- ^ For a non-error node, this contains the set of module immediate dependencies + -- in the same package. + , depReverseModuleDeps :: !(IntMap IntSet) + -- ^ Contains a reverse mapping from a module to all those that immediately depend on it. + , depPathIdMap :: !PathIdMap + -- ^ Map from FilePath to FilePathId + , depBootMap :: !BootIdMap + -- ^ Map from hs-boot file to the corresponding hs file + , depModuleFiles :: !(ShowableModuleEnv FilePathId) + -- ^ Map from Module to the corresponding non-boot hs file + , depModuleGraph :: !ModuleGraph + , depTransDepsFingerprints :: !(FilePathIdMap Fingerprint) + -- ^ Map from Module to fingerprint of the transitive dependencies of the module. + , depTransReverseDepsFingerprints :: !(FilePathIdMap Fingerprint) + -- ^ Map from FilePathId to the fingerprint of the transitive reverse dependencies of the module. + , depImmediateReverseDepsFingerprints :: !(FilePathIdMap Fingerprint) + -- ^ Map from FilePathId to the fingerprint of the immediate reverse dependencies of the module. + } deriving (Show, Generic) + +lookupFingerprint :: NormalizedFilePath -> DependencyInformation -> FilePathIdMap Fingerprint -> Maybe Fingerprint +lookupFingerprint fileId DependencyInformation {..} depFingerprintMap = + do + FilePathId cur_id <- lookupPathToId depPathIdMap fileId + IntMap.lookup cur_id depFingerprintMap + +newtype ShowableModule = + ShowableModule {showableModule :: Module} + deriving NFData + +newtype ShowableModuleEnv a = + ShowableModuleEnv {showableModuleEnv :: ModuleEnv a} + +instance Show a => Show (ShowableModuleEnv a) where + show (ShowableModuleEnv x) = show (moduleEnvToList x) +instance NFData a => NFData (ShowableModuleEnv a) where + rnf = rwhnf + +instance Show ShowableModule where show = moduleNameString . moduleName . showableModule + +reachableModules :: DependencyInformation -> [NormalizedFilePath] +reachableModules DependencyInformation{..} = + map (idToPath depPathIdMap . FilePathId) $ IntMap.keys depErrorNodes <> IntMap.keys depModuleDeps + +instance NFData DependencyInformation + +-- | This does not contain the actual parse error as that is already reported by GetParsedModule. +data ModuleParseError = ModuleParseError + deriving (Show, Generic) + +instance NFData ModuleParseError + +-- | Error when trying to locate a module. +newtype LocateError = LocateError [Diagnostic] + deriving (Eq, Show, Generic) + +instance NFData LocateError + +-- | An error attached to a node in the dependency graph. +data NodeError + = PartOfCycle (Located ModuleName) [FilePathId] + -- ^ This module is part of an import cycle. The module name corresponds + -- to the import that enters the cycle starting from this module. + -- The list of filepaths represents the elements + -- in the cycle in unspecified order. + | FailedToLocateImport (Located ModuleName) + -- ^ This module has an import that couldn’t be located. + | ParseError ModuleParseError + | ParentOfErrorNode (Located ModuleName) + -- ^ This module is the parent of a module that cannot be + -- processed (either it cannot be parsed, is part of a cycle + -- or the parent of another error node). + deriving (Show, Generic) + +instance NFData NodeError where + rnf (PartOfCycle m fs) = m `seq` rnf fs + rnf (FailedToLocateImport m) = m `seq` () + rnf (ParseError e) = rnf e + rnf (ParentOfErrorNode m) = m `seq` () + +-- | A processed node in the dependency graph. If there was any error +-- during processing the node or any of its dependencies, this is an +-- `ErrorNode`. Otherwise it is a `SuccessNode`. +data NodeResult + = ErrorNode (NonEmpty NodeError) + | SuccessNode [(Located ModuleName, FilePathId)] + deriving Show + +partitionNodeResults + :: [(a, NodeResult)] + -> ([(a, NonEmpty NodeError)], [(a, [(Located ModuleName, FilePathId)])]) +partitionNodeResults = partitionEithers . map f + where f (a, ErrorNode errs) = Left (a, errs) + f (a, SuccessNode imps) = Right (a, imps) + +instance Semigroup NodeResult where + ErrorNode errs <> ErrorNode errs' = ErrorNode (errs <> errs') + ErrorNode errs <> SuccessNode _ = ErrorNode errs + SuccessNode _ <> ErrorNode errs = ErrorNode errs + SuccessNode a <> SuccessNode _ = SuccessNode a + +processDependencyInformation :: RawDependencyInformation -> BootIdMap -> ModuleGraph -> FilePathIdMap Fingerprint -> DependencyInformation +processDependencyInformation RawDependencyInformation{..} rawBootMap mg shallowFingerMap = + DependencyInformation + { depErrorNodes = IntMap.fromList errorNodes + , depModuleDeps = moduleDeps + , depReverseModuleDeps = reverseModuleDeps + , depModules = rawModuleMap + , depPathIdMap = rawPathIdMap + , depBootMap = rawBootMap + , depModuleFiles = ShowableModuleEnv reverseModuleMap + , depModuleGraph = mg + , depTransDepsFingerprints = buildTransDepsFingerprintMap moduleDeps shallowFingerMap + , depTransReverseDepsFingerprints = buildTransDepsFingerprintMap reverseModuleDeps shallowFingerMap + , depImmediateReverseDepsFingerprints = buildImmediateDepsFingerprintMap reverseModuleDeps shallowFingerMap + } + where resultGraph = buildResultGraph rawImports + (errorNodes, successNodes) = partitionNodeResults $ IntMap.toList resultGraph + successEdges :: [(FilePathId, [FilePathId])] + successEdges = + map + (bimap FilePathId (map snd)) + successNodes + moduleDeps = + IntMap.fromList $ + map (\(FilePathId v, vs) -> (v, IntSet.fromList $ coerce vs)) + successEdges + reverseModuleDeps = + foldr (\(p, cs) res -> + let new = IntMap.fromList (map (, IntSet.singleton (coerce p)) (coerce cs)) + in IntMap.unionWith IntSet.union new res ) IntMap.empty successEdges + reverseModuleMap = mkModuleEnv $ map (\(i,sm) -> (showableModule sm, FilePathId i)) $ IntMap.toList rawModuleMap + + +-- | Given a dependency graph, buildResultGraph detects and propagates errors in that graph as follows: +-- 1. Mark each node that is part of an import cycle as an error node. +-- 2. Mark each node that has a parse error as an error node. +-- 3. Mark each node whose immediate children could not be located as an error. +-- 4. Recursively propagate errors to parents if they are not already error nodes. +buildResultGraph :: FilePathIdMap (Either ModuleParseError ModuleImports) -> FilePathIdMap NodeResult +buildResultGraph g = propagatedErrors + where + sccs = stronglyConnComp (graphEdges g) + (_, cycles) = partitionSCC sccs + cycleErrors :: IntMap NodeResult + cycleErrors = IntMap.unionsWith (<>) $ map errorsForCycle cycles + errorsForCycle :: [FilePathId] -> IntMap NodeResult + errorsForCycle files = + IntMap.fromListWith (<>) $ coerce $ concatMap (cycleErrorsForFile files) files + cycleErrorsForFile :: [FilePathId] -> FilePathId -> [(FilePathId,NodeResult)] + cycleErrorsForFile cycles' f = + let entryPoints = mapMaybe (findImport f) cycles' + in map (\imp -> (f, ErrorNode (PartOfCycle imp cycles' :| []))) entryPoints + otherErrors = IntMap.map otherErrorsForFile g + otherErrorsForFile :: Either ModuleParseError ModuleImports -> NodeResult + otherErrorsForFile (Left err) = ErrorNode (ParseError err :| []) + otherErrorsForFile (Right ModuleImports{moduleImports}) = + let toEither (imp, Nothing) = Left imp + toEither (imp, Just path) = Right (imp, path) + (errs, imports') = partitionEithers (map toEither moduleImports) + in case nonEmpty errs of + Nothing -> SuccessNode imports' + Just errs' -> ErrorNode (NonEmpty.map FailedToLocateImport errs') + + unpropagatedErrors = IntMap.unionWith (<>) cycleErrors otherErrors + -- The recursion here is fine since we use a lazy map and + -- we only recurse on SuccessNodes. In particular, we do not recurse + -- on nodes that are part of a cycle as they are already marked as + -- error nodes. + propagatedErrors = + IntMapLazy.map propagate unpropagatedErrors + propagate :: NodeResult -> NodeResult + propagate n@(ErrorNode _) = n + propagate n@(SuccessNode imps) = + let results = map (\(imp, FilePathId dep) -> (imp, propagatedErrors IntMap.! dep)) imps + (errs, _) = partitionNodeResults results + in case nonEmpty errs of + Nothing -> n + Just errs' -> ErrorNode (NonEmpty.map (ParentOfErrorNode . fst) errs') + findImport :: FilePathId -> FilePathId -> Maybe (Located ModuleName) + findImport (FilePathId file) importedFile = + case g IntMap.! file of + Left _ -> error "Tried to call findImport on a module with a parse error" + Right ModuleImports{moduleImports} -> + fmap fst $ find (\(_, resolvedImp) -> resolvedImp == Just importedFile) moduleImports + +graphEdges :: FilePathIdMap (Either ModuleParseError ModuleImports) -> [(FilePathId, FilePathId, [FilePathId])] +graphEdges g = + map (\(k, v) -> (FilePathId k, FilePathId k, deps v)) $ IntMap.toList g + where deps :: Either e ModuleImports -> [FilePathId] + deps (Left _) = [] + deps (Right ModuleImports{moduleImports}) = mapMaybe snd moduleImports + +partitionSCC :: [SCC a] -> ([a], [[a]]) +partitionSCC (CyclicSCC xs:rest) = second (xs:) $ partitionSCC rest +partitionSCC (AcyclicSCC x:rest) = first (x:) $ partitionSCC rest +partitionSCC [] = ([], []) + +-- | Transitive reverse dependencies of a file +transitiveReverseDependencies :: NormalizedFilePath -> DependencyInformation -> Maybe [NormalizedFilePath] +transitiveReverseDependencies file DependencyInformation{..} = do + FilePathId cur_id <- lookupPathToId depPathIdMap file + return $ map (idToPath depPathIdMap . FilePathId) (IntSet.toList (go cur_id IntSet.empty)) + where + go :: Int -> IntSet -> IntSet + go k i = + let outwards = IntMap.findWithDefault IntSet.empty k depReverseModuleDeps + res = IntSet.union i outwards + new = IntSet.difference i outwards + in IntSet.foldr go res new + +-- | Immediate reverse dependencies of a file +immediateReverseDependencies :: NormalizedFilePath -> DependencyInformation -> Maybe [NormalizedFilePath] +immediateReverseDependencies file DependencyInformation{..} = do + FilePathId cur_id <- lookupPathToId depPathIdMap file + return $ map (idToPath depPathIdMap . FilePathId) (maybe mempty IntSet.toList (IntMap.lookup cur_id depReverseModuleDeps)) + +-- | returns all transitive dependencies in topological order. +transitiveDeps :: DependencyInformation -> NormalizedFilePath -> Maybe TransitiveDependencies +transitiveDeps DependencyInformation{..} file = do + !fileId <- pathToId depPathIdMap file + reachableVs <- + -- Delete the starting node + IntSet.delete (getFilePathId fileId) . + IntSet.fromList . map (fst3 . fromVertex) . + reachable g <$> toVertex (getFilePathId fileId) + let transitiveModuleDepIds = + filter (\v -> v `IntSet.member` reachableVs) $ map (fst3 . fromVertex) vs + let transitiveModuleDeps = + map (idToPath depPathIdMap . FilePathId) transitiveModuleDepIds + pure TransitiveDependencies {..} + where + (g, fromVertex, toVertex) = graphFromEdges edges + edges = map (\(f, fs) -> (f, f, IntSet.toList fs ++ boot_edge f)) $ IntMap.toList depModuleDeps + + -- Need to add an edge between the .hs and .hs-boot file if it exists + -- so the .hs file gets loaded after the .hs-boot file and the right + -- stuff ends up in the HPT. If you don't have this check then GHC will + -- fail to work with ghcide. + boot_edge f = [getFilePathId f' | Just f' <- [IntMap.lookup f depBootMap]] + + vs = topSort g + +lookupModuleFile :: Module -> DependencyInformation -> Maybe NormalizedFilePath +lookupModuleFile mod DependencyInformation{..} + = idToPath depPathIdMap <$> lookupModuleEnv (showableModuleEnv depModuleFiles) mod + +newtype TransitiveDependencies = TransitiveDependencies + { transitiveModuleDeps :: [NormalizedFilePath] + -- ^ Transitive module dependencies in topological order. + -- The module itself is not included. + } deriving (Eq, Show, Generic) + +instance NFData TransitiveDependencies + +data NamedModuleDep = NamedModuleDep { + nmdFilePath :: !NormalizedFilePath, + nmdModuleName :: !ModuleName, + nmdModLocation :: !(Maybe ModLocation) + } + deriving Generic + +instance Eq NamedModuleDep where + a == b = nmdFilePath a == nmdFilePath b + +instance NFData NamedModuleDep where + rnf NamedModuleDep{..} = + rnf nmdFilePath `seq` + rnf nmdModuleName `seq` + -- 'ModLocation' lacks an 'NFData' instance + rwhnf nmdModLocation + +instance Show NamedModuleDep where + show NamedModuleDep{..} = show nmdFilePath + + +buildImmediateDepsFingerprintMap :: FilePathIdMap FilePathIdSet -> FilePathIdMap Fingerprint -> FilePathIdMap Fingerprint +buildImmediateDepsFingerprintMap modulesDeps shallowFingers = + IntMap.fromList + $ map + ( \k -> + ( k, + Util.fingerprintFingerprints $ + map + (shallowFingers IntMap.!) + (k : IntSet.toList (IntMap.findWithDefault IntSet.empty k modulesDeps)) + ) + ) + $ IntMap.keys shallowFingers + +-- | Build a map from file path to its full fingerprint. +-- The fingerprint is depend on both the fingerprints of the file and all its dependencies. +-- This is used to determine if a file has changed and needs to be reloaded. +buildTransDepsFingerprintMap :: FilePathIdMap FilePathIdSet -> FilePathIdMap Fingerprint -> FilePathIdMap Fingerprint +buildTransDepsFingerprintMap modulesDeps shallowFingers = go keys IntMap.empty + where + keys = IntMap.keys shallowFingers + go :: [IntSet.Key] -> FilePathIdMap Fingerprint -> FilePathIdMap Fingerprint + go keys acc = + case keys of + [] -> acc + k : ks -> + if IntMap.member k acc + -- already in the map, so we can skip + then go ks acc + -- not in the map, so we need to add it + else + let -- get the dependencies of the current key + deps = IntSet.toList $ IntMap.findWithDefault IntSet.empty k modulesDeps + -- add fingerprints of the dependencies to the accumulator + depFingerprints = go deps acc + -- combine the fingerprints of the dependencies with the current key + combinedFingerprints = Util.fingerprintFingerprints $ shallowFingers IntMap.! k : map (depFingerprints IntMap.!) deps + in -- add the combined fingerprints to the accumulator + go ks (IntMap.insert k combinedFingerprints depFingerprints) diff --git a/ghcide/src/Development/IDE/Import/FindImports.hs b/ghcide/src/Development/IDE/Import/FindImports.hs new file mode 100644 index 0000000000..7c4046a63a --- /dev/null +++ b/ghcide/src/Development/IDE/Import/FindImports.hs @@ -0,0 +1,228 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} + +module Development.IDE.Import.FindImports + ( locateModule + , locateModuleFile + , Import(..) + , ArtifactsLocation(..) + , modSummaryToArtifactsLocation + , isBootLocation + , mkImportDirs + ) where + +import Control.DeepSeq +import Control.Monad.Extra +import Control.Monad.IO.Class +import Data.List (find, isSuffixOf) +import Data.Maybe +import qualified Data.Set as S +import Development.IDE.GHC.Compat as Compat +import Development.IDE.GHC.Error as ErrUtils +import Development.IDE.GHC.Orphans () +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location +import GHC.Types.PkgQual +import GHC.Unit.State +import System.FilePath + + +#if MIN_VERSION_ghc(9,11,0) +import GHC.Driver.DynFlags +#endif + +data Import + = FileImport !ArtifactsLocation + | PackageImport + deriving (Show) + +data ArtifactsLocation = ArtifactsLocation + { artifactFilePath :: !NormalizedFilePath + , artifactModLocation :: !(Maybe ModLocation) + , artifactIsSource :: !Bool -- ^ True if a module is a source input + , artifactModule :: !(Maybe Module) + } deriving Show + +instance NFData ArtifactsLocation where + rnf ArtifactsLocation{..} = rnf artifactFilePath `seq` rwhnf artifactModLocation `seq` rnf artifactIsSource `seq` rnf artifactModule + +isBootLocation :: ArtifactsLocation -> Bool +isBootLocation = not . artifactIsSource + +instance NFData Import where + rnf (FileImport x) = rnf x + rnf PackageImport = () + +modSummaryToArtifactsLocation :: NormalizedFilePath -> Maybe ModSummary -> ArtifactsLocation +modSummaryToArtifactsLocation nfp ms = ArtifactsLocation nfp (ms_location <$> ms) source mbMod + where + isSource HsSrcFile = True + isSource _ = False + source = case ms of + Nothing -> "-boot" `isSuffixOf` fromNormalizedFilePath nfp + Just modSum -> isSource (ms_hsc_src modSum) + mbMod = ms_mod <$> ms + +data LocateResult + = LocateNotFound + | LocateFoundReexport UnitId + | LocateFoundFile UnitId NormalizedFilePath + +-- | locate a module in the file system. Where we go from *daml to Haskell +locateModuleFile :: MonadIO m + => [(UnitId, [FilePath], S.Set ModuleName)] + -> [String] + -> (ModuleName -> NormalizedFilePath -> m (Maybe NormalizedFilePath)) + -> Bool + -> ModuleName + -> m LocateResult +locateModuleFile import_dirss exts targetFor isSource modName = do + let candidates import_dirs = + [ toNormalizedFilePath' (prefix moduleNameSlashes modName <.> maybeBoot ext) + | prefix <- import_dirs , ext <- exts] + mf <- firstJustM go (concat [map (uid,) (candidates dirs) | (uid, dirs, _) <- import_dirss]) + case mf of + Nothing -> + case find (\(_ , _, reexports) -> S.member modName reexports) import_dirss of + Just (uid,_,_) -> pure $ LocateFoundReexport uid + Nothing -> pure LocateNotFound + Just (uid,file) -> pure $ LocateFoundFile uid file + where + go (uid, candidate) = fmap ((uid,) <$>) $ targetFor modName candidate + maybeBoot ext + | isSource = ext ++ "-boot" + | otherwise = ext + +-- | This function is used to map a package name to a set of import paths. +-- It only returns Just for unit-ids which are possible to import into the +-- current module. In particular, it will return Nothing for 'main' components +-- as they can never be imported into another package. +mkImportDirs :: HscEnv -> (UnitId, DynFlags) -> Maybe (UnitId, ([FilePath], S.Set ModuleName)) +#if MIN_VERSION_ghc(9,11,0) +mkImportDirs _env (i, flags) = Just (i, (importPaths flags, S.fromList $ map reexportTo $ reexportedModules flags)) +#else +mkImportDirs _env (i, flags) = Just (i, (importPaths flags, reexportedModules flags)) +#endif + +-- | locate a module in either the file system or the package database. Where we go from *daml to +-- Haskell +locateModule + :: MonadIO m + => HscEnv + -> [(UnitId, DynFlags)] -- ^ Import directories + -> [String] -- ^ File extensions + -> (ModuleName -> NormalizedFilePath -> m (Maybe NormalizedFilePath)) -- ^ does file exist predicate + -> Located ModuleName -- ^ Module name + -> PkgQual -- ^ Package name + -> Bool -- ^ Is boot module + -> m (Either [FileDiagnostic] Import) +locateModule env comp_info exts targetFor modName mbPkgName isSource = do + case mbPkgName of + -- 'ThisPkg' just means some home module, not the current unit + ThisPkg uid + | Just (dirs, reexports) <- lookup uid import_paths + -> lookupLocal uid dirs reexports + | otherwise -> return $ Left $ notFoundErr env modName $ LookupNotFound [] + -- if a package name is given we only go look for a package + OtherPkg uid + | Just (dirs, reexports) <- lookup uid import_paths + -> lookupLocal uid dirs reexports + | otherwise -> lookupInPackageDB + NoPkgQual -> do + + -- Reexports for current unit have to be empty because they only apply to other units depending on the + -- current unit. If we set the reexports to be the actual reexports then we risk looping forever trying + -- to find the module from the perspective of the current unit. + mbFile <- locateModuleFile ((homeUnitId_ dflags, importPaths dflags, S.empty) : other_imports) exts targetFor isSource $ unLoc modName + case mbFile of + LocateNotFound -> lookupInPackageDB + -- Lookup again with the perspective of the unit reexporting the file + LocateFoundReexport uid -> locateModule (hscSetActiveUnitId uid env) comp_info exts targetFor modName noPkgQual isSource + LocateFoundFile uid file -> toModLocation uid file + where + dflags = hsc_dflags env + import_paths = mapMaybe (mkImportDirs env) comp_info + other_imports = + -- Instead of bringing all the units into scope, only bring into scope the units + -- this one depends on. + -- This way if you have multiple units with the same module names, we won't get confused + -- For example if unit a imports module M from unit B, when there is also a module M in unit C, + -- and unit a only depends on unit b, without this logic there is the potential to get confused + -- about which module unit a imports. + -- Without multi-component support it is hard to recontruct the dependency environment so + -- unit a will have both unit b and unit c in scope. +#if MIN_VERSION_ghc(9,11,0) + map (\uid -> let this_df = homeUnitEnv_dflags (ue_findHomeUnitEnv uid ue) in (uid, importPaths this_df, S.fromList $ map reexportTo $ reexportedModules this_df)) hpt_deps +#else + map (\uid -> let this_df = homeUnitEnv_dflags (ue_findHomeUnitEnv uid ue) in (uid, importPaths this_df, reexportedModules this_df)) hpt_deps +#endif + ue = hsc_unit_env env + units = homeUnitEnv_units $ ue_findHomeUnitEnv (homeUnitId_ dflags) ue + hpt_deps :: [UnitId] + hpt_deps = homeUnitDepends units + + toModLocation uid file = liftIO $ do + loc <- mkHomeModLocation dflags (unLoc modName) (fromNormalizedFilePath file) + let genMod = mkModule (RealUnit $ Definite uid) (unLoc modName) -- TODO support backpack holes + return $ Right $ FileImport $ ArtifactsLocation file (Just loc) (not isSource) (Just genMod) + + lookupLocal uid dirs reexports = do + mbFile <- locateModuleFile [(uid, dirs, reexports)] exts targetFor isSource $ unLoc modName + case mbFile of + LocateNotFound -> return $ Left $ notFoundErr env modName $ LookupNotFound [] + -- Lookup again with the perspective of the unit reexporting the file + LocateFoundReexport uid' -> locateModule (hscSetActiveUnitId uid' env) comp_info exts targetFor modName noPkgQual isSource + LocateFoundFile uid' file -> toModLocation uid' file + + lookupInPackageDB = do + case Compat.lookupModuleWithSuggestions env (unLoc modName) mbPkgName of + LookupFound _m _pkgConfig -> return $ Right PackageImport + reason -> return $ Left $ notFoundErr env modName reason + +-- | Don't call this on a found module. +notFoundErr :: HscEnv -> Located ModuleName -> LookupResult -> [FileDiagnostic] +notFoundErr env modName reason = + mkError' $ ppr' $ cannotFindModule env modName0 $ lookupToFindResult reason + where + dfs = hsc_dflags env + mkError' doc = diagFromString "not found" DiagnosticSeverity_Error (Compat.getLoc modName) doc Nothing + modName0 = unLoc modName + ppr' = showSDoc dfs + -- We convert the lookup result to a find result to reuse GHC's cannotFindModule pretty printer. + lookupToFindResult = + \case + LookupFound _m _pkgConfig -> + pprPanic "Impossible: called lookupToFind on found module." (ppr modName0) + LookupMultiple rs -> FoundMultiple rs + LookupHidden pkg_hiddens mod_hiddens -> + notFound + { fr_pkgs_hidden = map (moduleUnit . fst) pkg_hiddens + , fr_mods_hidden = map (moduleUnit . fst) mod_hiddens + } + LookupUnusable unusable -> + let unusables' = map get_unusable unusable +#if MIN_VERSION_ghc(9,6,4) && (!MIN_VERSION_ghc(9,8,1) || MIN_VERSION_ghc(9,8,2)) + get_unusable (_m, ModUnusable r) = r +#else + get_unusable (m, ModUnusable r) = (moduleUnit m, r) +#endif + get_unusable (_, r) = + pprPanic "findLookupResult: unexpected origin" (ppr r) + in notFound {fr_unusables = unusables'} + LookupNotFound suggest -> + notFound {fr_suggestions = suggest} + +notFound :: FindResult +notFound = NotFound + { fr_paths = [] + , fr_pkg = Nothing + , fr_pkgs_hidden = [] + , fr_mods_hidden = [] + , fr_unusables = [] + , fr_suggestions = [] + } + +noPkgQual :: PkgQual +noPkgQual = NoPkgQual diff --git a/ghcide/src/Development/IDE/LSP/HoverDefinition.hs b/ghcide/src/Development/IDE/LSP/HoverDefinition.hs new file mode 100644 index 0000000000..0ba6e22530 --- /dev/null +++ b/ghcide/src/Development/IDE/LSP/HoverDefinition.hs @@ -0,0 +1,94 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE GADTs #-} + +-- | Display information on hover. +module Development.IDE.LSP.HoverDefinition + ( Log(..) + -- * For haskell-language-server + , hover + , foundHover + , gotoDefinition + , gotoTypeDefinition + , gotoImplementation + , documentHighlight + , references + , wsSymbols + ) where + +import Control.Monad.Except (ExceptT) +import Control.Monad.IO.Class +import Data.Maybe (fromMaybe) +import Development.IDE.Core.Actions +import qualified Development.IDE.Core.Rules as Shake +import Development.IDE.Core.Shake (IdeAction, IdeState (..), + runIdeAction) +import Development.IDE.Types.Location +import Ide.Logger +import Ide.Plugin.Error +import Ide.Types +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types + +import qualified Data.Text as T + + +data Log + = LogWorkspaceSymbolRequest !T.Text + | LogRequest !T.Text !Position !NormalizedFilePath + deriving (Show) + +instance Pretty Log where + pretty = \case + LogWorkspaceSymbolRequest query -> "Workspace symbols request:" <+> pretty query + LogRequest label pos nfp -> + pretty label <+> "request at position" <+> pretty (showPosition pos) <+> + "in file:" <+> pretty (fromNormalizedFilePath nfp) + +gotoDefinition :: Recorder (WithPriority Log) -> IdeState -> TextDocumentPositionParams -> ExceptT PluginError (HandlerM c) (MessageResult Method_TextDocumentDefinition) +hover :: Recorder (WithPriority Log) -> IdeState -> TextDocumentPositionParams -> ExceptT PluginError (HandlerM c) (Hover |? Null) +gotoTypeDefinition :: Recorder (WithPriority Log) -> IdeState -> TextDocumentPositionParams -> ExceptT PluginError (HandlerM c) (MessageResult Method_TextDocumentTypeDefinition) +gotoImplementation :: Recorder (WithPriority Log) -> IdeState -> TextDocumentPositionParams -> ExceptT PluginError (HandlerM c) (MessageResult Method_TextDocumentImplementation) +documentHighlight :: Recorder (WithPriority Log) -> IdeState -> TextDocumentPositionParams -> ExceptT PluginError (HandlerM c) ([DocumentHighlight] |? Null) +gotoDefinition = request "Definition" getDefinition (InR $ InR Null) (InL . Definition . InR . map fst) +gotoTypeDefinition = request "TypeDefinition" getTypeDefinition (InR $ InR Null) (InL . Definition . InR . map fst) +gotoImplementation = request "Implementation" getImplementationDefinition (InR $ InR Null) (InL . Definition . InR) +hover = request "Hover" getAtPoint (InR Null) foundHover +documentHighlight = request "DocumentHighlight" highlightAtPoint (InR Null) InL + +references :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState Method_TextDocumentReferences +references recorder ide _ (ReferenceParams (TextDocumentIdentifier uri) pos _ _ _) = do + nfp <- getNormalizedFilePathE uri + liftIO $ logWith recorder Debug $ LogRequest "References" pos nfp + InL <$> (liftIO $ Shake.runAction "references" ide $ refsAtPoint nfp pos) + +wsSymbols :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState Method_WorkspaceSymbol +wsSymbols recorder ide _ (WorkspaceSymbolParams _ _ query) = liftIO $ do + logWith recorder Debug $ LogWorkspaceSymbolRequest query + runIdeAction "WorkspaceSymbols" (shakeExtras ide) $ InL . fromMaybe [] <$> workspaceSymbols query + +foundHover :: (Maybe Range, [T.Text]) -> Hover |? Null +foundHover (mbRange, contents) = + InL $ Hover (InL $ MarkupContent MarkupKind_Markdown $ T.intercalate sectionSeparator contents) mbRange + +-- | Respond to and log a hover or go-to-definition request +request + :: T.Text + -> (NormalizedFilePath -> Position -> IdeAction (Maybe a)) + -> b + -> (a -> b) + -> Recorder (WithPriority Log) + -> IdeState + -> TextDocumentPositionParams + -> ExceptT PluginError (HandlerM c) b +request label getResults notFound found recorder ide (TextDocumentPositionParams (TextDocumentIdentifier uri) pos) = liftIO $ do + mbResult <- case uriToFilePath' uri of + Just path -> logAndRunRequest recorder label getResults ide pos path + Nothing -> pure Nothing + pure $ maybe notFound found mbResult + +logAndRunRequest :: Recorder (WithPriority Log) -> T.Text -> (NormalizedFilePath -> Position -> IdeAction b) -> IdeState -> Position -> String -> IO b +logAndRunRequest recorder label getResults ide pos path = do + let filePath = toNormalizedFilePath' path + logWith recorder Debug $ LogRequest label pos filePath + runIdeAction (T.unpack label) (shakeExtras ide) (getResults filePath pos) diff --git a/ghcide/src/Development/IDE/LSP/LanguageServer.hs b/ghcide/src/Development/IDE/LSP/LanguageServer.hs new file mode 100644 index 0000000000..918e024a4f --- /dev/null +++ b/ghcide/src/Development/IDE/LSP/LanguageServer.hs @@ -0,0 +1,316 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} +-- WARNING: A copy of DA.Daml.LanguageServer, try to keep them in sync +-- This version removes the daml: handling +module Development.IDE.LSP.LanguageServer + ( runLanguageServer + , setupLSP + , Log(..) + , ThreadQueue + , runWithWorkerThreads + , Setup (..) + ) where + +import Control.Concurrent.STM +import Control.Monad.Extra +import Control.Monad.IO.Class +import Control.Monad.Reader +import Data.Aeson (Value) +import Data.Maybe +import qualified Data.Set as Set +import qualified Data.Text as T +import Development.IDE.LSP.Server +import Development.IDE.Session (runWithDb) +import Ide.Types (traceWithSpan) +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import qualified Language.LSP.Server as LSP +import System.IO +import UnliftIO.Async +import UnliftIO.Concurrent +import UnliftIO.Directory +import UnliftIO.Exception + +import qualified Colog.Core as Colog +import Control.Monad.IO.Unlift (MonadUnliftIO) +import Control.Monad.Trans.Cont (evalContT) +import Development.IDE.Core.IdeConfiguration +import Development.IDE.Core.Shake hiding (Log) +import Development.IDE.Core.Tracing +import Development.IDE.Core.WorkerThread (withWorkerQueue) +import qualified Development.IDE.Session as Session +import Development.IDE.Types.Shake (WithHieDb, + WithHieDbShield (..)) +import Ide.Logger +import Language.LSP.Server (LanguageContextEnv, + LspServerLog, + type (<~>)) +data Log + = LogRegisteringIdeConfig !IdeConfiguration + | LogReactorThreadException !SomeException + | LogReactorMessageActionException !SomeException + | LogReactorThreadStopped + | LogCancelledRequest !SomeLspId + | LogSession Session.Log + | LogLspServer LspServerLog + | LogServerShutdownMessage + deriving Show + +instance Pretty Log where + pretty = \case + LogRegisteringIdeConfig ideConfig -> + -- This log is also used to identify if HLS starts successfully in vscode-haskell, + -- don't forget to update the corresponding test in vscode-haskell if the text in + -- the next line has been modified. + "Registering IDE configuration:" <+> viaShow ideConfig + LogReactorThreadException e -> + vcat + [ "ReactorThreadException" + , pretty $ displayException e ] + LogReactorMessageActionException e -> + vcat + [ "ReactorMessageActionException" + , pretty $ displayException e ] + LogReactorThreadStopped -> + "Reactor thread stopped" + LogCancelledRequest requestId -> + "Cancelled request" <+> viaShow requestId + LogSession msg -> pretty msg + LogLspServer msg -> pretty msg + LogServerShutdownMessage -> "Received shutdown message" + +data Setup config m a + = MkSetup + { doInitialize :: LSP.LanguageContextEnv config -> TRequestMessage Method_Initialize -> IO (Either (TResponseError Method_Initialize) (LSP.LanguageContextEnv config, a)) + -- ^ the callback invoked when the language server receives the 'Method_Initialize' request + , staticHandlers :: LSP.Handlers m + -- ^ the statically known handlers of the lsp server + , interpretHandler :: (LanguageContextEnv config, a) -> m <~> IO + -- ^ how to interpret @m@ to 'IO' and how to lift 'IO' into @m@ + , onExit :: [IO ()] + -- ^ a list of 'IO' actions that clean up resources and must be run when the server shuts down + } + +runLanguageServer + :: forall config a m. (Show config) + => Recorder (WithPriority Log) + -> LSP.Options + -> Handle -- input + -> Handle -- output + -> config + -> (config -> Value -> Either T.Text config) + -> (config -> m ()) + -> (MVar () -> IO (Setup config m a)) + -> IO () +runLanguageServer recorder options inH outH defaultConfig parseConfig onConfigChange setup = do + -- This MVar becomes full when the server thread exits or we receive exit message from client. + -- LSP server will be canceled when it's full. + clientMsgVar <- newEmptyMVar + + MkSetup + { doInitialize, staticHandlers, interpretHandler, onExit } <- setup clientMsgVar + + let serverDefinition = LSP.ServerDefinition + { LSP.parseConfig = parseConfig + , LSP.onConfigChange = onConfigChange + , LSP.defaultConfig = defaultConfig + -- TODO: magic string + , LSP.configSection = "haskell" + , LSP.doInitialize = doInitialize + , LSP.staticHandlers = const staticHandlers + , LSP.interpretHandler = interpretHandler + , LSP.options = modifyOptions options + } + + let lspCologAction :: forall io. MonadIO io => Colog.LogAction io (Colog.WithSeverity LspServerLog) + lspCologAction = toCologActionWithPrio (cmapWithPrio LogLspServer recorder) + + let runServer = + LSP.runServerWithHandles + lspCologAction + lspCologAction + inH + outH + serverDefinition + + untilMVar clientMsgVar $ + runServer `finally` sequence_ onExit + +setupLSP :: + forall config. + Recorder (WithPriority Log) + -> FilePath -- ^ root directory, see Note [Root Directory] + -> (FilePath -> IO FilePath) -- ^ Map root paths to the location of the hiedb for the project + -> LSP.Handlers (ServerM config) + -> (LSP.LanguageContextEnv config -> FilePath -> WithHieDb -> ThreadQueue -> IO IdeState) + -> MVar () + -> IO (Setup config (ServerM config) IdeState) +setupLSP recorder defaultRoot getHieDbLoc userHandlers getIdeState clientMsgVar = do + -- Send everything over a channel, since you need to wait until after initialise before + -- LspFuncs is available + clientMsgChan :: Chan ReactorMessage <- newChan + + -- An MVar to control the lifetime of the reactor loop. + -- The loop will be stopped and resources freed when it's full + reactorLifetime <- newEmptyMVar + let stopReactorLoop = void $ tryPutMVar reactorLifetime () + + -- Forcefully exit + let exit = void $ tryPutMVar clientMsgVar () + + -- The set of requests ids that we have received but not finished processing + pendingRequests <- newTVarIO Set.empty + -- The set of requests that have been cancelled and are also in pendingRequests + cancelledRequests <- newTVarIO Set.empty + + let cancelRequest reqId = atomically $ do + queued <- readTVar pendingRequests + -- We want to avoid that the list of cancelled requests + -- keeps growing if we receive cancellations for requests + -- that do not exist or have already been processed. + when (reqId `Set.member` queued) $ + modifyTVar cancelledRequests (Set.insert reqId) + let clearReqId reqId = atomically $ do + modifyTVar pendingRequests (Set.delete reqId) + modifyTVar cancelledRequests (Set.delete reqId) + -- We implement request cancellation by racing waitForCancel against + -- the actual request handler. + let waitForCancel reqId = atomically $ do + cancelled <- readTVar cancelledRequests + unless (reqId `Set.member` cancelled) retry + + let staticHandlers = mconcat + [ userHandlers + , cancelHandler cancelRequest + , exitHandler exit + , shutdownHandler recorder stopReactorLoop + ] + -- Cancel requests are special since they need to be handled + -- out of order to be useful. Existing handlers are run afterwards. + + let doInitialize = handleInit recorder defaultRoot getHieDbLoc getIdeState reactorLifetime exit clearReqId waitForCancel clientMsgChan + + let interpretHandler (env, st) = LSP.Iso (LSP.runLspT env . flip (runReaderT . unServerM) (clientMsgChan,st)) liftIO + + let onExit = [stopReactorLoop, exit] + + pure MkSetup {doInitialize, staticHandlers, interpretHandler, onExit} + + +handleInit + :: Recorder (WithPriority Log) + -> FilePath -- ^ root directory, see Note [Root Directory] + -> (FilePath -> IO FilePath) + -> (LSP.LanguageContextEnv config -> FilePath -> WithHieDb -> ThreadQueue -> IO IdeState) + -> MVar () + -> IO () + -> (SomeLspId -> IO ()) + -> (SomeLspId -> IO ()) + -> Chan ReactorMessage + -> LSP.LanguageContextEnv config -> TRequestMessage Method_Initialize -> IO (Either err (LSP.LanguageContextEnv config, IdeState)) +handleInit recorder defaultRoot getHieDbLoc getIdeState lifetime exitClientMsg clearReqId waitForCancel clientMsgChan env (TRequestMessage _ _ m params) = otTracedHandler "Initialize" (show m) $ \sp -> do + traceWithSpan sp params + -- only shift if lsp root is different from the rootDir + -- see Note [Root Directory] + root <- case LSP.resRootPath env of + Just lspRoot | lspRoot /= defaultRoot -> setCurrentDirectory lspRoot >> return lspRoot + _ -> pure defaultRoot + dbLoc <- getHieDbLoc root + let initConfig = parseConfiguration params + logWith recorder Info $ LogRegisteringIdeConfig initConfig + dbMVar <- newEmptyMVar + + + let handleServerException (Left e) = do + logWith recorder Error $ LogReactorThreadException e + exitClientMsg + handleServerException (Right _) = pure () + + exceptionInHandler e = do + logWith recorder Error $ LogReactorMessageActionException e + + checkCancelled :: forall m . LspId m -> IO () -> (TResponseError m -> IO ()) -> IO () + checkCancelled _id act k = + let sid = SomeLspId _id + in flip finally (clearReqId sid) $ + catch (do + -- We could optimize this by first checking if the id + -- is in the cancelled set. However, this is unlikely to be a + -- bottleneck and the additional check might hide + -- issues with async exceptions that need to be fixed. + cancelOrRes <- race (waitForCancel sid) act + case cancelOrRes of + Left () -> do + logWith recorder Debug $ LogCancelledRequest sid + k $ TResponseError (InL LSPErrorCodes_RequestCancelled) "" Nothing + Right res -> pure res + ) $ \(e :: SomeException) -> do + exceptionInHandler e + k $ TResponseError (InR ErrorCodes_InternalError) (T.pack $ show e) Nothing + _ <- flip forkFinally handleServerException $ do + untilMVar lifetime $ runWithWorkerThreads (cmapWithPrio LogSession recorder) dbLoc $ \withHieDb' threadQueue' -> do + putMVar dbMVar (WithHieDbShield withHieDb',threadQueue') + forever $ do + msg <- readChan clientMsgChan + -- We dispatch notifications synchronously and requests asynchronously + -- This is to ensure that all file edits and config changes are applied before a request is handled + case msg of + ReactorNotification act -> handle exceptionInHandler act + ReactorRequest _id act k -> void $ async $ checkCancelled _id act k + logWith recorder Info LogReactorThreadStopped + + (WithHieDbShield withHieDb, threadQueue) <- takeMVar dbMVar + ide <- getIdeState env root withHieDb threadQueue + registerIdeConfiguration (shakeExtras ide) initConfig + pure $ Right (env,ide) + + +-- | runWithWorkerThreads +-- create several threads to run the session, db and session loader +-- see Note [Serializing runs in separate thread] +runWithWorkerThreads :: Recorder (WithPriority Session.Log) -> FilePath -> (WithHieDb -> ThreadQueue -> IO ()) -> IO () +runWithWorkerThreads recorder dbLoc f = evalContT $ do + sessionRestartTQueue <- withWorkerQueue id + sessionLoaderTQueue <- withWorkerQueue id + (WithHieDbShield hiedb, threadQueue) <- runWithDb recorder dbLoc + liftIO $ f hiedb (ThreadQueue threadQueue sessionRestartTQueue sessionLoaderTQueue) + +-- | Runs the action until it ends or until the given MVar is put. +-- It is important, that the thread that puts the 'MVar' is not dropped before it puts the 'MVar' i.e. it should +-- occur as the final action in a 'finally' or 'bracket', because otherwise this thread will finish early (as soon +-- as the thread receives the BlockedIndefinitelyOnMVar exception) +-- Rethrows any exceptions. +untilMVar :: MonadUnliftIO m => MVar () -> m a -> m () +untilMVar mvar io = race_ (readMVar mvar) io + +cancelHandler :: (SomeLspId -> IO ()) -> LSP.Handlers (ServerM c) +cancelHandler cancelRequest = LSP.notificationHandler SMethod_CancelRequest $ \TNotificationMessage{_params=CancelParams{_id}} -> + liftIO $ cancelRequest (SomeLspId (toLspId _id)) + where toLspId :: (Int32 |? T.Text) -> LspId a + toLspId (InL x) = IdInt x + toLspId (InR y) = IdString y + +shutdownHandler :: Recorder (WithPriority Log) -> IO () -> LSP.Handlers (ServerM c) +shutdownHandler recorder stopReactor = LSP.requestHandler SMethod_Shutdown $ \_ resp -> do + (_, ide) <- ask + liftIO $ logWith recorder Debug LogServerShutdownMessage + -- stop the reactor to free up the hiedb connection + liftIO stopReactor + -- flush out the Shake session to record a Shake profile if applicable + liftIO $ shakeShut ide + resp $ Right Null + +exitHandler :: IO () -> LSP.Handlers (ServerM c) +exitHandler exit = LSP.notificationHandler SMethod_Exit $ const $ liftIO exit + +modifyOptions :: LSP.Options -> LSP.Options +modifyOptions x = x{ LSP.optTextDocumentSync = Just $ tweakTDS origTDS + } + where + tweakTDS tds = tds{_openClose=Just True, _change=Just TextDocumentSyncKind_Incremental, _save=Just $ InR $ SaveOptions Nothing} + origTDS = fromMaybe tdsDefault $ LSP.optTextDocumentSync x + tdsDefault = TextDocumentSyncOptions Nothing Nothing Nothing Nothing Nothing + diff --git a/ghcide/src/Development/IDE/LSP/Notifications.hs b/ghcide/src/Development/IDE/LSP/Notifications.hs new file mode 100644 index 0000000000..4f5475442c --- /dev/null +++ b/ghcide/src/Development/IDE/LSP/Notifications.hs @@ -0,0 +1,160 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} + +module Development.IDE.LSP.Notifications + ( whenUriFile + , descriptor + , Log(..) + , ghcideNotificationsPluginPriority + ) where + +import qualified Language.LSP.Protocol.Message as LSP +import Language.LSP.Protocol.Types +import qualified Language.LSP.Protocol.Types as LSP + +import Control.Concurrent.STM.Stats (atomically) +import Control.Monad.Extra +import Control.Monad.IO.Class +import qualified Data.HashMap.Strict as HM +import qualified Data.HashSet as S +import qualified Data.Text as Text +import Development.IDE.Core.FileExists (modifyFileExists, + watchedGlobs) +import Development.IDE.Core.FileStore (registerFileWatches, + resetFileStore, + setFileModified, + setSomethingModified) +import qualified Development.IDE.Core.FileStore as FileStore +import Development.IDE.Core.IdeConfiguration +import Development.IDE.Core.OfInterest hiding (Log, LogShake) +import Development.IDE.Core.Service hiding (Log, LogShake) +import Development.IDE.Core.Shake hiding (Log) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.Types.Location +import Ide.Logger +import Ide.Types +import Numeric.Natural + +data Log + = LogShake Shake.Log + | LogFileStore FileStore.Log + | LogOpenedTextDocument !Uri + | LogModifiedTextDocument !Uri + | LogSavedTextDocument !Uri + | LogClosedTextDocument !Uri + | LogWatchedFileEvents !Text.Text + | LogWarnNoWatchedFilesSupport + deriving Show + +instance Pretty Log where + pretty = \case + LogShake msg -> pretty msg + LogFileStore msg -> pretty msg + LogOpenedTextDocument uri -> "Opened text document:" <+> pretty (getUri uri) + LogModifiedTextDocument uri -> "Modified text document:" <+> pretty (getUri uri) + LogSavedTextDocument uri -> "Saved text document:" <+> pretty (getUri uri) + LogClosedTextDocument uri -> "Closed text document:" <+> pretty (getUri uri) + LogWatchedFileEvents msg -> "Watched file events:" <+> pretty msg + LogWarnNoWatchedFilesSupport -> "Client does not support watched files. Falling back to OS polling" + +whenUriFile :: Uri -> (NormalizedFilePath -> IO ()) -> IO () +whenUriFile uri act = whenJust (LSP.uriToFilePath uri) $ act . toNormalizedFilePath' + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = (defaultPluginDescriptor plId desc) { pluginNotificationHandlers = mconcat + [ mkPluginNotificationHandler LSP.SMethod_TextDocumentDidOpen $ + \ide vfs _ (DidOpenTextDocumentParams TextDocumentItem{_uri,_version}) -> liftIO $ do + atomically $ updatePositionMapping ide (VersionedTextDocumentIdentifier _uri _version) [] + whenUriFile _uri $ \file -> do + -- We don't know if the file actually exists, or if the contents match those on disk + -- For example, vscode restores previously unsaved contents on open + setFileModified (cmapWithPrio LogFileStore recorder) (VFSModified vfs) ide False file $ + addFileOfInterest ide file Modified{firstOpen=True} + logWith recorder Debug $ LogOpenedTextDocument _uri + + , mkPluginNotificationHandler LSP.SMethod_TextDocumentDidChange $ + \ide vfs _ (DidChangeTextDocumentParams identifier@VersionedTextDocumentIdentifier{_uri} changes) -> liftIO $ do + atomically $ updatePositionMapping ide identifier changes + whenUriFile _uri $ \file -> do + setFileModified (cmapWithPrio LogFileStore recorder) (VFSModified vfs) ide False file $ + addFileOfInterest ide file Modified{firstOpen=False} + logWith recorder Debug $ LogModifiedTextDocument _uri + + , mkPluginNotificationHandler LSP.SMethod_TextDocumentDidSave $ + \ide vfs _ (DidSaveTextDocumentParams TextDocumentIdentifier{_uri} _) -> liftIO $ do + whenUriFile _uri $ \file -> do + setFileModified (cmapWithPrio LogFileStore recorder) (VFSModified vfs) ide True file $ + addFileOfInterest ide file OnDisk + logWith recorder Debug $ LogSavedTextDocument _uri + + , mkPluginNotificationHandler LSP.SMethod_TextDocumentDidClose $ + \ide vfs _ (DidCloseTextDocumentParams TextDocumentIdentifier{_uri}) -> liftIO $ do + whenUriFile _uri $ \file -> do + let msg = "Closed text document: " <> getUri _uri + setSomethingModified (VFSModified vfs) ide (Text.unpack msg) $ do + scheduleGarbageCollection ide + deleteFileOfInterest ide file + logWith recorder Debug $ LogClosedTextDocument _uri + + , mkPluginNotificationHandler LSP.SMethod_WorkspaceDidChangeWatchedFiles $ + \ide vfs _ (DidChangeWatchedFilesParams fileEvents) -> liftIO $ do + -- See Note [File existence cache and LSP file watchers] which explains why we get these notifications and + -- what we do with them + -- filter out files of interest, since we already know all about those + -- filter also uris that do not map to filenames, since we cannot handle them + filesOfInterest <- getFilesOfInterest ide + let fileEvents' = + [ (nfp, event) | (FileEvent uri event) <- fileEvents + , Just fp <- [uriToFilePath uri] + , let nfp = toNormalizedFilePath fp + , not $ HM.member nfp filesOfInterest + ] + unless (null fileEvents') $ do + let msg = show fileEvents' + logWith recorder Debug $ LogWatchedFileEvents (Text.pack msg) + setSomethingModified (VFSModified vfs) ide msg $ do + ks1 <- resetFileStore ide fileEvents' + ks2 <- modifyFileExists ide fileEvents' + return (ks1 <> ks2) + + , mkPluginNotificationHandler LSP.SMethod_WorkspaceDidChangeWorkspaceFolders $ + \ide _ _ (DidChangeWorkspaceFoldersParams events) -> liftIO $ do + let add = S.union + substract = flip S.difference + modifyWorkspaceFolders ide + $ add (foldMap (S.singleton . parseWorkspaceFolder) (_added events)) + . substract (foldMap (S.singleton . parseWorkspaceFolder) (_removed events)) + + -- Nothing additional to do here beyond what `lsp` does for us, but this prevents + -- complaints about there being no handler defined + , mkPluginNotificationHandler LSP.SMethod_WorkspaceDidChangeConfiguration mempty + + , mkPluginNotificationHandler LSP.SMethod_Initialized $ \ide _ _ _ -> do + --------- Initialize Shake session -------------------------------------------------------------------- + liftIO $ shakeSessionInit (cmapWithPrio LogShake recorder) ide + + --------- Set up file watchers ------------------------------------------------------------------------ + opts <- liftIO $ getIdeOptionsIO $ shakeExtras ide + -- See Note [Which files should we watch?] for an explanation of why the pattern is the way that it is + -- The patterns will be something like "**/.hs", i.e. "any number of directory segments, + -- followed by a file with an extension 'hs'. + -- We use multiple watchers instead of one using '{}' because lsp-test doesn't + -- support that: https://github.com/bubba/lsp-test/issues/77 + let globs = watchedGlobs opts + success <- registerFileWatches globs + unless success $ + liftIO $ logWith recorder Warning LogWarnNoWatchedFilesSupport + ], + + -- The ghcide descriptors should come last'ish so that the notification handlers + -- (which restart the Shake build) run after everything else + pluginPriority = ghcideNotificationsPluginPriority + } + where + desc = "Handles basic notifications for ghcide" + +ghcideNotificationsPluginPriority :: Natural +ghcideNotificationsPluginPriority = defaultPluginPriority - 900 diff --git a/ghcide/src/Development/IDE/LSP/Outline.hs b/ghcide/src/Development/IDE/LSP/Outline.hs new file mode 100644 index 0000000000..af2a0f1c97 --- /dev/null +++ b/ghcide/src/Development/IDE/LSP/Outline.hs @@ -0,0 +1,263 @@ +{-# LANGUAGE CPP #-} + +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} + +module Development.IDE.LSP.Outline + ( moduleOutline + ) +where + +import Control.Monad.IO.Class +import Data.Foldable (toList) +import Data.Functor +import Data.Generics hiding (Prefix) +import Data.List.NonEmpty (nonEmpty) +import Data.Maybe +import Development.IDE.Core.Rules +import Development.IDE.Core.Shake +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Error (rangeToRealSrcSpan, + realSrcSpanToRange) +import Development.IDE.GHC.Util (printOutputable) +import Development.IDE.Types.Location +import Ide.Types +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types (DocumentSymbol (..), + DocumentSymbolParams (DocumentSymbolParams, _textDocument), + SymbolKind (..), + TextDocumentIdentifier (TextDocumentIdentifier), + type (|?) (InL, InR), + uriToFilePath) + + +moduleOutline + :: PluginMethodHandler IdeState Method_TextDocumentDocumentSymbol +moduleOutline ideState _ DocumentSymbolParams{ _textDocument = TextDocumentIdentifier uri } + = liftIO $ case uriToFilePath uri of + Just (toNormalizedFilePath' -> fp) -> do + mb_decls <- fmap fst <$> runAction "Outline" ideState (useWithStale GetParsedModule fp) + pure $ case mb_decls of + Nothing -> InL [] + Just ParsedModule { pm_parsed_source = L _ltop HsModule { hsmodName, hsmodDecls, hsmodImports } } + -> let + declSymbols = mapMaybe documentSymbolForDecl hsmodDecls + moduleSymbol = hsmodName >>= \case + (L (locA -> (RealSrcSpan l _)) m) -> Just $ + (defDocumentSymbol l :: DocumentSymbol) + { _name = printOutputable m + , _kind = SymbolKind_File + , _range = Range (Position 0 0) (Position maxBound 0) -- _ltop is 0 0 0 0 + } + _ -> Nothing + importSymbols = maybe [] pure $ + documentSymbolForImportSummary + (mapMaybe documentSymbolForImport hsmodImports) + allSymbols = case moduleSymbol of + Nothing -> importSymbols <> declSymbols + Just x -> + [ x { _children = Just (importSymbols <> declSymbols) + } + ] + in + InR (InL allSymbols) + + + Nothing -> pure $ InL [] + +documentSymbolForDecl :: LHsDecl GhcPs -> Maybe DocumentSymbol +documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (TyClD _ FamDecl { tcdFam = FamilyDecl { fdLName = L _ n, fdInfo, fdTyVars } })) + = Just (defDocumentSymbol l :: DocumentSymbol) + { _name = printOutputable n + <> (case printOutputable fdTyVars of + "" -> "" + t -> " " <> t + ) + , _detail = Just $ printOutputable fdInfo + , _kind = SymbolKind_Function + } +documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (TyClD _ ClassDecl { tcdLName = L _ name, tcdSigs, tcdTyVars })) + = Just (defDocumentSymbol l :: DocumentSymbol) + { _name = printOutputable name + <> (case printOutputable tcdTyVars of + "" -> "" + t -> " " <> t + ) + , _kind = SymbolKind_Interface + , _detail = Just "class" + , _children = + Just $ + [ (defDocumentSymbol l' :: DocumentSymbol) + { _name = printOutputable n + , _kind = SymbolKind_Method + , _selectionRange = realSrcSpanToRange l'' + } + | L (locA -> (RealSrcSpan l' _)) (ClassOpSig _ False names _) <- tcdSigs + , L (locA -> (RealSrcSpan l'' _)) n <- names + ] + } +documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (TyClD _ DataDecl { tcdLName = L _ name, tcdDataDefn = HsDataDefn { dd_cons } })) + = Just (defDocumentSymbol l :: DocumentSymbol) + { _name = printOutputable name + , _kind = SymbolKind_Struct + , _children = + Just $ + [ (defDocumentSymbol l'' :: DocumentSymbol) + { _name = printOutputable n + , _kind = SymbolKind_Constructor + , _selectionRange = realSrcSpanToRange l' + , _children = toList <$> nonEmpty childs + } + | con <- extract_cons dd_cons + , let (cs, flds) = hsConDeclsBinders con + , let childs = mapMaybe cvtFld flds + , L (locA -> RealSrcSpan l' _) n <- cs + , let l'' = case con of + L (locA -> RealSrcSpan l''' _) _ -> l''' + _ -> l' + ] + } + where + cvtFld :: LFieldOcc GhcPs -> Maybe DocumentSymbol + cvtFld (L (locA -> RealSrcSpan l' _) n) = Just $ (defDocumentSymbol l' :: DocumentSymbol) + { _name = printOutputable (unLoc (foLabel n)) + , _kind = SymbolKind_Field + } + cvtFld _ = Nothing +documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (TyClD _ SynDecl { tcdLName = L (locA -> (RealSrcSpan l' _)) n })) = Just + (defDocumentSymbol l :: DocumentSymbol) { _name = printOutputable n + , _kind = SymbolKind_TypeParameter + , _selectionRange = realSrcSpanToRange l' + } +documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (InstD _ ClsInstD { cid_inst = ClsInstDecl { cid_poly_ty } })) + = Just (defDocumentSymbol l :: DocumentSymbol) { _name = printOutputable cid_poly_ty + , _kind = SymbolKind_Interface + } +documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (InstD _ DataFamInstD { dfid_inst = DataFamInstDecl FamEqn { feqn_tycon, feqn_pats } })) + = Just (defDocumentSymbol l :: DocumentSymbol) + { _name = + printOutputable $ pprHsArgsApp (unLoc feqn_tycon) Prefix feqn_pats + , _kind = SymbolKind_Interface + } +documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (InstD _ TyFamInstD { tfid_inst = TyFamInstDecl _ FamEqn { feqn_tycon, feqn_pats } })) + = Just (defDocumentSymbol l :: DocumentSymbol) + { _name = + printOutputable $ pprHsArgsApp (unLoc feqn_tycon) Prefix feqn_pats + , _kind = SymbolKind_Interface + } +documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (DerivD _ DerivDecl { deriv_type })) = + gfindtype deriv_type <&> \(L (_ :: SrcSpan) name) -> + (defDocumentSymbol l :: DocumentSymbol) { _name = printOutputable @(HsType GhcPs) + name + , _kind = SymbolKind_Interface + } +documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (ValD _ FunBind{fun_id = L _ name})) = Just + (defDocumentSymbol l :: DocumentSymbol) + { _name = printOutputable name + , _kind = SymbolKind_Function + } +documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (ValD _ PatBind{pat_lhs})) = Just + (defDocumentSymbol l :: DocumentSymbol) + { _name = printOutputable pat_lhs + , _kind = SymbolKind_Function + } + +documentSymbolForDecl (L (locA -> (RealSrcSpan l _)) (ForD _ x)) = Just + (defDocumentSymbol l :: DocumentSymbol) + { _name = case x of + ForeignImport{} -> name + ForeignExport{} -> name + , _kind = SymbolKind_Object + , _detail = case x of + ForeignImport{} -> Just "import" + ForeignExport{} -> Just "export" + } + where name = printOutputable $ unLoc $ fd_name x + +documentSymbolForDecl _ = Nothing + +-- | Wrap the Document imports into a hierarchical outline for +-- a better overview of symbols in scope. +-- If there are no imports, then no hierarchy will be created. +documentSymbolForImportSummary :: [DocumentSymbol] -> Maybe DocumentSymbol +documentSymbolForImportSummary [] = Nothing +documentSymbolForImportSummary importSymbols = + let + -- safe because if we have no ranges then we don't take this branch + mergeRanges xs = Range (minimum $ map _start xs) (maximum $ map _end xs) + importRange = mergeRanges $ map (\DocumentSymbol{_range} -> _range) importSymbols + in + Just (defDocumentSymbol (rangeToRealSrcSpan "" importRange)) + { _name = "imports" + , _kind = SymbolKind_Module + , _children = Just importSymbols + } + +documentSymbolForImport :: LImportDecl GhcPs -> Maybe DocumentSymbol +documentSymbolForImport (L (locA -> (RealSrcSpan l _)) ImportDecl { ideclName, ideclQualified }) = Just + (defDocumentSymbol l :: DocumentSymbol) + { _name = "import " <> printOutputable ideclName + , _kind = SymbolKind_Module + , _detail = case ideclQualified of { NotQualified -> Nothing; _ -> Just "qualified" } + } +documentSymbolForImport _ = Nothing + +defDocumentSymbol :: RealSrcSpan -> DocumentSymbol +defDocumentSymbol l = DocumentSymbol { .. } where + _detail = Nothing + _deprecated = Nothing + _name = "" + -- This used to be SkUnknown 0, which is invalid, as SymbolKinds start at 1, + -- therefore, I am replacing it with SymbolKind_File, which is the type for 1 + _kind = SymbolKind_File + _range = realSrcSpanToRange l + _selectionRange = realSrcSpanToRange l + _children = Nothing + _tags = Nothing + +-- the version of getConNames for ghc9 is restricted to only the renaming phase +hsConDeclsBinders :: LConDecl GhcPs + -> ([LIdP GhcPs], [LFieldOcc GhcPs]) + -- See hsLTyClDeclBinders for what this does + -- The function is boringly complicated because of the records + -- And since we only have equality, we have to be a little careful +hsConDeclsBinders cons + = go cons + where + go :: LConDecl GhcPs + -> ([LIdP GhcPs], [LFieldOcc GhcPs]) + go r + -- Don't re-mangle the location of field names, because we don't + -- have a record of the full location of the field declaration anyway + = case unLoc r of + -- remove only the first occurrence of any seen field in order to + -- avoid circumventing detection of duplicate fields (#9156) + ConDeclGADT { con_names = names, con_g_args = args } + -> (toList names, flds) + where + flds = get_flds_gadt args + + ConDeclH98 { con_name = name, con_args = args } + -> ([name], flds) + where + flds = get_flds_h98 args + + get_flds_h98 :: HsConDeclH98Details GhcPs + -> [LFieldOcc GhcPs] + get_flds_h98 (RecCon flds) = get_flds (reLoc flds) + get_flds_h98 _ = [] + + get_flds_gadt :: HsConDeclGADTDetails GhcPs + -> [LFieldOcc GhcPs] +#if MIN_VERSION_ghc(9,9,0) + get_flds_gadt (RecConGADT _ flds) = get_flds (reLoc flds) +#else + get_flds_gadt (RecConGADT flds _) = get_flds (reLoc flds) +#endif + get_flds_gadt _ = [] + + get_flds :: Located [LConDeclField GhcPs] + -> [LFieldOcc GhcPs] + get_flds flds = concatMap (cd_fld_names . unLoc) (unLoc flds) + + diff --git a/ghcide/src/Development/IDE/LSP/Server.hs b/ghcide/src/Development/IDE/LSP/Server.hs new file mode 100644 index 0000000000..605250491b --- /dev/null +++ b/ghcide/src/Development/IDE/LSP/Server.hs @@ -0,0 +1,61 @@ +{-# LANGUAGE UndecidableInstances #-} +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +module Development.IDE.LSP.Server + ( ReactorMessage(..) + , ReactorChan + , ServerM(..) + , requestHandler + , notificationHandler + ) where +import Control.Monad.IO.Unlift (MonadUnliftIO) +import Control.Monad.Reader +import Development.IDE.Core.Shake +import Development.IDE.Core.Tracing +import Ide.Types +import Language.LSP.Protocol.Message +import Language.LSP.Server (Handlers, LspM) +import qualified Language.LSP.Server as LSP +import Language.LSP.VFS +import UnliftIO.Chan + +data ReactorMessage + = ReactorNotification (IO ()) + | forall m . ReactorRequest (LspId m) (IO ()) (TResponseError m -> IO ()) + +type ReactorChan = Chan ReactorMessage +newtype ServerM c a = ServerM { unServerM :: ReaderT (ReactorChan, IdeState) (LspM c) a } + deriving (Functor, Applicative, Monad, MonadReader (ReactorChan, IdeState), MonadIO, MonadUnliftIO, LSP.MonadLsp c) + +requestHandler + :: forall m c. PluginMethod Request m => + SMethod m + -> (IdeState -> MessageParams m -> LspM c (Either (TResponseError m) (MessageResult m))) + -> Handlers (ServerM c) +requestHandler m k = LSP.requestHandler m $ \TRequestMessage{_method,_id,_params} resp -> do + st@(chan,ide) <- ask + env <- LSP.getLspEnv + let resp' :: Either (TResponseError m) (MessageResult m) -> LspM c () + resp' = flip (runReaderT . unServerM) st . resp + trace x = otTracedHandler "Request" (show _method) $ \sp -> do + traceWithSpan sp _params + x + writeChan chan $ ReactorRequest _id (trace $ LSP.runLspT env $ resp' =<< k ide _params) (LSP.runLspT env . resp' . Left) + +notificationHandler + :: forall m c. PluginMethod Notification m => + SMethod m + -> (IdeState -> VFS -> MessageParams m -> LspM c ()) + -> Handlers (ServerM c) +notificationHandler m k = LSP.notificationHandler m $ \TNotificationMessage{_params,_method}-> do + (chan,ide) <- ask + env <- LSP.getLspEnv + -- Take a snapshot of the VFS state on every notification + -- We only need to do this here because the VFS state is only updated + -- on notifications + vfs <- LSP.getVirtualFiles + let trace x = otTracedHandler "Notification" (show _method) $ \sp -> do + traceWithSpan sp _params + x + writeChan chan $ ReactorNotification (trace $ LSP.runLspT env $ k ide vfs _params) diff --git a/ghcide/src/Development/IDE/Main.hs b/ghcide/src/Development/IDE/Main.hs new file mode 100644 index 0000000000..ad4a36327a --- /dev/null +++ b/ghcide/src/Development/IDE/Main.hs @@ -0,0 +1,460 @@ +{-# OPTIONS_GHC -Wno-orphans #-} +module Development.IDE.Main +(Arguments(..) +,defaultArguments +,Command(..) +,IdeCommand(..) +,isLSP +,commandP +,defaultMain +,testing +,Log(..) +) where + +import Control.Concurrent.Extra (withNumCapabilities) +import Control.Concurrent.MVar (MVar, newEmptyMVar, + putMVar, tryReadMVar) +import Control.Concurrent.STM.Stats (dumpSTMStats) +import Control.Monad.Extra (concatMapM, unless, + when) +import Control.Monad.IO.Class (liftIO) +import qualified Data.Aeson as J +import Data.Coerce (coerce) +import Data.Default (Default (def)) +import Data.Hashable (hashed) +import qualified Data.HashMap.Strict as HashMap +import Data.List.Extra (intercalate, + isPrefixOf, nubOrd, + partition) +import Data.Maybe (catMaybes, isJust) +import qualified Data.Text as T +import Development.IDE (Action, + Priority (Debug), + Rules, hDuplicateTo') +import Development.IDE.Core.Debouncer (Debouncer, + newAsyncDebouncer) +import Development.IDE.Core.FileStore (isWatchSupported, + setSomethingModified) +import Development.IDE.Core.IdeConfiguration (IdeConfiguration (..), + modifyClientSettings, + registerIdeConfiguration) +import Development.IDE.Core.OfInterest (FileOfInterestStatus (OnDisk), + kick, + setFilesOfInterest) +import Development.IDE.Core.Rules (mainRule) +import qualified Development.IDE.Core.Rules as Rules +import Development.IDE.Core.RuleTypes (GenerateCore (GenerateCore), + GetHieAst (GetHieAst), + TypeCheck (TypeCheck)) +import Development.IDE.Core.Service (initialise, + runAction) +import qualified Development.IDE.Core.Service as Service +import Development.IDE.Core.Shake (IdeState (shakeExtras), + ThreadQueue (tLoaderQueue), + shakeSessionInit, + uses) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.Graph (action) +import Development.IDE.LSP.LanguageServer (runLanguageServer, + runWithWorkerThreads, + setupLSP) +import qualified Development.IDE.LSP.LanguageServer as LanguageServer +import Development.IDE.Main.HeapStats (withHeapStats) +import qualified Development.IDE.Main.HeapStats as HeapStats +import qualified Development.IDE.Monitoring.OpenTelemetry as OpenTelemetry +import Development.IDE.Plugin (Plugin (pluginHandlers, pluginModifyDynflags, pluginRules)) +import Development.IDE.Plugin.HLS (asGhcIdePlugin) +import qualified Development.IDE.Plugin.HLS as PluginHLS +import qualified Development.IDE.Plugin.HLS.GhcIde as GhcIde +import qualified Development.IDE.Plugin.Test as Test +import Development.IDE.Session (SessionLoadingOptions, + getHieDbLoc, + getInitialGhcLibDirDefault, + loadSessionWithOptions, + retryOnSqliteBusy) +import qualified Development.IDE.Session as Session +import Development.IDE.Types.Location (NormalizedUri, + toNormalizedFilePath') +import Development.IDE.Types.Monitoring (Monitoring) +import Development.IDE.Types.Options (IdeGhcSession, + IdeOptions (optCheckParents, optCheckProject, optReportProgress, optRunSubset), + IdeTesting (IdeTesting), + clientSupportsProgress, + defaultIdeOptions, + optModifyDynFlags, + optTesting) +import Development.IDE.Types.Shake (WithHieDb, + toNoFileKey) +import GHC.Conc (getNumProcessors) +import GHC.IO.Encoding (setLocaleEncoding) +import GHC.IO.Handle (hDuplicate) +import HIE.Bios.Cradle (findCradle) +import qualified HieDb.Run as HieDb +import Ide.Logger (Pretty (pretty), + Priority (Info), + Recorder, + WithPriority, + cmapWithPrio, + logWith, nest, vsep, + (<+>)) +import Ide.Plugin.Config (CheckParents (NeverCheck), + Config, checkParents, + checkProject, + getConfigFromNotification) +import Ide.PluginUtils (allLspCmdIds', + getProcessID, + idePluginsToPluginDesc, + pluginDescToIdePlugins) +import Ide.Types (IdeCommand (IdeCommand), + IdePlugins, + PluginDescriptor (PluginDescriptor, pluginCli), + PluginId (PluginId), + ipMap, pluginId) +import qualified Language.LSP.Server as LSP +import Numeric.Natural (Natural) +import Options.Applicative hiding (action) +import qualified System.Directory.Extra as IO +import System.Exit (ExitCode (ExitFailure), + exitWith) +import System.FilePath (takeExtension, + takeFileName) +import System.IO (BufferMode (LineBuffering, NoBuffering), + Handle, hFlush, + hPutStrLn, + hSetBuffering, + hSetEncoding, stderr, + stdin, stdout, utf8) +import System.Random (newStdGen) +import System.Time.Extra (Seconds, offsetTime, + showDuration) + +data Log + = LogHeapStats !HeapStats.Log + | LogLspStart [PluginId] + | LogLspStartDuration !Seconds + | LogShouldRunSubset !Bool + | LogConfigurationChange T.Text + | LogService Service.Log + | LogShake Shake.Log + | LogGhcIde GhcIde.Log + | LogLanguageServer LanguageServer.Log + | LogSession Session.Log + | LogPluginHLS PluginHLS.Log + | LogRules Rules.Log + deriving Show + +instance Pretty Log where + pretty = \case + LogHeapStats msg -> pretty msg + LogLspStart pluginIds -> + nest 2 $ vsep + [ "Starting LSP server..." + , "If you are seeing this in a terminal, you probably should have run WITHOUT the --lsp option!" + , "PluginIds:" <+> pretty (coerce @_ @[T.Text] pluginIds) + ] + LogLspStartDuration duration -> + "Started LSP server in" <+> pretty (showDuration duration) + LogShouldRunSubset shouldRunSubset -> + "shouldRunSubset:" <+> pretty shouldRunSubset + LogConfigurationChange msg -> "Configuration changed:" <+> pretty msg + LogService msg -> pretty msg + LogShake msg -> pretty msg + LogGhcIde msg -> pretty msg + LogLanguageServer msg -> pretty msg + LogSession msg -> pretty msg + LogPluginHLS msg -> pretty msg + LogRules msg -> pretty msg + +data Command + = Check [FilePath] -- ^ Typecheck some paths and print diagnostics. Exit code is the number of failures + | Db {hieOptions :: HieDb.Options, hieCommand :: HieDb.Command} + -- ^ Run a command in the hiedb + | LSP -- ^ Run the LSP server + | Custom {ideCommand :: IdeCommand IdeState} -- ^ User defined + deriving Show + +-- TODO move these to hiedb +deriving instance Show HieDb.Command +deriving instance Show HieDb.Options + +isLSP :: Command -> Bool +isLSP LSP = True +isLSP _ = False + +commandP :: IdePlugins IdeState -> Parser Command +commandP plugins = + hsubparser(command "typecheck" (info (Check <$> fileCmd) fileInfo) + <> command "hiedb" (info (Db <$> HieDb.optParser "" True <*> HieDb.cmdParser) hieInfo) + <> command "lsp" (info (pure LSP) lspInfo) + <> pluginCommands + ) + where + fileCmd = many (argument str (metavar "FILES/DIRS...")) + lspInfo = fullDesc <> progDesc "Start talking to an LSP client" + fileInfo = fullDesc <> progDesc "Used as a test bed to check your IDE will work" + hieInfo = fullDesc <> progDesc "Query .hie files" + + pluginCommands = mconcat + [ command (T.unpack pId) (Custom <$> p) + | PluginDescriptor{pluginCli = Just p, pluginId = PluginId pId} <- ipMap plugins + ] + + +data Arguments = Arguments + { argsProjectRoot :: FilePath + , argCommand :: Command + , argsRules :: Rules () + , argsHlsPlugins :: IdePlugins IdeState + , argsGhcidePlugin :: Plugin Config -- ^ Deprecated + , argsSessionLoadingOptions :: SessionLoadingOptions + , argsIdeOptions :: Config -> Action IdeGhcSession -> IdeOptions + , argsLspOptions :: LSP.Options + , argsDefaultHlsConfig :: Config + , argsGetHieDbLoc :: FilePath -> IO FilePath -- ^ Map project roots to the location of the hiedb for the project + , argsDebouncer :: IO (Debouncer NormalizedUri) -- ^ Debouncer used for diagnostics + , argsHandleIn :: IO Handle + , argsHandleOut :: IO Handle + , argsThreads :: Maybe Natural + , argsMonitoring :: IO Monitoring + , argsDisableKick :: Bool -- ^ flag to disable kick used for testing + } + +defaultArguments :: Recorder (WithPriority Log) -> FilePath -> IdePlugins IdeState -> Arguments +defaultArguments recorder projectRoot plugins = Arguments + { argsProjectRoot = projectRoot -- ^ see Note [Root Directory] + , argCommand = LSP + , argsRules = mainRule (cmapWithPrio LogRules recorder) def + , argsGhcidePlugin = mempty + , argsHlsPlugins = pluginDescToIdePlugins (GhcIde.descriptors (cmapWithPrio LogGhcIde recorder)) <> plugins + , argsSessionLoadingOptions = def + , argsIdeOptions = \config ghcSession -> (defaultIdeOptions ghcSession) + { optCheckProject = pure $ checkProject config + , optCheckParents = pure $ checkParents config + } + , argsLspOptions = def + { LSP.optCompletionTriggerCharacters = Just "." + -- Generally people start to notice that something is taking a while at about 1s, so + -- that's when we start reporting progress + , LSP.optProgressStartDelay = 1_000_000 + -- Once progress is being reported, it's nice to see that it's moving reasonably quickly, + -- but not so fast that it's ugly. This number is a bit made up + , LSP.optProgressUpdateDelay = 1_00_000 + } + , argsDefaultHlsConfig = def + , argsGetHieDbLoc = getHieDbLoc + , argsDebouncer = newAsyncDebouncer + , argsThreads = Nothing + , argsHandleIn = pure stdin + , argsHandleOut = do + -- Move stdout to another file descriptor and duplicate stderr + -- to stdout. This guards against stray prints from corrupting the JSON-RPC + -- message stream. + newStdout <- hDuplicate stdout + stderr `hDuplicateTo'` stdout + hSetBuffering stdout NoBuffering + + -- Print out a single space to assert that the above redirection works. + -- This is interleaved with the logger, hence we just print a space here in + -- order not to mess up the output too much. Verified that this breaks + -- the language server tests without the redirection. + putStr " " >> hFlush stdout + return newStdout + , argsMonitoring = OpenTelemetry.monitoring + , argsDisableKick = False + } + + +testing :: Recorder (WithPriority Log) -> FilePath -> IdePlugins IdeState -> Arguments +testing recorder projectRoot plugins = + let + arguments@Arguments{ argsHlsPlugins, argsIdeOptions, argsLspOptions } = + defaultArguments recorder projectRoot plugins + hlsPlugins = pluginDescToIdePlugins $ + idePluginsToPluginDesc argsHlsPlugins + ++ [Test.blockCommandDescriptor "block-command", Test.plugin] + ideOptions config sessionLoader = + let + defOptions = argsIdeOptions config sessionLoader + in + defOptions{ optTesting = IdeTesting True } + lspOptions = argsLspOptions { LSP.optProgressStartDelay = 0, LSP.optProgressUpdateDelay = 0 } + in + arguments + { argsHlsPlugins = hlsPlugins + , argsIdeOptions = ideOptions + , argsLspOptions = lspOptions + } + +defaultMain :: Recorder (WithPriority Log) -> Arguments -> IO () +defaultMain recorder Arguments{..} = withHeapStats (cmapWithPrio LogHeapStats recorder) fun + where + fun = do + setLocaleEncoding utf8 + pid <- T.pack . show <$> getProcessID + hSetBuffering stderr LineBuffering + + let hlsPlugin = asGhcIdePlugin (cmapWithPrio LogPluginHLS recorder) argsHlsPlugins + hlsCommands = allLspCmdIds' pid argsHlsPlugins + plugins = hlsPlugin <> argsGhcidePlugin + options = argsLspOptions { LSP.optExecuteCommandCommands = LSP.optExecuteCommandCommands argsLspOptions <> Just hlsCommands } + argsParseConfig = getConfigFromNotification argsHlsPlugins + rules = do + argsRules + unless argsDisableKick $ action kick + pluginRules plugins + -- install the main and ghcide-plugin rules + -- install the kick action, which triggers a typecheck on every + -- Shake database restart, i.e. on every user edit. + + debouncer <- argsDebouncer + inH <- argsHandleIn + outH <- argsHandleOut + + numProcessors <- getNumProcessors + let numCapabilities = max 1 $ maybe (numProcessors `div` 2) fromIntegral argsThreads + + case argCommand of + LSP -> withNumCapabilities numCapabilities $ do + ioT <- offsetTime + logWith recorder Info $ LogLspStart (pluginId <$> ipMap argsHlsPlugins) + + let getIdeState :: MVar IdeState -> LSP.LanguageContextEnv Config -> FilePath -> WithHieDb -> Shake.ThreadQueue -> IO IdeState + getIdeState ideStateVar env rootPath withHieDb threadQueue = do + t <- ioT + logWith recorder Info $ LogLspStartDuration t + sessionLoader <- loadSessionWithOptions (cmapWithPrio LogSession recorder) argsSessionLoadingOptions rootPath (tLoaderQueue threadQueue) + config <- LSP.runLspT env LSP.getConfig + let def_options = argsIdeOptions config sessionLoader + + -- disable runSubset if the client doesn't support watched files + runSubset <- (optRunSubset def_options &&) <$> LSP.runLspT env isWatchSupported + logWith recorder Debug $ LogShouldRunSubset runSubset + + let ideOptions = def_options + { optReportProgress = clientSupportsProgress caps + , optModifyDynFlags = optModifyDynFlags def_options <> pluginModifyDynflags plugins + , optRunSubset = runSubset + } + caps = LSP.resClientCapabilities env + monitoring <- argsMonitoring + ide <- initialise + (cmapWithPrio LogService recorder) + argsDefaultHlsConfig + argsHlsPlugins + rules + (Just env) + debouncer + ideOptions + withHieDb + threadQueue + monitoring + rootPath + putMVar ideStateVar ide + pure ide + + let setup ideStateVar = setupLSP (cmapWithPrio LogLanguageServer recorder) argsProjectRoot argsGetHieDbLoc (pluginHandlers plugins) (getIdeState ideStateVar) + -- See Note [Client configuration in Rules] + onConfigChange ideStateVar cfg = do + -- TODO: this is nuts, we're converting back to JSON just to get a fingerprint + let cfgObj = J.toJSON cfg + mide <- liftIO $ tryReadMVar ideStateVar + case mide of + Nothing -> pure () + Just ide -> liftIO $ do + let msg = T.pack $ show cfg + setSomethingModified Shake.VFSUnmodified ide "config change" $ do + logWith recorder Debug $ LogConfigurationChange msg + modifyClientSettings ide (const $ Just cfgObj) + return [toNoFileKey Rules.GetClientSettings] + + do + ideStateVar <- newEmptyMVar + runLanguageServer (cmapWithPrio LogLanguageServer recorder) options inH outH argsDefaultHlsConfig argsParseConfig (onConfigChange ideStateVar) (setup ideStateVar) + dumpSTMStats + Check argFiles -> do + let dir = argsProjectRoot + dbLoc <- getHieDbLoc dir + runWithWorkerThreads (cmapWithPrio LogSession recorder) dbLoc $ \hiedb threadQueue -> do + -- GHC produces messages with UTF8 in them, so make sure the terminal doesn't error + hSetEncoding stdout utf8 + hSetEncoding stderr utf8 + + putStrLn $ "ghcide setup tester in " ++ dir ++ "." + putStrLn "Report bugs at https://github.com/haskell/haskell-language-server/issues" + + putStrLn $ "\nStep 1/4: Finding files to test in " ++ dir + files <- expandFiles (argFiles ++ ["." | null argFiles]) + -- LSP works with absolute file paths, so try and behave similarly + absoluteFiles <- nubOrd <$> mapM IO.canonicalizePath files + putStrLn $ "Found " ++ show (length absoluteFiles) ++ " files" + + putStrLn "\nStep 2/4: Looking for hie.yaml files that control setup" + cradles <- mapM findCradle absoluteFiles + let ucradles = nubOrd cradles + let n = length ucradles + putStrLn $ "Found " ++ show n ++ " cradle" ++ ['s' | n /= 1] + when (n > 0) $ putStrLn $ " (" ++ intercalate ", " (catMaybes ucradles) ++ ")" + putStrLn "\nStep 3/4: Initializing the IDE" + sessionLoader <- loadSessionWithOptions (cmapWithPrio LogSession recorder) argsSessionLoadingOptions dir (tLoaderQueue threadQueue) + let def_options = argsIdeOptions argsDefaultHlsConfig sessionLoader + ideOptions = def_options + { optCheckParents = pure NeverCheck + , optCheckProject = pure False + , optModifyDynFlags = optModifyDynFlags def_options <> pluginModifyDynflags plugins + } + ide <- initialise (cmapWithPrio LogService recorder) argsDefaultHlsConfig argsHlsPlugins rules Nothing debouncer ideOptions hiedb threadQueue mempty dir + shakeSessionInit (cmapWithPrio LogShake recorder) ide + registerIdeConfiguration (shakeExtras ide) $ IdeConfiguration mempty (hashed Nothing) + + putStrLn "\nStep 4/4: Type checking the files" + setFilesOfInterest ide $ HashMap.fromList $ map ((,OnDisk) . toNormalizedFilePath') absoluteFiles + results <- runAction "User TypeCheck" ide $ uses TypeCheck (map toNormalizedFilePath' absoluteFiles) + _results <- runAction "GetHie" ide $ uses GetHieAst (map toNormalizedFilePath' absoluteFiles) + _results <- runAction "GenerateCore" ide $ uses GenerateCore (map toNormalizedFilePath' absoluteFiles) + let (worked, failed) = partition fst $ zip (map isJust results) absoluteFiles + when (failed /= []) $ + putStr $ unlines $ "Files that failed:" : map ((++) " * " . snd) failed + + let nfiles xs = let n' = length xs in if n' == 1 then "1 file" else show n' ++ " files" + putStrLn $ "\nCompleted (" ++ nfiles worked ++ " worked, " ++ nfiles failed ++ " failed)" + + unless (null failed) (exitWith $ ExitFailure (length failed)) + Db opts cmd -> do + let root = argsProjectRoot + dbLoc <- getHieDbLoc root + hPutStrLn stderr $ "Using hiedb at: " ++ dbLoc + mlibdir <- getInitialGhcLibDirDefault (cmapWithPrio LogSession recorder) root + rng <- newStdGen + case mlibdir of + Nothing -> exitWith $ ExitFailure 1 + Just libdir -> retryOnSqliteBusy (cmapWithPrio LogSession recorder) rng (HieDb.runCommand libdir opts{HieDb.database = dbLoc} cmd) + + Custom (IdeCommand c) -> do + let root = argsProjectRoot + dbLoc <- getHieDbLoc root + runWithWorkerThreads (cmapWithPrio LogSession recorder) dbLoc $ \hiedb threadQueue -> do + sessionLoader <- loadSessionWithOptions (cmapWithPrio LogSession recorder) argsSessionLoadingOptions "." (tLoaderQueue threadQueue) + let def_options = argsIdeOptions argsDefaultHlsConfig sessionLoader + ideOptions = def_options + { optCheckParents = pure NeverCheck + , optCheckProject = pure False + , optModifyDynFlags = optModifyDynFlags def_options <> pluginModifyDynflags plugins + } + ide <- initialise (cmapWithPrio LogService recorder) argsDefaultHlsConfig argsHlsPlugins rules Nothing debouncer ideOptions hiedb threadQueue mempty root + shakeSessionInit (cmapWithPrio LogShake recorder) ide + registerIdeConfiguration (shakeExtras ide) $ IdeConfiguration mempty (hashed Nothing) + c ide + +expandFiles :: [FilePath] -> IO [FilePath] +expandFiles = concatMapM $ \x -> do + b <- IO.doesFileExist x + if b + then return [x] + else do + let recurse "." = True + recurse y | "." `isPrefixOf` takeFileName y = False -- skip .git etc + recurse y = takeFileName y `notElem` ["dist", "dist-newstyle"] -- cabal directories + files <- filter (\y -> takeExtension y `elem` [".hs", ".lhs"]) <$> IO.listFilesInside (return . recurse) x + when (null files) $ + fail $ "Couldn't find any .hs/.lhs files inside directory: " ++ x + return files diff --git a/ghcide/src/Development/IDE/Main/HeapStats.hs b/ghcide/src/Development/IDE/Main/HeapStats.hs new file mode 100644 index 0000000000..a6f685b68c --- /dev/null +++ b/ghcide/src/Development/IDE/Main/HeapStats.hs @@ -0,0 +1,74 @@ +-- | Logging utilities for reporting heap statistics +module Development.IDE.Main.HeapStats ( withHeapStats, Log(..)) where + +import Control.Concurrent +import Control.Concurrent.Async +import Control.Monad +import Data.Word +import GHC.Stats +import Ide.Logger (Pretty (pretty), Priority (Info), + Recorder, WithPriority, hsep, + logWith, (<+>)) +import Text.Printf (printf) + +data Log + = LogHeapStatsPeriod !Int + | LogHeapStatsDisabled + | LogHeapStats !Word64 !Word64 + deriving Show + +instance Pretty Log where + pretty = \case + LogHeapStatsPeriod period -> + "Logging heap statistics every" <+> pretty (toFormattedSeconds period) + LogHeapStatsDisabled -> + "Heap statistics are not enabled (RTS option -T is needed)" + LogHeapStats liveBytes heapSize -> + hsep + [ "Live bytes:" + , pretty (toFormattedMegabytes liveBytes) + , "Heap size:" + , pretty (toFormattedMegabytes heapSize) ] + where + toFormattedSeconds :: Int -> String + toFormattedSeconds s = printf "%.2fs" (fromIntegral @Int @Double s / 1e6) + + toFormattedMegabytes :: Word64 -> String + toFormattedMegabytes b = printf "%.2fMB" (fromIntegral @Word64 @Double b / 1e6) + +-- | Interval at which to report the latest heap statistics. +heapStatsInterval :: Int +heapStatsInterval = 60_000_000 -- 60s + +-- | Report the live bytes and heap size at the last major collection. +logHeapStats :: Recorder (WithPriority Log) -> IO () +logHeapStats l = do + stats <- getRTSStats + -- live_bytes is the total amount of live memory in a program + -- (corresponding to the amount on a heap profile) + let live_bytes = gcdetails_live_bytes (gc stats) + -- heap_size is the total amount of memory the RTS is using + -- this corresponds closer to OS memory usage + heap_size = gcdetails_mem_in_use_bytes (gc stats) + logWith l Info $ LogHeapStats live_bytes heap_size + +-- | An action which logs heap statistics at the 'heapStatsInterval' +heapStatsThread :: Recorder (WithPriority Log) -> IO r +heapStatsThread l = forever $ do + threadDelay heapStatsInterval + logHeapStats l + +-- | A helper function which launches the 'heapStatsThread' and kills it +-- appropriately when the inner action finishes. It also checks to see +-- if `-T` is enabled. +withHeapStats :: Recorder (WithPriority Log) -> IO r -> IO r +withHeapStats l k = do + enabled <- getRTSStatsEnabled + if enabled + then do + logWith l Info $ LogHeapStatsPeriod heapStatsInterval + withAsync (heapStatsThread l) (const k) + else do + logWith l Info LogHeapStatsDisabled + k + diff --git a/ghcide/src/Development/IDE/Monitoring/OpenTelemetry.hs b/ghcide/src/Development/IDE/Monitoring/OpenTelemetry.hs new file mode 100644 index 0000000000..184a5c1ba9 --- /dev/null +++ b/ghcide/src/Development/IDE/Monitoring/OpenTelemetry.hs @@ -0,0 +1,31 @@ +module Development.IDE.Monitoring.OpenTelemetry (monitoring) where + +import Control.Concurrent.Async (Async, async, cancel) +import Control.Monad (forever) +import Data.IORef.Extra (atomicModifyIORef'_, + newIORef, readIORef) +import Data.Text.Encoding (encodeUtf8) +import Debug.Trace.Flags (userTracingEnabled) +import Development.IDE.Types.Monitoring (Monitoring (..)) +import OpenTelemetry.Eventlog (mkValueObserver, observe) +import System.Time.Extra (Seconds, sleep) + +-- | Dump monitoring to the eventlog using the Opentelemetry package +monitoring :: IO Monitoring +monitoring + | userTracingEnabled = do + actions <- newIORef [] + let registerCounter name readA = do + observer <- mkValueObserver (encodeUtf8 name) + let update = observe observer . fromIntegral =<< readA + atomicModifyIORef'_ actions (update :) + registerGauge = registerCounter + let start = do + a <- regularly 1 $ sequence_ =<< readIORef actions + return (cancel a) + return Monitoring{..} + | otherwise = mempty + + +regularly :: Seconds -> IO () -> IO (Async ()) +regularly delay act = async $ forever (act >> sleep delay) diff --git a/ghcide/src/Development/IDE/Plugin.hs b/ghcide/src/Development/IDE/Plugin.hs new file mode 100644 index 0000000000..0e682d6c9f --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin.hs @@ -0,0 +1,23 @@ +module Development.IDE.Plugin ( Plugin(..) ) where + +import Data.Default +import Development.IDE.Graph + +import Development.IDE.LSP.Server +import Ide.Types (DynFlagsModifications) +import qualified Language.LSP.Server as LSP + +data Plugin c = Plugin + {pluginRules :: Rules () + ,pluginHandlers :: LSP.Handlers (ServerM c) + ,pluginModifyDynflags :: c -> DynFlagsModifications + } + +instance Default (Plugin c) where + def = Plugin mempty mempty mempty + +instance Semigroup (Plugin c) where + Plugin x1 h1 d1 <> Plugin x2 h2 d2 = Plugin (x1<>x2) (h1 <> h2) (d1 <> d2) + +instance Monoid (Plugin c) where + mempty = def diff --git a/ghcide/src/Development/IDE/Plugin/Completions.hs b/ghcide/src/Development/IDE/Plugin/Completions.hs new file mode 100644 index 0000000000..d92bf1da85 --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/Completions.hs @@ -0,0 +1,259 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE TypeFamilies #-} + +module Development.IDE.Plugin.Completions + ( descriptor + , Log(..) + , ghcideCompletionsPluginPriority + ) where + +import Control.Concurrent.Async (concurrently) +import Control.Concurrent.STM.Stats (readTVarIO) +import Control.Lens ((&), (.~), (?~)) +import Control.Monad.IO.Class +import Control.Monad.Trans.Except (ExceptT (ExceptT), + withExceptT) +import qualified Data.HashMap.Strict as Map +import qualified Data.HashSet as Set +import Data.Maybe +import qualified Data.Text as T +import Development.IDE.Core.Compile +import Development.IDE.Core.FileStore (getUriContents) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.PositionMapping +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Service hiding (Log, LogShake) +import Development.IDE.Core.Shake hiding (Log, + knownTargets) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Util +import Development.IDE.Graph +import Development.IDE.Plugin.Completions.Logic +import Development.IDE.Plugin.Completions.Types +import Development.IDE.Spans.Common +import Development.IDE.Spans.Documentation +import Development.IDE.Types.Exports +import Development.IDE.Types.HscEnvEq (HscEnvEq (envPackageExports, envVisibleModuleNames), + hscEnv) +import qualified Development.IDE.Types.KnownTargets as KT +import Development.IDE.Types.Location +import Ide.Logger (Pretty (pretty), + Recorder, + WithPriority, + cmapWithPrio) +import Ide.Plugin.Error +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import Numeric.Natural +import Prelude hiding (mod) +import Text.Fuzzy.Parallel (Scored (..)) + +import Development.IDE.Core.Rules (usePropertyAction) + +import qualified Ide.Plugin.Config as Config + +import qualified GHC.LanguageExtensions as LangExt + +data Log = LogShake Shake.Log deriving Show + +instance Pretty Log where + pretty = \case + LogShake msg -> pretty msg + +ghcideCompletionsPluginPriority :: Natural +ghcideCompletionsPluginPriority = defaultPluginPriority + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = (defaultPluginDescriptor plId desc) + { pluginRules = produceCompletions recorder + , pluginHandlers = mkPluginHandler SMethod_TextDocumentCompletion getCompletionsLSP + <> mkResolveHandler SMethod_CompletionItemResolve resolveCompletion + , pluginConfigDescriptor = defaultConfigDescriptor {configCustomConfig = mkCustomConfig properties} + , pluginPriority = ghcideCompletionsPluginPriority + } + where + desc = "Provides Haskell completions" + + +produceCompletions :: Recorder (WithPriority Log) -> Rules () +produceCompletions recorder = do + define (cmapWithPrio LogShake recorder) $ \LocalCompletions file -> do + let uri = fromNormalizedUri $ normalizedFilePathToUri file + mbPm <- useWithStale GetParsedModule file + case mbPm of + Just (pm, _) -> do + let cdata = localCompletionsForParsedModule uri pm + return ([], Just cdata) + _ -> return ([], Nothing) + define (cmapWithPrio LogShake recorder) $ \NonLocalCompletions file -> do + -- For non local completions we avoid depending on the parsed module, + -- synthesizing a fake module with an empty body from the buffer + -- in the ModSummary, which preserves all the imports + ms <- fmap fst <$> useWithStale GetModSummaryWithoutTimestamps file + mbSess <- fmap fst <$> useWithStale GhcSessionDeps file + + case (ms, mbSess) of + (Just ModSummaryResult{..}, Just sess) -> do + let env = hscEnv sess + -- We do this to be able to provide completions of items that are not restricted to the explicit list + (global, inScope) <- liftIO $ tcRnImportDecls env (dropListFromImportDecl <$> msrImports) `concurrently` tcRnImportDecls env msrImports + case (global, inScope) of + ((_, Just globalEnv), (_, Just inScopeEnv)) -> do + visibleMods <- liftIO $ fmap (fromMaybe []) $ envVisibleModuleNames sess + let uri = fromNormalizedUri $ normalizedFilePathToUri file + let cdata = cacheDataProducer uri visibleMods (ms_mod msrModSummary) globalEnv inScopeEnv msrImports + return ([], Just cdata) + (_diag, _) -> + return ([], Nothing) + _ -> return ([], Nothing) + +-- Drop any explicit imports in ImportDecl if not hidden +dropListFromImportDecl :: LImportDecl GhcPs -> LImportDecl GhcPs +dropListFromImportDecl iDecl = let + f d@ImportDecl {ideclImportList} = case ideclImportList of + Just (Exactly, _) -> d {ideclImportList=Nothing} + -- if hiding or Nothing just return d + _ -> d + f x = x + in f <$> iDecl + +resolveCompletion :: ResolveFunction IdeState CompletionResolveData Method_CompletionItemResolve +resolveCompletion ide _pid comp@CompletionItem{_detail,_documentation,_data_} uri (CompletionResolveData _ needType (NameDetails mod occ)) = + do + file <- getNormalizedFilePathE uri + (sess,_) <- withExceptT (const PluginStaleResolve) + $ runIdeActionE "CompletionResolve.GhcSessionDeps" (shakeExtras ide) + $ useWithStaleFastE GhcSessionDeps file + let nc = ideNc $ shakeExtras ide + name <- liftIO $ lookupNameCache nc mod occ + mdkm <- liftIO $ runIdeAction "CompletionResolve.GetDocMap" (shakeExtras ide) $ useWithStaleFast GetDocMap file + let (dm,km) = case mdkm of + Just (DKMap docMap tyThingMap, _) -> (docMap,tyThingMap) + Nothing -> (mempty, mempty) + doc <- case lookupNameEnv dm name of + Just doc -> pure $ spanDocToMarkdown doc + Nothing -> liftIO $ spanDocToMarkdown <$> getDocumentationTryGhc (hscEnv sess) name + typ <- case lookupNameEnv km name of + _ | not needType -> pure Nothing + Just ty -> pure (safeTyThingType ty) + Nothing -> do + (safeTyThingType =<<) <$> liftIO (lookupName (hscEnv sess) name) + let det1 = case typ of + Just ty -> Just (":: " <> printOutputable (stripForall ty) <> "\n") + Nothing -> Nothing + doc1 = case _documentation of + Just (InR (MarkupContent MarkupKind_Markdown old)) -> + InR $ MarkupContent MarkupKind_Markdown $ T.intercalate sectionSeparator (old:doc) + _ -> InR $ MarkupContent MarkupKind_Markdown $ T.intercalate sectionSeparator doc + pure (comp & L.detail .~ (det1 <> _detail) + & L.documentation ?~ doc1) + where + stripForall ty = case splitForAllTyCoVars ty of + (_,res) -> res + +-- | Generate code actions. +getCompletionsLSP :: PluginMethodHandler IdeState Method_TextDocumentCompletion +getCompletionsLSP ide plId + CompletionParams{_textDocument=TextDocumentIdentifier uri + ,_position=position + ,_context=completionContext} = ExceptT $ do + contentsMaybe <- + liftIO $ runAction "Completion" ide $ getUriContents $ toNormalizedUri uri + fmap Right $ case (contentsMaybe, uriToFilePath' uri) of + (Just cnts, Just path) -> do + let npath = toNormalizedFilePath' path + (ideOpts, compls, moduleExports, astres) <- liftIO $ runIdeAction "Completion" (shakeExtras ide) $ do + opts <- liftIO $ getIdeOptionsIO $ shakeExtras ide + localCompls <- useWithStaleFast LocalCompletions npath + nonLocalCompls <- useWithStaleFast NonLocalCompletions npath + pm <- useWithStaleFast GetParsedModule npath + binds <- fromMaybe (mempty, zeroMapping) <$> useWithStaleFast GetBindings npath + knownTargets <- liftIO $ runAction "Completion" ide $ useNoFile GetKnownTargets + let localModules = maybe [] (Map.keys . targetMap) knownTargets + let lModules = mempty{importableModules = map toModueNameText localModules} + -- set up the exports map including both package and project-level identifiers + packageExportsMapIO <- fmap(envPackageExports . fst) <$> useWithStaleFast GhcSession npath + packageExportsMap <- mapM liftIO packageExportsMapIO + projectExportsMap <- liftIO $ readTVarIO (exportsMap $ shakeExtras ide) + let exportsMap = fromMaybe mempty packageExportsMap <> projectExportsMap + + let moduleExports = getModuleExportsMap exportsMap + exportsCompItems = foldMap (map (fromIdentInfo uri) . Set.toList) . nonDetOccEnvElts . getExportsMap $ exportsMap + exportsCompls = mempty{anyQualCompls = exportsCompItems} + let compls = (fst <$> localCompls) <> (fst <$> nonLocalCompls) <> Just exportsCompls <> Just lModules + + -- get HieAst if OverloadedRecordDot is enabled + let uses_overloaded_record_dot (ms_hspp_opts . msrModSummary -> dflags) = xopt LangExt.OverloadedRecordDot dflags + ms <- fmap fst <$> useWithStaleFast GetModSummaryWithoutTimestamps npath + astres <- case ms of + Just ms' | uses_overloaded_record_dot ms' + -> useWithStaleFast GetHieAst npath + _ -> return Nothing + + pure (opts, fmap (,pm,binds) compls, moduleExports, astres) + case compls of + Just (cci', parsedMod, bindMap) -> do + let pfix = getCompletionPrefixFromRope position cnts + case (pfix, completionContext) of + (PosPrefixInfo _ "" _ _, Just CompletionContext { _triggerCharacter = Just "."}) + -> return (InL []) + (_, _) -> do + let clientCaps = clientCapabilities $ shakeExtras ide + plugins = idePlugins $ shakeExtras ide + config <- liftIO $ runAction "" ide $ getCompletionsConfig plId + + let allCompletions = getCompletions plugins ideOpts cci' parsedMod astres bindMap pfix clientCaps config moduleExports uri + pure $ InL (orderedCompletions allCompletions) + _ -> return (InL []) + _ -> return (InL []) + +getCompletionsConfig :: PluginId -> Action CompletionsConfig +getCompletionsConfig pId = + CompletionsConfig + <$> usePropertyAction #snippetsOn pId properties + <*> usePropertyAction #autoExtendOn pId properties + <*> (Config.maxCompletions <$> getClientConfigAction) + +{- COMPLETION SORTING + We return an ordered set of completions (local -> nonlocal -> global). + Ordering is important because local/nonlocal are import aware, whereas + global are not and will always insert import statements, potentially redundant. + + Moreover, the order prioritizes qualifiers, for instance, given: + + import qualified MyModule + foo = MyModule. + + The identifiers defined in MyModule will be listed first, followed by other + identifiers in importable modules. + + According to the LSP specification, if no sortText is provided, the label is used + to sort alphabetically. Alphabetical ordering is almost never what we want, + so we force the LSP client to respect our ordering by using a numbered sequence. +-} + +orderedCompletions :: [Scored CompletionItem] -> [CompletionItem] +orderedCompletions [] = [] +orderedCompletions xx = zipWith addOrder [0..] xx + where + lxx = digits $ Prelude.length xx + digits = Prelude.length . show + + addOrder :: Int -> Scored CompletionItem -> CompletionItem + addOrder n Scored{original = it@CompletionItem{_label,_sortText}} = + it{_sortText = Just $ + T.pack(pad lxx n) + } + + pad n x = let sx = show x in replicate (n - Prelude.length sx) '0' <> sx + +---------------------------------------------------------------------------------------------------- + +toModueNameText :: KT.Target -> T.Text +toModueNameText target = case target of + KT.TargetModule m -> T.pack $ moduleNameString m + _ -> T.empty diff --git a/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs new file mode 100644 index 0000000000..0a5cecaca8 --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs @@ -0,0 +1,890 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE MultiWayIf #-} + +-- Mostly taken from "haskell-ide-engine" +module Development.IDE.Plugin.Completions.Logic ( + CachedCompletions +, cacheDataProducer +, localCompletionsForParsedModule +, getCompletions +, fromIdentInfo +, getCompletionPrefix +, getCompletionPrefixFromRope +) where + +import Control.Applicative +import Control.Lens hiding (Context, + parts) +import Data.Char (isAlphaNum, isUpper) +import Data.Default (def) +import Data.Generics +import Data.List.Extra as List hiding + (stripPrefix) +import qualified Data.Map as Map +import Prelude hiding (mod) + +import Data.Maybe (fromMaybe, isJust, + isNothing, + listToMaybe, + mapMaybe) +import qualified Data.Text as T +import qualified Text.Fuzzy.Parallel as Fuzzy + +import Control.Monad +import Data.Aeson (ToJSON (toJSON)) +import Data.Function (on) + +import qualified Data.HashSet as HashSet +import Data.Ord (Down (Down)) +import qualified Data.Set as Set +import Development.IDE.Core.PositionMapping +import Development.IDE.GHC.Compat hiding (isQual, ppr) +import qualified Development.IDE.GHC.Compat as GHC +import Development.IDE.GHC.Compat.Util +import Development.IDE.GHC.Error +import Development.IDE.GHC.Util +import Development.IDE.Plugin.Completions.Types +import Development.IDE.Spans.LocalBindings +import Development.IDE.Types.Exports +import Development.IDE.Types.Options +import GHC.Iface.Ext.Types (HieAST, + NodeInfo (..)) +import GHC.Iface.Ext.Utils (nodeInfo) +import Ide.PluginUtils (mkLspCommand) +import Ide.Types (CommandId (..), + IdePlugins (..), + PluginId) +import Language.Haskell.Syntax.Basic +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Types +import qualified Language.LSP.VFS as VFS +import Text.Fuzzy.Parallel (Scored (score), + original) + +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE hiding (line) + +import Development.IDE.Spans.AtPoint (pointCommand) + + +import GHC.Plugins (Depth (AllTheWay), + mkUserStyle, + neverQualify, + sdocStyle) + +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] + + +-- Chunk size used for parallelizing fuzzy matching +chunkSize :: Int +chunkSize = 1000 + +-- From haskell-ide-engine/hie-plugin-api/Haskell/Ide/Engine/Context.hs + +-- | A context of a declaration in the program +-- e.g. is the declaration a type declaration or a value declaration +-- Used for determining which code completions to show +-- TODO: expand this with more contexts like classes or instances for +-- smarter code completion +data Context = TypeContext + | ValueContext + | ModuleContext String -- ^ module context with module name + | ImportContext String -- ^ import context with module name + | ImportListContext String -- ^ import list context with module name + | ImportHidingContext String -- ^ import hiding context with module name + | ExportContext -- ^ List of exported identifiers from the current module + deriving (Show, Eq) + +-- | Generates a map of where the context is a type and where the context is a value +-- i.e. where are the value decls and the type decls +getCContext :: Position -> ParsedModule -> Maybe Context +getCContext pos pm + | Just (L (locA -> r) modName) <- moduleHeader + , pos `isInsideSrcSpan` r + = Just (ModuleContext (moduleNameString modName)) + + | Just (L (locA -> r) _) <- exportList + , pos `isInsideSrcSpan` r + = Just ExportContext + + | Just ctx <- something (Nothing `mkQ` go `extQ` goInline) decl + = Just ctx + + | Just ctx <- something (Nothing `mkQ` importGo) imports + = Just ctx + + | otherwise + = Nothing + + where decl = hsmodDecls $ unLoc $ pm_parsed_source pm + moduleHeader = hsmodName $ unLoc $ pm_parsed_source pm + exportList = hsmodExports $ unLoc $ pm_parsed_source pm + imports = hsmodImports $ unLoc $ pm_parsed_source pm + + go :: LHsDecl GhcPs -> Maybe Context + go (L (locA -> r) SigD {}) + | pos `isInsideSrcSpan` r = Just TypeContext + | otherwise = Nothing + go (L (locA -> r) GHC.ValD {}) + | pos `isInsideSrcSpan` r = Just ValueContext + | otherwise = Nothing + go _ = Nothing + + goInline :: GHC.LHsType GhcPs -> Maybe Context + goInline (GHC.L (locA -> r) _) + | pos `isInsideSrcSpan` r = Just TypeContext + goInline _ = Nothing + + importGo :: GHC.LImportDecl GhcPs -> Maybe Context + importGo (L (locA -> r) impDecl) + | pos `isInsideSrcSpan` r + = importInline importModuleName (fmap (fmap reLoc) $ ideclImportList impDecl) + <|> Just (ImportContext importModuleName) + + | otherwise = Nothing + where importModuleName = moduleNameString $ unLoc $ ideclName impDecl + + -- importInline :: String -> Maybe (Bool, GHC.Located [LIE GhcPs]) -> Maybe Context + importInline modName (Just (EverythingBut, L r _)) + | pos `isInsideSrcSpan` r = Just $ ImportHidingContext modName + | otherwise = Nothing + + importInline modName (Just (Exactly, L r _)) + | pos `isInsideSrcSpan` r = Just $ ImportListContext modName + | otherwise = Nothing + + importInline _ _ = Nothing + +occNameToComKind :: OccName -> CompletionItemKind +occNameToComKind oc + | isVarOcc oc = case occNameString oc of + i:_ | isUpper i -> CompletionItemKind_Constructor + _ -> CompletionItemKind_Function + | isTcOcc oc = CompletionItemKind_Struct + | isDataOcc oc = CompletionItemKind_Constructor + | otherwise = CompletionItemKind_Variable + + +showModName :: ModuleName -> T.Text +showModName = T.pack . moduleNameString + +mkCompl :: Maybe PluginId -- ^ Plugin to use for the extend import command + -> IdeOptions -> Uri -> CompItem -> CompletionItem +mkCompl + pId + _ideOptions + uri + CI + { compKind, + isInfix, + insertText, + provenance, + label, + typeText, + additionalTextEdits, + nameDetails + } = do + let mbCommand = mkAdditionalEditsCommand pId =<< additionalTextEdits + let ci = CompletionItem + {_label = label, + _kind = kind, + _tags = Nothing, + _detail = + case (typeText, provenance) of + (Just t,_) | not(T.null t) -> Just $ ":: " <> t + (_, ImportedFrom mod) -> Just $ "from " <> mod + (_, DefinedIn mod) -> Just $ "from " <> mod + _ -> Nothing, + _documentation = documentation, + _deprecated = Nothing, + _preselect = Nothing, + _sortText = Nothing, + _filterText = Nothing, + _insertText = Just insertText, + _insertTextFormat = Just InsertTextFormat_Snippet, + _insertTextMode = Nothing, + _textEdit = Nothing, + _additionalTextEdits = Nothing, + _commitCharacters = Nothing, + _command = mbCommand, + _data_ = toJSON <$> fmap (CompletionResolveData uri (isNothing typeText)) nameDetails, + _labelDetails = Nothing, + _textEditText = Nothing} + removeSnippetsWhen (isJust isInfix) ci + + where kind = Just compKind + docs' = [imported] + imported = case provenance of + Local pos -> "*Defined at " <> pprLineCol (srcSpanStart pos) <> " in this module*\n" + ImportedFrom mod -> "*Imported from '" <> mod <> "'*\n" + DefinedIn mod -> "*Defined in '" <> mod <> "'*\n" + documentation = Just $ InR $ + MarkupContent MarkupKind_Markdown $ + T.intercalate sectionSeparator docs' + pprLineCol :: SrcLoc -> T.Text + pprLineCol (UnhelpfulLoc fs) = T.pack $ unpackFS fs + pprLineCol (RealSrcLoc loc _) = + "line " <> printOutputable (srcLocLine loc) <> ", column " <> printOutputable (srcLocCol loc) + + +mkAdditionalEditsCommand :: Maybe PluginId -> ExtendImport -> Maybe Command +mkAdditionalEditsCommand (Just pId) edits = Just $ mkLspCommand pId (CommandId extendImportCommandId) "extend import" (Just [toJSON edits]) +mkAdditionalEditsCommand _ _ = Nothing + +mkNameCompItem :: Uri -> Maybe T.Text -> OccName -> Provenance -> Maybe Backtick -> Maybe (LImportDecl GhcPs) -> Maybe Module -> CompItem +mkNameCompItem doc thingParent origName provenance isInfix !imp mod = CI {..} + where + isLocalCompletion = True + nameDetails = NameDetails <$> mod <*> pure origName + compKind = occNameToComKind origName + isTypeCompl = isTcOcc origName + typeText = Nothing + label = stripOccNamePrefix $ printOutputable origName + insertText = case isInfix of + Nothing -> label + Just LeftSide -> label <> "`" + + Just Surrounded -> label + additionalTextEdits = + imp <&> \x -> + ExtendImport + { doc, + thingParent, + importName = showModName $ unLoc $ ideclName $ unLoc x, + importQual = getImportQual x, + newThing = printOutputable origName + } + +showForSnippet :: Outputable a => a -> T.Text +showForSnippet x = T.pack $ renderWithContext ctxt $ GHC.ppr x -- FIXme + where + ctxt = defaultSDocContext{sdocStyle = mkUserStyle neverQualify AllTheWay} + +mkModCompl :: T.Text -> CompletionItem +mkModCompl label = + defaultCompletionItemWithLabel label + & L.kind ?~ CompletionItemKind_Module + +mkModuleFunctionImport :: T.Text -> T.Text -> CompletionItem +mkModuleFunctionImport moduleName label = + defaultCompletionItemWithLabel label + & L.kind ?~ CompletionItemKind_Function + & L.detail ?~ moduleName + +mkImportCompl :: T.Text -> T.Text -> CompletionItem +mkImportCompl enteredQual label = + defaultCompletionItemWithLabel m + & L.kind ?~ CompletionItemKind_Module + & L.detail ?~ label + where + m = fromMaybe "" (T.stripPrefix enteredQual label) + +mkExtCompl :: T.Text -> CompletionItem +mkExtCompl label = + defaultCompletionItemWithLabel label + & L.kind ?~ CompletionItemKind_Keyword + +defaultCompletionItemWithLabel :: T.Text -> CompletionItem +defaultCompletionItemWithLabel label = + CompletionItem label def def def def def def def def def + def def def def def def def def def + +fromIdentInfo :: Uri -> IdentInfo -> Maybe T.Text -> CompItem +fromIdentInfo doc identInfo@IdentInfo{..} q = CI + { compKind= occNameToComKind name + , insertText=rend + , provenance = DefinedIn mod + , label=rend + , typeText = Nothing + , isInfix=Nothing + , isTypeCompl= not (isDatacon identInfo) && isUpper (T.head rend) + , additionalTextEdits= Just $ + ExtendImport + { doc, + thingParent = occNameText <$> parent, + importName = mod, + importQual = q, + newThing = rend + } + , nameDetails = Nothing + , isLocalCompletion = False + } + where rend = rendered identInfo + mod = moduleNameText identInfo + +cacheDataProducer :: Uri -> [ModuleName] -> Module -> GlobalRdrEnv-> GlobalRdrEnv -> [LImportDecl GhcPs] -> CachedCompletions +cacheDataProducer uri visibleMods curMod globalEnv inScopeEnv limports = + let curModName = moduleName curMod + curModNameText = printOutputable curModName + + importMap = Map.fromList [ (l, imp) | imp@(L (locA -> (RealSrcSpan l _)) _) <- limports ] + + iDeclToModName :: ImportDecl GhcPs -> ModuleName + iDeclToModName = unLoc . ideclName + + asNamespace :: ImportDecl GhcPs -> ModuleName + asNamespace imp = maybe (iDeclToModName imp) GHC.unLoc (ideclAs imp) + -- Full canonical names of imported modules + importDeclarations = map unLoc limports + + + -- The given namespaces for the imported modules (ie. full name, or alias if used) + allModNamesAsNS = map (showModName . asNamespace) importDeclarations + + rdrElts = globalRdrEnvElts globalEnv + + -- construct a map from Parents(type) to their fields + fieldMap = Map.fromListWith (++) $ flip mapMaybe rdrElts $ \elt -> do + par <- greParent_maybe elt +#if MIN_VERSION_ghc(9,7,0) + flbl <- greFieldLabel_maybe elt +#else + flbl <- greFieldLabel elt +#endif + Just (par,[flLabel flbl]) + + getCompls :: [GlobalRdrElt] -> ([CompItem],QualCompls) + getCompls = foldMap getComplsForOne + + getComplsForOne :: GlobalRdrElt -> ([CompItem],QualCompls) + getComplsForOne (GRE n par True _) = + (toCompItem par curMod curModNameText n Nothing, mempty) + getComplsForOne (GRE n par False prov) = + flip foldMap (map is_decl prov) $ \spec -> + let originalImportDecl = do + -- we don't want to extend import if it's already in scope + guard . null $ lookupGRE_Name inScopeEnv n + -- or if it doesn't have a real location + loc <- realSpan $ is_dloc spec + Map.lookup loc importMap + compItem = toCompItem par curMod (printOutputable $ is_mod spec) n originalImportDecl + unqual + | is_qual spec = [] + | otherwise = compItem + qual + | is_qual spec = Map.singleton asMod compItem + | otherwise = Map.fromList [(asMod,compItem),(origMod,compItem)] + asMod = showModName (is_as spec) +#if MIN_VERSION_ghc(9,8,0) + origMod = showModName (moduleName $ is_mod spec) +#else + origMod = showModName (is_mod spec) +#endif + in (unqual,QualCompls qual) + + toCompItem :: Parent -> Module -> T.Text -> Name -> Maybe (LImportDecl GhcPs) -> [CompItem] + toCompItem par _ mn n imp' = + -- docs <- getDocumentationTryGhc packageState curMod n + let (mbParent, originName) = case par of + NoParent -> (Nothing, nameOccName n) + ParentIs n' -> (Just . T.pack $ printName n', nameOccName n) + recordCompls = case par of + ParentIs parent + | isDataConName n + , Just flds <- Map.lookup parent fieldMap + , not (null flds) -> + [mkRecordSnippetCompItem uri mbParent (printOutputable originName) (map (T.pack . unpackFS . field_label) flds) (ImportedFrom mn) imp'] + _ -> [] + + in mkNameCompItem uri mbParent originName (ImportedFrom mn) Nothing imp' (nameModule_maybe n) + : recordCompls + + (unquals,quals) = getCompls rdrElts + + -- The list of all importable Modules from all packages + moduleNames = map showModName visibleMods + + in CC + { allModNamesAsNS = allModNamesAsNS + , unqualCompls = unquals + , qualCompls = quals + , anyQualCompls = [] + , importableModules = moduleNames + } + +-- | Produces completions from the top level declarations of a module. +localCompletionsForParsedModule :: Uri -> ParsedModule -> CachedCompletions +localCompletionsForParsedModule uri pm@ParsedModule{pm_parsed_source = L _ HsModule{hsmodDecls}} = + CC { allModNamesAsNS = mempty + , unqualCompls = compls + , qualCompls = mempty + , anyQualCompls = [] + , importableModules = mempty + } + where + typeSigIds = Set.fromList + [ identifier + | L _ (SigD _ (TypeSig _ ids _)) <- hsmodDecls + , L _ identifier <- ids + ] + hasTypeSig = (`Set.member` typeSigIds) . unLoc + + compls = concat + [ case decl of + SigD _ (TypeSig _ ids typ) -> + [mkComp identifier CompletionItemKind_Function (Just $ showForSnippet typ) | identifier <- ids] + ValD _ FunBind{fun_id} -> + [ mkComp fun_id CompletionItemKind_Function Nothing + | not (hasTypeSig fun_id) + ] + ValD _ PatBind{pat_lhs} -> + [mkComp identifier CompletionItemKind_Variable Nothing + | VarPat _ identifier <- listify (\(_ :: Pat GhcPs) -> True) pat_lhs] + TyClD _ ClassDecl{tcdLName, tcdSigs, tcdATs} -> + mkComp tcdLName CompletionItemKind_Interface (Just $ showForSnippet tcdLName) : + [ mkComp identifier CompletionItemKind_Function (Just $ showForSnippet typ) + | L _ (ClassOpSig _ _ ids typ) <- tcdSigs + , identifier <- ids] ++ + [ mkComp fdLName CompletionItemKind_Struct (Just $ showForSnippet fdLName) + | L _ (FamilyDecl{fdLName}) <- tcdATs] + TyClD _ x -> + let generalCompls = [mkComp identifier cl (Just $ showForSnippet $ tyClDeclLName x) + | identifier <- listify (\(_ :: LIdP GhcPs) -> True) x + , let cl = occNameToComKind (rdrNameOcc $ unLoc identifier)] + -- here we only have to look at the outermost type + recordCompls = findRecordCompl uri (Local pos) x + in + -- the constructors and snippets will be duplicated here giving the user 2 choices. + generalCompls ++ recordCompls + ForD _ ForeignImport{fd_name,fd_sig_ty} -> + [mkComp fd_name CompletionItemKind_Variable (Just $ showForSnippet fd_sig_ty)] + ForD _ ForeignExport{fd_name,fd_sig_ty} -> + [mkComp fd_name CompletionItemKind_Variable (Just $ showForSnippet fd_sig_ty)] + _ -> [] + | L (locA -> pos) decl <- hsmodDecls, + let mkComp = mkLocalComp pos + ] + + mkLocalComp pos n ctyp ty = + CI ctyp pn (Local pos) pn ty Nothing (ctyp `elem` [CompletionItemKind_Struct, CompletionItemKind_Interface]) Nothing (Just $ NameDetails (ms_mod $ pm_mod_summary pm) occ) True + where + occ = rdrNameOcc $ unLoc n + pn = showForSnippet n + +findRecordCompl :: Uri -> Provenance -> TyClDecl GhcPs -> [CompItem] +findRecordCompl uri mn DataDecl {tcdLName, tcdDataDefn} = result + where + result = [mkRecordSnippetCompItem uri (Just $ printOutputable $ unLoc tcdLName) + (printOutputable . unLoc $ con_name) field_labels mn Nothing + | ConDeclH98{..} <- unLoc <$> (extract_cons $ dd_cons tcdDataDefn) + , Just con_details <- [getFlds con_args] + , let field_names = concatMap extract con_details + , let field_labels = printOutputable <$> field_names + , (not . List.null) field_labels + ] + + getFlds conArg = case conArg of + RecCon rec -> Just $ unLoc <$> unLoc rec + PrefixCon{} -> Just [] + _ -> Nothing + + -- NOTE: 'cd_fld_names' is grouped so that the fields + -- sharing the same type declaration to fit in the same group; e.g. + -- + -- @ + -- data Foo = Foo {arg1, arg2 :: Int, arg3 :: Int, arg4 :: Bool} + -- @ + -- + -- is encoded as @[[arg1, arg2], [arg3], [arg4]]@ + -- Hence, we must concat nested arguments into one to get all the fields. + extract ConDeclField{..} + = map (foLabel . unLoc) cd_fld_names + -- XConDeclField + extract _ = [] +findRecordCompl _ _ _ = [] + +toggleSnippets :: ClientCapabilities -> CompletionsConfig -> CompletionItem -> CompletionItem +toggleSnippets ClientCapabilities {_textDocument} CompletionsConfig{..} = + removeSnippetsWhen (not $ enableSnippets && supported) + where + supported = + Just True == (_textDocument >>= _completion >>= view L.completionItem >>= view L.snippetSupport) + +toggleAutoExtend :: CompletionsConfig -> CompItem -> CompItem +toggleAutoExtend CompletionsConfig{enableAutoExtend=False} x = x {additionalTextEdits = Nothing} +toggleAutoExtend _ x = x + +removeSnippetsWhen :: Bool -> CompletionItem -> CompletionItem +removeSnippetsWhen condition x = + if condition + then + x + { _insertTextFormat = Just InsertTextFormat_PlainText, + _insertText = Nothing + } + else x + +-- | Returns the cached completions for the given module and position. +getCompletions + :: IdePlugins a + -> IdeOptions + -> CachedCompletions + -> Maybe (ParsedModule, PositionMapping) + -> Maybe (HieAstResult, PositionMapping) + -> (Bindings, PositionMapping) + -> PosPrefixInfo + -> ClientCapabilities + -> CompletionsConfig + -> ModuleNameEnv (HashSet.HashSet IdentInfo) + -> Uri + -> [Scored CompletionItem] +getCompletions + plugins + ideOpts + CC {allModNamesAsNS, anyQualCompls, unqualCompls, qualCompls, importableModules} + maybe_parsed + maybe_ast_res + (localBindings, bmapping) + prefixInfo@(PosPrefixInfo { fullLine, prefixScope, prefixText }) + caps + config + moduleExportsMap + uri + -- ------------------------------------------------------------------------ + -- IMPORT MODULENAME (NAM|) + | Just (ImportListContext moduleName) <- maybeContext + = moduleImportListCompletions moduleName + + | Just (ImportHidingContext moduleName) <- maybeContext + = moduleImportListCompletions moduleName + + -- ------------------------------------------------------------------------ + -- IMPORT MODULENAM| + | Just (ImportContext _moduleName) <- maybeContext + = filtImportCompls + + -- ------------------------------------------------------------------------ + -- {-# LA| #-} + -- we leave this condition here to avoid duplications and return empty list + -- since HLS implements these completions (#haskell-language-server/pull/662) + | "{-# " `T.isPrefixOf` fullLine + = [] + + -- ------------------------------------------------------------------------ + | otherwise = + -- assumes that nubOrdBy is stable + let uniqueFiltCompls = nubOrdBy (uniqueCompl `on` snd . Fuzzy.original) filtCompls + compls = (fmap.fmap.fmap) (mkCompl pId ideOpts uri) uniqueFiltCompls + pId = lookupCommandProvider plugins (CommandId extendImportCommandId) + in + (fmap.fmap) snd $ + sortBy (compare `on` lexicographicOrdering) $ + mergeListsBy (flip compare `on` score) + [ (fmap.fmap) (notQual,) filtModNameCompls + , (fmap.fmap) (notQual,) filtKeywordCompls + , (fmap.fmap.fmap) (toggleSnippets caps config) compls + ] + where + enteredQual = if T.null prefixScope then "" else prefixScope <> "." + fullPrefix = enteredQual <> prefixText + + -- Boolean labels to tag suggestions as qualified (or not) + qual = not(T.null prefixScope) + notQual = False + + {- correct the position by moving 'foo :: Int -> String -> ' + ^ + to 'foo :: Int -> String -> ' + ^ + -} + pos = cursorPos prefixInfo + + maxC = maxCompletions config + + filtModNameCompls :: [Scored CompletionItem] + filtModNameCompls = + (fmap.fmap) mkModCompl + $ Fuzzy.simpleFilter chunkSize maxC fullPrefix + $ (if T.null enteredQual then id else mapMaybe (T.stripPrefix enteredQual)) + allModNamesAsNS + -- If we have a parsed module, use it to determine which completion to show. + maybeContext :: Maybe Context + maybeContext = case maybe_parsed of + Nothing -> Nothing + Just (pm, pmapping) -> + let PositionMapping pDelta = pmapping + position' = fromDelta pDelta pos + lpos = lowerRange position' + hpos = upperRange position' + in getCContext lpos pm <|> getCContext hpos pm + + filtCompls :: [Scored (Bool, CompItem)] + filtCompls = Fuzzy.filter chunkSize maxC prefixText ctxCompls (label . snd) + where + -- We need the hieast to be "fresh". We can't get types from "stale" hie files, so hasfield won't work, + -- since it gets the record fields from the types. + -- Perhaps this could be fixed with a refactor to GHC's IfaceTyCon, to have it also contain record fields. + -- Requiring fresh hieast is fine for normal workflows, because it is generated while the user edits. + recordDotSyntaxCompls :: [(Bool, CompItem)] + recordDotSyntaxCompls = case maybe_ast_res of + Just (HAR {hieAst = hieast, hieKind = HieFresh},_) -> concat $ pointCommand hieast (completionPrefixPos prefixInfo) nodeCompletions + _ -> [] + where + nodeCompletions :: HieAST Type -> [(Bool, CompItem)] + nodeCompletions node = concatMap g (nodeType $ nodeInfo node) + g :: Type -> [(Bool, CompItem)] + g (TyConApp theTyCon _) = map (dotFieldSelectorToCompl (printOutputable $ GHC.tyConName theTyCon)) $ getSels theTyCon + g _ = [] + getSels :: GHC.TyCon -> [T.Text] + getSels tycon = let f fieldLabel = printOutputable fieldLabel + in map f $ tyConFieldLabels tycon + -- Completions can return more information that just the completion itself, but it will + -- require more than what GHC currently gives us in the HieAST, since it only gives the Type + -- of the fields, not where they are defined, etc. So for now the extra fields remain empty. + -- Also: additionalTextEdits is a todo, since we may want to import the record. It requires a way + -- to get the record's module, which isn't included in the type information used to get the fields. + dotFieldSelectorToCompl :: T.Text -> T.Text -> (Bool, CompItem) + dotFieldSelectorToCompl recname label = (True, CI + { compKind = CompletionItemKind_Field + , insertText = label + , provenance = DefinedIn recname + , label = label + , typeText = Nothing + , isInfix = Nothing + , isTypeCompl = False + , additionalTextEdits = Nothing + , nameDetails = Nothing + , isLocalCompletion = False + }) + + -- completions specific to the current context + ctxCompls' = case maybeContext of + Nothing -> compls + Just TypeContext -> filter ( isTypeCompl . snd) compls + Just ValueContext -> filter (not . isTypeCompl . snd) compls + Just _ -> filter (not . isTypeCompl . snd) compls + -- Add whether the text to insert has backticks + ctxCompls = (fmap.fmap) (\comp -> toggleAutoExtend config $ comp { isInfix = infixCompls }) ctxCompls' + + infixCompls :: Maybe Backtick + infixCompls = isUsedAsInfix fullLine prefixScope prefixText pos + + PositionMapping bDelta = bmapping + oldPos = fromDelta bDelta $ cursorPos prefixInfo + startLoc = lowerRange oldPos + endLoc = upperRange oldPos + localCompls = map (uncurry localBindsToCompItem) $ getFuzzyScope localBindings startLoc endLoc + localBindsToCompItem :: Name -> Maybe Type -> CompItem + localBindsToCompItem name typ = CI ctyp pn thisModName pn ty Nothing (not $ isValOcc occ) Nothing dets True + where + occ = nameOccName name + ctyp = occNameToComKind occ + pn = showForSnippet name + ty = showForSnippet <$> typ + thisModName = Local $ nameSrcSpan name + dets = NameDetails <$> nameModule_maybe name <*> pure (nameOccName name) + + -- When record-dot-syntax completions are available, we return them exclusively. + -- They are only available when we write i.e. `myrecord.` with OverloadedRecordDot enabled. + -- Anything that isn't a field is invalid, so those completion don't make sense. + compls + | T.null prefixScope = map (notQual,) localCompls ++ map (qual,) unqualCompls ++ map (\compl -> (notQual, compl Nothing)) anyQualCompls + | not $ null recordDotSyntaxCompls = recordDotSyntaxCompls + | otherwise = ((qual,) <$> Map.findWithDefault [] prefixScope (getQualCompls qualCompls)) + ++ map (\compl -> (notQual, compl (Just prefixScope))) anyQualCompls + + filtListWith f xs = + [ fmap f label + | label <- Fuzzy.simpleFilter chunkSize maxC fullPrefix xs + , enteredQual `T.isPrefixOf` original label + ] + + moduleImportListCompletions :: String -> [Scored CompletionItem] + moduleImportListCompletions moduleNameS = + let moduleName = T.pack moduleNameS + funcs = lookupWithDefaultUFM moduleExportsMap HashSet.empty $ mkModuleName moduleNameS + funs = map (show . name) $ HashSet.toList funcs + in filterModuleExports moduleName $ map T.pack funs + + filtImportCompls :: [Scored CompletionItem] + filtImportCompls = filtListWith (mkImportCompl enteredQual) importableModules + + filterModuleExports :: T.Text -> [T.Text] -> [Scored CompletionItem] + filterModuleExports moduleName = filtListWith $ mkModuleFunctionImport moduleName + + filtKeywordCompls :: [Scored CompletionItem] + filtKeywordCompls + | T.null prefixScope = filtListWith mkExtCompl (optKeywords ideOpts) + | otherwise = [] + + -- We use this ordering to alphabetically sort suggestions while respecting + -- all the previously applied ordering sources. These are: + -- 1. Qualified suggestions go first + -- 2. Fuzzy score ranks next + -- 3. In-scope completions rank next + -- 4. label alphabetical ordering next + -- 4. detail alphabetical ordering (proxy for module) + lexicographicOrdering Fuzzy.Scored{score, original} = + case original of + (isQual, CompletionItem{_label,_detail}) -> do + let isLocal = maybe False (":" `T.isPrefixOf`) _detail + (Down isQual, Down score, Down isLocal, _label, _detail) + + + + +uniqueCompl :: CompItem -> CompItem -> Ordering +uniqueCompl candidate unique = + case compare (label candidate, compKind candidate) + (label unique, compKind unique) of + EQ -> + -- preserve completions for duplicate record fields where the only difference is in the type + -- remove redundant completions with less type info than the previous + if isLocalCompletion unique + -- filter global completions when we already have a local one + || not(isLocalCompletion candidate) && isLocalCompletion unique + then EQ + else compare (importedFrom candidate, insertText candidate) (importedFrom unique, insertText unique) + other -> other + where + importedFrom :: CompItem -> T.Text + importedFrom (provenance -> ImportedFrom m) = m + importedFrom (provenance -> DefinedIn m) = m + importedFrom (provenance -> Local _) = "local" + +-- --------------------------------------------------------------------- +-- helper functions for infix backticks +-- --------------------------------------------------------------------- + +hasTrailingBacktick :: T.Text -> Position -> Bool +hasTrailingBacktick line Position { _character=(fromIntegral -> c) } + | T.length line > c = (line `T.index` c) == '`' + | otherwise = False + +isUsedAsInfix :: T.Text -> T.Text -> T.Text -> Position -> Maybe Backtick +isUsedAsInfix line prefixMod prefixText pos + | hasClosingBacktick && hasOpeningBacktick = Just Surrounded + | hasOpeningBacktick = Just LeftSide + | otherwise = Nothing + where + hasOpeningBacktick = openingBacktick line prefixMod prefixText pos + hasClosingBacktick = hasTrailingBacktick line pos + +openingBacktick :: T.Text -> T.Text -> T.Text -> Position -> Bool +openingBacktick line prefixModule prefixText Position { _character=(fromIntegral -> c) } + | backtickIndex < 0 || backtickIndex >= T.length line = False + | otherwise = (line `T.index` backtickIndex) == '`' + where + backtickIndex :: Int + backtickIndex = + let + prefixLength = T.length prefixText + moduleLength = if prefixModule == "" + then 0 + else T.length prefixModule + 1 {- Because of "." -} + in + -- Points to the first letter of either the module or prefix text + c - (prefixLength + moduleLength) - 1 + + +-- --------------------------------------------------------------------- + +mkRecordSnippetCompItem :: Uri -> Maybe T.Text -> T.Text -> [T.Text] -> Provenance -> Maybe (LImportDecl GhcPs) -> CompItem +mkRecordSnippetCompItem uri parent ctxStr compl importedFrom imp = r + where + r = CI { + compKind = CompletionItemKind_Snippet + , insertText = buildSnippet + , provenance = importedFrom + , typeText = Nothing + , label = ctxStr + , isInfix = Nothing + , isTypeCompl = False + , additionalTextEdits = imp <&> \x -> + ExtendImport + { doc = uri, + thingParent = parent, + importName = showModName $ unLoc $ ideclName $ unLoc x, + importQual = getImportQual x, + newThing = ctxStr + } + , nameDetails = Nothing + , isLocalCompletion = True + } + + placeholder_pairs = zip compl ([1..]::[Int]) + snippet_parts = map (\(x, i) -> x <> "=${" <> T.pack (show i) <> ":_" <> x <> "}") placeholder_pairs + snippet = T.intercalate (T.pack ", ") snippet_parts + buildSnippet = ctxStr <> " {" <> snippet <> "}" + +getImportQual :: LImportDecl GhcPs -> Maybe T.Text +getImportQual (L _ imp) + | isQualifiedImport imp = Just $ T.pack $ moduleNameString $ maybe (unLoc $ ideclName imp) unLoc (ideclAs imp) + | otherwise = Nothing + +-------------------------------------------------------------------------------- + +-- This comes from the GHC.Utils.Misc module (not exported) +-- | Merge an unsorted list of sorted lists, for example: +-- +-- > mergeListsBy compare [ [2,5,15], [1,10,100] ] = [1,2,5,10,15,100] +-- +-- \( O(n \log{} k) \) +mergeListsBy :: forall a. (a -> a -> Ordering) -> [[a]] -> [a] +mergeListsBy cmp all_lists = merge_lists all_lists + where + -- Implements "Iterative 2-Way merge" described at + -- https://en.wikipedia.org/wiki/K-way_merge_algorithm + + -- Merge two sorted lists into one in O(n). + merge2 :: [a] -> [a] -> [a] + merge2 [] ys = ys + merge2 xs [] = xs + merge2 (x:xs) (y:ys) = + case cmp x y of + Prelude.GT -> y : merge2 (x:xs) ys + _ -> x : merge2 xs (y:ys) + + -- Merge the first list with the second, the third with the fourth, and so + -- on. The output has half as much lists as the input. + merge_neighbours :: [[a]] -> [[a]] + merge_neighbours [] = [] + merge_neighbours [xs] = [xs] + merge_neighbours (xs : ys : lists) = + merge2 xs ys : merge_neighbours lists + + -- Since 'merge_neighbours' halves the amount of lists in each iteration, + -- we perform O(log k) iteration. Each iteration is O(n). The total running + -- time is therefore O(n log k). + merge_lists :: [[a]] -> [a] + merge_lists lists = + case merge_neighbours lists of + [] -> [] + [xs] -> xs + lists' -> merge_lists lists' + +-- |From the given cursor position, gets the prefix module or record for autocompletion +getCompletionPrefix :: Position -> VFS.VirtualFile -> PosPrefixInfo +getCompletionPrefix pos (VFS.VirtualFile _ _ ropetext) = getCompletionPrefixFromRope pos ropetext + +getCompletionPrefixFromRope :: Position -> Rope.Rope -> PosPrefixInfo +getCompletionPrefixFromRope pos@(Position l c) ropetext = + fromMaybe (PosPrefixInfo "" "" "" pos) $ do -- Maybe monad + let headMaybe = listToMaybe + lastMaybe = headMaybe . reverse + + -- grab the entire line the cursor is at + curLine <- headMaybe $ Rope.lines + $ fst $ Rope.splitAtLine 1 $ snd $ Rope.splitAtLine (fromIntegral l) ropetext + let beforePos = T.take (fromIntegral c) curLine + -- the word getting typed, after previous space and before cursor + curWord <- + if | T.null beforePos -> Just "" + | T.last beforePos == ' ' -> Just "" -- don't count abc as the curword in 'abc ' + | otherwise -> lastMaybe (T.words beforePos) + + let parts = T.split (=='.') + $ T.takeWhileEnd (\x -> isAlphaNum x || x `elem` ("._'"::String)) curWord + case reverse parts of + [] -> Nothing + (x:xs) -> do + let modParts = reverse $ filter (not .T.null) xs + -- Must check the prefix is a valid module name, else record dot accesses treat + -- the record name as a qualName for search and generated imports + modName = if all (isUpper . T.head) modParts then T.intercalate "." modParts else "" + return $ PosPrefixInfo { fullLine = curLine, prefixScope = modName, prefixText = x, cursorPos = pos } + +completionPrefixPos :: PosPrefixInfo -> Position +completionPrefixPos PosPrefixInfo { cursorPos = Position ln co, prefixText = str} = Position ln (co - (fromInteger . toInteger . T.length $ str) - 1) diff --git a/ghcide/src/Development/IDE/Plugin/Completions/Types.hs b/ghcide/src/Development/IDE/Plugin/Completions/Types.hs new file mode 100644 index 0000000000..338b969bab --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/Completions/Types.hs @@ -0,0 +1,207 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE TypeFamilies #-} +module Development.IDE.Plugin.Completions.Types ( + module Development.IDE.Plugin.Completions.Types +) where + +import Control.DeepSeq +import qualified Data.Map as Map +import qualified Data.Text as T + +import Data.Aeson +import Data.Aeson.Types +import Data.Hashable (Hashable) +import Data.Text (Text) +import Development.IDE.GHC.Compat +import Development.IDE.Graph (RuleResult) +import Development.IDE.Spans.Common () +import GHC.Generics (Generic) +import qualified GHC.Types.Name.Occurrence as Occ +import Ide.Plugin.Properties +import Language.LSP.Protocol.Types (CompletionItemKind (..), Uri) +import qualified Language.LSP.Protocol.Types as J + +-- | Produce completions info for a file +type instance RuleResult LocalCompletions = CachedCompletions +type instance RuleResult NonLocalCompletions = CachedCompletions + +data LocalCompletions = LocalCompletions + deriving (Eq, Show, Generic) +instance Hashable LocalCompletions +instance NFData LocalCompletions + +data NonLocalCompletions = NonLocalCompletions + deriving (Eq, Show, Generic) +instance Hashable NonLocalCompletions +instance NFData NonLocalCompletions + +-- From haskell-ide-engine/src/Haskell/Ide/Engine/LSP/Completions.hs + +data Backtick = Surrounded | LeftSide + deriving (Eq, Ord, Show) + +extendImportCommandId :: Text +extendImportCommandId = "extendImport" + +properties :: Properties + '[ 'PropertyKey "autoExtendOn" TBoolean, + 'PropertyKey "snippetsOn" TBoolean] +properties = emptyProperties + & defineBooleanProperty #snippetsOn + "Inserts snippets when using code completions" + True + & defineBooleanProperty #autoExtendOn + "Extends the import list automatically when completing a out-of-scope identifier" + True + + +data CompletionsConfig = CompletionsConfig { + enableSnippets :: Bool, + enableAutoExtend :: Bool, + maxCompletions :: Int +} + +data ExtendImport = ExtendImport + { doc :: !Uri, + newThing :: !T.Text, + thingParent :: !(Maybe T.Text), + importName :: !T.Text, + importQual :: !(Maybe T.Text) + } + deriving (Eq, Show, Generic) + deriving anyclass (FromJSON, ToJSON) + +data Provenance + = ImportedFrom Text + | DefinedIn Text + | Local SrcSpan + deriving (Eq, Ord, Show) + +data CompItem = CI + { compKind :: CompletionItemKind + , insertText :: T.Text -- ^ Snippet for the completion + , provenance :: Provenance -- ^ From where this item is imported from. + , label :: T.Text -- ^ Label to display to the user. + , typeText :: Maybe T.Text + , isInfix :: Maybe Backtick -- ^ Did the completion happen + -- in the context of an infix notation. + , isTypeCompl :: Bool + , additionalTextEdits :: Maybe ExtendImport + , nameDetails :: Maybe NameDetails -- ^ For resolving purposes + , isLocalCompletion :: Bool -- ^ Is it from this module? + } + deriving (Eq, Show) + +-- Associates a module's qualifier with its members +newtype QualCompls + = QualCompls { getQualCompls :: Map.Map T.Text [CompItem] } + deriving Show +instance Semigroup QualCompls where + (QualCompls a) <> (QualCompls b) = QualCompls $ Map.unionWith (++) a b +instance Monoid QualCompls where + mempty = QualCompls Map.empty + mappend = (Prelude.<>) + +-- | End result of the completions +data CachedCompletions = CC + { allModNamesAsNS :: [T.Text] -- ^ All module names in scope. + -- Prelude is a single module + , unqualCompls :: [CompItem] -- ^ Unqualified completion items + , qualCompls :: QualCompls -- ^ Completion items associated to + -- to a specific module name. + , anyQualCompls :: [Maybe T.Text -> CompItem] -- ^ Items associated to any qualifier + , importableModules :: [T.Text] -- ^ All modules that may be imported. + } + +instance Show CachedCompletions where show _ = "" + +instance NFData CachedCompletions where + rnf = rwhnf + +instance Monoid CachedCompletions where + mempty = CC mempty mempty mempty mempty mempty + +instance Semigroup CachedCompletions where + CC a b c d e <> CC a' b' c' d' e' = + CC (a<>a') (b<>b') (c<>c') (d<>d') (e<>e') + + +-- | Describes the line at the current cursor position +data PosPrefixInfo = PosPrefixInfo + { fullLine :: !T.Text + -- ^ The full contents of the line the cursor is at + + , prefixScope :: !T.Text + -- ^ If any, the module name that was typed right before the cursor position. + -- For example, if the user has typed "Data.Maybe.from", then this property + -- will be "Data.Maybe" + -- If OverloadedRecordDot is enabled, "Shape.rect.width" will be + -- "Shape.rect" + + , prefixText :: !T.Text + -- ^ The word right before the cursor position, after removing the module part. + -- For example if the user has typed "Data.Maybe.from", + -- then this property will be "from" + , cursorPos :: !J.Position + -- ^ The cursor position + } deriving (Show,Eq) + + +-- | This is a JSON serialisable representation of a GHC Name that we include in +-- completion responses so that we can recover the original name corresponding +-- to the completion item. This is used to resolve additional details on demand +-- about the item like its type and documentation. +data NameDetails + = NameDetails Module OccName + deriving (Eq) + +-- NameSpace is abstract so need these +nsJSON :: NameSpace -> Value +nsJSON ns + | isVarNameSpace ns = String "v" + | isDataConNameSpace ns = String "c" + | isTcClsNameSpace ns = String "t" + | isTvNameSpace ns = String "z" + | otherwise = error "namespace not recognized" + +parseNs :: Value -> Parser NameSpace +parseNs (String "v") = pure Occ.varName +parseNs (String "c") = pure dataName +parseNs (String "t") = pure tcClsName +parseNs (String "z") = pure tvName +parseNs _ = mempty + +instance FromJSON NameDetails where + parseJSON v@(Array _) + = do + [modname,modid,namesp,occname] <- parseJSON v + mn <- parseJSON modname + mid <- parseJSON modid + ns <- parseNs namesp + occn <- parseJSON occname + pure $ NameDetails (mkModule (stringToUnit mid) (mkModuleName mn)) (mkOccName ns occn) + parseJSON _ = mempty +instance ToJSON NameDetails where + toJSON (NameDetails mdl occ) = toJSON [toJSON mname,toJSON mid,nsJSON ns,toJSON occs] + where + mname = moduleNameString $ moduleName mdl + mid = unitIdString $ moduleUnitId mdl + ns = occNameSpace occ + occs = occNameString occ +instance Show NameDetails where + show = show . toJSON + +-- | The data that is actually sent for resolve support +-- We need the URI to be able to reconstruct the GHC environment +-- in the file the completion was triggered in. +data CompletionResolveData = CompletionResolveData + { itemFile :: Uri + , itemNeedsType :: Bool -- ^ Do we need to lookup a type for this item? + , itemName :: NameDetails + } + deriving stock Generic + deriving anyclass (FromJSON, ToJSON) diff --git a/ghcide/src/Development/IDE/Plugin/HLS.hs b/ghcide/src/Development/IDE/Plugin/HLS.hs new file mode 100644 index 0000000000..f5190e9274 --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/HLS.hs @@ -0,0 +1,456 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedStrings #-} + +module Development.IDE.Plugin.HLS + ( + asGhcIdePlugin + , toResponseError + , Log(..) + ) where + +import Control.Exception (SomeException) +import Control.Lens ((^.)) +import Control.Monad +import qualified Control.Monad.Extra as Extra +import Control.Monad.IO.Class (MonadIO) +import Control.Monad.Trans.Except (runExceptT) +import qualified Data.Aeson as A +import Data.Bifunctor (first) +import Data.Dependent.Map (DMap) +import qualified Data.Dependent.Map as DMap +import Data.Dependent.Sum +import Data.Either +import qualified Data.List as List +import Data.List.NonEmpty (NonEmpty, nonEmpty, toList) +import qualified Data.List.NonEmpty as NE +import qualified Data.Map as Map +import Data.Maybe (isNothing, mapMaybe) +import Data.Some +import Data.String +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE.Core.Shake hiding (Log) +import Development.IDE.Core.Tracing +import Development.IDE.Graph (Rules) +import Development.IDE.LSP.Server +import Development.IDE.Plugin +import qualified Development.IDE.Plugin as P +import Ide.Logger +import Ide.Plugin.Config +import Ide.Plugin.Error +import Ide.Plugin.HandleRequestTypes +import Ide.PluginUtils (getClientConfig) +import Ide.Types as HLS +import qualified Language.LSP.Protocol.Lens as JL +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import qualified Language.LSP.Server as LSP +import Language.LSP.VFS +import Prettyprinter.Render.String (renderString) +import Text.Regex.TDFA.Text () +import UnliftIO (MonadUnliftIO, liftIO) +import UnliftIO.Async (forConcurrently) +import UnliftIO.Exception (catchAny) + +-- --------------------------------------------------------------------- +-- + +data Log + = LogPluginError PluginId PluginError + | forall m . A.ToJSON (ErrorData m) => LogResponseError PluginId (TResponseError m) + | LogNoPluginForMethod (Some SMethod) + | LogInvalidCommandIdentifier + | ExceptionInPlugin PluginId (Some SMethod) SomeException + | LogResolveDefaultHandler (Some SMethod) + +instance Pretty Log where + pretty = \case + LogPluginError (PluginId pId) err -> + pretty pId <> ":" <+> pretty err + LogResponseError (PluginId pId) err -> + pretty pId <> ":" <+> pretty err + LogNoPluginForMethod (Some method) -> + "No plugin handles this " <> pretty method <> " request." + LogInvalidCommandIdentifier-> "Invalid command identifier" + ExceptionInPlugin plId (Some method) exception -> + "Exception in plugin " <> viaShow plId <> " while processing " + <> pretty method <> ": " <> viaShow exception + LogResolveDefaultHandler (Some method) -> + "No plugin can handle" <+> pretty method <+> "request. Return object unchanged." +instance Show Log where show = renderString . layoutCompact . pretty + +noPluginHandles :: Recorder (WithPriority Log) -> SMethod m -> [(PluginId, HandleRequestResult)] -> IO (Either (TResponseError m) c) +noPluginHandles recorder m fs' = do + logWith recorder Warning (LogNoPluginForMethod $ Some m) + let err = TResponseError (InR ErrorCodes_MethodNotFound) msg Nothing + msg = noPluginHandlesMsg m fs' + return $ Left err + where noPluginHandlesMsg :: SMethod m -> [(PluginId, HandleRequestResult)] -> Text + noPluginHandlesMsg method [] = "No plugins are available to handle this " <> T.pack (show method) <> " request." + noPluginHandlesMsg method availPlugins = + "No plugins are available to handle this " <> T.pack (show method) <> " request.\n Plugins installed for this method, but not available to handle this request are:\n" + <> (T.intercalate "\n" $ + map (\(PluginId plid, pluginStatus) -> + plid + <> " " + <> (renderStrict . layoutCompact . pretty) pluginStatus) + availPlugins) + +pluginDoesntExist :: PluginId -> Text +pluginDoesntExist (PluginId pid) = "Plugin " <> pid <> " doesn't exist" + +commandDoesntExist :: CommandId -> PluginId -> [PluginCommand ideState] -> Text +commandDoesntExist (CommandId com) (PluginId pid) legalCmds = + "Command " <> com <> " isn't defined for plugin " <> pid <> ". Legal commands are: " + <> (T.intercalate ", " $ map (\(PluginCommand{commandId = CommandId cid}) -> cid) legalCmds) + +failedToParseArgs :: CommandId -- ^ command that failed to parse + -> PluginId -- ^ Plugin that created the command + -> String -- ^ The JSON Error message + -> A.Value -- ^ The Argument Values + -> Text +failedToParseArgs (CommandId com) (PluginId pid) err arg = + "Error while parsing args for " <> com <> " in plugin " <> pid <> ": " + <> T.pack err <> ", arg = " <> T.pack (show arg) + +exceptionInPlugin :: PluginId -> SMethod m -> SomeException -> Text +exceptionInPlugin plId method exception = + "Exception in plugin " <> T.pack (show plId) <> " while processing "<> T.pack (show method) <> ": " <> T.pack (show exception) + +-- | Build a ResponseError and log it before returning to the caller +logAndReturnError :: A.ToJSON (ErrorData m) => Recorder (WithPriority Log) -> PluginId -> (LSPErrorCodes |? ErrorCodes) -> Text -> LSP.LspT Config IO (Either (TResponseError m) a) +logAndReturnError recorder p errCode msg = do + let err = TResponseError errCode msg Nothing + logWith recorder Warning $ LogResponseError p err + pure $ Left err + +-- | Map a set of plugins to the underlying ghcide engine. +asGhcIdePlugin :: Recorder (WithPriority Log) -> IdePlugins IdeState -> Plugin Config +asGhcIdePlugin recorder (IdePlugins ls) = + mkPlugin rulesPlugins HLS.pluginRules <> + mkPlugin (executeCommandPlugins recorder) HLS.pluginCommands <> + mkPlugin (extensiblePlugins recorder) id <> + mkPlugin (extensibleNotificationPlugins recorder) id <> + mkPluginFromDescriptor dynFlagsPlugins HLS.pluginModifyDynflags + where + mkPlugin f = mkPluginFromDescriptor (f . map (first pluginId)) + + mkPluginFromDescriptor + :: ([(PluginDescriptor IdeState, b)] + -> Plugin Config) + -> (PluginDescriptor IdeState -> b) + -> Plugin Config + mkPluginFromDescriptor maker selector = + case map (\p -> (p, selector p)) ls of + -- If there are no plugins that provide a descriptor, use mempty to + -- create the plugin – otherwise we we end up declaring handlers for + -- capabilities that there are no plugins for + [] -> mempty + xs -> maker xs + +-- --------------------------------------------------------------------- + +rulesPlugins :: [(PluginId, Rules ())] -> Plugin Config +rulesPlugins rs = mempty { P.pluginRules = rules } + where + rules = foldMap snd rs + +dynFlagsPlugins :: [(PluginDescriptor c, DynFlagsModifications)] -> Plugin Config +dynFlagsPlugins rs = mempty + { P.pluginModifyDynflags = + flip foldMap rs $ \(plId, dflag_mods) cfg -> + let plg_cfg = configForPlugin cfg plId + in if plcGlobalOn plg_cfg + then dflag_mods + else mempty + } + +-- --------------------------------------------------------------------- + +executeCommandPlugins :: Recorder (WithPriority Log) -> [(PluginId, [PluginCommand IdeState])] -> Plugin Config +executeCommandPlugins recorder ecs = mempty { P.pluginHandlers = executeCommandHandlers recorder ecs } + +executeCommandHandlers :: Recorder (WithPriority Log) -> [(PluginId, [PluginCommand IdeState])] -> LSP.Handlers (ServerM Config) +executeCommandHandlers recorder ecs = requestHandler SMethod_WorkspaceExecuteCommand execCmd + where + pluginMap = Map.fromListWith (++) ecs + + parseCmdId :: T.Text -> Maybe (PluginId, CommandId) + parseCmdId x = case T.splitOn ":" x of + [plugin, command] -> Just (PluginId plugin, CommandId command) + [_, plugin, command] -> Just (PluginId plugin, CommandId command) + _ -> Nothing + + -- The parameters to the HLS command are always the first element + execCmd :: IdeState -> ExecuteCommandParams -> LSP.LspT Config IO (Either (TResponseError Method_WorkspaceExecuteCommand) (A.Value |? Null)) + execCmd ide (ExecuteCommandParams mtoken cmdId args) = do + let cmdParams :: A.Value + cmdParams = case args of + Just ((x:_)) -> x + _ -> A.Null + case parseCmdId cmdId of + -- Shortcut for immediately applying a applyWorkspaceEdit as a fallback for v3.8 code actions + Just ("hls", "fallbackCodeAction") -> + case A.fromJSON cmdParams of + A.Success (FallbackCodeActionParams mEdit mCmd) -> do + + -- Send off the workspace request if it has one + forM_ mEdit $ \edit -> + LSP.sendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) + + case mCmd of + -- If we have a command, continue to execute it + Just (Command _ innerCmdId innerArgs) + -> execCmd ide (ExecuteCommandParams Nothing innerCmdId innerArgs) + -- TODO: This should be a response error? + Nothing -> return $ Right $ InR Null + + -- TODO: This should be a response error? + A.Error _str -> return $ Right $ InR Null + + -- Just an ordinary HIE command + Just (plugin, cmd) -> runPluginCommand ide plugin cmd mtoken cmdParams + + -- Couldn't parse the command identifier + _ -> do + logWith recorder Warning LogInvalidCommandIdentifier + return $ Left $ TResponseError (InR ErrorCodes_InvalidParams) "Invalid command identifier" Nothing + + runPluginCommand :: IdeState -> PluginId -> CommandId -> Maybe ProgressToken -> A.Value -> LSP.LspT Config IO (Either (TResponseError Method_WorkspaceExecuteCommand) (A.Value |? Null)) + runPluginCommand ide p com mtoken arg = + case Map.lookup p pluginMap of + Nothing -> logAndReturnError recorder p (InR ErrorCodes_InvalidRequest) (pluginDoesntExist p) + Just xs -> case List.find ((com ==) . commandId) xs of + Nothing -> logAndReturnError recorder p (InR ErrorCodes_InvalidRequest) (commandDoesntExist com p xs) + Just (PluginCommand _ _ f) -> case A.fromJSON arg of + A.Error err -> logAndReturnError recorder p (InR ErrorCodes_InvalidParams) (failedToParseArgs com p err arg) + A.Success a -> do + res <- runHandlerM (runExceptT (f ide mtoken a)) `catchAny` -- See Note [Exception handling in plugins] + (\e -> pure $ Left $ PluginInternalError (exceptionInPlugin p SMethod_WorkspaceExecuteCommand e)) + case res of + (Left (PluginRequestRefused r)) -> + liftIO $ noPluginHandles recorder SMethod_WorkspaceExecuteCommand [(p,DoesNotHandleRequest r)] + (Left pluginErr) -> do + liftIO $ logErrors recorder [(p, pluginErr)] + pure $ Left $ toResponseError (p, pluginErr) + (Right result) -> pure $ Right result + +-- --------------------------------------------------------------------- + +extensiblePlugins :: Recorder (WithPriority Log) -> [(PluginId, PluginDescriptor IdeState)] -> Plugin Config +extensiblePlugins recorder plugins = mempty { P.pluginHandlers = handlers } + where + IdeHandlers handlers' = foldMap bakePluginId plugins + bakePluginId :: (PluginId, PluginDescriptor IdeState) -> IdeHandlers + bakePluginId (pid,pluginDesc) = IdeHandlers $ DMap.map + (\(PluginHandler f) -> IdeHandler [(pid,pluginDesc,f pid)]) + hs + where + PluginHandlers hs = HLS.pluginHandlers pluginDesc + handlers = mconcat $ do + (IdeMethod m :=> IdeHandler fs') <- DMap.assocs handlers' + pure $ requestHandler m $ \ide params -> do + config <- Ide.PluginUtils.getClientConfig + -- Only run plugins that are allowed to run on this request, save the + -- list of disabled plugins incase that's all we have + let (fs, dfs) = List.partition (\(_, desc, _) -> handlesRequest m params desc config == HandlesRequest) fs' + let disabledPluginsReason = (\(x, desc, _) -> (x, handlesRequest m params desc config)) <$> dfs + -- Clients generally don't display ResponseErrors so instead we log any that we come across + -- However, some clients do display ResponseErrors! See for example the issues: + -- https://github.com/haskell/haskell-language-server/issues/4467 + -- https://github.com/haskell/haskell-language-server/issues/4451 + case nonEmpty fs of + Nothing -> do + liftIO (fallbackResolveHandler recorder m params) >>= \case + Nothing -> + liftIO $ noPluginHandles recorder m disabledPluginsReason + Just result -> + pure $ Right result + Just neFs -> do + let plidsAndHandlers = fmap (\(plid,_,handler) -> (plid,handler)) neFs + es <- runHandlerM $ runConcurrently exceptionInPlugin m plidsAndHandlers ide params + caps <- LSP.getClientCapabilities + let (errs,succs) = partitionEithers $ toList $ join $ NE.zipWith (\(pId,_) -> fmap (first (pId,))) plidsAndHandlers es + liftIO $ unless (null errs) $ logErrors recorder errs + case nonEmpty succs of + Nothing -> do + let noRefused (_, PluginRequestRefused _) = False + noRefused (_, _) = True + (asErrors, asRefused) = List.partition noRefused errs + convertPRR (pId, PluginRequestRefused r) = Just (pId, DoesNotHandleRequest r) + convertPRR _ = Nothing + asRefusedReason = mapMaybe convertPRR asRefused + case nonEmpty asErrors of + Nothing -> liftIO $ noPluginHandles recorder m (disabledPluginsReason <> asRefusedReason) + Just xs -> pure $ Left $ combineErrors xs + Just xs -> do + pure $ Right $ combineResponses m config caps params xs + +-- | Fallback Handler for resolve requests. +-- For all kinds of `*/resolve` requests, if they don't have a 'data_' value, +-- produce the original item, since no other plugin has any resolve data. +-- +-- This is an internal handler, so it cannot be turned off and should be opaque +-- to the end-user. +-- This function does not take the ServerCapabilities into account, and assumes +-- clients will only send these requests, if and only if the Language Server +-- advertised support for it. +-- +-- See Note [Fallback Handler for LSP resolve requests] for justification and reasoning. +fallbackResolveHandler :: MonadIO m => Recorder (WithPriority Log) -> SMethod s -> MessageParams s -> m (Maybe (MessageResult s)) +fallbackResolveHandler recorder m params = do + let result = case m of + SMethod_InlayHintResolve + | noResolveData params -> Just params + SMethod_CompletionItemResolve + | noResolveData params -> Just params + SMethod_CodeActionResolve + | noResolveData params -> Just params + SMethod_WorkspaceSymbolResolve + | noResolveData params -> Just params + SMethod_CodeLensResolve + | noResolveData params -> Just params + SMethod_DocumentLinkResolve + | noResolveData params -> Just params + _ -> Nothing + logResolveHandling result + pure result + where + noResolveData :: JL.HasData_ p (Maybe a) => p -> Bool + noResolveData p = isNothing $ p ^. JL.data_ + + -- We only log if we are handling the request. + -- If we don't handle this request, this should be logged + -- on call-site. + logResolveHandling p = Extra.whenJust p $ \_ -> do + logWith recorder Debug $ LogResolveDefaultHandler (Some m) + +{- Note [Fallback Handler for LSP resolve requests] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +We have a special fallback for `*/resolve` requests. + +We had multiple reports, where `resolve` requests (such as +`completion/resolve` and `codeAction/resolve`) are rejected +by HLS since the `_data_` field of the respective LSP feature has not been +populated by HLS. +This makes sense, as we only support `resolve` for certain kinds of +`CodeAction`/`Completions`, when they contain particularly expensive +properties, such as documentation or non-local type signatures. + +So what to do? We can see two options: + +1. Be dumb and permissive: if no plugin wants to resolve a request, then + just respond positively with the original item! Potentially this masks + real issues, but may not be too bad. If a plugin thinks it can + handle the request but it then fails to resolve it, we should still return a failure. +2. Try and be smart: we try to figure out requests that we're "supposed" to + resolve (e.g. those with a data field), and fail if no plugin wants to handle those. + This is possible since we set data. + So as long as we maintain the invariant that only things which need resolving get + data, then it could be okay. + +In 'fallbackResolveHandler', we implement the option (2). +-} + +-- --------------------------------------------------------------------- + +extensibleNotificationPlugins :: Recorder (WithPriority Log) -> [(PluginId, PluginDescriptor IdeState)] -> Plugin Config +extensibleNotificationPlugins recorder xs = mempty { P.pluginHandlers = handlers } + where + IdeNotificationHandlers handlers' = foldMap bakePluginId xs + bakePluginId :: (PluginId, PluginDescriptor IdeState) -> IdeNotificationHandlers + bakePluginId (pid,pluginDesc) = IdeNotificationHandlers $ DMap.map + (\(PluginNotificationHandler f) -> IdeNotificationHandler [(pid,pluginDesc,f pid)]) + hs + where PluginNotificationHandlers hs = HLS.pluginNotificationHandlers pluginDesc + handlers = mconcat $ do + (IdeNotification m :=> IdeNotificationHandler fs') <- DMap.assocs handlers' + pure $ notificationHandler m $ \ide vfs params -> do + config <- Ide.PluginUtils.getClientConfig + -- Only run plugins that are enabled for this request + let fs = filter (\(_, desc, _) -> handlesRequest m params desc config == HandlesRequest) fs' + case nonEmpty fs of + Nothing -> do + logWith recorder Warning (LogNoPluginForMethod $ Some m) + Just neFs -> do + -- We run the notifications in order, so the core ghcide provider + -- (which restarts the shake process) hopefully comes last + mapM_ (\(pid,_,f) -> otTracedProvider pid (fromString $ show m) $ f ide vfs params + `catchAny` -- See Note [Exception handling in plugins] + (\e -> logWith recorder Warning (ExceptionInPlugin pid (Some m) e))) neFs + + +-- --------------------------------------------------------------------- + +runConcurrently + :: MonadUnliftIO m + => (PluginId -> SMethod method -> SomeException -> T.Text) + -> SMethod method -- ^ Method (used for errors and tracing) + -> NonEmpty (PluginId, a -> b -> m (NonEmpty (Either PluginError d))) + -- ^ Enabled plugin actions that we are allowed to run + -> a + -> b + -> m (NonEmpty(NonEmpty (Either PluginError d))) +runConcurrently msg method fs a b = forConcurrently fs $ \(pid,f) -> otTracedProvider pid (fromString (show method)) $ do + f a b -- See Note [Exception handling in plugins] + `catchAny` (\e -> pure $ pure $ Left $ PluginInternalError (msg pid method e)) + +combineErrors :: NonEmpty (PluginId, PluginError) -> TResponseError m +combineErrors (x NE.:| []) = toResponseError x +combineErrors xs = toResponseError $ NE.last $ NE.sortWith (toPriority . snd) xs + +toResponseError :: (PluginId, PluginError) -> TResponseError m +toResponseError (PluginId plId, err) = + TResponseError (toErrorCode err) (plId <> ": " <> tPretty err) Nothing + where tPretty = T.pack . show . pretty + +logErrors :: Recorder (WithPriority Log) -> [(PluginId, PluginError)] -> IO () +logErrors recorder errs = do + forM_ errs $ \(pId, err) -> + logIndividualErrors pId err + where logIndividualErrors plId err = + logWith recorder (toPriority err) $ LogPluginError plId err + + +-- | Combine the 'PluginHandler' for all plugins +newtype IdeHandler (m :: Method ClientToServer Request) + = IdeHandler [(PluginId, PluginDescriptor IdeState, IdeState -> MessageParams m -> HandlerM Config (NonEmpty (Either PluginError (MessageResult m))))] + +-- | Combine the 'PluginHandler' for all plugins +newtype IdeNotificationHandler (m :: Method ClientToServer Notification) + = IdeNotificationHandler [(PluginId, PluginDescriptor IdeState, IdeState -> VFS -> MessageParams m -> LSP.LspM Config ())] +-- type NotificationHandler (m :: Method ClientToServer Notification) = MessageParams m -> IO ()` + +-- | Combine the 'PluginHandlers' for all plugins +newtype IdeHandlers = IdeHandlers (DMap IdeMethod IdeHandler) +newtype IdeNotificationHandlers = IdeNotificationHandlers (DMap IdeNotification IdeNotificationHandler) + +instance Semigroup IdeHandlers where + (IdeHandlers a) <> (IdeHandlers b) = IdeHandlers $ DMap.unionWithKey go a b + where + go _ (IdeHandler c) (IdeHandler d) = IdeHandler (c <> d) +instance Monoid IdeHandlers where + mempty = IdeHandlers mempty + +instance Semigroup IdeNotificationHandlers where + (IdeNotificationHandlers a) <> (IdeNotificationHandlers b) = IdeNotificationHandlers $ DMap.unionWithKey go a b + where + go _ (IdeNotificationHandler c) (IdeNotificationHandler d) = IdeNotificationHandler (c <> d) +instance Monoid IdeNotificationHandlers where + mempty = IdeNotificationHandlers mempty + +{- Note [Exception handling in plugins] + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Plugins run in LspM, and so have access to IO. This means they are likely to +throw exceptions, even if only by accident or through calling libraries that +throw exceptions. Ultimately, we're running a bunch of less-trusted IO code, +so we should be robust to it throwing. + +We don't want these to bring down HLS. So we catch and log exceptions wherever +we run a handler defined in a plugin. + +The flip side of this is that it's okay for plugins to throw exceptions as a +way of signalling failure! +-} diff --git a/ghcide/src/Development/IDE/Plugin/HLS/GhcIde.hs b/ghcide/src/Development/IDE/Plugin/HLS/GhcIde.hs new file mode 100644 index 0000000000..ada0f9e682 --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/HLS/GhcIde.hs @@ -0,0 +1,70 @@ +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} + +-- | Exposes the ghcide features as an HLS plugin +module Development.IDE.Plugin.HLS.GhcIde + ( + descriptors + , Log(..) + ) where + +import Development.IDE +import qualified Development.IDE.LSP.HoverDefinition as Hover +import qualified Development.IDE.LSP.Notifications as Notifications +import Development.IDE.LSP.Outline +import qualified Development.IDE.Plugin.Completions as Completions +import qualified Development.IDE.Plugin.TypeLenses as TypeLenses +import Ide.Types +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import Text.Regex.TDFA.Text () + +data Log + = LogNotifications Notifications.Log + | LogCompletions Completions.Log + | LogTypeLenses TypeLenses.Log + | LogHover Hover.Log + deriving Show + +instance Pretty Log where + pretty = \case + LogNotifications msg -> pretty msg + LogCompletions msg -> pretty msg + LogTypeLenses msg -> pretty msg + LogHover msg -> pretty msg + +descriptors :: Recorder (WithPriority Log) -> [PluginDescriptor IdeState] +descriptors recorder = + [ descriptor (cmapWithPrio LogHover recorder) "ghcide-hover-and-symbols", + Completions.descriptor (cmapWithPrio LogCompletions recorder) "ghcide-completions", + TypeLenses.descriptor (cmapWithPrio LogTypeLenses recorder) "ghcide-type-lenses", + Notifications.descriptor (cmapWithPrio LogNotifications recorder) "ghcide-core" + ] + +-- --------------------------------------------------------------------- + +descriptor :: Recorder (WithPriority Hover.Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = (defaultPluginDescriptor plId desc) + { pluginHandlers = mkPluginHandler SMethod_TextDocumentHover (hover' recorder) + <> mkPluginHandler SMethod_TextDocumentDocumentSymbol moduleOutline + <> mkPluginHandler SMethod_TextDocumentDefinition (\ide _ DefinitionParams{..} -> + Hover.gotoDefinition recorder ide TextDocumentPositionParams{..}) + <> mkPluginHandler SMethod_TextDocumentTypeDefinition (\ide _ TypeDefinitionParams{..} -> + Hover.gotoTypeDefinition recorder ide TextDocumentPositionParams{..}) + <> mkPluginHandler SMethod_TextDocumentImplementation (\ide _ ImplementationParams{..} -> + Hover.gotoImplementation recorder ide TextDocumentPositionParams{..}) + <> mkPluginHandler SMethod_TextDocumentDocumentHighlight (\ide _ DocumentHighlightParams{..} -> + Hover.documentHighlight recorder ide TextDocumentPositionParams{..}) + <> mkPluginHandler SMethod_TextDocumentReferences (Hover.references recorder) + <> mkPluginHandler SMethod_WorkspaceSymbol (Hover.wsSymbols recorder), + + pluginConfigDescriptor = defaultConfigDescriptor + } + where + desc = "Provides core IDE features for Haskell" + +-- --------------------------------------------------------------------- + +hover' :: Recorder (WithPriority Hover.Log) -> PluginMethodHandler IdeState Method_TextDocumentHover +hover' recorder ideState _ HoverParams{..} = + Hover.hover recorder ideState TextDocumentPositionParams{..} diff --git a/ghcide/src/Development/IDE/Plugin/Test.hs b/ghcide/src/Development/IDE/Plugin/Test.hs new file mode 100644 index 0000000000..e24bcfeee9 --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/Test.hs @@ -0,0 +1,175 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE PackageImports #-} +-- | A plugin that adds custom messages for use in tests +module Development.IDE.Plugin.Test + ( TestRequest(..) + , WaitForIdeRuleResult(..) + , plugin + , blockCommandDescriptor + , blockCommandId + ) where + +import Control.Concurrent (threadDelay) +import Control.Monad +import Control.Monad.Except (ExceptT (..), throwError) +import Control.Monad.IO.Class +import Control.Monad.STM +import Control.Monad.Trans.Class (MonadTrans (lift)) +import Data.Aeson (FromJSON (parseJSON), + ToJSON (toJSON), Value) +import qualified Data.Aeson.Types as A +import Data.Bifunctor +import Data.CaseInsensitive (CI, original) +import qualified Data.HashMap.Strict as HM +import Data.Maybe (isJust) +import Data.Proxy +import Data.String +import Data.Text (Text, pack) +import Development.IDE.Core.OfInterest (getFilesOfInterest) +import Development.IDE.Core.Rules +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Shake +import Development.IDE.GHC.Compat +import Development.IDE.Graph (Action) +import qualified Development.IDE.Graph as Graph +import Development.IDE.Graph.Database (ShakeDatabase, + shakeGetBuildEdges, + shakeGetBuildStep, + shakeGetCleanKeys) +import Development.IDE.Graph.Internal.Types (Result (resultBuilt, resultChanged, resultVisited), + Step (Step)) +import qualified Development.IDE.Graph.Internal.Types as Graph +import Development.IDE.Types.Action +import Development.IDE.Types.HscEnvEq (HscEnvEq (hscEnv)) +import Development.IDE.Types.Location (fromUri) +import GHC.Generics (Generic) +import Ide.Plugin.Error +import Ide.Types +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import qualified "list-t" ListT +import qualified StmContainers.Map as STM +import System.Time.Extra + +type Age = Int +data TestRequest + = BlockSeconds Seconds -- ^ :: Null + | GetInterfaceFilesDir Uri -- ^ :: String + | GetShakeSessionQueueCount -- ^ :: Number + | WaitForShakeQueue -- ^ Block until the Shake queue is empty. Returns Null + | WaitForIdeRule String Uri -- ^ :: WaitForIdeRuleResult + | GetBuildKeysVisited -- ^ :: [(String] + | GetBuildKeysBuilt -- ^ :: [(String] + | GetBuildKeysChanged -- ^ :: [(String] + | GetBuildEdgesCount -- ^ :: Int + | GarbageCollectDirtyKeys CheckParents Age -- ^ :: [String] (list of keys collected) + | GetStoredKeys -- ^ :: [String] (list of keys in store) + | GetFilesOfInterest -- ^ :: [FilePath] + | GetRebuildsCount -- ^ :: Int (number of times we recompiled with GHC) + deriving Generic + deriving anyclass (FromJSON, ToJSON) + +newtype WaitForIdeRuleResult = WaitForIdeRuleResult { ideResultSuccess::Bool} + deriving newtype (FromJSON, ToJSON) + +plugin :: PluginDescriptor IdeState +plugin = (defaultPluginDescriptor "test" "") { + pluginHandlers = mkPluginHandler (SMethod_CustomMethod (Proxy @"test")) $ \st _ -> + testRequestHandler' st + } + where + testRequestHandler' ide req + | Just customReq <- A.parseMaybe parseJSON req + = ExceptT $ testRequestHandler ide customReq + | otherwise + = throwError + $ PluginInvalidParams "Cannot parse request" + + +testRequestHandler :: IdeState + -> TestRequest + -> HandlerM config (Either PluginError Value) +testRequestHandler _ (BlockSeconds secs) = do + pluginSendNotification (SMethod_CustomMethod (Proxy @"ghcide/blocking/request")) $ + toJSON secs + liftIO $ sleep secs + return (Right A.Null) +testRequestHandler s (GetInterfaceFilesDir file) = liftIO $ do + let nfp = fromUri $ toNormalizedUri file + sess <- runAction "Test - GhcSession" s $ use_ GhcSession nfp + let hiPath = hiDir $ hsc_dflags $ hscEnv sess + return $ Right (toJSON hiPath) +testRequestHandler s GetShakeSessionQueueCount = liftIO $ do + n <- atomically $ countQueue $ actionQueue $ shakeExtras s + return $ Right (toJSON n) +testRequestHandler s WaitForShakeQueue = liftIO $ do + atomically $ do + n <- countQueue $ actionQueue $ shakeExtras s + when (n>0) retry + return $ Right A.Null +testRequestHandler s (WaitForIdeRule k file) = liftIO $ do + let nfp = fromUri $ toNormalizedUri file + success <- runAction ("WaitForIdeRule " <> k <> " " <> show file) s $ parseAction (fromString k) nfp + let res = WaitForIdeRuleResult <$> success + return $ bimap PluginInvalidParams toJSON res +testRequestHandler s GetBuildKeysBuilt = liftIO $ do + keys <- getDatabaseKeys resultBuilt $ shakeDb s + return $ Right $ toJSON $ map show keys +testRequestHandler s GetBuildKeysChanged = liftIO $ do + keys <- getDatabaseKeys resultChanged $ shakeDb s + return $ Right $ toJSON $ map show keys +testRequestHandler s GetBuildKeysVisited = liftIO $ do + keys <- getDatabaseKeys resultVisited $ shakeDb s + return $ Right $ toJSON $ map show keys +testRequestHandler s GetBuildEdgesCount = liftIO $ do + count <- shakeGetBuildEdges $ shakeDb s + return $ Right $ toJSON count +testRequestHandler s (GarbageCollectDirtyKeys parents age) = do + res <- liftIO $ runAction "garbage collect dirty" s $ garbageCollectDirtyKeysOlderThan age parents + return $ Right $ toJSON $ map show res +testRequestHandler s GetStoredKeys = do + keys <- liftIO $ atomically $ map fst <$> ListT.toList (STM.listT $ state $ shakeExtras s) + return $ Right $ toJSON $ map show keys +testRequestHandler s GetFilesOfInterest = do + ff <- liftIO $ getFilesOfInterest s + return $ Right $ toJSON $ map fromNormalizedFilePath $ HM.keys ff +testRequestHandler s GetRebuildsCount = do + count <- liftIO $ runAction "get build count" s getRebuildCount + return $ Right $ toJSON count + +getDatabaseKeys :: (Graph.Result -> Step) + -> ShakeDatabase + -> IO [Graph.Key] +getDatabaseKeys field db = do + keys <- shakeGetCleanKeys db + step <- shakeGetBuildStep db + return [ k | (k, res) <- keys, field res == Step step] + +parseAction :: CI String -> NormalizedFilePath -> Action (Either Text Bool) +parseAction "typecheck" fp = Right . isJust <$> use TypeCheck fp +parseAction "getLocatedImports" fp = Right . isJust <$> use GetLocatedImports fp +parseAction "getmodsummary" fp = Right . isJust <$> use GetModSummary fp +parseAction "getmodsummarywithouttimestamps" fp = Right . isJust <$> use GetModSummaryWithoutTimestamps fp +parseAction "getparsedmodule" fp = Right . isJust <$> use GetParsedModule fp +parseAction "ghcsession" fp = Right . isJust <$> use GhcSession fp +parseAction "ghcsessiondeps" fp = Right . isJust <$> use GhcSessionDeps fp +parseAction "gethieast" fp = Right . isJust <$> use GetHieAst fp +parseAction "getFileContents" fp = Right . isJust <$> use GetFileContents fp +parseAction other _ = return $ Left $ "Cannot parse ide rule: " <> pack (original other) + +-- | a command that blocks forever. Used for testing +blockCommandId :: Text +blockCommandId = "ghcide.command.block" + +blockCommandDescriptor :: PluginId -> PluginDescriptor state +blockCommandDescriptor plId = (defaultPluginDescriptor plId "") { + pluginCommands = [PluginCommand (CommandId blockCommandId) "blocks forever" blockCommandHandler] +} + +blockCommandHandler :: CommandFunction state ExecuteCommandParams +blockCommandHandler _ideState _ _params = do + lift $ pluginSendNotification (SMethod_CustomMethod (Proxy @"ghcide/blocking/command")) A.Null + liftIO $ threadDelay maxBound + pure $ InR Null diff --git a/ghcide/src/Development/IDE/Plugin/TypeLenses.hs b/ghcide/src/Development/IDE/Plugin/TypeLenses.hs new file mode 100644 index 0000000000..c596d1fb82 --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/TypeLenses.hs @@ -0,0 +1,362 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE TypeFamilies #-} + +-- | An HLS plugin to provide code lenses for type signatures +module Development.IDE.Plugin.TypeLenses ( + descriptor, + suggestSignature, + typeLensCommandId, + GlobalBindingTypeSig (..), + GetGlobalBindingTypeSigs (..), + GlobalBindingTypeSigsResult (..), + Log(..) + ) where + +import Control.Concurrent.STM.Stats (atomically) +import Control.DeepSeq (rwhnf) +import Control.Lens (to, (?~), (^?)) +import Control.Monad (mzero) +import Control.Monad.Extra (whenMaybe) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Control.Monad.Trans.Class (MonadTrans (lift)) +import Data.Aeson.Types (toJSON) +import qualified Data.Aeson.Types as A +import Data.List (find) +import qualified Data.Map as Map +import Data.Maybe (catMaybes, isJust, + maybeToList) +import qualified Data.Text as T +import Development.IDE (FileDiagnostic (..), + GhcSession (..), + HscEnvEq (hscEnv), + RuleResult, Rules, Uri, + _SomeStructuredMessage, + define, + fdStructuredMessageL, + srcSpanToRange, + usePropertyAction) +import Development.IDE.Core.Compile (TcModuleResult (..)) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.PositionMapping (PositionMapping, + fromCurrentRange, + toCurrentRange) +import Development.IDE.Core.Rules (IdeState, runAction) +import Development.IDE.Core.RuleTypes (TypeCheck (TypeCheck)) +import Development.IDE.Core.Service (getDiagnostics) +import Development.IDE.Core.Shake (getHiddenDiagnostics, + use) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Compat.Error (_TcRnMessage, + _TcRnMissingSignature, + msgEnvelopeErrorL, + stripTcRnMessageContext) +import Development.IDE.GHC.Util (printName) +import Development.IDE.Graph.Classes +import Development.IDE.Types.Location (Position (Position, _line), + Range (Range, _end, _start)) +import GHC.Generics (Generic) +import Ide.Logger (Pretty (pretty), + Recorder, WithPriority, + cmapWithPrio) +import Ide.Plugin.Error +import Ide.Plugin.Properties +import Ide.PluginUtils (mkLspCommand) +import Ide.Types (CommandFunction, + CommandId (CommandId), + PluginCommand (PluginCommand), + PluginDescriptor (..), + PluginId, + PluginMethodHandler, + ResolveFunction, + configCustomConfig, + defaultConfigDescriptor, + defaultPluginDescriptor, + mkCustomConfig, + mkPluginHandler, + mkResolveHandler, + pluginSendRequest) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message (Method (Method_CodeLensResolve, Method_TextDocumentCodeLens), + SMethod (..)) +import Language.LSP.Protocol.Types (ApplyWorkspaceEditParams (ApplyWorkspaceEditParams), + CodeLens (..), + CodeLensParams (CodeLensParams, _textDocument), + Command, Diagnostic (..), + Null (Null), + TextDocumentIdentifier (TextDocumentIdentifier), + TextEdit (TextEdit), + WorkspaceEdit (WorkspaceEdit), + type (|?) (..)) +import Text.Regex.TDFA ((=~)) + +data Log = LogShake Shake.Log deriving Show + +instance Pretty Log where + pretty = \case + LogShake msg -> pretty msg + + +typeLensCommandId :: T.Text +typeLensCommandId = "typesignature.add" + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultPluginDescriptor plId desc) + { pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeLens codeLensProvider + <> mkResolveHandler SMethod_CodeLensResolve codeLensResolveProvider + , pluginCommands = [PluginCommand (CommandId typeLensCommandId) "adds a signature" commandHandler] + , pluginRules = rules recorder + , pluginConfigDescriptor = defaultConfigDescriptor {configCustomConfig = mkCustomConfig properties} + } + where + desc = "Provides code lenses type signatures" + +properties :: Properties '[ 'PropertyKey "mode" (TEnum Mode)] +properties = emptyProperties + & defineEnumProperty #mode "Control how type lenses are shown" + [ (Always, "Always displays type lenses of global bindings") + , (Exported, "Only display type lenses of exported global bindings") + , (Diagnostics, "Follows error messages produced by GHC about missing signatures") + ] Always + +codeLensProvider :: PluginMethodHandler IdeState Method_TextDocumentCodeLens +codeLensProvider ideState pId CodeLensParams{_textDocument = TextDocumentIdentifier uri} = do + mode <- liftIO $ runAction "codeLens.config" ideState $ usePropertyAction #mode pId properties + nfp <- getNormalizedFilePathE uri + -- We have two ways we can possibly generate code lenses for type lenses. + -- Different options are with different "modes" of the type-lenses plugin. + -- (Remember here, as the code lens is not resolved yet, we only really need + -- the range and any data that will help us resolve it later) + let -- The first option is to generate lens from diagnostics about + -- top level bindings. + generateLensFromGlobalDiags diags = + -- We don't actually pass any data to resolve, however we need this + -- dummy type to make sure HLS resolves our lens + [ CodeLens _range Nothing (Just $ toJSON TypeLensesResolve) + | diag <- diags + , let Diagnostic {_range} = fdLspDiagnostic diag + , fdFilePath diag == nfp + , isGlobalDiagnostic diag] + -- The second option is to generate lenses from the GlobalBindingTypeSig + -- rule. This is the only type that needs to have the range adjusted + -- with PositionMapping. + -- PositionMapping for diagnostics doesn't make sense, because we always + -- have fresh diagnostics even if current module parsed failed (the + -- diagnostic would then be parse failed). See + -- https://github.com/haskell/haskell-language-server/pull/3558 for this + -- discussion. + generateLensFromGlobal sigs mp = do + [ CodeLens newRange Nothing (Just $ toJSON TypeLensesResolve) + | sig <- sigs + , Just range <- [srcSpanToRange (gbSrcSpan sig)] + , Just newRange <- [toCurrentRange mp range]] + if mode == Always || mode == Exported + then do + -- In this mode we get the global bindings from the + -- GlobalBindingTypeSigs rule. + (GlobalBindingTypeSigsResult gblSigs, gblSigsMp) <- + runActionE "codeLens.GetGlobalBindingTypeSigs" ideState + $ useWithStaleE GetGlobalBindingTypeSigs nfp + -- Depending on whether we only want exported or not we filter our list + -- of signatures to get what we want + let relevantGlobalSigs = + if mode == Exported + then filter gbExported gblSigs + else gblSigs + pure $ InL $ generateLensFromGlobal relevantGlobalSigs gblSigsMp + else do + -- For this mode we exclusively use diagnostics to create the lenses. + -- However we will still use the GlobalBindingTypeSigs to resolve them. + diags <- liftIO $ atomically $ getDiagnostics ideState + hDiags <- liftIO $ atomically $ getHiddenDiagnostics ideState + let allDiags = diags <> hDiags + pure $ InL $ generateLensFromGlobalDiags allDiags + +codeLensResolveProvider :: ResolveFunction IdeState TypeLensesResolve Method_CodeLensResolve +codeLensResolveProvider ideState pId lens@CodeLens{_range} uri TypeLensesResolve = do + nfp <- getNormalizedFilePathE uri + (gblSigs@(GlobalBindingTypeSigsResult _), pm) <- + runActionE "codeLens.GetGlobalBindingTypeSigs" ideState + $ useWithStaleE GetGlobalBindingTypeSigs nfp + -- regardless of how the original lens was generated, we want to get the range + -- that the global bindings rule would expect here, hence the need to reverse + -- position map the range, regardless of whether it was position mapped in the + -- beginning or freshly taken from diagnostics. + newRange <- handleMaybe PluginStaleResolve (fromCurrentRange pm _range) + -- We also pass on the PositionMapping so that the generated text edit can + -- have the range adjusted. + (title, edit) <- + handleMaybe PluginStaleResolve $ suggestGlobalSignature' False (Just gblSigs) (Just pm) newRange + pure $ lens & L.command ?~ generateLensCommand pId uri title edit + +generateLensCommand :: PluginId -> Uri -> T.Text -> TextEdit -> Command +generateLensCommand pId uri title edit = + let wEdit = WorkspaceEdit (Just $ Map.singleton uri [edit]) Nothing Nothing + in mkLspCommand pId (CommandId typeLensCommandId) title (Just [toJSON wEdit]) + +-- Since the lenses are created with diagnostics, and since the globalTypeSig +-- rule can't be changed as it is also used by the hls-refactor plugin, we can't +-- rely on actions. Because we can't rely on actions it doesn't make sense to +-- recompute the edit upon command. Hence the command here just takes a edit +-- and applies it. +commandHandler :: CommandFunction IdeState WorkspaceEdit +commandHandler _ideState _ wedit = do + _ <- lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing wedit) (\_ -> pure ()) + pure $ InR Null + +-------------------------------------------------------------------------------- +suggestSignature :: Bool -> Maybe GlobalBindingTypeSigsResult -> FileDiagnostic -> [(T.Text, TextEdit)] +suggestSignature isQuickFix mGblSigs diag = + maybeToList (suggestGlobalSignature isQuickFix mGblSigs diag) + +-- The suggestGlobalSignature is separated into two functions. The main function +-- works with a diagnostic, which then calls the secondary function with +-- whatever pieces of the diagnostic it needs. This allows the resolve function, +-- which no longer has the Diagnostic, to still call the secondary functions. +suggestGlobalSignature :: Bool -> Maybe GlobalBindingTypeSigsResult -> FileDiagnostic -> Maybe (T.Text, TextEdit) +suggestGlobalSignature isQuickFix mGblSigs diag@FileDiagnostic {fdLspDiagnostic = Diagnostic {_range}} + | isGlobalDiagnostic diag = + suggestGlobalSignature' isQuickFix mGblSigs Nothing _range + | otherwise = Nothing + +isGlobalDiagnostic :: FileDiagnostic -> Bool +isGlobalDiagnostic diag = diag ^? fdStructuredMessageL + . _SomeStructuredMessage + . msgEnvelopeErrorL + . _TcRnMessage + . _TcRnMissingSignature + & isJust + +-- If a PositionMapping is supplied, this function will call +-- gblBindingTypeSigToEdit with it to create a TextEdit in the right location. +suggestGlobalSignature' :: Bool -> Maybe GlobalBindingTypeSigsResult -> Maybe PositionMapping -> Range -> Maybe (T.Text, TextEdit) +suggestGlobalSignature' isQuickFix mGblSigs pm range + | Just (GlobalBindingTypeSigsResult sigs) <- mGblSigs + , Just sig <- find (\x -> sameThing (gbSrcSpan x) range) sigs + , signature <- T.pack $ gbRendered sig + , title <- if isQuickFix then "add signature: " <> signature else signature + , Just action <- gblBindingTypeSigToEdit sig pm = + Just (title, action) + | otherwise = Nothing + +sameThing :: SrcSpan -> Range -> Bool +sameThing s1 s2 = (_start <$> srcSpanToRange s1) == (_start <$> Just s2) + +gblBindingTypeSigToEdit :: GlobalBindingTypeSig -> Maybe PositionMapping -> Maybe TextEdit +gblBindingTypeSigToEdit GlobalBindingTypeSig{..} mmp + | Just Range{..} <- srcSpanToRange $ getSrcSpan gbName + , startOfLine <- Position (_line _start) 0 + , beforeLine <- Range startOfLine startOfLine + -- If `mmp` is `Nothing`, return the original range, + -- otherwise we apply `toCurrentRange`, and the guard should fail if `toCurrentRange` failed. + , Just range <- maybe (Just beforeLine) (flip toCurrentRange beforeLine) mmp + -- We need to flatten the signature, as otherwise long signatures are + -- rendered on multiple lines with invalid formatting. + , renderedFlat <- unwords $ lines gbRendered + = Just $ TextEdit range $ T.pack renderedFlat <> "\n" + | otherwise = Nothing + +-- |We don't need anything to resolve our lens, but a data field is mandatory +-- to get types resolved in HLS +data TypeLensesResolve = TypeLensesResolve + deriving (Generic, A.FromJSON, A.ToJSON) + +data Mode + = -- | always displays type lenses of global bindings, no matter what GHC flags are set + Always + | -- | similar to 'Always', but only displays for exported global bindings + Exported + | -- | follows error messages produced by GHC + Diagnostics + deriving (Eq, Ord, Show, Read, Enum) + +instance A.ToJSON Mode where + toJSON Always = "always" + toJSON Exported = "exported" + toJSON Diagnostics = "diagnostics" + +instance A.FromJSON Mode where + parseJSON = A.withText "Mode" $ \case + "always" -> pure Always + "exported" -> pure Exported + "diagnostics" -> pure Diagnostics + _ -> mzero + +-------------------------------------------------------------------------------- + +showDocRdrEnv :: HscEnv -> GlobalRdrEnv -> SDoc -> String +showDocRdrEnv env rdrEnv = showSDocForUser' env (mkPrintUnqualifiedDefault env rdrEnv) + +data GetGlobalBindingTypeSigs = GetGlobalBindingTypeSigs + deriving (Generic, Show, Eq, Ord, Hashable, NFData) + +data GlobalBindingTypeSig = GlobalBindingTypeSig + { gbName :: Name + , gbRendered :: String + , gbExported :: Bool + } + +gbSrcSpan :: GlobalBindingTypeSig -> SrcSpan +gbSrcSpan GlobalBindingTypeSig{gbName} = getSrcSpan gbName + +newtype GlobalBindingTypeSigsResult = GlobalBindingTypeSigsResult [GlobalBindingTypeSig] + +instance Show GlobalBindingTypeSigsResult where + show _ = "" + +instance NFData GlobalBindingTypeSigsResult where + rnf = rwhnf + +type instance RuleResult GetGlobalBindingTypeSigs = GlobalBindingTypeSigsResult + +rules :: Recorder (WithPriority Log) -> Rules () +rules recorder = do + define (cmapWithPrio LogShake recorder) $ \GetGlobalBindingTypeSigs nfp -> do + tmr <- use TypeCheck nfp + -- we need session here for tidying types + hsc <- use GhcSession nfp + result <- liftIO $ gblBindingType (hscEnv <$> hsc) (tmrTypechecked <$> tmr) + pure ([], result) + +gblBindingType :: Maybe HscEnv -> Maybe TcGblEnv -> IO (Maybe GlobalBindingTypeSigsResult) +gblBindingType (Just hsc) (Just gblEnv) = do + let exports = availsToNameSet $ tcg_exports gblEnv + sigs = tcg_sigs gblEnv + binds = collectHsBindsBinders $ tcg_binds gblEnv + patSyns = tcg_patsyns gblEnv + rdrEnv = tcg_rdr_env gblEnv + showDoc = showDocRdrEnv hsc rdrEnv + hasSig :: (Monad m) => Name -> m a -> m (Maybe a) + hasSig name f = whenMaybe (name `elemNameSet` sigs) f + bindToSig identifier = liftZonkM $ do + let name = idName identifier + hasSig name $ do + env <- tcInitTidyEnv +#if MIN_VERSION_ghc(9,11,0) + let ty = tidyOpenType env (idType identifier) +#else + let (_, ty) = tidyOpenType env (idType identifier) +#endif + pure $ GlobalBindingTypeSig name (printName name <> " :: " <> showDoc (pprSigmaType ty)) (name `elemNameSet` exports) + patToSig p = do + let name = patSynName p + hasSig name $ pure $ GlobalBindingTypeSig name ("pattern " <> printName name <> " :: " <> showDoc (pprPatSynTypeWithoutForalls p)) (name `elemNameSet` exports) + (_, maybe [] catMaybes -> bindings) <- initTcWithGbl hsc gblEnv (realSrcLocSpan $ mkRealSrcLoc "" 1 1) $ mapM bindToSig binds + patterns <- catMaybes <$> mapM patToSig patSyns + pure . Just . GlobalBindingTypeSigsResult $ bindings <> patterns +gblBindingType _ _ = pure Nothing + +pprPatSynTypeWithoutForalls :: PatSyn -> SDoc +pprPatSynTypeWithoutForalls p = pprPatSynType pWithoutTypeVariables + where + pWithoutTypeVariables = mkPatSyn name declared_infix ([], req_theta) ([], prov_theta) orig_args' orig_res_ty matcher builder field_labels + (_univ_tvs, req_theta, _ex_tvs, prov_theta, orig_args, orig_res_ty) = patSynSig p + name = patSynName p + declared_infix = patSynIsInfix p + matcher = patSynMatcher p + builder = patSynBuilder p + field_labels = patSynFieldLabels p + orig_args' = map scaledThing orig_args diff --git a/ghcide/src/Development/IDE/Spans/AtPoint.hs b/ghcide/src/Development/IDE/Spans/AtPoint.hs new file mode 100644 index 0000000000..50df0f5ba5 --- /dev/null +++ b/ghcide/src/Development/IDE/Spans/AtPoint.hs @@ -0,0 +1,607 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} +{-# LANGUAGE GADTs #-} + +-- | Gives information about symbols at a given point in DAML files. +-- These are all pure functions that should execute quickly. +module Development.IDE.Spans.AtPoint ( + atPoint + , gotoDefinition + , gotoTypeDefinition + , gotoImplementation + , documentHighlight + , pointCommand + , referencesAtPoint + , computeTypeReferences + , FOIReferences(..) + , defRowToSymbolInfo + , getNamesAtPoint + , toCurrentLocation + , rowToLoc + , nameToLocation + , LookupModule + ) where + + +import GHC.Data.FastString (lengthFS) +import qualified GHC.Utils.Outputable as O + +import Development.IDE.GHC.Error +import Development.IDE.GHC.Orphans () +import Development.IDE.Types.Location +import Language.LSP.Protocol.Types hiding + (SemanticTokenAbsolute (..)) +import Prelude hiding (mod) + +-- compiler and infrastructure +import Development.IDE.Core.Compile (setNonHomeFCHook) +import Development.IDE.Core.PositionMapping +import Development.IDE.Core.RuleTypes +import Development.IDE.GHC.Compat +import qualified Development.IDE.GHC.Compat.Util as Util +import Development.IDE.GHC.Util (printOutputable) +import Development.IDE.Spans.Common +import Development.IDE.Types.Options + +import Control.Applicative +import Control.Monad.Extra +import Control.Monad.IO.Class +import Control.Monad.Trans.Class +import Control.Monad.Trans.Maybe +import Data.Coerce (coerce) +import qualified Data.HashMap.Strict as HM +import qualified Data.Map.Strict as M +import Data.Maybe +import qualified Data.Text as T + +import qualified Data.Array as A +import Data.Either +import Data.List.Extra (dropEnd1, nubOrd) + + +import Data.Either.Extra (eitherToMaybe) +import Data.List (isSuffixOf, sortOn) +import Data.Tree +import qualified Data.Tree as T +import Data.Version (showVersion) +import Development.IDE.Types.Shake (WithHieDb) +import GHC.Iface.Ext.Types (EvVarSource (..), + HieAST (..), + HieASTs (..), + HieArgs (..), + HieType (..), Identifier, + IdentifierDetails (..), + NodeInfo (..), Scope, + Span) +import GHC.Iface.Ext.Utils (EvidenceInfo (..), + RefMap, getEvidenceTree, + getScopeFromContext, + hieTypeToIface, + isEvidenceContext, + isEvidenceUse, + isOccurrence, nodeInfo, + recoverFullType, + selectSmallestContaining) +import HieDb hiding (pointCommand, + withHieDb) +import System.Directory (doesFileExist) + +-- | Gives a Uri for the module, given the .hie file location and the the module info +-- The Bool denotes if it is a boot module +type LookupModule m = FilePath -> ModuleName -> Unit -> Bool -> MaybeT m Uri + +-- | HieFileResult for files of interest, along with the position mappings +newtype FOIReferences = FOIReferences (HM.HashMap NormalizedFilePath (HieAstResult, PositionMapping)) + +computeTypeReferences :: Foldable f => f (HieAST Type) -> M.Map Name [Span] +computeTypeReferences = foldr (\ast m -> M.unionWith (++) (go ast) m) M.empty + where + go ast = M.unionsWith (++) (this : map go (nodeChildren ast)) + where + this = M.fromListWith (++) + $ map (, [nodeSpan ast]) + $ concatMap namesInType + $ mapMaybe (\x -> guard (not $ all isOccurrence $ identInfo x) *> identType x) + $ M.elems + $ nodeIdentifiers $ nodeInfo ast + +-- | Given a file and position, return the names at a point, the references for +-- those names in the FOIs, and a list of file paths we already searched through +foiReferencesAtPoint + :: NormalizedFilePath + -> Position + -> FOIReferences + -> ([Name],[Location],[FilePath]) +foiReferencesAtPoint file pos (FOIReferences asts) = + case HM.lookup file asts of + Nothing -> ([],[],[]) + Just (HAR _ hf _ _ _,mapping) -> + let names = getNamesAtPoint hf pos mapping + adjustedLocs = HM.foldr go [] asts + go (HAR _ _ rf tr _, goMapping) xs = refs ++ typerefs ++ xs + where + refs = concatMap (mapMaybe (toCurrentLocation goMapping . realSrcSpanToLocation . fst)) + (mapMaybe (\n -> M.lookup (Right n) rf) names) + typerefs = concatMap (mapMaybe (toCurrentLocation goMapping . realSrcSpanToLocation)) + (mapMaybe (`M.lookup` tr) names) + in (names, adjustedLocs,map fromNormalizedFilePath $ HM.keys asts) + +getNamesAtPoint :: HieASTs a -> Position -> PositionMapping -> [Name] +getNamesAtPoint hf pos mapping = + concat $ pointCommand hf posFile (rights . M.keys . getSourceNodeIds) + where + posFile = fromMaybe pos $ fromCurrentPosition mapping pos + +toCurrentLocation :: PositionMapping -> Location -> Maybe Location +toCurrentLocation mapping (Location uri range) = + Location uri <$> toCurrentRange mapping range + +referencesAtPoint + :: MonadIO m + => WithHieDb + -> NormalizedFilePath -- ^ The file the cursor is in + -> Position -- ^ position in the file + -> FOIReferences -- ^ references data for FOIs + -> m [Location] +referencesAtPoint withHieDb nfp pos refs = do + -- The database doesn't have up2date references data for the FOIs so we must collect those + -- from the Shake graph. + let (names, foiRefs, exclude) = foiReferencesAtPoint nfp pos refs + nonFOIRefs <- forM names $ \name -> + case nameModule_maybe name of + Nothing -> pure [] + Just mod -> do + -- Look for references (strictly in project files, not dependencies), + -- excluding the files in the FOIs (since those are in foiRefs) + rows <- liftIO $ withHieDb (\hieDb -> findReferences hieDb True (nameOccName name) (Just $ moduleName mod) (Just $ moduleUnit mod) exclude) + pure $ mapMaybe rowToLoc rows + typeRefs <- forM names $ \name -> + case nameModule_maybe name of + Just mod | isTcClsNameSpace (occNameSpace $ nameOccName name) -> do + refs' <- liftIO $ withHieDb (\hieDb -> findTypeRefs hieDb True (nameOccName name) (Just $ moduleName mod) (Just $ moduleUnit mod) exclude) + pure $ mapMaybe typeRowToLoc refs' + _ -> pure [] + pure $ nubOrd $ foiRefs ++ concat nonFOIRefs ++ concat typeRefs + +rowToLoc :: Res RefRow -> Maybe Location +rowToLoc (row:.info) = flip Location range <$> mfile + where + range = Range start end + start = Position (fromIntegral $ refSLine row - 1) (fromIntegral $ refSCol row -1) + end = Position (fromIntegral $ refELine row - 1) (fromIntegral $ refECol row -1) + mfile = case modInfoSrcFile info of + Just f -> Just $ toUri f + Nothing -> Nothing + +typeRowToLoc :: Res TypeRef -> Maybe Location +typeRowToLoc (row:.info) = do + file <- modInfoSrcFile info + pure $ Location (toUri file) range + where + range = Range start end + start = Position (fromIntegral $ typeRefSLine row - 1) (fromIntegral $ typeRefSCol row -1) + end = Position (fromIntegral $ typeRefELine row - 1) (fromIntegral $ typeRefECol row -1) + +documentHighlight + :: Monad m + => HieASTs a + -> RefMap a + -> Position + -> MaybeT m [DocumentHighlight] +documentHighlight hf rf pos = pure highlights + where + -- We don't want to show document highlights for evidence variables, which are supposed to be invisible + notEvidence = not . any isEvidenceContext . identInfo + ns = concat $ pointCommand hf pos (rights . M.keys . M.filter notEvidence . getSourceNodeIds) + highlights = do + n <- ns + ref <- fromMaybe [] (M.lookup (Right n) rf) + maybeToList (makeHighlight n ref) + makeHighlight n (sp,dets) + | isTvNameSpace (nameNameSpace n) && isBadSpan n sp = Nothing + | otherwise = Just $ DocumentHighlight (realSrcSpanToRange sp) (Just $ highlightType $ identInfo dets) + highlightType s = + if any (isJust . getScopeFromContext) s + then DocumentHighlightKind_Write + else DocumentHighlightKind_Read + + isBadSpan :: Name -> RealSrcSpan -> Bool + isBadSpan n sp = srcSpanStartLine sp /= srcSpanEndLine sp || (srcSpanEndCol sp - srcSpanStartCol sp > lengthFS (occNameFS $ nameOccName n)) + +-- | Locate the type definition of the name at a given position. +gotoTypeDefinition + :: MonadIO m + => WithHieDb + -> LookupModule m + -> IdeOptions + -> HieAstResult + -> Position + -> MaybeT m [(Location, Identifier)] +gotoTypeDefinition withHieDb lookupModule ideOpts srcSpans pos + = lift $ typeLocationsAtPoint withHieDb lookupModule ideOpts pos srcSpans + +-- | Locate the definition of the name at a given position. +gotoDefinition + :: MonadIO m + => WithHieDb + -> LookupModule m + -> IdeOptions + -> M.Map ModuleName NormalizedFilePath + -> HieAstResult + -> Position + -> MaybeT m [(Location, Identifier)] +gotoDefinition withHieDb getHieFile ideOpts imports srcSpans pos + = lift $ locationsAtPoint withHieDb getHieFile ideOpts imports pos srcSpans + +-- | Locate the implementation definition of the name at a given position. +-- Goto Implementation for an overloaded function. +gotoImplementation + :: MonadIO m + => WithHieDb + -> LookupModule m + -> IdeOptions + -> HieAstResult + -> Position + -> MaybeT m [Location] +gotoImplementation withHieDb getHieFile ideOpts srcSpans pos + = lift $ instanceLocationsAtPoint withHieDb getHieFile ideOpts pos srcSpans + +-- | Synopsis for the name at a given position. +atPoint + :: IdeOptions + -> HieAstResult + -> DocAndTyThingMap + -> HscEnv + -> Position + -> IO (Maybe (Maybe Range, [T.Text])) +atPoint IdeOptions{} (HAR _ (hf :: HieASTs a) rf _ (kind :: HieKind hietype)) (DKMap dm km) env pos = + listToMaybe <$> sequence (pointCommand hf pos hoverInfo) + where + -- Hover info for values/data + hoverInfo :: HieAST hietype -> IO (Maybe Range, [T.Text]) + hoverInfo ast = do + prettyNames <- mapM prettyName names + pure (Just range, prettyNames ++ pTypes) + where + pTypes :: [T.Text] + pTypes + | Prelude.length names == 1 = dropEnd1 $ map wrapHaskell prettyTypes + | otherwise = map wrapHaskell prettyTypes + + range :: Range + range = realSrcSpanToRange $ nodeSpan ast + + wrapHaskell :: T.Text -> T.Text + wrapHaskell x = "\n```haskell\n"<>x<>"\n```\n" + + info :: NodeInfo hietype + info = nodeInfoH kind ast + + -- We want evidence variables to be displayed last. + -- Evidence trees contain information of secondary relevance. + names :: [(Identifier, IdentifierDetails hietype)] + names = sortOn (any isEvidenceUse . identInfo . snd) $ M.assocs $ nodeIdentifiers info + + prettyName :: (Either ModuleName Name, IdentifierDetails hietype) -> IO T.Text + prettyName (Right n, dets) + -- We want to print evidence variable using a readable tree structure. + -- Evidence variables contain information why a particular instance or + -- type equality was chosen, paired with location information. + | any isEvidenceUse (identInfo dets) = + let + -- The evidence tree may not be present for some reason, e.g., the 'Name' is not + -- present in the tree. + -- Thus, we need to handle it here, but in practice, this should never be 'Nothing'. + evidenceTree = maybe "" (printOutputable . renderEvidenceTree) (getEvidenceTree rf n) + in + pure $ evidenceTree <> "\n" + -- Identifier details that are not evidence variables are used to display type information and + -- documentation of that name. + | otherwise = + let + typeSig = wrapHaskell (printOutputable n <> maybe "" (" :: " <>) ((prettyType <$> identType dets) <|> maybeKind)) + definitionLoc = maybeToList (pretty (definedAt n) (prettyPackageName n)) + docs = maybeToList (T.unlines . spanDocToMarkdown <$> lookupNameEnv dm n) + in + pure $ T.unlines $ + [typeSig] ++ definitionLoc ++ docs + where maybeKind = fmap printOutputable $ safeTyThingType =<< lookupNameEnv km n + pretty Nothing Nothing = Nothing + pretty (Just define) Nothing = Just $ define <> "\n" + pretty Nothing (Just pkgName) = Just $ pkgName <> "\n" + pretty (Just define) (Just pkgName) = Just $ define <> " " <> pkgName <> "\n" + prettyName (Left m,_) = packageNameForImportStatement m + + prettyPackageName :: Name -> Maybe T.Text + prettyPackageName n = do + m <- nameModule_maybe n + pkgTxt <- packageNameWithVersion m + pure $ "*(" <> pkgTxt <> ")*" + + -- Return the module text itself and + -- the package(with version) this `ModuleName` belongs to. + packageNameForImportStatement :: ModuleName -> IO T.Text + packageNameForImportStatement mod = do + mpkg <- findImportedModule (setNonHomeFCHook env) mod :: IO (Maybe Module) + let moduleName = printOutputable mod + case mpkg >>= packageNameWithVersion of + Nothing -> pure moduleName + Just pkgWithVersion -> pure $ moduleName <> "\n\n" <> pkgWithVersion + + -- Return the package name and version of a module. + -- For example, given module `Data.List`, it should return something like `base-4.x`. + packageNameWithVersion :: Module -> Maybe T.Text + packageNameWithVersion m = do + let pid = moduleUnit m + conf <- lookupUnit env pid + let pkgName = T.pack $ unitPackageNameString conf + version = T.pack $ showVersion (unitPackageVersion conf) + pure $ pkgName <> "-" <> version + + -- Type info for the current node, it may contain several symbols + -- for one range, like wildcard + types :: [hietype] + types = nodeType info + + prettyTypes :: [T.Text] + prettyTypes = map (("_ :: "<>) . prettyType) types + + prettyType :: hietype -> T.Text + prettyType = printOutputable . expandType + + expandType :: a -> SDoc + expandType t = case kind of + HieFresh -> ppr t + HieFromDisk full_file -> ppr $ hieTypeToIface $ recoverFullType t (hie_types full_file) + + definedAt :: Name -> Maybe T.Text + definedAt name = + -- do not show "at " and similar messages + -- see the code of 'pprNameDefnLoc' for more information + case nameSrcLoc name of + UnhelpfulLoc {} | isInternalName name || isSystemName name -> Nothing + _ -> Just $ "*Defined " <> printOutputable (pprNameDefnLoc name) <> "*" + + -- We want to render the root constraint even if it is a let, + -- but we don't want to render any subsequent lets + renderEvidenceTree :: Tree (EvidenceInfo a) -> SDoc + -- However, if the root constraint is simply a (Show (,), Show [], Show Int, Show Bool)@ + -- + -- It is also quite helpful to look at the @.hie@ file directly to see how the + -- evidence information is presented on disk. @hiedb dump @ + renderEvidenceTree (T.Node (EvidenceInfo{evidenceDetails=Just (EvLetBind _,_,_)}) [x]) + = renderEvidenceTree x + renderEvidenceTree (T.Node (EvidenceInfo{evidenceDetails=Just (EvLetBind _,_,_), ..}) xs) + = hang (text "Evidence of constraint `" O.<> expandType evidenceType O.<> "`") 2 $ + vcat $ text "constructed using:" : map renderEvidenceTree' xs + renderEvidenceTree (T.Node (EvidenceInfo{..}) _) + = hang (text "Evidence of constraint `" O.<> expandType evidenceType O.<> "`") 2 $ + vcat $ printDets evidenceSpan evidenceDetails : map (text . T.unpack) (maybeToList $ definedAt evidenceVar) + + -- renderEvidenceTree' skips let bound evidence variables and prints the children directly + renderEvidenceTree' (T.Node (EvidenceInfo{evidenceDetails=Just (EvLetBind _,_,_)}) xs) + = vcat (map renderEvidenceTree' xs) + renderEvidenceTree' (T.Node (EvidenceInfo{..}) _) + = hang (text "- `" O.<> expandType evidenceType O.<> "`") 2 $ + vcat $ + printDets evidenceSpan evidenceDetails : map (text . T.unpack) (maybeToList $ definedAt evidenceVar) + + printDets :: RealSrcSpan -> Maybe (EvVarSource, Scope, Maybe Span) -> SDoc + printDets _ Nothing = text "using an external instance" + printDets ospn (Just (src,_,mspn)) = pprSrc + $$ text "at" <+> text (T.unpack $ srcSpanToMdLink location) + where + location = realSrcSpanToLocation spn + -- Use the bind span if we have one, else use the occurrence span + spn = fromMaybe ospn mspn + pprSrc = case src of + -- Users don't know what HsWrappers are + EvWrapperBind -> "bound by type signature or pattern" + _ -> ppr src + +-- | Find 'Location's of type definition at a specific point and return them along with their 'Identifier's. +typeLocationsAtPoint + :: forall m + . MonadIO m + => WithHieDb + -> LookupModule m + -> IdeOptions + -> Position + -> HieAstResult + -> m [(Location, Identifier)] +typeLocationsAtPoint withHieDb lookupModule _ideOptions pos (HAR _ ast _ _ hieKind) = + case hieKind of + HieFromDisk hf -> + let arr = hie_types hf + ts = concat $ pointCommand ast pos getts + unfold = map (arr A.!) + getts x = nodeType ni ++ mapMaybe identType (M.elems $ nodeIdentifiers ni) + where ni = nodeInfo' x + getTypes' ts' = flip concatMap (unfold ts') $ \case + HTyVarTy n -> [n] + HAppTy a (HieArgs xs) -> getTypes' (a : map snd xs) + HTyConApp tc (HieArgs xs) -> ifaceTyConName tc : getTypes' (map snd xs) + HForAllTy _ a -> getTypes' [a] + HFunTy a b c -> getTypes' [a,b,c] + HQualTy a b -> getTypes' [a,b] + HCastTy a -> getTypes' [a] + _ -> [] + in fmap nubOrd $ concatMapM (\n -> fmap (maybe [] (fmap (,Right n))) (nameToLocation withHieDb lookupModule n)) (getTypes' ts) + HieFresh -> + let ts = concat $ pointCommand ast pos getts + getts x = nodeType ni ++ mapMaybe identType (M.elems $ nodeIdentifiers ni) + where ni = nodeInfo x + in fmap nubOrd $ concatMapM (\n -> fmap (maybe [] (fmap (,Right n))) (nameToLocation withHieDb lookupModule n)) (getTypes ts) + +namesInType :: Type -> [Name] +namesInType (TyVarTy n) = [varName n] +namesInType (AppTy a b) = getTypes [a,b] +namesInType (TyConApp tc ts) = tyConName tc : getTypes ts +namesInType (ForAllTy b t) = varName (binderVar b) : namesInType t +namesInType (FunTy _ a b) = getTypes [a,b] +namesInType (CastTy t _) = namesInType t +namesInType (LitTy _) = [] +namesInType _ = [] + +getTypes :: [Type] -> [Name] +getTypes = concatMap namesInType + +-- | Find 'Location's of definition at a specific point and return them along with their 'Identifier's. +locationsAtPoint + :: forall m + . MonadIO m + => WithHieDb + -> LookupModule m + -> IdeOptions + -> M.Map ModuleName NormalizedFilePath + -> Position + -> HieAstResult + -> m [(Location, Identifier)] +locationsAtPoint withHieDb lookupModule _ideOptions imports pos (HAR _ ast _rm _ _) = + let ns = concat $ pointCommand ast pos (M.keys . getNodeIds) + zeroPos = Position 0 0 + zeroRange = Range zeroPos zeroPos + modToLocation m = fmap (\fs -> pure (Location (fromNormalizedUri $ filePathToUri' fs) zeroRange)) $ M.lookup m imports + in fmap (nubOrd . concat) $ mapMaybeM + (either (\m -> pure ((fmap $ fmap (,Left m)) (modToLocation m))) + (\n -> fmap (fmap $ fmap (,Right n)) (nameToLocation withHieDb lookupModule n))) + ns + +-- | Find 'Location's of a implementation definition at a specific point. +instanceLocationsAtPoint + :: forall m + . MonadIO m + => WithHieDb + -> LookupModule m + -> IdeOptions + -> Position + -> HieAstResult + -> m [Location] +instanceLocationsAtPoint withHieDb lookupModule _ideOptions pos (HAR _ ast _rm _ _) = + let ns = concat $ pointCommand ast pos (M.keys . getNodeIds) + evTrees = mapMaybe (eitherToMaybe >=> getEvidenceTree _rm) ns + evNs = concatMap (map evidenceVar . T.flatten) evTrees + in fmap (nubOrd . concat) $ mapMaybeM + (nameToLocation withHieDb lookupModule) + evNs + +-- | Given a 'Name' attempt to find the location where it is defined. +nameToLocation :: MonadIO m => WithHieDb -> LookupModule m -> Name -> m (Maybe [Location]) +nameToLocation withHieDb lookupModule name = runMaybeT $ + case nameSrcSpan name of + sp@(RealSrcSpan rsp _) + -- Lookup in the db if we got a location in a boot file + | fs <- Util.unpackFS (srcSpanFile rsp) + , not $ "boot" `isSuffixOf` fs + -> do + itExists <- liftIO $ doesFileExist fs + if itExists + then MaybeT $ pure $ fmap pure $ srcSpanToLocation sp + -- When reusing .hie files from a cloud cache, + -- the paths may not match the local file system. + -- Let's fall back to the hiedb in case it contains local paths + else fallbackToDb sp + sp -> fallbackToDb sp + where + fallbackToDb sp = do + guard (sp /= wiredInSrcSpan) + -- This case usually arises when the definition is in an external package. + -- In this case the interface files contain garbage source spans + -- so we instead read the .hie files to get useful source spans. + mod <- MaybeT $ return $ nameModule_maybe name + erow <- liftIO $ withHieDb (\hieDb -> findDef hieDb (nameOccName name) (Just $ moduleName mod) (Just $ moduleUnit mod)) + case erow of + [] -> do + -- If the lookup failed, try again without specifying a unit-id. + -- This is a hack to make find definition work better with ghcide's nascent multi-component support, + -- where names from a component that has been indexed in a previous session but not loaded in this + -- session may end up with different unit ids + erow' <- liftIO $ withHieDb (\hieDb -> findDef hieDb (nameOccName name) (Just $ moduleName mod) Nothing) + case erow' of + [] -> MaybeT $ pure Nothing + xs -> lift $ mapMaybeM (runMaybeT . defRowToLocation lookupModule) xs + xs -> lift $ mapMaybeM (runMaybeT . defRowToLocation lookupModule) xs + +defRowToLocation :: Monad m => LookupModule m -> Res DefRow -> MaybeT m Location +defRowToLocation lookupModule (row:.info) = do + let start = Position (fromIntegral $ defSLine row - 1) (fromIntegral $ defSCol row - 1) + end = Position (fromIntegral $ defELine row - 1) (fromIntegral $ defECol row - 1) + range = Range start end + file <- case modInfoSrcFile info of + Just src -> pure $ toUri src + Nothing -> lookupModule (defSrc row) (modInfoName info) (modInfoUnit info) (modInfoIsBoot info) + pure $ Location file range + +toUri :: FilePath -> Uri +toUri = fromNormalizedUri . filePathToUri' . toNormalizedFilePath' + +defRowToSymbolInfo :: Res DefRow -> Maybe SymbolInformation +defRowToSymbolInfo (DefRow{..}:.(modInfoSrcFile -> Just srcFile)) + = Just $ SymbolInformation (printOutputable defNameOcc) kind Nothing Nothing Nothing loc + where + kind + | isVarOcc defNameOcc = SymbolKind_Variable + | isDataOcc defNameOcc = SymbolKind_Constructor + | isTcOcc defNameOcc = SymbolKind_Struct + -- This used to be (SkUnknown 1), buth there is no SymbolKind_Unknown. + -- Changing this to File, as that is enum representation of 1 + | otherwise = SymbolKind_File + loc = Location file range + file = fromNormalizedUri . filePathToUri' . toNormalizedFilePath' $ srcFile + range = Range start end + start = Position (fromIntegral $ defSLine - 1) (fromIntegral $ defSCol - 1) + end = Position (fromIntegral $ defELine - 1) (fromIntegral $ defECol - 1) +defRowToSymbolInfo _ = Nothing + +pointCommand :: HieASTs t -> Position -> (HieAST t -> a) -> [a] +pointCommand hf pos k = + M.elems $ flip M.mapMaybeWithKey (getAsts hf) $ \fs ast -> + -- Since GHC 9.2: + -- getAsts :: Map HiePath (HieAst a) + -- type HiePath = LexicalFastString + -- + -- but before: + -- getAsts :: Map HiePath (HieAst a) + -- type HiePath = FastString + -- + -- 'coerce' here to avoid an additional function for maintaining + -- backwards compatibility. + case selectSmallestContaining (sp $ coerce fs) ast of + Nothing -> Nothing + Just ast' -> Just $ k ast' + where + sloc fs = mkRealSrcLoc fs (fromIntegral $ line+1) (fromIntegral $ cha+1) + sp fs = mkRealSrcSpan (sloc fs) (sloc fs) + line :: UInt + line = _line pos + cha = _character pos + +-- In ghc9, nodeInfo is monomorphic, so we need a case split here +nodeInfoH :: HieKind a -> HieAST a -> NodeInfo a +nodeInfoH (HieFromDisk _) = nodeInfo' +nodeInfoH HieFresh = nodeInfo diff --git a/ghcide/src/Development/IDE/Spans/Common.hs b/ghcide/src/Development/IDE/Spans/Common.hs new file mode 100644 index 0000000000..996e55ef1a --- /dev/null +++ b/ghcide/src/Development/IDE/Spans/Common.hs @@ -0,0 +1,254 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} + +module Development.IDE.Spans.Common ( + unqualIEWrapName +, safeTyThingId +, safeTyThingType +, SpanDoc(..) +, SpanDocUris(..) +, emptySpanDoc +, spanDocToMarkdown +, spanDocToMarkdownForTest +, DocMap +, TyThingMap +, srcSpanToMdLink +) where + +import Control.DeepSeq +import Data.Bifunctor (second) +import Data.List.Extra +import Data.Maybe +import qualified Data.Text as T +import Development.IDE.GHC.Util +import qualified Documentation.Haddock.Parser as H +import qualified Documentation.Haddock.Types as H +import GHC +import GHC.Generics +import System.FilePath + +import Control.Lens +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Orphans () +import qualified Language.LSP.Protocol.Lens as JL +import Language.LSP.Protocol.Types + +type DocMap = NameEnv SpanDoc +type TyThingMap = NameEnv TyThing + +-- | Shows IEWrappedName, without any modifier, qualifier or unique identifier. +unqualIEWrapName :: IEWrappedName GhcPs -> T.Text +unqualIEWrapName = printOutputable . rdrNameOcc . ieWrappedName + +-- From haskell-ide-engine/src/Haskell/Ide/Engine/Support/HieExtras.hs +safeTyThingType :: TyThing -> Maybe Type +safeTyThingType thing + | Just i <- safeTyThingId thing = Just (varType i) +safeTyThingType (ATyCon tycon) = Just (tyConKind tycon) +safeTyThingType _ = Nothing + +safeTyThingId :: TyThing -> Maybe Id +safeTyThingId (AnId i) = Just i +safeTyThingId (AConLike (RealDataCon dataCon)) = Just (dataConWrapId dataCon) +safeTyThingId _ = Nothing + +-- Possible documentation for an element in the code +data SpanDoc + = SpanDocString [HsDocString] SpanDocUris + | SpanDocText [T.Text] SpanDocUris + deriving stock (Eq, Show, Generic) + deriving anyclass NFData + +data SpanDocUris = + SpanDocUris + { spanDocUriDoc :: Maybe T.Text -- ^ The haddock html page + , spanDocUriSrc :: Maybe T.Text -- ^ The hyperlinked source html page + } deriving stock (Eq, Show, Generic) + deriving anyclass NFData + +emptySpanDoc :: SpanDoc +emptySpanDoc = SpanDocText [] (SpanDocUris Nothing Nothing) + +-- | Convert `SpanDoc` to Markdown format. +-- +-- Return a list `Text` includes haddock, document uri and source code uri, +-- each item can be empty and must end with '\\n' if exist. This is to prevent +-- subsequent render problem caused by the missing newline. +-- +-- Example: +-- +-- For return value ["xxxx","yyyy"], if we concat the list with inserting +-- a separate line(note by "---\n"), +-- it will result "xxxx---\nyyyy" and can't be rendered as a normal doc. +-- Therefore we check every item in the value to make sure they all end with '\\n', +-- this makes "xxxx\n---\nyyy\n" and can be rendered correctly. +-- +-- Notes: +-- +-- To insert a new line in Markdown, we need two '\\n', like ("\\n\\n"), __or__ a section +-- symbol with one '\\n', like ("***\\n"). +spanDocToMarkdown :: SpanDoc -> [T.Text] +spanDocToMarkdown = \case + (SpanDocString docs uris) -> + let doc = T.pack $ haddockToMarkdown $ H.toRegular $ H._doc $ H.parseParas Nothing $ + renderHsDocStrings docs + in go [doc] uris + (SpanDocText txt uris) -> go txt uris + where + go [] uris = render <$> spanDocUrisToMarkdown uris + go txt uris = init txt <> [render (last txt)] <> (render <$> spanDocUrisToMarkdown uris) + -- If the doc is not end with an '\n', we append it. + render txt + | T.null txt = txt + | T.last txt == '\n' = txt + | otherwise = txt <> T.pack "\n" + +spanDocUrisToMarkdown :: SpanDocUris -> [T.Text] +spanDocUrisToMarkdown (SpanDocUris mdoc msrc) = catMaybes + [ linkify "Documentation" <$> mdoc + , linkify "Source" <$> msrc + ] + +-- | Generate a markdown link. +-- +-- >>> linkify "Title" "uri" +-- "[Title](Uri)" +linkify :: T.Text -> T.Text -> T.Text +linkify title uri = "[" <> title <> "](" <> uri <> ")" + +spanDocToMarkdownForTest :: String -> String +spanDocToMarkdownForTest + = haddockToMarkdown . H.toRegular . H._doc . H.parseParas Nothing + +-- Simple (and a bit hacky) conversion from Haddock markup to Markdown +haddockToMarkdown + :: H.DocH String String -> String + +haddockToMarkdown H.DocEmpty + = "" +haddockToMarkdown (H.DocAppend d1 d2) + = haddockToMarkdown d1 ++ " " ++ haddockToMarkdown d2 +haddockToMarkdown (H.DocString s) + = escapeBackticks s +haddockToMarkdown (H.DocParagraph p) + = "\n\n" ++ haddockToMarkdown p +haddockToMarkdown (H.DocIdentifier i) + = "`" ++ i ++ "`" +haddockToMarkdown (H.DocIdentifierUnchecked i) + = "`" ++ i ++ "`" +#if MIN_VERSION_haddock_library(1,10,0) +haddockToMarkdown (H.DocModule (H.ModLink i Nothing)) + = "`" ++ escapeBackticks i ++ "`" +-- See https://github.com/haskell/haddock/pull/1315 +-- Module references can be labeled in markdown style, e.g. [some label]("Some.Module") +-- However, we don't want to use the link markup here, since the module name would be covered +-- up by the label. Thus, we keep both the label and module name in the following style: +-- some label ( `Some.Module` ) +haddockToMarkdown (H.DocModule (H.ModLink i (Just label))) + = haddockToMarkdown label ++ " ( `" ++ escapeBackticks i ++ "` )" +#else +haddockToMarkdown (H.DocModule i) + = "`" ++ escapeBackticks i ++ "`" +#endif +haddockToMarkdown (H.DocWarning w) + = haddockToMarkdown w +haddockToMarkdown (H.DocEmphasis d) + = "*" ++ haddockToMarkdown d ++ "*" +haddockToMarkdown (H.DocBold d) + = "**" ++ haddockToMarkdown d ++ "**" +haddockToMarkdown (H.DocMonospaced d) + = "`" ++ removeUnescapedBackticks (haddockToMarkdown d) ++ "`" +haddockToMarkdown (H.DocCodeBlock d) + = "\n```haskell\n" ++ haddockToMarkdown d ++ "\n```\n" +haddockToMarkdown (H.DocExamples es) + = "\n```haskell\n" ++ unlines (map exampleToMarkdown es) ++ "\n```\n" + where + exampleToMarkdown (H.Example expr result) + = ">>> " ++ expr ++ "\n" ++ unlines result +haddockToMarkdown (H.DocHyperlink (H.Hyperlink url Nothing)) + = "<" ++ url ++ ">" +haddockToMarkdown (H.DocHyperlink (H.Hyperlink url (Just label))) + = "[" ++ haddockToMarkdown label ++ "](" ++ url ++ ")" +haddockToMarkdown (H.DocPic (H.Picture url Nothing)) + = "![](" ++ url ++ ")" +haddockToMarkdown (H.DocPic (H.Picture url (Just label))) + = "![" ++ label ++ "](" ++ url ++ ")" +haddockToMarkdown (H.DocAName aname) + = "[" ++ escapeBackticks aname ++ "]:" +haddockToMarkdown (H.DocHeader (H.Header level title)) + = replicate level '#' ++ " " ++ haddockToMarkdown title + +haddockToMarkdown (H.DocUnorderedList things) + = '\n' : (unlines $ map (("+ " ++) . trimStart . splitForList . haddockToMarkdown) things) +haddockToMarkdown (H.DocOrderedList things) = +#if MIN_VERSION_haddock_library(1,11,0) + '\n' : (unlines $ map ((\(num, str) -> show num ++ ". " ++ str) . second (trimStart . splitForList . haddockToMarkdown)) things) +#else + '\n' : (unlines $ map (("1. " ++) . trimStart . splitForList . haddockToMarkdown) things) +#endif +haddockToMarkdown (H.DocDefList things) + = '\n' : (unlines $ map (\(term, defn) -> "+ **" ++ haddockToMarkdown term ++ "**: " ++ haddockToMarkdown defn) things) + +haddockToMarkdown (H.DocMathInline s) + = "`" ++ s ++ "`" +haddockToMarkdown (H.DocMathDisplay s) + = "\n```latex\n" ++ s ++ "\n```\n" + +-- TODO: render tables +haddockToMarkdown (H.DocTable _t) + = "\n\n*tables are not yet supported*\n\n" + +-- things I don't really know how to handle +haddockToMarkdown (H.DocProperty _) + = "" -- don't really know what to do + +escapeBackticks :: String -> String +escapeBackticks "" = "" +escapeBackticks ('`':ss) = '\\':'`':escapeBackticks ss +escapeBackticks (s :ss) = s:escapeBackticks ss + +removeUnescapedBackticks :: String -> String +removeUnescapedBackticks = \case + '\\' : '`' : ss -> '\\' : '`' : removeUnescapedBackticks ss + '`' : ss -> removeUnescapedBackticks ss + "" -> "" + s : ss -> s : removeUnescapedBackticks ss + +splitForList :: String -> String +splitForList s + = case lines s of + [] -> "" + (first:rest) -> unlines $ first : map ((" " ++) . trimStart) rest + +-- | Generate a source link for the 'Location' according to VSCode's supported form: +-- https://github.com/microsoft/vscode/blob/b3ec8181fc49f5462b5128f38e0723ae85e295c2/src/vs/platform/opener/common/opener.ts#L151-L160 +-- +srcSpanToMdLink :: Location -> T.Text +srcSpanToMdLink location = + let + uri = location ^. JL.uri + range = location ^. JL.range + -- LSP 'Range' starts at '0', but link locations start at '1'. + intText n = T.pack $ show (n + 1) + srcRangeText = + T.concat + [ "L" + , intText (range ^. JL.start . JL.line) + , "," + , intText (range ^. JL.start . JL.character) + , "-L" + , intText (range ^. JL.end . JL.line) + , "," + , intText (range ^. JL.end . JL.character) + ] + + -- If the 'Location' is a 'FilePath', display it in shortened form. + -- This avoids some redundancy and better readability for the user. + title = case uriToFilePath uri of + Just fp -> T.pack (takeFileName fp) <> ":" <> intText (range ^. JL.start . JL.line) + Nothing -> getUri uri + + srcLink = getUri uri <> "#" <> srcRangeText + in + linkify title srcLink diff --git a/ghcide/src/Development/IDE/Spans/Documentation.hs b/ghcide/src/Development/IDE/Spans/Documentation.hs new file mode 100644 index 0000000000..dcf7778de3 --- /dev/null +++ b/ghcide/src/Development/IDE/Spans/Documentation.hs @@ -0,0 +1,150 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} + +module Development.IDE.Spans.Documentation ( + getDocumentation + , getDocumentationTryGhc + , getDocumentationsTryGhc + , DocMap + , mkDocMap + ) where + +import Control.Monad +import Control.Monad.Extra (findM) +import Control.Monad.IO.Class +import Data.Either +import Data.Foldable +import Data.List.Extra +import qualified Data.Map as M +import Data.Maybe +import qualified Data.Set as S +import qualified Data.Text as T +import Development.IDE.Core.Compile +import Development.IDE.Core.RuleTypes +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Compat.Util +import Development.IDE.GHC.Error +import Development.IDE.GHC.Util (printOutputable) +import Development.IDE.Spans.Common +import GHC.Iface.Ext.Utils (RefMap) +import Language.LSP.Protocol.Types (filePathToUri, getUri) +import Prelude hiding (mod) +import System.Directory +import System.FilePath + + +mkDocMap + :: HscEnv + -> RefMap a + -> TcGblEnv + -> IO DocAndTyThingMap +mkDocMap env rm this_mod = + do + (Just Docs{docs_decls = UniqMap this_docs}) <- extractDocs (hsc_dflags env) this_mod + d <- foldrM getDocs (fmap (\(_, x) -> (map hsDocString x) `SpanDocString` SpanDocUris Nothing Nothing) this_docs) names + k <- foldrM getType (tcg_type_env this_mod) names + pure $ DKMap d k + where + getDocs n nameMap + | maybe True (mod ==) $ nameModule_maybe n = pure nameMap -- we already have the docs in this_docs, or they do not exist + | otherwise = do + doc <- getDocumentationTryGhc env n + pure $ extendNameEnv nameMap n doc + getType n nameMap + | Nothing <- lookupNameEnv nameMap n + = do kind <- lookupKind env n + pure $ maybe nameMap (extendNameEnv nameMap n) kind + | otherwise = pure nameMap + names = rights $ S.toList idents + idents = M.keysSet rm + mod = tcg_mod this_mod + +lookupKind :: HscEnv -> Name -> IO (Maybe TyThing) +lookupKind env = + fmap (fromRight Nothing) . catchSrcErrors (hsc_dflags env) "span" . lookupName env + +getDocumentationTryGhc :: HscEnv -> Name -> IO SpanDoc +getDocumentationTryGhc env n = + (fromMaybe emptySpanDoc . listToMaybe <$> getDocumentationsTryGhc env [n]) + `catch` (\(_ :: IOEnvFailure) -> pure emptySpanDoc) + +getDocumentationsTryGhc :: HscEnv -> [Name] -> IO [SpanDoc] +getDocumentationsTryGhc env names = do + resOr <- catchSrcErrors (hsc_dflags env) "docs" $ getDocsBatch env names + case resOr of + Left _ -> return [] + Right res -> zipWithM unwrap res names + where + unwrap (Right (Just docs, _)) n = SpanDocString (map hsDocString docs) <$> getUris n + unwrap _ n = mkSpanDocText n + + mkSpanDocText name = + SpanDocText [] <$> getUris name + + -- Get the uris to the documentation and source html pages if they exist + getUris name = do + (docFu, srcFu) <- + case nameModule_maybe name of + Just mod -> liftIO $ do + doc <- toFileUriText $ lookupDocHtmlForModule env mod + src <- toFileUriText $ lookupSrcHtmlForModule env mod + return (doc, src) + Nothing -> pure (Nothing, Nothing) + let docUri = (<> "#" <> selector <> printOutputable name) <$> docFu + srcUri = (<> "#" <> printOutputable name) <$> srcFu + selector + | isValName name = "v:" + | otherwise = "t:" + return $ SpanDocUris docUri srcUri + + toFileUriText = (fmap . fmap) (getUri . filePathToUri) + +getDocumentation + :: HasSrcSpan name + => [ParsedModule] -- ^ All of the possible modules it could be defined in. + -> name -- ^ The name you want documentation for. + -> [T.Text] +getDocumentation _sources _targetName = [] + +-- These are taken from haskell-ide-engine's Haddock plugin + +-- | Given a module finds the local @doc/html/Foo-Bar-Baz.html@ page. +-- An example for a cabal installed module: +-- @~/.cabal/store/ghc-8.10.1/vctr-0.12.1.2-98e2e861/share/doc/html/Data-Vector-Primitive.html@ +lookupDocHtmlForModule :: HscEnv -> Module -> IO (Maybe FilePath) +lookupDocHtmlForModule = + lookupHtmlForModule (\pkgDocDir modDocName -> pkgDocDir modDocName <.> "html") + +-- | Given a module finds the hyperlinked source @doc/html/src/Foo.Bar.Baz.html@ page. +-- An example for a cabal installed module: +-- @~/.cabal/store/ghc-8.10.1/vctr-0.12.1.2-98e2e861/share/doc/html/src/Data.Vector.Primitive.html@ +lookupSrcHtmlForModule :: HscEnv -> Module -> IO (Maybe FilePath) +lookupSrcHtmlForModule = + lookupHtmlForModule (\pkgDocDir modDocName -> pkgDocDir "src" modDocName <.> "html") + +lookupHtmlForModule :: (FilePath -> FilePath -> FilePath) -> HscEnv -> Module -> IO (Maybe FilePath) +lookupHtmlForModule mkDocPath hscEnv m = do + -- try all directories + let mfs = fmap (concatMap go) (lookupHtmls hscEnv ui) + html <- findM doesFileExist (concat . maybeToList $ mfs) + -- canonicalize located html to remove /../ indirection which can break some clients + -- (vscode on Windows at least) + traverse canonicalizePath html + where + go pkgDocDir = map (mkDocPath pkgDocDir) mns + ui = moduleUnit m + -- try to locate html file from most to least specific name e.g. + -- first Language.LSP.Types.Uri.html and Language-Haskell-LSP-Types-Uri.html + -- then Language.LSP.Types.html and Language-Haskell-LSP-Types.html etc. + mns = do + chunks <- (reverse . drop1 . inits . splitOn ".") $ (moduleNameString . moduleName) m + -- The file might use "." or "-" as separator + map (`intercalate` chunks) [".", "-"] + +lookupHtmls :: HscEnv -> Unit -> Maybe [FilePath] +lookupHtmls df ui = + -- use haddockInterfaces instead of haddockHTMLs: GHC treats haddockHTMLs as URL not path + -- and therefore doesn't expand $topdir on Windows + map takeDirectory . unitHaddockInterfaces <$> lookupUnit df ui diff --git a/ghcide/src/Development/IDE/Spans/LocalBindings.hs b/ghcide/src/Development/IDE/Spans/LocalBindings.hs new file mode 100644 index 0000000000..8806ed8ab3 --- /dev/null +++ b/ghcide/src/Development/IDE/Spans/LocalBindings.hs @@ -0,0 +1,142 @@ +{-# LANGUAGE DerivingStrategies #-} + +module Development.IDE.Spans.LocalBindings + ( Bindings + , getLocalScope + , getFuzzyScope + , getDefiningBindings + , getFuzzyDefiningBindings + , bindings + ) where + +import Control.DeepSeq +import Control.Monad +import Data.Bifunctor +import Data.IntervalMap.FingerTree (Interval (..), IntervalMap) +import qualified Data.IntervalMap.FingerTree as IM +import qualified Data.List as L +import qualified Data.Map as M +import qualified Data.Set as S +import GHC.Iface.Ext.Types (IdentifierDetails (..), + Scope (..)) +import GHC.Iface.Ext.Utils (RefMap, getBindSiteFromContext, + getScopeFromContext) + +import Development.IDE.GHC.Compat (Name, NameEnv, RealSrcSpan, + Type, isSystemName, + nonDetNameEnvElts, + realSrcSpanEnd, + realSrcSpanStart, unitNameEnv) +import Development.IDE.GHC.Error +import Development.IDE.Types.Location + +------------------------------------------------------------------------------ +-- | Turn a 'RealSrcSpan' into an 'Interval'. +realSrcSpanToInterval :: RealSrcSpan -> Interval Position +realSrcSpanToInterval rss = + Interval + (realSrcLocToPosition $ realSrcSpanStart rss) + (realSrcLocToPosition $ realSrcSpanEnd rss) + +bindings :: RefMap Type -> Bindings +bindings = uncurry Bindings . localBindings + +------------------------------------------------------------------------------ +-- | Compute which identifiers are in scope at every point in the AST. Use +-- 'getLocalScope' to find the results. +localBindings + :: RefMap Type + -> ( IntervalMap Position (NameEnv (Name, Maybe Type)) + , IntervalMap Position (NameEnv (Name, Maybe Type)) + ) +localBindings refmap = bimap mk mk $ unzip $ do + (ident, refs) <- M.toList refmap + Right name <- pure ident + (_, ident_details) <- refs + let ty = identType ident_details + info <- S.toList $ identInfo ident_details + pure + ( do + Just scopes <- pure $ getScopeFromContext info + scope <- scopes >>= \case + LocalScope scope -> pure $ realSrcSpanToInterval scope + _ -> [] + pure ( scope + , unitNameEnv name (name,ty) + ) + , do + Just scope <- pure $ getBindSiteFromContext info + pure ( realSrcSpanToInterval scope + , unitNameEnv name (name,ty) + ) + ) + where + mk = L.foldl' (flip (uncurry IM.insert)) mempty . join + +------------------------------------------------------------------------------ +-- | The available bindings at every point in a Haskell tree. +data Bindings = Bindings + { getLocalBindings + :: IntervalMap Position (NameEnv (Name, Maybe Type)) + , getBindingSites + :: IntervalMap Position (NameEnv (Name, Maybe Type)) + } + +instance Semigroup Bindings where + Bindings a1 b1 <> Bindings a2 b2 + = Bindings (a1 <> a2) (b1 <> b2) + +instance Monoid Bindings where + mempty = Bindings mempty mempty + +instance NFData Bindings where + rnf = rwhnf + +instance Show Bindings where + show _ = "" + + +------------------------------------------------------------------------------ +-- | Given a 'Bindings' get every identifier in scope at the given +-- 'RealSrcSpan', +getLocalScope :: Bindings -> RealSrcSpan -> [(Name, Maybe Type)] +getLocalScope bs rss + = nonDetNameEnvElts + $ foldMap snd + $ IM.dominators (realSrcSpanToInterval rss) + $ getLocalBindings bs + +------------------------------------------------------------------------------ +-- | Given a 'Bindings', get every binding currently active at a given +-- 'RealSrcSpan', +getDefiningBindings :: Bindings -> RealSrcSpan -> [(Name, Maybe Type)] +getDefiningBindings bs rss + = nonDetNameEnvElts + $ foldMap snd + $ IM.dominators (realSrcSpanToInterval rss) + $ getBindingSites bs + + +-- | Lookup all names in scope in any span that intersects the interval +-- defined by the two positions. +-- This is meant for use with the fuzzy `PositionRange` returned by `PositionMapping` +getFuzzyScope :: Bindings -> Position -> Position -> [(Name, Maybe Type)] +getFuzzyScope bs a b + = filter (not . isSystemName . fst) + $ nonDetNameEnvElts + $ foldMap snd + $ IM.intersections (Interval a b) + $ getLocalBindings bs + +------------------------------------------------------------------------------ +-- | Given a 'Bindings', get every binding that intersects the interval defined +-- by the two positions. +-- This is meant for use with the fuzzy `PositionRange` returned by +-- `PositionMapping` +getFuzzyDefiningBindings :: Bindings -> Position -> Position -> [(Name, Maybe Type)] +getFuzzyDefiningBindings bs a b + = nonDetNameEnvElts + $ foldMap snd + $ IM.intersections (Interval a b) + $ getBindingSites bs + diff --git a/ghcide/src/Development/IDE/Spans/Pragmas.hs b/ghcide/src/Development/IDE/Spans/Pragmas.hs new file mode 100644 index 0000000000..4df16c6704 --- /dev/null +++ b/ghcide/src/Development/IDE/Spans/Pragmas.hs @@ -0,0 +1,412 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE MultiWayIf #-} + +module Development.IDE.Spans.Pragmas + ( NextPragmaInfo(..) + , LineSplitTextEdits(..) + , getNextPragmaInfo + , insertNewPragma + , getFirstPragma ) where + +import Control.Lens ((&), (.~)) +import Data.Bits (Bits (setBit)) +import qualified Data.List as List +import qualified Data.Maybe as Maybe +import Data.Text (Text, pack) +import qualified Data.Text as Text +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE (srcSpanToRange, IdeState, NormalizedFilePath, GhcSession (..), getFileContents, hscEnv, runAction) +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Compat.Util +import qualified Language.LSP.Protocol.Types as LSP +import Control.Monad.IO.Class (MonadIO (..)) +import Control.Monad.Trans.Except (ExceptT) +import Ide.Plugin.Error (PluginError) +import Ide.Types (PluginId(..)) +import qualified Data.Text as T +import Development.IDE.Core.PluginUtils +import qualified Language.LSP.Protocol.Lens as L + +getNextPragmaInfo :: DynFlags -> Maybe Rope -> NextPragmaInfo +getNextPragmaInfo dynFlags mbSource = + if | Just source <- mbSource + , let sourceStringBuffer = stringToStringBuffer (Text.unpack (Rope.toText source)) + , POk _ parserState <- parsePreDecl dynFlags sourceStringBuffer + -> case parserState of + ParserStateNotDone{ nextPragma } -> nextPragma + ParserStateDone{ nextPragma } -> nextPragma + | otherwise + -> NextPragmaInfo 0 Nothing + +-- NOTE(ozkutuk): `RecordPuns` extension is renamed to `NamedFieldPuns` +-- in GHC 9.4, but we still want to insert `NamedFieldPuns` in pre-9.4 +-- GHC as well, hence the replacement. +-- https://gitlab.haskell.org/ghc/ghc/-/merge_requests/6156 +showExtension :: Extension -> Text +showExtension NamedFieldPuns = "NamedFieldPuns" +showExtension ext = pack (show ext) + +insertNewPragma :: NextPragmaInfo -> Extension -> LSP.TextEdit +insertNewPragma (NextPragmaInfo _ (Just (LineSplitTextEdits ins _))) newPragma = ins & L.newText .~ "{-# LANGUAGE " <> showExtension newPragma <> " #-}\n" :: LSP.TextEdit +insertNewPragma (NextPragmaInfo nextPragmaLine _) newPragma = LSP.TextEdit pragmaInsertRange $ "{-# LANGUAGE " <> showExtension newPragma <> " #-}\n" + where + pragmaInsertPosition = LSP.Position (fromIntegral nextPragmaLine) 0 + pragmaInsertRange = LSP.Range pragmaInsertPosition pragmaInsertPosition + +getFirstPragma :: MonadIO m => PluginId -> IdeState -> NormalizedFilePath -> ExceptT PluginError m NextPragmaInfo +getFirstPragma (PluginId pId) state nfp = do + (hscEnv -> hsc_dflags -> sessionDynFlags, _) <- runActionE (T.unpack pId <> ".GhcSession") state $ useWithStaleE GhcSession nfp + fileContents <- liftIO $ runAction (T.unpack pId <> ".GetFileContents") state $ getFileContents nfp + pure $ getNextPragmaInfo sessionDynFlags fileContents + +-- Pre-declaration comments parser ----------------------------------------------------- + +-- | Each mode represents the "strongest" thing we've seen so far. +-- From strongest to weakest: +-- ModePragma, ModeHaddock, ModeComment, ModeInitial +data Mode = ModePragma | ModeHaddock | ModeComment | ModeInitial deriving Show + +data LineSplitTextEdits = LineSplitTextEdits { + lineSplitInsertTextEdit :: !LSP.TextEdit, + lineSplitDeleteTextEdit :: !LSP.TextEdit +} deriving Show + +data NextPragmaInfo = NextPragmaInfo { + nextPragmaLine :: !Int, + lineSplitTextEdits :: !(Maybe LineSplitTextEdits) +} deriving Show + +data ParserState + = ParserStateNotDone + { nextPragma :: !NextPragmaInfo + , mode :: !Mode + , lastBlockCommentLine :: !Int + , lastPragmaLine :: !Int + , isLastTokenHash :: !Bool + } + | ParserStateDone { nextPragma :: NextPragmaInfo } + deriving Show + +isPragma :: String -> Bool +isPragma = List.isPrefixOf "{-#" + +isDownwardBlockHaddock :: String -> Bool +isDownwardBlockHaddock = List.isPrefixOf "{-|" + +isDownwardLineHaddock :: String -> Bool +isDownwardLineHaddock = List.isPrefixOf "-- |" + +-- need to merge tokens that are deleted/inserted into one TextEdit each +-- to work around some weird TextEdits applied in reversed order issue +updateLineSplitTextEdits :: LSP.Range -> String -> Maybe LineSplitTextEdits -> LineSplitTextEdits +updateLineSplitTextEdits tokenRange tokenString mbPrevLineSplitTextEdits + | Just prevLineSplitTextEdits <- mbPrevLineSplitTextEdits + , let LineSplitTextEdits + { lineSplitInsertTextEdit = prevInsertTextEdit + , lineSplitDeleteTextEdit = prevDeleteTextEdit } = prevLineSplitTextEdits + , let LSP.TextEdit prevInsertRange prevInsertText = prevInsertTextEdit + , let LSP.TextEdit prevDeleteRange _prevDeleteText = prevDeleteTextEdit + , let LSP.Range prevInsertStartPos prevInsertEndPos = prevInsertRange + , let LSP.Position _prevInsertStartLine _prevInsertStartCol = prevInsertStartPos + , let LSP.Position _prevInsertEndLine _prevInsertEndCol = prevInsertEndPos + , let LSP.Range prevDeleteStartPos prevDeleteEndPos = prevDeleteRange + , let LSP.Position _prevDeleteStartLine _prevDeleteStartCol = prevDeleteStartPos + , let LSP.Position _prevDeleteEndLine prevDeleteEndCol = prevDeleteEndPos + , let currInsertRange = prevInsertRange + , let currInsertText = + Text.init prevInsertText + <> Text.replicate (fromIntegral $ startCol - prevDeleteEndCol) " " + <> Text.pack (List.take newLineCol tokenString) + <> "\n" + , let currInsertTextEdit = LSP.TextEdit currInsertRange currInsertText + , let currDeleteStartPos = prevDeleteStartPos + , let currDeleteEndPos = LSP.Position endLine endCol + , let currDeleteRange = LSP.Range currDeleteStartPos currDeleteEndPos + , let currDeleteTextEdit = LSP.TextEdit currDeleteRange "" + = LineSplitTextEdits currInsertTextEdit currDeleteTextEdit + | otherwise + , let LSP.Range startPos _ = tokenRange + , let deleteTextEdit = LSP.TextEdit (LSP.Range startPos startPos{ LSP._character = startCol + fromIntegral newLineCol }) "" + , let insertPosition = LSP.Position (startLine + 1) 0 + , let insertRange = LSP.Range insertPosition insertPosition + , let insertText = Text.pack (List.take newLineCol tokenString) <> "\n" + , let insertTextEdit = LSP.TextEdit insertRange insertText + = LineSplitTextEdits insertTextEdit deleteTextEdit + where + LSP.Range (LSP.Position startLine startCol) (LSP.Position endLine endCol) = tokenRange + + newLineCol = Maybe.fromMaybe (length tokenString) (List.elemIndex '\n' tokenString) + +-- ITvarsym "#" after a block comment is a parse error so we don't need to worry about it +updateParserState :: Token -> LSP.Range -> ParserState -> ParserState +updateParserState token range prevParserState + | ParserStateNotDone + { nextPragma = prevNextPragma@NextPragmaInfo{ lineSplitTextEdits = prevLineSplitTextEdits } + , mode = prevMode + , lastBlockCommentLine + , lastPragmaLine + } <- prevParserState + , let defaultParserState = prevParserState { isLastTokenHash = False } + , let LSP.Range (LSP.Position (fromIntegral -> startLine) _) (LSP.Position (fromIntegral -> endLine) _) = range + = case prevMode of + ModeInitial -> + case token of + ITvarsym "#" -> defaultParserState{ isLastTokenHash = True } + ITlineComment s _ + | isDownwardLineHaddock s -> defaultParserState{ mode = ModeHaddock } + | otherwise -> + defaultParserState + { nextPragma = NextPragmaInfo (endLine + 1) Nothing + , mode = ModeComment } + ITblockComment s _ + | isPragma s -> + defaultParserState + { nextPragma = NextPragmaInfo (endLine + 1) Nothing + , mode = ModePragma + , lastPragmaLine = endLine } + | isDownwardBlockHaddock s -> defaultParserState{ mode = ModeHaddock } + | otherwise -> + defaultParserState + { nextPragma = NextPragmaInfo (endLine + 1) Nothing + , mode = ModeComment + , lastBlockCommentLine = endLine } + _ -> ParserStateDone prevNextPragma + ModeComment -> + case token of + ITvarsym "#" -> defaultParserState{ isLastTokenHash = True } + ITlineComment s _ + | hasDeleteStartedOnSameLine startLine prevLineSplitTextEdits + , let currLineSplitTextEdits = updateLineSplitTextEdits range s prevLineSplitTextEdits -> + defaultParserState{ nextPragma = prevNextPragma{ lineSplitTextEdits = Just currLineSplitTextEdits } } + | isDownwardLineHaddock s + , lastBlockCommentLine == startLine + , let currLineSplitTextEdits = updateLineSplitTextEdits range s Nothing -> + defaultParserState + { nextPragma = prevNextPragma{ lineSplitTextEdits = Just currLineSplitTextEdits } + , mode = ModeHaddock } + | otherwise -> + defaultParserState { nextPragma = NextPragmaInfo (endLine + 1) Nothing } + ITblockComment s _ + | isPragma s -> + defaultParserState + { nextPragma = NextPragmaInfo (endLine + 1) Nothing + , mode = ModePragma + , lastPragmaLine = endLine } + | hasDeleteStartedOnSameLine startLine prevLineSplitTextEdits + , let currLineSplitTextEdits = updateLineSplitTextEdits range s prevLineSplitTextEdits -> + defaultParserState{ nextPragma = prevNextPragma{ lineSplitTextEdits = Just currLineSplitTextEdits } } + | isDownwardBlockHaddock s + , lastBlockCommentLine == startLine + , let currLineSplitTextEdits = updateLineSplitTextEdits range s Nothing -> + defaultParserState{ + nextPragma = prevNextPragma{ lineSplitTextEdits = Just currLineSplitTextEdits }, + mode = ModeHaddock } + | otherwise -> + defaultParserState{ + nextPragma = NextPragmaInfo (endLine + 1) Nothing, + lastBlockCommentLine = endLine } + _ -> ParserStateDone prevNextPragma + ModeHaddock -> + case token of + ITvarsym "#" -> + defaultParserState{ isLastTokenHash = True } + ITlineComment s _ + | hasDeleteStartedOnSameLine startLine prevLineSplitTextEdits + , let currLineSplitTextEdits = updateLineSplitTextEdits range s prevLineSplitTextEdits -> + defaultParserState{ nextPragma = prevNextPragma{ lineSplitTextEdits = Just currLineSplitTextEdits } } + | otherwise -> + defaultParserState + ITblockComment s _ + | isPragma s -> + defaultParserState{ + nextPragma = NextPragmaInfo (endLine + 1) Nothing, + mode = ModePragma, + lastPragmaLine = endLine } + | hasDeleteStartedOnSameLine startLine prevLineSplitTextEdits + , let currLineSplitTextEdits = updateLineSplitTextEdits range s prevLineSplitTextEdits -> + defaultParserState{ nextPragma = prevNextPragma{ lineSplitTextEdits = Just currLineSplitTextEdits } } + | otherwise -> defaultParserState{ lastBlockCommentLine = endLine } + _ -> ParserStateDone prevNextPragma + ModePragma -> + case token of + ITvarsym "#" -> defaultParserState{ isLastTokenHash = True } + ITlineComment s _ + | hasDeleteStartedOnSameLine startLine prevLineSplitTextEdits + , let currLineSplitTextEdits = updateLineSplitTextEdits range s prevLineSplitTextEdits -> + defaultParserState{ nextPragma = prevNextPragma{ lineSplitTextEdits = Just currLineSplitTextEdits } } + | isDownwardLineHaddock s + , lastPragmaLine == startLine + , let currLineSplitTextEdits = updateLineSplitTextEdits range s Nothing -> + defaultParserState{ nextPragma = prevNextPragma{ lineSplitTextEdits = Just currLineSplitTextEdits } } + | otherwise -> + defaultParserState + ITblockComment s _ + | isPragma s -> + defaultParserState{ nextPragma = NextPragmaInfo (endLine + 1) Nothing, lastPragmaLine = endLine } + | hasDeleteStartedOnSameLine startLine prevLineSplitTextEdits + , let currLineSplitTextEdits = updateLineSplitTextEdits range s prevLineSplitTextEdits -> + defaultParserState{ nextPragma = prevNextPragma{ lineSplitTextEdits = Just currLineSplitTextEdits } } + | isDownwardBlockHaddock s + , lastPragmaLine == startLine + , let currLineSplitTextEdits = updateLineSplitTextEdits range s Nothing -> + defaultParserState{ nextPragma = prevNextPragma{ lineSplitTextEdits = Just currLineSplitTextEdits } } + | lastPragmaLine == startLine && startLine < endLine + , let currLineSplitTextEdits = updateLineSplitTextEdits range s Nothing -> + defaultParserState{ nextPragma = prevNextPragma{ lineSplitTextEdits = Just currLineSplitTextEdits } } + | otherwise -> + defaultParserState{ lastBlockCommentLine = endLine } + _ -> ParserStateDone prevNextPragma + | otherwise = prevParserState + where + hasDeleteStartedOnSameLine :: Int -> Maybe LineSplitTextEdits -> Bool + hasDeleteStartedOnSameLine line mbLineSplitTextEdits + | Just lineSplitTextEdits <- mbLineSplitTextEdits + , let LineSplitTextEdits{ lineSplitDeleteTextEdit } = lineSplitTextEdits + , let LSP.TextEdit deleteRange _ = lineSplitDeleteTextEdit + , let LSP.Range _ deleteEndPosition = deleteRange + , let LSP.Position deleteEndLine _ = deleteEndPosition + = fromIntegral deleteEndLine == line + | otherwise = False + +lexUntilNextLineIncl :: P (Located Token) +lexUntilNextLineIncl = do + PState{ last_loc } <- getPState + let PsSpan{ psRealSpan = lastRealSrcSpan } = last_loc + let prevEndLine = lastRealSrcSpan & realSrcSpanEnd & srcLocLine + locatedToken@(L srcSpan _token) <- lexer False pure + if | RealSrcLoc currEndRealSrcLoc _ <- srcSpan & srcSpanEnd + , let currEndLine = currEndRealSrcLoc & srcLocLine + -> if prevEndLine < currEndLine then + pure locatedToken + else lexUntilNextLineIncl + | otherwise -> pure locatedToken + +dropWhileStringBuffer :: (Char -> Bool) -> StringBuffer -> StringBuffer +dropWhileStringBuffer predicate buffer + | atEnd buffer = buffer + | let (c, remainingBuffer) = nextChar buffer + = if predicate c then + dropWhileStringBuffer predicate remainingBuffer + else + buffer + +isHorizontalSpace :: Char -> Bool +isHorizontalSpace c = c == ' ' || c == '\t' + +data ShebangParserState = ShebangParserState { + nextPragmaLine :: !Int, + newlineCount :: !Int, + prevCharIsHash :: !Bool, + buffer :: !StringBuffer +} + +-- lexer seems to ignore shebangs completely hence this function +parseShebangs :: ShebangParserState -> ShebangParserState +parseShebangs prev@ShebangParserState{ newlineCount = prevNewlineCount, prevCharIsHash, buffer = prevBuffer } + | atEnd prevBuffer + = prev + | let (c, currBuffer) = nextChar (dropWhileStringBuffer isHorizontalSpace prevBuffer) + = if c == '#' then + parseShebangs prev{ prevCharIsHash = True, buffer = currBuffer } + else if c == '!' && prevCharIsHash then + parseShebangs prev{ nextPragmaLine = prevNewlineCount + 1, buffer = dropWhileStringBuffer (/= '\n') currBuffer } + else if c == '\n' then + parseShebangs prev{ newlineCount = prevNewlineCount + 1, buffer = currBuffer } + else + prev + + +-- | Parses blank lines, comments, haddock comments ("-- |"), lines that start +-- with "#!", lines that start with "#", pragma lines using the GHC API lexer. +-- When it doesn't find one of these things then it's assumed that we've found +-- a declaration, end-of-file, or a ghc parse error, and the parser stops. +-- Shebangs are parsed separately than the rest becaues the lexer ignores them. +-- +-- The reason for custom parsing instead of using annotations, or turning on/off +-- extensions in the dynflags is because there are a number of extensions that +-- while removing parse errors, can also introduce them. Hence, there are +-- cases where the file cannot be parsed without error when we want to insert +-- extension (and other) pragmas. The compiler (8.10.7) doesn't include +-- annotations in its failure state. So if the compiler someday returns +-- annotation or equivalent information when it fails then we can replace this +-- with that. +-- +-- The reason for using the compiler lexer is to reduce duplicated +-- implementation, particularly nested comments, but in retrospect this comes +-- with the disadvantage of the logic feeling more complex, and not being able +-- to handle whitespace directly. +-- +-- The parser keeps track of state in order to place the next pragma line +-- according to some rules: +-- +-- - Ignore lines starting with '#' except for shebangs. +-- - If pragmas exist place after last pragma +-- - else if haddock comments exist: +-- - If comments exist place after last comment +-- - else if shebangs exist place after last shebang +-- - else place at first line +-- - else if comments exist place after last comment +-- - else if shebangs exist place after last shebang +-- - else place at first line +-- +-- Additionally the parser keeps track of information to be able to insert +-- pragmas inbetween lines. +-- +-- For example the parser keeps track of information so that +-- +-- > {- block comment -} -- | haddock +-- +-- can become +-- +-- > {- block comment -} +-- > {-# pragma #-} +-- > -- | haddock +-- +-- This information does not respect the type of whitespace, because the lexer +-- strips whitespace and gives locations. +-- +-- In this example the tabs are converted to spaces in the TextEdits: +-- +-- > {- block comment -}-- | haddock +-- +parsePreDecl :: DynFlags -> StringBuffer -> ParseResult ParserState +parsePreDecl dynFlags buffer = unP (go initialParserState) pState + where + initialShebangParserState = ShebangParserState{ + nextPragmaLine = 0, + newlineCount = 0, + prevCharIsHash = False, + buffer = buffer } + ShebangParserState{ nextPragmaLine } = parseShebangs initialShebangParserState + pState = mkLexerPState dynFlags buffer + initialParserState = ParserStateNotDone (NextPragmaInfo nextPragmaLine Nothing) ModeInitial (-1) (-1) False + + go :: ParserState -> P ParserState + go prevParserState = + case prevParserState of + ParserStateDone _ -> pure prevParserState + ParserStateNotDone{..} -> do + L srcSpan token <- + if isLastTokenHash then + lexUntilNextLineIncl + else + lexer False pure + case srcSpanToRange srcSpan of + Just range -> go (updateParserState token range prevParserState) + Nothing -> pure prevParserState + +mkLexerPState :: DynFlags -> StringBuffer -> PState +mkLexerPState dynFlags stringBuffer = + let + startRealSrcLoc = mkRealSrcLoc "asdf" 1 1 + updateDynFlags = flip gopt_unset Opt_Haddock . flip gopt_set Opt_KeepRawTokenStream + finalDynFlags = updateDynFlags dynFlags + pState = initParserState (initParserOpts finalDynFlags) stringBuffer startRealSrcLoc + PState{ options = pStateOptions } = pState + finalExtBitsMap = setBit (pExtsBitmap pStateOptions) (fromEnum UsePosPragsBit) + finalPStateOptions = pStateOptions{ pExtsBitmap = finalExtBitsMap } + finalPState = pState{ options = finalPStateOptions } + in + finalPState diff --git a/ghcide/src/Development/IDE/Types/Action.hs b/ghcide/src/Development/IDE/Types/Action.hs new file mode 100644 index 0000000000..0aedd1d0da --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Action.hs @@ -0,0 +1,88 @@ +module Development.IDE.Types.Action + ( DelayedAction (..), + DelayedActionInternal, + ActionQueue, + newQueue, + pushQueue, + popQueue, + doneQueue, + peekInProgress, + abortQueue,countQueue) +where + +import Control.Concurrent.STM +import Data.Hashable (Hashable (..)) +import Data.HashSet (HashSet) +import qualified Data.HashSet as Set +import Data.Unique (Unique) +import Development.IDE.Graph (Action) +import Ide.Logger +import Numeric.Natural + +data DelayedAction a = DelayedAction + { uniqueID :: Maybe Unique, + -- | Name we use for debugging + actionName :: String, + -- | Priority with which to log the action + actionPriority :: Priority, + -- | The payload + getAction :: Action a + } + deriving (Functor) + +type DelayedActionInternal = DelayedAction () + +instance Eq (DelayedAction a) where + a == b = uniqueID a == uniqueID b + +instance Hashable (DelayedAction a) where + hashWithSalt s = hashWithSalt s . uniqueID + +instance Show (DelayedAction a) where + show d = "DelayedAction: " ++ actionName d + +------------------------------------------------------------------------------ + +data ActionQueue = ActionQueue + { newActions :: TQueue DelayedActionInternal, + inProgress :: TVar (HashSet DelayedActionInternal) + } + +newQueue :: IO ActionQueue +newQueue = atomically $ do + newActions <- newTQueue + inProgress <- newTVar mempty + return ActionQueue {..} + +pushQueue :: DelayedActionInternal -> ActionQueue -> STM () +pushQueue act ActionQueue {..} = writeTQueue newActions act + +-- | You must call 'doneQueue' to signal completion +popQueue :: ActionQueue -> STM DelayedActionInternal +popQueue ActionQueue {..} = do + x <- readTQueue newActions + modifyTVar inProgress (Set.insert x) + return x + +-- | Completely remove an action from the queue +abortQueue :: DelayedActionInternal -> ActionQueue -> STM () +abortQueue x ActionQueue {..} = do + qq <- flushTQueue newActions + mapM_ (writeTQueue newActions) (filter (/= x) qq) + modifyTVar' inProgress (Set.delete x) + +-- | Mark an action as complete when called after 'popQueue'. +-- Has no effect otherwise +doneQueue :: DelayedActionInternal -> ActionQueue -> STM () +doneQueue x ActionQueue {..} = do + modifyTVar' inProgress (Set.delete x) + +countQueue :: ActionQueue -> STM Natural +countQueue ActionQueue{..} = do + backlog <- flushTQueue newActions + mapM_ (writeTQueue newActions) backlog + m <- Set.size <$> readTVar inProgress + return $ fromIntegral $ length backlog + m + +peekInProgress :: ActionQueue -> STM [DelayedActionInternal] +peekInProgress ActionQueue {..} = Set.toList <$> readTVar inProgress diff --git a/ghcide/src/Development/IDE/Types/Diagnostics.hs b/ghcide/src/Development/IDE/Types/Diagnostics.hs new file mode 100644 index 0000000000..5072fa7ffa --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Diagnostics.hs @@ -0,0 +1,327 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE TemplateHaskell #-} + +module Development.IDE.Types.Diagnostics ( + LSP.Diagnostic(..), + ShowDiagnostic(..), + FileDiagnostic(..), + fdFilePathL, + fdLspDiagnosticL, + fdShouldShowDiagnosticL, + fdStructuredMessageL, + StructuredMessage(..), + _NoStructuredMessage, + _SomeStructuredMessage, + IdeResult, + LSP.DiagnosticSeverity(..), + DiagnosticStore, + ideErrorText, + ideErrorWithSource, + ideErrorFromLspDiag, + showDiagnostics, + showDiagnosticsColored, + showGhcCode, + IdeResultNoDiagnosticsEarlyCutoff, + attachReason, + attachedReason) where + +import Control.Applicative ((<|>)) +import Control.DeepSeq +import Control.Lens +import qualified Data.Aeson as JSON +import qualified Data.Aeson.Lens as JSON +import Data.ByteString (ByteString) +import Data.Foldable +import Data.Maybe as Maybe +import qualified Data.Text as T +import Development.IDE.GHC.Compat (GhcMessage, MsgEnvelope, + WarningFlag, flagSpecFlag, + flagSpecName, wWarningFlags) +import Development.IDE.Types.Location +import GHC.Generics +import GHC.Types.Error (DiagnosticCode (..), + DiagnosticReason (..), + diagnosticCode, + diagnosticReason, + errMsgDiagnostic) +import Language.LSP.Diagnostics +import Language.LSP.Protocol.Lens (data_) +import Language.LSP.Protocol.Types as LSP +import Prettyprinter +import Prettyprinter.Render.Terminal (Color (..), color) +import qualified Prettyprinter.Render.Terminal as Terminal +import Prettyprinter.Render.Text +import Text.Printf (printf) + + +-- | The result of an IDE operation. Warnings and errors are in the Diagnostic, +-- and a value is in the Maybe. For operations that throw an error you +-- expect a non-empty list of diagnostics, at least one of which is an error, +-- and a Nothing. For operations that succeed you expect perhaps some warnings +-- and a Just. For operations that depend on other failing operations you may +-- get empty diagnostics and a Nothing, to indicate this phase throws no fresh +-- errors but still failed. +-- +-- A rule on a file should only return diagnostics for that given file. It should +-- not propagate diagnostic errors through multiple phases. +type IdeResult v = ([FileDiagnostic], Maybe v) + +-- | an IdeResult with a fingerprint +type IdeResultNoDiagnosticsEarlyCutoff v = (Maybe ByteString, Maybe v) + +-- | Produce a 'FileDiagnostic' for the given 'NormalizedFilePath' +-- with an error message. +ideErrorText :: NormalizedFilePath -> T.Text -> FileDiagnostic +ideErrorText nfp msg = + ideErrorWithSource (Just "compiler") (Just DiagnosticSeverity_Error) nfp msg Nothing + +-- | Create a 'FileDiagnostic' from an existing 'LSP.Diagnostic' for a +-- specific 'NormalizedFilePath'. +-- The optional 'MsgEnvelope GhcMessage' is the original error message +-- that was used for creating the 'LSP.Diagnostic'. +-- It is included here, to allow downstream consumers, such as HLS plugins, +-- to provide LSP features based on the structured error messages. +-- Additionally, if available, we insert the ghc error code into the +-- 'LSP.Diagnostic'. These error codes are used in https://errors.haskell.org/ +-- to provide documentation and explanations for error messages. +ideErrorFromLspDiag + :: LSP.Diagnostic + -> NormalizedFilePath + -> Maybe (MsgEnvelope GhcMessage) + -> FileDiagnostic +ideErrorFromLspDiag lspDiag fdFilePath mbOrigMsg = + let fdShouldShowDiagnostic = ShowDiag + fdStructuredMessage = + case mbOrigMsg of + Nothing -> NoStructuredMessage + Just msg -> SomeStructuredMessage msg + fdLspDiagnostic = + lspDiag + & attachReason (fmap (diagnosticReason . errMsgDiagnostic) mbOrigMsg) + & attachDiagnosticCode ((diagnosticCode . errMsgDiagnostic) =<< mbOrigMsg) + in + FileDiagnostic {..} + +-- | Set the code of the 'LSP.Diagnostic' to the GHC diagnostic code, and include the link +-- to https://errors.haskell.org/. +attachDiagnosticCode :: Maybe DiagnosticCode -> LSP.Diagnostic -> LSP.Diagnostic +attachDiagnosticCode Nothing diag = diag +attachDiagnosticCode (Just code) diag = + let + textualCode = showGhcCode code + codeDesc = LSP.CodeDescription{ _href = Uri $ "https://errors.haskell.org/messages/" <> textualCode } + in diag { _code = Just (InR textualCode), _codeDescription = Just codeDesc} + +#if MIN_VERSION_ghc(9,9,0) +-- DiagnosticCode only got a show instance in 9.10.1 +showGhcCode :: DiagnosticCode -> T.Text +showGhcCode = T.pack . show +#else +showGhcCode :: DiagnosticCode -> T.Text +showGhcCode (DiagnosticCode prefix c) = T.pack $ prefix ++ "-" ++ printf "%05d" c +#endif + +attachedReason :: Traversal' Diagnostic (Maybe JSON.Value) +attachedReason = data_ . non (JSON.object []) . JSON.atKey "attachedReason" + +attachReason :: Maybe DiagnosticReason -> Diagnostic -> Diagnostic +attachReason Nothing = id +attachReason (Just wr) = attachedReason .~ fmap JSON.toJSON (showReason wr) + where + showReason = \case + WarningWithFlag flag -> Just $ catMaybes [showFlag flag] +#if MIN_VERSION_ghc(9,7,0) + WarningWithFlags flags -> Just $ catMaybes (fmap showFlag $ toList flags) +#endif + _ -> Nothing + +showFlag :: WarningFlag -> Maybe T.Text +showFlag flag = ("-W" <>) . T.pack . flagSpecName <$> find ((== flag) . flagSpecFlag) wWarningFlags + +ideErrorWithSource + :: Maybe T.Text + -> Maybe DiagnosticSeverity + -> NormalizedFilePath + -> T.Text + -> Maybe (MsgEnvelope GhcMessage) + -> FileDiagnostic +ideErrorWithSource source sev fdFilePath msg origMsg = + let lspDiagnostic = + LSP.Diagnostic { + _range = noRange, + _severity = sev, + _code = Nothing, + _source = source, + _message = msg, + _relatedInformation = Nothing, + _tags = Nothing, + _codeDescription = Nothing, + _data_ = Nothing + } + in + ideErrorFromLspDiag lspDiagnostic fdFilePath origMsg + +-- | Defines whether a particular diagnostic should be reported +-- back to the user. +-- +-- One important use case is "missing signature" code lenses, +-- for which we need to enable the corresponding warning during +-- type checking. However, we do not want to show the warning +-- unless the programmer asks for it (#261). +data ShowDiagnostic + = ShowDiag -- ^ Report back to the user + | HideDiag -- ^ Hide from user + deriving (Eq, Ord, Show) + +instance NFData ShowDiagnostic where + rnf = rwhnf + +-- | A Maybe-like wrapper for a GhcMessage that doesn't try to compare, show, or +-- force the GhcMessage inside, so that we can derive Show, Eq, Ord, NFData on +-- FileDiagnostic. FileDiagnostic only uses this as metadata so we can safely +-- ignore it in fields. +-- +-- Instead of pattern matching on these constructors directly, consider 'Prism' from +-- the 'lens' package. This allows to conveniently pattern match deeply into the 'MsgEnvelope GhcMessage' +-- constructor. +-- The module 'Development.IDE.GHC.Compat.Error' implements additional 'Lens's and 'Prism's, +-- allowing you to avoid importing GHC modules directly. +-- +-- For example, to pattern match on a 'TcRnMessage' you can use the lens: +-- +-- @ +-- message ^? _SomeStructuredMessage . msgEnvelopeErrorL . _TcRnMessage +-- @ +-- +-- This produces a value of type `Maybe TcRnMessage`. +-- +-- Further, consider utility functions such as 'stripTcRnMessageContext', which strip +-- context from error messages which may be more convenient in certain situations. +data StructuredMessage + = NoStructuredMessage + | SomeStructuredMessage (MsgEnvelope GhcMessage) + deriving (Generic) + +instance Show StructuredMessage where + show NoStructuredMessage = "NoStructuredMessage" + show SomeStructuredMessage {} = "SomeStructuredMessage" + +instance Eq StructuredMessage where + (==) NoStructuredMessage NoStructuredMessage = True + (==) SomeStructuredMessage {} SomeStructuredMessage {} = True + (==) _ _ = False + +instance Ord StructuredMessage where + compare NoStructuredMessage NoStructuredMessage = EQ + compare SomeStructuredMessage {} SomeStructuredMessage {} = EQ + compare NoStructuredMessage SomeStructuredMessage {} = GT + compare SomeStructuredMessage {} NoStructuredMessage = LT + +instance NFData StructuredMessage where + rnf NoStructuredMessage = () + rnf SomeStructuredMessage {} = () + +-- | Human readable diagnostics for a specific file. +-- +-- This type packages a pretty printed, human readable error message +-- along with the related source location so that we can display the error +-- on either the console or in the IDE at the right source location. +-- +-- It also optionally keeps a structured diagnostic message GhcMessage in +-- StructuredMessage. +-- +data FileDiagnostic = FileDiagnostic + { fdFilePath :: NormalizedFilePath + , fdShouldShowDiagnostic :: ShowDiagnostic + , fdLspDiagnostic :: Diagnostic + -- | The original diagnostic that was used to produce 'fdLspDiagnostic'. + -- We keep it here, so downstream consumers, e.g. HLS plugins, can use the + -- the structured error messages and don't have to resort to parsing + -- error messages via regexes or similar. + -- + -- The optional GhcMessage inside of this StructuredMessage is ignored for + -- Eq, Ord, Show, and NFData instances. This is fine because this field + -- should only ever be metadata and should never be used to distinguish + -- between FileDiagnostics. + , fdStructuredMessage :: StructuredMessage + } + deriving (Eq, Ord, Show, Generic) + +instance NFData FileDiagnostic + +prettyRange :: Range -> Doc Terminal.AnsiStyle +prettyRange Range{..} = f _start <> "-" <> f _end + where f Position{..} = pretty (show $ _line+1) <> colon <> pretty (show $ _character+1) + +stringParagraphs :: T.Text -> Doc a +stringParagraphs = vcat . map (fillSep . map pretty . T.words) . T.lines + +showDiagnostics :: [FileDiagnostic] -> T.Text +showDiagnostics = srenderPlain . prettyDiagnostics + +showDiagnosticsColored :: [FileDiagnostic] -> T.Text +showDiagnosticsColored = srenderColored . prettyDiagnostics + + +prettyDiagnostics :: [FileDiagnostic] -> Doc Terminal.AnsiStyle +prettyDiagnostics = vcat . map prettyDiagnostic + +prettyDiagnostic :: FileDiagnostic -> Doc Terminal.AnsiStyle +prettyDiagnostic FileDiagnostic { fdFilePath, fdShouldShowDiagnostic, fdLspDiagnostic = LSP.Diagnostic{..} } = + vcat + [ slabel_ "File: " $ pretty (fromNormalizedFilePath fdFilePath) + , slabel_ "Hidden: " $ if fdShouldShowDiagnostic == ShowDiag then "no" else "yes" + , slabel_ "Range: " $ prettyRange _range + , slabel_ "Source: " $ pretty _source + , slabel_ "Severity:" $ pretty $ show sev + , slabel_ "Code: " $ case _code of + Just (InR text) -> pretty text + Just (InL i) -> pretty i + Nothing -> "" + , slabel_ "Message: " + $ case sev of + LSP.DiagnosticSeverity_Error -> annotate $ color Red + LSP.DiagnosticSeverity_Warning -> annotate $ color Yellow + LSP.DiagnosticSeverity_Information -> annotate $ color Blue + LSP.DiagnosticSeverity_Hint -> annotate $ color Magenta + $ stringParagraphs _message + ] + where + sev = fromMaybe LSP.DiagnosticSeverity_Error _severity + + +-- | Label a document. +slabel_ :: String -> Doc a -> Doc a +slabel_ t d = nest 2 $ sep [pretty t, d] + +-- | The layout options used for the SDK assistant. +cliLayout :: + Int + -- ^ Rendering width of the pretty printer. + -> LayoutOptions +cliLayout renderWidth = LayoutOptions + { layoutPageWidth = AvailablePerLine renderWidth 0.9 + } + +-- | Render without any syntax annotations +srenderPlain :: Doc ann -> T.Text +srenderPlain = renderStrict . layoutSmart (cliLayout defaultTermWidth) + +-- | Render a 'Document' as an ANSII colored string. +srenderColored :: Doc Terminal.AnsiStyle -> T.Text +srenderColored = + Terminal.renderStrict . + layoutSmart defaultLayoutOptions { layoutPageWidth = AvailablePerLine 100 1.0 } + +defaultTermWidth :: Int +defaultTermWidth = 80 + +makePrisms ''StructuredMessage + +makeLensesWith + (lensRules & lensField .~ mappingNamer (pure . (++ "L"))) + ''FileDiagnostic diff --git a/ghcide/src/Development/IDE/Types/Exports.hs b/ghcide/src/Development/IDE/Types/Exports.hs new file mode 100644 index 0000000000..3b40ce1653 --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Exports.hs @@ -0,0 +1,226 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +module Development.IDE.Types.Exports +( + IdentInfo(..), + ExportsMap(..), + rendered, + moduleNameText, + occNameText, + renderOcc, + mkTypeOcc, + mkVarOrDataOcc, + isDatacon, + createExportsMap, + createExportsMapMg, + buildModuleExportMapFrom, + createExportsMapHieDb, + size, + exportsMapSize, + updateExportsMapMg + ) where + +import Control.DeepSeq (NFData (..), force, ($!!)) +import Control.Monad +import Data.Char (isUpper) +import Data.Hashable (Hashable) +import Data.HashSet (HashSet) +import qualified Data.HashSet as Set +import Data.List (isSuffixOf) +import Data.Text (Text, uncons) +import Data.Text.Encoding (decodeUtf8, encodeUtf8) +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Orphans () +import GHC.Generics (Generic) +import HieDb hiding (withHieDb) +import Prelude hiding (mod) + + +data ExportsMap = ExportsMap + { getExportsMap :: !(OccEnv (HashSet IdentInfo)) + , getModuleExportsMap :: !(ModuleNameEnv (HashSet IdentInfo)) + } + +instance NFData ExportsMap where + rnf (ExportsMap a b) = nonDetFoldOccEnv (\c d -> rnf c `seq` d) (seqEltsUFM rnf b) a + +instance Show ExportsMap where + show (ExportsMap occs mods) = + unwords [ "ExportsMap { getExportsMap =" + , printWithoutUniques $ mapOccEnv (textDoc . show) occs + , "getModuleExportsMap =" + , printWithoutUniques $ mapUFM (textDoc . show) mods + , "}" + ] + +-- | `updateExportsMap old new` results in an export map containing +-- the union of old and new, but with all the module entries new overriding +-- those in old. +updateExportsMap :: ExportsMap -> ExportsMap -> ExportsMap +updateExportsMap old new = ExportsMap + { getExportsMap = delListFromOccEnv (getExportsMap old) old_occs `plusOccEnv` getExportsMap new -- plusOccEnv is right biased + , getModuleExportsMap = getModuleExportsMap old `plusUFM` getModuleExportsMap new -- plusUFM is right biased + } + where old_occs = concat [map name $ Set.toList (lookupWithDefaultUFM_Directly (getModuleExportsMap old) mempty m_uniq) + | m_uniq <- nonDetKeysUFM (getModuleExportsMap new)] + +size :: ExportsMap -> Int +size = sum . map Set.size . nonDetOccEnvElts . getExportsMap + +mkVarOrDataOcc :: Text -> OccName +mkVarOrDataOcc t = mkOcc $ mkFastStringByteString $ encodeUtf8 t + where + mkOcc + | Just (c,_) <- uncons t + , c == ':' || isUpper c = mkDataOccFS + | otherwise = mkVarOccFS + +mkTypeOcc :: Text -> OccName +mkTypeOcc t = mkTcOccFS $ mkFastStringByteString $ encodeUtf8 t + +exportsMapSize :: ExportsMap -> Int +exportsMapSize = nonDetFoldOccEnv (\_ x -> x+1) 0 . getExportsMap + +instance Semigroup ExportsMap where + ExportsMap a b <> ExportsMap c d = ExportsMap (plusOccEnv_C (<>) a c) (plusUFM_C (<>) b d) + +instance Monoid ExportsMap where + mempty = ExportsMap emptyOccEnv emptyUFM + +rendered :: IdentInfo -> Text +rendered = occNameText . name + +-- | Render an identifier as imported or exported style. +-- TODO: pattern synonymoccNameText :: OccName -> Text +occNameText :: OccName -> Text +occNameText name + | isSymOcc name = "(" <> renderedOcc <> ")" + | isTcOcc name && isSymOcc name = "type (" <> renderedOcc <> ")" + | otherwise = renderedOcc + where + renderedOcc = renderOcc name + +renderOcc :: OccName -> Text +renderOcc = decodeUtf8 . bytesFS . occNameFS + +moduleNameText :: IdentInfo -> Text +moduleNameText = moduleNameText' . identModuleName + +moduleNameText' :: ModuleName -> Text +moduleNameText' = decodeUtf8 . bytesFS . moduleNameFS + +data IdentInfo = IdentInfo + { name :: !OccName + , parent :: !(Maybe OccName) + , identModuleName :: !ModuleName + } + deriving (Generic, Show) + deriving anyclass Hashable + +isDatacon :: IdentInfo -> Bool +isDatacon = isDataOcc . name + +instance Eq IdentInfo where + a == b = name a == name b + && parent a == parent b + && identModuleName a == identModuleName b + +instance NFData IdentInfo where + rnf IdentInfo{..} = + -- deliberately skip the rendered field + rnf name `seq` rnf parent `seq` rnf identModuleName + +mkIdentInfos :: ModuleName -> AvailInfo -> [IdentInfo] +mkIdentInfos mod (AvailName n) = + [IdentInfo (nameOccName n) Nothing mod] +mkIdentInfos mod (AvailFL fl) = + [IdentInfo (nameOccName n) Nothing mod] + where + n = flSelector fl +mkIdentInfos mod (AvailTC parent (n:nn) flds) + -- Following the GHC convention that parent == n if parent is exported + | n == parent + = [ IdentInfo (nameOccName name) (Just $! nameOccName parent) mod + | name <- nn ++ map flSelector flds + ] ++ + [ IdentInfo (nameOccName n) Nothing mod] + +mkIdentInfos mod (AvailTC _ nn flds) + = [ IdentInfo (nameOccName n) Nothing mod + | n <- nn ++ map flSelector flds + ] + +createExportsMap :: [ModIface] -> ExportsMap +createExportsMap modIface = do + let exportList = concatMap doOne modIface + let exportsMap = mkOccEnv_C (<>) $ map (\(a,_,c) -> (a, c)) exportList + force $ ExportsMap exportsMap $ buildModuleExportMap $ map (\(_,b,c) -> (b, c)) exportList -- UFM is lazy, so need to seq + where + doOne modIFace = do + let getModDetails = unpackAvail $ moduleName $ mi_module modIFace + concatMap getModDetails (mi_exports modIFace) + +createExportsMapMg :: [ModGuts] -> ExportsMap +createExportsMapMg modGuts = do + let exportList = concatMap doOne modGuts + let exportsMap = mkOccEnv_C (<>) $ map (\(a,_,c) -> (a, c)) exportList + force $ ExportsMap exportsMap $ buildModuleExportMap $ map (\(_,b,c) -> (b, c)) exportList -- UFM is lazy, so need to seq + where + doOne mi = do + let getModuleName = moduleName $ mg_module mi + concatMap (unpackAvail getModuleName) (mg_exports mi) + +updateExportsMapMg :: [ModGuts] -> ExportsMap -> ExportsMap +updateExportsMapMg modGuts old = updateExportsMap old new + where + new = createExportsMapMg modGuts + +nonInternalModules :: ModuleName -> Bool +nonInternalModules = not . (".Internal" `isSuffixOf`) . moduleNameString + +type WithHieDb = forall a. (HieDb -> IO a) -> IO a + +createExportsMapHieDb :: WithHieDb -> IO ExportsMap +createExportsMapHieDb withHieDb = do + mods <- withHieDb getAllIndexedMods + idents' <- forM (filter (nonInternalModules . modInfoName . hieModInfo) mods) $ \m -> do + let mn = modInfoName $ hieModInfo m + fmap (unwrap mn) <$> withHieDb (\hieDb -> getExportsForModule hieDb mn) + let idents = concat idents' + let exportsMap = mkOccEnv_C (<>) (keyWith name idents) + return $!! ExportsMap exportsMap $ buildModuleExportMap (keyWith identModuleName idents) -- UFM is lazy so need to seq + where + unwrap m ExportRow{..} = IdentInfo exportName exportParent m + keyWith f xs = [(f x, Set.singleton x) | x <- xs] + +unpackAvail :: ModuleName -> IfaceExport -> [(OccName, ModuleName, HashSet IdentInfo)] +unpackAvail mn + | nonInternalModules mn = map f . mkIdentInfos mn + | otherwise = const [] + where + f identInfo@IdentInfo {..} = (name, mn, Set.singleton identInfo) + + +identInfoToKeyVal :: IdentInfo -> (ModuleName, IdentInfo) +identInfoToKeyVal identInfo = + (identModuleName identInfo, identInfo) + +buildModuleExportMap:: [(ModuleName, HashSet IdentInfo)] -> ModuleNameEnv (HashSet IdentInfo) +buildModuleExportMap exportsMap = do + let lst = concatMap (Set.toList . snd) exportsMap + let lstThree = map identInfoToKeyVal lst + sortAndGroup lstThree + +buildModuleExportMapFrom:: [ModIface] -> ModuleNameEnv (HashSet IdentInfo) +buildModuleExportMapFrom modIfaces = do + let exports = map extractModuleExports modIfaces + listToUFM_C (<>) exports + +extractModuleExports :: ModIface -> (ModuleName, HashSet IdentInfo) +extractModuleExports modIFace = do + let modName = moduleName $ mi_module modIFace + let functionSet = Set.fromList $ concatMap (mkIdentInfos modName) $ mi_exports modIFace + (modName, functionSet) + +sortAndGroup :: [(ModuleName, IdentInfo)] -> ModuleNameEnv (HashSet IdentInfo) +sortAndGroup assocs = listToUFM_C (<>) [(k, Set.singleton v) | (k, v) <- assocs] diff --git a/ghcide/src/Development/IDE/Types/HscEnvEq.hs b/ghcide/src/Development/IDE/Types/HscEnvEq.hs new file mode 100644 index 0000000000..1c2ed1732f --- /dev/null +++ b/ghcide/src/Development/IDE/Types/HscEnvEq.hs @@ -0,0 +1,147 @@ +{-# LANGUAGE CPP #-} +module Development.IDE.Types.HscEnvEq +( HscEnvEq, + hscEnv, newHscEnvEq, + updateHscEnvEq, + envPackageExports, + envVisibleModuleNames, +) where + + +import Control.Concurrent.Async (Async, async, waitCatch) +import Control.Concurrent.Strict (modifyVar, newVar) +import Control.DeepSeq (force, rwhnf) +import Control.Exception (evaluate, mask, throwIO) +import Control.Monad.Extra (eitherM, join, mapMaybeM) +import Data.Either (fromRight) +import Data.IORef +import qualified Data.Map as M +import Data.Unique (Unique) +import qualified Data.Unique as Unique +import Development.IDE.GHC.Compat hiding (newUnique) +import qualified Development.IDE.GHC.Compat.Util as Maybes +import Development.IDE.GHC.Error (catchSrcErrors) +import Development.IDE.GHC.Util (lookupPackageConfig) +import Development.IDE.Graph.Classes +import Development.IDE.Types.Exports (ExportsMap, createExportsMap) +import GHC.Driver.Env (hsc_all_home_unit_ids) +import Ide.PluginUtils (toAbsolute) +import OpenTelemetry.Eventlog (withSpan) +import System.Directory (makeAbsolute) + + +-- | An 'HscEnv' with equality. Two values are considered equal +-- if they are created with the same call to 'newHscEnvEq' or +-- 'updateHscEnvEq'. +data HscEnvEq = HscEnvEq + { envUnique :: !Unique + , hscEnv :: !HscEnv + , envPackageExports :: IO ExportsMap + , envVisibleModuleNames :: IO (Maybe [ModuleName]) + -- ^ 'listVisibleModuleNames' is a pure function, + -- but it could panic due to a ghc bug: https://github.com/haskell/haskell-language-server/issues/1365 + -- So it's wrapped in IO here for error handling + -- If Nothing, 'listVisibleModuleNames' panic + } + +updateHscEnvEq :: HscEnvEq -> HscEnv -> IO HscEnvEq +updateHscEnvEq oldHscEnvEq newHscEnv = do + let update newUnique = oldHscEnvEq { envUnique = newUnique, hscEnv = newHscEnv } + update <$> Unique.newUnique + +-- | Wrap an 'HscEnv' into an 'HscEnvEq'. +newHscEnvEq :: HscEnv -> IO HscEnvEq +newHscEnvEq hscEnv' = do + + mod_cache <- newIORef emptyInstalledModuleEnv + file_cache <- newIORef M.empty + -- This finder cache is for things which are outside of things which are tracked + -- by HLS. For example, non-home modules, dependent object files etc +#if MIN_VERSION_ghc(9,11,0) + let hscEnv = hscEnv' + { hsc_FC = FinderCache + { flushFinderCaches = \_ -> error "GHC should never call flushFinderCaches outside the driver" + , addToFinderCache = \(GWIB im _) val -> do + if moduleUnit im `elem` hsc_all_home_unit_ids hscEnv' + then error "tried to add home module to FC" + else atomicModifyIORef' mod_cache $ \c -> (extendInstalledModuleEnv c im val, ()) + , lookupFinderCache = \(GWIB im _) -> do + if moduleUnit im `elem` hsc_all_home_unit_ids hscEnv' + then error ("tried to lookup home module from FC" ++ showSDocUnsafe (ppr (im, hsc_all_home_unit_ids hscEnv'))) + else lookupInstalledModuleEnv <$> readIORef mod_cache <*> pure im + , lookupFileCache = \fp -> error ("not used by HLS" ++ fp) + } + } + +#else + let hscEnv = hscEnv' +#endif + + let dflags = hsc_dflags hscEnv + + envUnique <- Unique.newUnique + + -- it's very important to delay the package exports computation + envPackageExports <- onceAsync $ withSpan "Package Exports" $ \_sp -> do + -- compute the package imports + let pkgst = unitState hscEnv + depends = explicitUnits pkgst + modules = + [ m + | d <- depends + , Just pkg <- [lookupPackageConfig d hscEnv] + , (modName, maybeOtherPkgMod) <- unitExposedModules pkg + , let m = case maybeOtherPkgMod of + -- When module is re-exported from another package, + -- the origin module is represented by value in Just + Just otherPkgMod -> otherPkgMod + Nothing -> mkModule (mkUnit pkg) modName + ] + + doOne m = do + modIface <- initIfaceLoad hscEnv $ + loadInterface "" m (ImportByUser NotBoot) + return $ case modIface of + Maybes.Failed _r -> Nothing + Maybes.Succeeded mi -> Just mi + modIfaces <- mapMaybeM doOne modules + return $ createExportsMap modIfaces + + -- similar to envPackageExports, evaluated lazily + envVisibleModuleNames <- onceAsync $ + fromRight Nothing + <$> catchSrcErrors + dflags + "listVisibleModuleNames" + (evaluate . force . Just $ listVisibleModuleNames hscEnv) + + return HscEnvEq{..} + +instance Show HscEnvEq where + show HscEnvEq{envUnique} = "HscEnvEq " ++ show (Unique.hashUnique envUnique) + +instance Eq HscEnvEq where + a == b = envUnique a == envUnique b + +instance NFData HscEnvEq where + rnf (HscEnvEq a b _ _) = + -- deliberately skip the package exports map and visible module names + rnf (Unique.hashUnique a) `seq` rwhnf b + +instance Hashable HscEnvEq where + hashWithSalt s = hashWithSalt s . envUnique + +-- | Given an action, produce a wrapped action that runs at most once. +-- The action is run in an async so it won't be killed by async exceptions +-- If the function raises an exception, the same exception will be reraised each time. +onceAsync :: IO a -> IO (IO a) +onceAsync act = do + var <- newVar OncePending + let run as = eitherM throwIO pure (waitCatch as) + pure $ mask $ \unmask -> join $ modifyVar var $ \v -> case v of + OnceRunning x -> pure (v, unmask $ run x) + OncePending -> do + x <- async (unmask act) + pure (OnceRunning x, unmask $ run x) + +data Once a = OncePending | OnceRunning (Async a) diff --git a/ghcide/src/Development/IDE/Types/KnownTargets.hs b/ghcide/src/Development/IDE/Types/KnownTargets.hs new file mode 100644 index 0000000000..6ae6d52ba3 --- /dev/null +++ b/ghcide/src/Development/IDE/Types/KnownTargets.hs @@ -0,0 +1,71 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +module Development.IDE.Types.KnownTargets ( KnownTargets(..) + , emptyKnownTargets + , mkKnownTargets + , unionKnownTargets + , Target(..) + , toKnownFiles) where + +import Control.DeepSeq +import Data.Hashable +import Data.HashMap.Strict +import qualified Data.HashMap.Strict as HMap +import Data.HashSet +import qualified Data.HashSet as HSet +import Development.IDE.GHC.Compat (ModuleName) +import Development.IDE.GHC.Orphans () +import Development.IDE.Types.Location +import GHC.Generics + +-- | A mapping of module name to known files +data KnownTargets = KnownTargets + { targetMap :: !(HashMap Target (HashSet NormalizedFilePath)) + -- | 'normalisingMap' is a cached copy of `HMap.mapKey const targetMap` + -- + -- At startup 'GetLocatedImports' is called on all known files. Say you have 10000 + -- modules in your project then this leads to 10000 calls to 'GetLocatedImports' + -- running concurrently. + -- + -- In `GetLocatedImports` the known targets are consulted and the targetsMap + -- is created by mapping the known targets. This map is used for introducing + -- sharing amongst filepaths. This operation copies a local copy of the `target` + -- map which is local to the rule. + -- + -- @ + -- let targetsMap = HMap.mapWithKey const targets + -- @ + -- + -- So now each rule has a 'HashMap' of size 10000 held locally to it and depending + -- on how the threads are scheduled there will be 10000^2 elements in total + -- allocated in 'HashMap's. This used a lot of memory. + -- + -- Solution: Return the 'normalisingMap' in the result of the `GetKnownTargets` rule so it is shared across threads. + , normalisingMap :: !(HashMap Target Target) } deriving Show + + +unionKnownTargets :: KnownTargets -> KnownTargets -> KnownTargets +unionKnownTargets (KnownTargets tm nm) (KnownTargets tm' nm') = + KnownTargets (HMap.unionWith (<>) tm tm') (HMap.union nm nm') + +mkKnownTargets :: [(Target, HashSet NormalizedFilePath)] -> KnownTargets +mkKnownTargets vs = KnownTargets (HMap.fromList vs) (HMap.fromList [(k,k) | (k,_) <- vs ]) + +instance NFData KnownTargets where + rnf (KnownTargets tm nm) = rnf tm `seq` rnf nm `seq` () + +instance Eq KnownTargets where + k1 == k2 = targetMap k1 == targetMap k2 + +instance Hashable KnownTargets where + hashWithSalt s (KnownTargets hm _) = hashWithSalt s hm + +emptyKnownTargets :: KnownTargets +emptyKnownTargets = KnownTargets HMap.empty HMap.empty + +data Target = TargetModule ModuleName | TargetFile NormalizedFilePath + deriving ( Eq, Ord, Generic, Show ) + deriving anyclass (Hashable, NFData) + +toKnownFiles :: KnownTargets -> HashSet NormalizedFilePath +toKnownFiles = HSet.unions . HMap.elems . targetMap diff --git a/ghcide/src/Development/IDE/Types/Location.hs b/ghcide/src/Development/IDE/Types/Location.hs new file mode 100644 index 0000000000..06ca9cbeca --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Location.hs @@ -0,0 +1,114 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE CPP #-} + +-- | Types and functions for working with source code locations. +module Development.IDE.Types.Location + ( Location(..) + , noFilePath + , noRange + , Position(..) + , showPosition + , Range(..) + , LSP.Uri(..) + , LSP.NormalizedUri + , LSP.toNormalizedUri + , LSP.fromNormalizedUri + , LSP.NormalizedFilePath + , fromUri + , emptyFilePath + , emptyPathUri + , toNormalizedFilePath' + , LSP.fromNormalizedFilePath + , filePathToUri' + , uriToFilePath' + , readSrcSpan + ) where + +import Control.Applicative +import Control.Monad +import Data.Hashable (Hashable (hash)) +import Data.Maybe (fromMaybe) +import Data.String +import Language.LSP.Protocol.Types (Location (..), Position (..), + Range (..)) +import qualified Language.LSP.Protocol.Types as LSP +import Text.ParserCombinators.ReadP as ReadP + +import GHC.Data.FastString +import GHC.Types.SrcLoc as GHC + +toNormalizedFilePath' :: FilePath -> LSP.NormalizedFilePath +-- We want to keep empty paths instead of normalising them to "." +toNormalizedFilePath' "" = emptyFilePath +toNormalizedFilePath' fp = LSP.toNormalizedFilePath fp + +emptyFilePath :: LSP.NormalizedFilePath +emptyFilePath = LSP.emptyNormalizedFilePath + +-- | We use an empty string as a filepath when we don’t have a file. +-- However, haskell-lsp doesn’t support that in uriToFilePath and given +-- that it is not a valid filepath it does not make sense to upstream a fix. +-- So we have our own wrapper here that supports empty filepaths. +uriToFilePath' :: LSP.Uri -> Maybe FilePath +uriToFilePath' uri + | uri == LSP.fromNormalizedUri emptyPathUri = Just "" + | otherwise = LSP.uriToFilePath uri + +emptyPathUri :: LSP.NormalizedUri +emptyPathUri = + let s = "file://" + in LSP.NormalizedUri (hash s) s + +filePathToUri' :: LSP.NormalizedFilePath -> LSP.NormalizedUri +filePathToUri' = LSP.normalizedFilePathToUri + +fromUri :: LSP.NormalizedUri -> LSP.NormalizedFilePath +fromUri = fromMaybe (toNormalizedFilePath' noFilePath) . LSP.uriToNormalizedFilePath + +noFilePath :: FilePath +noFilePath = "" + +-- A dummy range to use when range is unknown +noRange :: Range +noRange = Range (Position 0 0) (Position 1 0) + +showPosition :: Position -> String +showPosition Position{..} = show (_line + 1) ++ ":" ++ show (_character + 1) + +-- | Parser for the GHC output format +readSrcSpan :: ReadS RealSrcSpan +readSrcSpan = readP_to_S (singleLineSrcSpanP <|> multiLineSrcSpanP) + where + singleLineSrcSpanP, multiLineSrcSpanP :: ReadP RealSrcSpan + singleLineSrcSpanP = do + fp <- filePathP + l <- readS_to_P reads <* char ':' + c0 <- readS_to_P reads + c1 <- (char '-' *> readS_to_P reads) <|> pure c0 + let from = mkRealSrcLoc fp l c0 + to = mkRealSrcLoc fp l c1 + return $ mkRealSrcSpan from to + + multiLineSrcSpanP = do + fp <- filePathP + s <- parensP (srcLocP fp) + void $ char '-' + e <- parensP (srcLocP fp) + return $ mkRealSrcSpan s e + + parensP :: ReadP a -> ReadP a + parensP = between (char '(') (char ')') + + filePathP :: ReadP FastString + filePathP = fromString <$> (readFilePath <* char ':') <|> pure "" + + srcLocP :: FastString -> ReadP RealSrcLoc + srcLocP fp = do + l <- readS_to_P reads + void $ char ',' + c <- readS_to_P reads + return $ mkRealSrcLoc fp l c + + readFilePath :: ReadP FilePath + readFilePath = some ReadP.get diff --git a/ghcide/src/Development/IDE/Types/Monitoring.hs b/ghcide/src/Development/IDE/Types/Monitoring.hs new file mode 100644 index 0000000000..41e92ed7dd --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Monitoring.hs @@ -0,0 +1,32 @@ +module Development.IDE.Types.Monitoring +(Monitoring(..) +) where + +import Data.Int +import Data.Text (Text) + +-- | An abstraction for runtime monitoring inspired by the 'ekg' package +data Monitoring = Monitoring { + -- | Register an integer-valued metric. + registerGauge :: Text -> IO Int64 -> IO (), + -- | Register a non-negative, monotonically increasing, integer-valued metric. + registerCounter :: Text -> IO Int64 -> IO (), + start :: IO (IO ()) -- ^ Start the monitoring system, returning an action which will stop the system. + } + +instance Semigroup Monitoring where + a <> b = Monitoring { + registerGauge = \n v -> registerGauge a n v >> registerGauge b n v, + registerCounter = \n v -> registerCounter a n v >> registerCounter b n v, + start = do + a' <- start a + b' <- start b + return $ a' >> b' + } + +instance Monoid Monitoring where + mempty = Monitoring { + registerGauge = \_ _ -> return (), + registerCounter = \_ _ -> return (), + start = return $ return () + } diff --git a/ghcide/src/Development/IDE/Types/Options.hs b/ghcide/src/Development/IDE/Types/Options.hs new file mode 100644 index 0000000000..8d4d91e166 --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Options.hs @@ -0,0 +1,189 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +-- | Options +module Development.IDE.Types.Options + ( IdeOptions(..) + , IdePreprocessedSource(..) + , IdeReportProgress(..) + , IdeDefer(..) + , IdeTesting(..) + , IdeOTMemoryProfiling(..) + , clientSupportsProgress + , IdePkgLocationOptions(..) + , defaultIdeOptions + , IdeResult + , IdeGhcSession(..) + , OptHaddockParse(..) + , ProgressReportingStyle(..) + ) where + +import Control.Lens +import qualified Data.Text as T +import Data.Typeable +import Development.IDE.Core.RuleTypes +import Development.IDE.GHC.Compat as GHC +import Development.IDE.Graph +import Development.IDE.Types.Diagnostics +import Ide.Plugin.Config +import Ide.Types (DynFlagsModifications) +import qualified Language.LSP.Protocol.Lens as L +import qualified Language.LSP.Protocol.Types as LSP + +data IdeOptions = IdeOptions + { optPreprocessor :: GHC.ParsedSource -> IdePreprocessedSource + -- ^ Preprocessor to run over all parsed source trees, generating a list of warnings + -- and a list of errors, along with a new parse tree. + , optGhcSession :: Action IdeGhcSession + -- ^ Setup a GHC session for a given file, e.g. @Foo.hs@. + -- For the same 'ComponentOptions' from hie-bios, the resulting function will be applied once per file. + -- It is desirable that many files get the same 'HscEnvEq', so that more IDE features work. + , optPkgLocationOpts :: IdePkgLocationOptions + -- ^ How to locate source and @.hie@ files given a module name. + , optExtensions :: [String] + -- ^ File extensions to search for code, defaults to Haskell sources (including @.hs@) + , optShakeProfiling :: Maybe FilePath + -- ^ Set to 'Just' to create a directory of profiling reports. + , optTesting :: IdeTesting + -- ^ Whether to enable additional lsp messages used by the test suite for checking invariants + , optReportProgress :: IdeReportProgress + -- ^ Whether to report progress during long operations. + , optMaxDirtyAge :: Int + -- ^ Age (in # builds) at which we collect dirty keys + , optLanguageSyntax :: String + -- ^ the ```language to use + , optNewColonConvention :: Bool + -- ^ whether to use new colon convention + , optKeywords :: [T.Text] + -- ^ keywords used for completions. These are customizable + -- since DAML has a different set of keywords than Haskell. + , optDefer :: IdeDefer + -- ^ Whether to defer type errors, typed holes and out of scope + -- variables. Deferral allows the IDE to continue to provide + -- features such as diagnostics and go-to-definition, in + -- situations in which they would become unavailable because of + -- the presence of type errors, holes or unbound variables. + , optCheckProject :: IO Bool + -- ^ Whether to typecheck the entire project on load + , optCheckParents :: IO CheckParents + -- ^ When to typecheck reverse dependencies of a file + , optHaddockParse :: OptHaddockParse + -- ^ Whether to parse modules with '-haddock' by default. + -- If 'HaddockParse' is given, we parse local haskell modules with the + -- '-haddock' flag enables. + -- If a plugin requires the parsed sources *without* '-haddock', it needs + -- to use rules that explicitly disable the '-haddock' flag. + -- See call sites of 'withoutOptHaddock' for rules that parse without '-haddock'. + , optModifyDynFlags :: Config -> DynFlagsModifications + -- ^ Will be called right after setting up a new cradle, + -- allowing to customize the Ghc options used + , optShakeOptions :: ShakeOptions + , optSkipProgress :: forall a. Typeable a => a -> Bool + -- ^ Predicate to select which rule keys to exclude from progress reporting. + , optProgressStyle :: ProgressReportingStyle + , optRunSubset :: Bool + -- ^ Experimental feature to re-run only the subset of the Shake graph that has changed + , optVerifyCoreFile :: Bool + -- ^ Verify core files after serialization + } + +data OptHaddockParse = HaddockParse | NoHaddockParse + deriving (Eq,Ord,Show,Enum) + +data IdePreprocessedSource = IdePreprocessedSource + { preprocWarnings :: [(GHC.SrcSpan, String)] -- TODO: Future work could we make these warnings structured as well? + -- ^ Warnings emitted by the preprocessor. + , preprocErrors :: [(GHC.SrcSpan, String)] -- TODO: Future work could we make these errors structured as well? + -- ^ Errors emitted by the preprocessor. + , preprocSource :: GHC.ParsedSource + -- ^ New parse tree emitted by the preprocessor. + } + +newtype IdeReportProgress = IdeReportProgress Bool +newtype IdeDefer = IdeDefer Bool +newtype IdeTesting = IdeTesting Bool +newtype IdeOTMemoryProfiling = IdeOTMemoryProfiling Bool + +data ProgressReportingStyle + = Percentage -- ^ Report using the LSP @_percentage@ field + | Explicit -- ^ Report using explicit 123/456 text + | NoProgress -- ^ Do not report any percentage + deriving Eq + + +clientSupportsProgress :: LSP.ClientCapabilities -> IdeReportProgress +clientSupportsProgress caps = IdeReportProgress $ Just True == + ((\x -> x ^. L.workDoneProgress) =<< LSP._window (caps :: LSP.ClientCapabilities)) + +defaultIdeOptions :: Action IdeGhcSession -> IdeOptions +defaultIdeOptions session = IdeOptions + {optPreprocessor = IdePreprocessedSource [] [] + ,optGhcSession = session + ,optExtensions = ["hs", "lhs"] + ,optPkgLocationOpts = defaultIdePkgLocationOptions + ,optShakeOptions = shakeOptions + ,optShakeProfiling = Nothing + ,optReportProgress = IdeReportProgress False + ,optLanguageSyntax = "haskell" + ,optNewColonConvention = False + ,optKeywords = haskellKeywords + ,optDefer = IdeDefer True + ,optTesting = IdeTesting False + ,optCheckProject = pure True + ,optCheckParents = pure CheckOnSave + ,optHaddockParse = HaddockParse + ,optModifyDynFlags = mempty + ,optSkipProgress = defaultSkipProgress + ,optProgressStyle = Explicit + ,optRunSubset = True + ,optVerifyCoreFile = False + ,optMaxDirtyAge = 100 + } + +defaultSkipProgress :: Typeable a => a -> Bool +defaultSkipProgress key = case () of + -- don't do progress for GetFileContents as it's cheap + _ | Just GetFileContents <- cast key -> True + -- don't do progress for GetFileExists, as there are lots of redundant nodes + -- (normally there is one node per file, but this is not the case for GetFileExists) + _ | Just GetFileExists <- cast key -> True + -- don't do progress for GetModificationTime as there are lot of redundant nodes + -- (for the interface files) + _ | Just GetModificationTime_{} <- cast key -> True + _ -> False + + +-- | The set of options used to locate files belonging to external packages. +data IdePkgLocationOptions = IdePkgLocationOptions + { optLocateHieFile :: UnitState -> Module -> IO (Maybe FilePath) + -- ^ Locate the HIE file for the given module. The PackageConfig can be + -- used to lookup settings like importDirs. + , optLocateSrcFile :: UnitState -> Module -> IO (Maybe FilePath) + -- ^ Locate the source file for the given module. The PackageConfig can be + -- used to lookup settings like importDirs. For DAML, we place them in the package DB. + -- For cabal this could point somewhere in ~/.cabal/packages. + } + +defaultIdePkgLocationOptions :: IdePkgLocationOptions +defaultIdePkgLocationOptions = IdePkgLocationOptions f f + where f _ _ = return Nothing + +-- | From https://wiki.haskell.org/Keywords +haskellKeywords :: [T.Text] +haskellKeywords = + [ "as" + , "case", "of" + , "class", "instance", "type" + , "data", "family", "newtype" + , "default" + , "deriving" + , "do", "mdo", "proc", "rec" + , "forall" + , "foreign" + , "hiding" + , "if", "then", "else" + , "import", "qualified", "hiding" + , "infix", "infixl", "infixr" + , "let", "in", "where" + , "module" + ] diff --git a/ghcide/src/Development/IDE/Types/Shake.hs b/ghcide/src/Development/IDE/Types/Shake.hs new file mode 100644 index 0000000000..cc8f84e3b6 --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Shake.hs @@ -0,0 +1,141 @@ +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE TypeFamilies #-} +module Development.IDE.Types.Shake + ( Q (..), + A (..), + Value (..), + ValueWithDiagnostics (..), + Values, + Key, + BadDependency (..), + ShakeValue(..), + currentValue, + isBadDependency, + toShakeValue,encodeShakeValue,decodeShakeValue,toKey,toNoFileKey,fromKey,fromKeyType,WithHieDb,WithHieDbShield(..)) +where + +import Control.DeepSeq +import Control.Exception +import qualified Data.ByteString.Char8 as BS +import Data.Dynamic +import Data.Hashable +import Data.Typeable (cast) +import Data.Vector (Vector) +import Development.IDE.Core.PositionMapping +import Development.IDE.Core.RuleTypes (FileVersion) +import Development.IDE.Graph (Key, RuleResult, newKey, + pattern Key) +import qualified Development.IDE.Graph as Shake +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location +import GHC.Generics +import HieDb.Types (HieDb) +import qualified StmContainers.Map as STM +import Type.Reflection (SomeTypeRep (SomeTypeRep), + eqTypeRep, pattern App, + type (:~~:) (HRefl), + typeOf, typeRep) + +-- | Intended to represent HieDb calls wrapped with (currently) retry +-- functionality +type WithHieDb = forall a. (HieDb -> IO a) -> IO a + +-- used to smuggle RankNType WithHieDb through dbMVar +newtype WithHieDbShield = WithHieDbShield WithHieDb + +data Value v + = Succeeded (Maybe FileVersion) v + | Stale (Maybe PositionDelta) (Maybe FileVersion) v + | Failed Bool -- True if we already tried the persistent rule + deriving (Functor, Generic, Show) + +instance NFData v => NFData (Value v) + +-- | Convert a Value to a Maybe. This will only return `Just` for +-- up2date results not for stale values. +currentValue :: Value v -> Maybe v +currentValue (Succeeded _ v) = Just v +currentValue (Stale _ _ _) = Nothing +currentValue Failed{} = Nothing + +data ValueWithDiagnostics + = ValueWithDiagnostics !(Value Dynamic) !(Vector FileDiagnostic) + +-- | The state of the all values and diagnostics +type Values = STM.Map Key ValueWithDiagnostics + +-- | When we depend on something that reported an error, and we fail as a direct result, throw BadDependency +-- which short-circuits the rest of the action +newtype BadDependency = BadDependency String deriving Show +instance Exception BadDependency + +isBadDependency :: SomeException -> Bool +isBadDependency x + | Just (_ :: BadDependency) <- fromException x = True + | otherwise = False + +toKey :: Shake.ShakeValue k => k -> NormalizedFilePath -> Key +toKey = (newKey.) . curry Q + +fromKey :: Typeable k => Key -> Maybe (k, NormalizedFilePath) +fromKey (Key k) + | Just (Q (k', f)) <- cast k = Just (k', f) + | otherwise = Nothing + +-- | fromKeyType (Q (k,f)) = (typeOf k, f) +fromKeyType :: Key -> Maybe (SomeTypeRep, NormalizedFilePath) +fromKeyType (Key k) + | App tc a <- typeOf k + , Just HRefl <- tc `eqTypeRep` (typeRep @Q) + , Q (_, f) <- k + = Just (SomeTypeRep a, f) + | otherwise = Nothing + +toNoFileKey :: (Show k, Typeable k, Eq k, Hashable k) => k -> Key +toNoFileKey k = newKey $ Q (k, emptyFilePath) + +newtype Q k = Q (k, NormalizedFilePath) + deriving newtype (Eq, Hashable, NFData) + +instance Show k => Show (Q k) where + show (Q (k, file)) = show k ++ "; " ++ fromNormalizedFilePath file + +-- | Invariant: the @v@ must be in normal form (fully evaluated). +-- Otherwise we keep repeatedly 'rnf'ing values taken from the Shake database +newtype A v = A (Value v) + deriving Show + +-- In the Shake database we only store one type of key/result pairs, +-- namely Q (question) / A (answer). +type instance RuleResult (Q k) = A (RuleResult k) + + +toShakeValue :: (BS.ByteString -> ShakeValue) -> Maybe BS.ByteString -> ShakeValue +toShakeValue = maybe ShakeNoCutoff + +data ShakeValue + = -- | This is what we use when we get Nothing from + -- a rule. + ShakeNoCutoff + | -- | This is used both for `Failed` + -- as well as `Succeeded`. + ShakeResult !BS.ByteString + | ShakeStale !BS.ByteString + deriving (Generic, Show) + +instance NFData ShakeValue + +encodeShakeValue :: ShakeValue -> BS.ByteString +encodeShakeValue = \case + ShakeNoCutoff -> BS.empty + ShakeResult r -> BS.cons 'r' r + ShakeStale r -> BS.cons 's' r + +decodeShakeValue :: BS.ByteString -> ShakeValue +decodeShakeValue bs = case BS.uncons bs of + Nothing -> ShakeNoCutoff + Just (x, xs) + | x == 'r' -> ShakeResult xs + | x == 's' -> ShakeStale xs + | otherwise -> error $ "Failed to parse shake value " <> show bs diff --git a/ghcide/src/Generics/SYB/GHC.hs b/ghcide/src/Generics/SYB/GHC.hs new file mode 100644 index 0000000000..10ab699633 --- /dev/null +++ b/ghcide/src/Generics/SYB/GHC.hs @@ -0,0 +1,123 @@ +{-# LANGUAGE DerivingVia #-} + +-- | Custom SYB traversals explicitly designed for operating over the GHC AST. +module Generics.SYB.GHC + ( genericIsSubspan, + mkBindListT, + everywhereM', + smallestM, + largestM + ) where + +import Control.Monad +import Data.Functor.Compose (Compose (Compose)) +import Data.Monoid (Any (Any)) +import Development.IDE.GHC.Compat +import Development.IDE.Graph.Classes +import Generics.SYB + + +-- | A generic query intended to be used for calling 'smallestM' and +-- 'largestM'. If the current node is a 'Located', returns whether or not the +-- given 'SrcSpan' is a subspan. For all other nodes, returns 'Nothing', which +-- indicates uncertainty. The search strategy in 'smallestM' et al. will +-- continue searching uncertain nodes. +genericIsSubspan :: + forall ast. + Typeable ast => + -- | The type of nodes we'd like to consider. + Proxy (Located ast) -> + SrcSpan -> + GenericQ (Maybe (Bool, ast)) +genericIsSubspan _ dst = mkQ Nothing $ \case + (L srcSpan ast :: Located ast) -> Just (dst `isSubspanOf` srcSpan, ast) + + +-- | Lift a function that replaces a value with several values into a generic +-- function. The result doesn't perform any searching, so should be driven via +-- 'everywhereM' or friends. +-- +-- The 'Int' argument is the index in the list being bound. +mkBindListT :: forall b m. (Data b, Monad m) => (Int -> b -> m [b]) -> GenericM m +mkBindListT f = mkM $ fmap join . traverse (uncurry f) . zip [0..] + + +-- | Apply a monadic transformation everywhere in a top-down manner. +everywhereM' :: forall m. Monad m => GenericM m -> GenericM m +everywhereM' f = go + where + go :: GenericM m + go = gmapM go <=< f + + +------------------------------------------------------------------------------ +-- Custom SYB machinery +------------------------------------------------------------------------------ + +-- | Generic monadic transformations that return side-channel data. +type GenericMQ r m = forall a. Data a => a -> m (r, a) + +------------------------------------------------------------------------------ +-- | Apply the given 'GenericM' at all every node whose children fail the +-- 'GenericQ', but which passes the query itself. +-- +-- The query must be a monotonic function when it returns 'Just'. That is, if +-- @s@ is a subtree of @t@, @q t@ should return @Just True@ if @q s@ does. It +-- is the True-to-false edge of the query that triggers the transformation. +-- +-- Why is the query a @Maybe Bool@? The GHC AST intersperses 'Located' nodes +-- with data nodes, so for any given node we can only definitely return an +-- answer if it's a 'Located'. See 'genericIsSubspan' for how this parameter is +-- used. +smallestM :: forall m a. Monad m => GenericQ (Maybe (Bool, a)) -> (a -> GenericM m) -> GenericM m +smallestM q f = fmap snd . go + where + go :: GenericMQ Any m + go x = do + case q x of + Nothing -> gmapMQ go x + Just (True, a) -> do + it@(r, x') <- gmapMQ go x + case r of + Any True -> pure it + Any False -> fmap (Any True,) $ f a x' + Just (False, _) -> pure (mempty, x) + +------------------------------------------------------------------------------ +-- | Apply the given 'GenericM' at every node that passes the 'GenericQ', but +-- don't descend into children if the query matches. Because this traversal is +-- root-first, this policy will find the largest subtrees for which the query +-- holds true. +-- +-- Why is the query a @Maybe Bool@? The GHC AST intersperses 'Located' nodes +-- with data nodes, so for any given node we can only definitely return an +-- answer if it's a 'Located'. See 'genericIsSubspan' for how this parameter is +-- used. +largestM :: forall m a. Monad m => GenericQ (Maybe (Bool, a)) -> (a -> GenericM m) -> GenericM m +largestM q f = go + where + go :: GenericM m + go x = do + case q x of + Just (True, a) -> f a x + Just (False, _) -> pure x + Nothing -> gmapM go x + +newtype MonadicQuery r m a = MonadicQuery + { runMonadicQuery :: m (r, a) + } + deriving stock (Functor) + deriving Applicative via Compose m ((,) r) + + +------------------------------------------------------------------------------ +-- | Like 'gmapM', but also returns side-channel data. +gmapMQ :: + forall f r a. (Monoid r, Data a, Applicative f) => + (forall d. Data d => d -> f (r, d)) -> + a -> + f (r, a) +gmapMQ f = runMonadicQuery . gfoldl k pure + where + k :: Data d => MonadicQuery r f (d -> b) -> d -> MonadicQuery r f b + k c x = c <*> MonadicQuery (f x) diff --git a/ghcide/src/Text/Fuzzy/Parallel.hs b/ghcide/src/Text/Fuzzy/Parallel.hs new file mode 100644 index 0000000000..4d7a1d67e0 --- /dev/null +++ b/ghcide/src/Text/Fuzzy/Parallel.hs @@ -0,0 +1,176 @@ +-- | Parallel versions of 'filter' and 'simpleFilter' + +module Text.Fuzzy.Parallel +( filter, filter', + simpleFilter, simpleFilter', + match, defChunkSize, defMaxResults, + Scored(..) +) where + +import Control.Parallel.Strategies (evalList, parList, rseq, using) +import Data.Bits ((.|.)) +import Data.Maybe (fromMaybe, mapMaybe) +import qualified Data.Text as T +import qualified Data.Text.Array as TA +import qualified Data.Text.Internal as T +import Prelude hiding (filter) + +data Scored a = Scored {score :: !Int, original:: !a} + deriving (Functor, Show) + +-- | Returns the rendered output and the +-- matching score for a pattern and a text. +-- Two examples are given below: +-- +-- >>> match "fnt" "infinite" +-- Just 3 +-- +-- >>> match "hsk" "Haskell" +-- Just 5 +-- +{-# INLINABLE match #-} +match :: T.Text -- ^ Pattern in lowercase except for first character + -> T.Text -- ^ The text to search in. + -> Maybe Int -- ^ The score +match (T.Text pArr pOff pLen) (T.Text sArr sOff sLen) = go 0 1 pOff sOff + where + pTotal = pOff + pLen + sDelta = sOff + sLen - pTotal + + go !totalScore !currScore !currPOff !currSOff + -- If pattern has been matched in full + | currPOff >= pTotal + = Just totalScore + -- If there is not enough left to match the rest of the pattern, equivalent to + -- (sOff + sLen - currSOff) < (pOff + pLen - currPOff) + | currSOff > currPOff + sDelta + = Nothing + -- This is slightly broken for non-ASCII: + -- 1. If code units, consisting a single pattern code point, are found as parts + -- of different code points, it counts as a match. Unless you use a ton of emojis + -- as identifiers, such false positives should not be be a big deal, + -- and anyways HLS does not currently support such use cases, because it uses + -- code point and UTF-16 code unit positions interchangeably. + -- 2. Case conversions is not applied to non-ASCII code points, because one has + -- to call T.toLower (not T.map toLower), reallocating the string in full, which + -- is too much of performance penalty for fuzzy search. Again, anyway HLS does not + -- attempt to do justice to Unicode: proper Unicode text matching requires + -- `unicode-transforms` and friends. + -- Altogether we sacrifice correctness for the sake of performance, which + -- is a right trade-off for fuzzy search. + | pByte <- TA.unsafeIndex pArr currPOff + , sByte <- TA.unsafeIndex sArr currSOff + -- First byte (currPOff == pOff) should match exactly, otherwise - up to case. + , pByte == sByte || (currPOff /= pOff && pByte == toLowerAscii sByte) + = let curr = currScore * 2 + 1 in + go (totalScore + curr) curr (currPOff + 1) (currSOff + 1) + | otherwise + = go totalScore 0 currPOff (currSOff + 1) + + toLowerAscii w = if (w - 65) < 26 then w .|. 0x20 else w + +-- | Sensible default value for chunk size to use when calling simple filter. +defChunkSize :: Int +defChunkSize = 1000 + +-- | Sensible default value for the number of max results to use when calling simple filter. +defMaxResults :: Int +defMaxResults = 10 + +-- | Return all elements of the list that have a fuzzy +-- match against the pattern. Runs with default settings where +-- nothing is added around the matches, as case insensitive. +-- +-- >>> simpleFilter 1000 10 "vm" ["vim", "emacs", "virtual machine"] +-- [Scored {score = 4, original = "vim"},Scored {score = 4, original = "virtual machine"}] +{-# INLINABLE simpleFilter #-} +simpleFilter :: Int -- ^ Chunk size. 1000 works well. + -> Int -- ^ Max. number of results wanted + -> T.Text -- ^ Pattern to look for. + -> [T.Text] -- ^ List of texts to check. + -> [Scored T.Text] -- ^ The ones that match. +simpleFilter chunk maxRes pat xs = filter chunk maxRes pat xs id + + +-- | The function to filter a list of values by fuzzy search on the text extracted from them, +-- using a custom matching function which determines how close words are. +filter' :: Int -- ^ Chunk size. 1000 works well. + -> Int -- ^ Max. number of results wanted + -> T.Text -- ^ Pattern. + -> [t] -- ^ The list of values containing the text to search in. + -> (t -> T.Text) -- ^ The function to extract the text from the container. + -> (T.Text -> T.Text -> Maybe Int) + -- ^ Custom scoring function to use for calculating how close words are + -- When the function returns Nothing, this means the values are incomparable. + -> [Scored t] -- ^ The list of results, sorted, highest score first. +filter' chunkSize maxRes pat ts extract match' = partialSortByAscScore maxRes perfectScore (concat vss) + where + -- Preserve case for the first character, make all others lowercase + pat' = case T.uncons pat of + Just (c, rest) -> T.cons c (T.toLower rest) + _ -> pat + vss = map (mapMaybe (\t -> flip Scored t <$> match' pat' (extract t))) (chunkList chunkSize ts) + `using` parList (evalList rseq) + perfectScore = fromMaybe (error $ T.unpack pat) $ match' pat' pat' + +-- | The function to filter a list of values by fuzzy search on the text extracted from them, +-- using a custom matching function which determines how close words are. +filter :: Int -- ^ Chunk size. 1000 works well. + -> Int -- ^ Max. number of results wanted + -> T.Text -- ^ Pattern. + -> [t] -- ^ The list of values containing the text to search in. + -> (t -> T.Text) -- ^ The function to extract the text from the container. + -> [Scored t] -- ^ The list of results, sorted, highest score first. +filter chunkSize maxRes pat ts extract = + filter' chunkSize maxRes pat ts extract match + +-- | Return all elements of the list that have a fuzzy match against the pattern, +-- the closeness of the match is determined using the custom scoring match function that is passed. +-- Runs with default settings where nothing is added around the matches, as case insensitive. +{-# INLINABLE simpleFilter' #-} +simpleFilter' :: Int -- ^ Chunk size. 1000 works well. + -> Int -- ^ Max. number of results wanted + -> T.Text -- ^ Pattern to look for. + -> [T.Text] -- ^ List of texts to check. + -> (T.Text -> T.Text -> Maybe Int) + -- ^ Custom scoring function to use for calculating how close words are + -> [Scored T.Text] -- ^ The ones that match. +simpleFilter' chunk maxRes pat xs match' = + filter' chunk maxRes pat xs id match' +-------------------------------------------------------------------------------- + +chunkList :: Int -> [a] -> [[a]] +chunkList chunkSize = go + where + go [] = [] + go xs = ys : go zs + where + (ys, zs) = splitAt chunkSize xs + +-- | A stable partial sort ascending by score. O(N) best case, O(wanted*N) worst case +partialSortByAscScore + :: Int -- ^ Number of items needed + -> Int -- ^ Value of a perfect score + -> [Scored t] + -> [Scored t] +partialSortByAscScore wantedCount perfectScore orig = loop orig (SortState minBound perfectScore 0) [] where + loop [] st@SortState{..} acc + | foundCount == wantedCount = reverse acc + | otherwise = if bestScoreSeen < scoreWanted + then loop orig st{scoreWanted = bestScoreSeen, bestScoreSeen = minBound} acc + else reverse acc + loop (x : xs) st@SortState{..} acc + | foundCount == wantedCount = reverse acc + | score x == scoreWanted + = loop xs st{foundCount = foundCount+1} (x:acc) + | score x < scoreWanted && score x > bestScoreSeen + = loop xs st{bestScoreSeen = score x} acc + | otherwise + = loop xs st acc + +data SortState a = SortState + { bestScoreSeen :: !Int + , scoreWanted :: !Int + , foundCount :: !Int + } + deriving Show diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index 872b3a12ae..50d4b869ba 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -1,295 +1,2309 @@ -cabal-version: 2.2 -category: Development -name: haskell-language-server -version: 0.2.1.0 -synopsis: LSP server for GHC -description: Please see the README on GitHub at -homepage: https://github.com/haskell/haskell-language-server#readme -bug-reports: https://github.com/haskell/haskell-language-server/issues -author: Alan Zimmerman -maintainer: alan.zimm@gmail.com -copyright: Alan Zimmerman -license: Apache-2.0 -license-file: LICENSE -build-type: Simple +cabal-version: 3.4 +category: Development +name: haskell-language-server +version: 2.11.0.0 +synopsis: LSP server for GHC +description: + Please see the README on GitHub at + +homepage: https://github.com/haskell/haskell-language-server#readme +bug-reports: https://github.com/haskell/haskell-language-server/issues +author: The Haskell IDE Team +maintainer: alan.zimm@gmail.com +copyright: The Haskell IDE Team +license: Apache-2.0 +license-file: LICENSE +build-type: Simple +tested-with: GHC == {9.12.2, 9.10.2, 9.8.4, 9.6.7} extra-source-files: - README.md - ChangeLog.md + README.md + ChangeLog.md + test/testdata/**/*.project + test/testdata/**/*.cabal + test/testdata/**/*.yaml + test/testdata/**/*.hs + test/testdata/**/*.json + + -- These globs should only match test/testdata + plugins/**/*.project + plugins/**/*.expected + plugins/**/*.cabal + plugins/**/*.yaml + plugins/**/*.txt + plugins/**/*.hs + + ghcide-test/data/**/*.cabal + ghcide-test/data/**/*.hs + ghcide-test/data/**/*.hs-boot + ghcide-test/data/**/*.project + ghcide-test/data/**/*.yaml + + bindist/wrapper.in + +source-repository head + type: git + location: https://github.com/haskell/haskell-language-server + +common defaults + default-language: GHC2021 + -- Should have been in GHC2021, an oversight + default-extensions: ExplicitNamespaces + build-depends: + , base >=4.12 && <5 + +common test-defaults + ghc-options: -threaded -rtsopts -with-rtsopts=-N + if impl(ghc >= 9.8) + -- We allow using partial functions in tests + ghc-options: -Wno-x-partial + +-- Default warnings in HLS +common warnings + ghc-options: -Wall + -Wredundant-constraints + -Wunused-packages + -Wno-name-shadowing + -Wno-unticked-promoted-constructors + +flag pedantic + description: Enable -Werror + default: False + manual: True + +-- Allow compiling in pedantic mode +common pedantic + if flag(pedantic) + ghc-options: + -Werror + -- Note [unused-packages] + -- ~~~~~~~~~~~~~~~~~~~~~~ + -- Some packages need CPP conditioned on MIN_VERSION_ghc(x,y,z). + -- MIN_VERSION_ is CPP macro that cabal defines only when is declared as a dependency. + -- But -Wunused-packages still reports it as unused dependency if it's not imported. + -- For packages with such "unused" dependencies we demote -Wunused-packages error + -- (enabled by --flag=pedantic) to warning via -Wwarn=unused-packages. + -Wwarn=unused-packages + +-- Plugin flags are designed for 'cabal install haskell-language-server': +-- - Bulk flags should be default:False +-- - Individual flags should be default:True + +-- The intent of this flag is being able to keep the ghc condition for hackage +-- but skip it via flags in cabal.project as plugins for new ghcs usually +-- are buildable using cabal.project tweaks +flag ignore-plugins-ghc-bounds + description: Force the inclusion of plugins even if they are not buildable by default with a specific ghc version + default: False + manual: True + +flag dynamic + description: Build with the dyn rts + default: True + manual: True + +---------------------------- +---------------------------- +-- PLUGINS +---------------------------- +---------------------------- + +----------------------------- +-- cabal-fmt plugin +----------------------------- + +flag cabalfmt + description: Enable cabal-fmt plugin + default: True + manual: True + +common cabalfmt + if flag(cabalfmt) && flag(cabal) + build-depends: haskell-language-server:hls-cabal-fmt-plugin + cpp-options: -Dhls_cabalfmt + +flag isolateCabalfmtTests + description: Should tests search for 'cabal-fmt' on the $PATH or shall we install it via build-tool-depends? + -- By default, search on the PATH + default: False + manual: True + +library hls-cabal-fmt-plugin + import: defaults, pedantic, warnings + if !flag(cabalfmt) || !flag(cabal) + buildable: False + exposed-modules: Ide.Plugin.CabalFmt + hs-source-dirs: plugins/hls-cabal-fmt-plugin/src + build-depends: + , directory + , filepath + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lens + , lsp-types + , mtl + , process-extras + , text + +-- The `hls-cabal-plugin` is needed for tests, as we need to install notification handlers +test-suite hls-cabal-fmt-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(cabalfmt) || !flag(cabal) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-cabal-fmt-plugin/test + main-is: Main.hs + build-depends: + , directory + , filepath + , haskell-language-server:hls-cabal-plugin + , haskell-language-server:hls-cabal-fmt-plugin + , hls-plugin-api == 2.11.0.0 + , hls-test-utils == 2.11.0.0 + + if flag(isolateCabalfmtTests) + build-tool-depends: cabal-fmt:cabal-fmt ^>=0.1.12 + cpp-options: -Dhls_isolate_cabalfmt_tests + +----------------------------- +-- cabal-gild plugin +----------------------------- + +flag cabalgild + description: Enable cabal-gild plugin + default: True + manual: True + +common cabalgild + if flag(cabalgild) && flag(cabal) + build-depends: haskell-language-server:hls-cabal-gild-plugin + cpp-options: -Dhls_cabalgild + +flag isolateCabalGildTests + description: Should tests search for 'cabal-gild' on the $PATH or shall we install it via build-tool-depends? + -- By default, search on the PATH + default: False + manual: True + +library hls-cabal-gild-plugin + import: defaults, pedantic, warnings + if !flag(cabalgild) || !flag(cabal) + buildable: False + exposed-modules: Ide.Plugin.CabalGild + hs-source-dirs: plugins/hls-cabal-gild-plugin/src + build-depends: + , directory + , filepath + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lsp-types + , text + , mtl + , process-extras + +-- The `hls-cabal-plugin` is needed for tests, as we need to install notification handlers +test-suite hls-cabal-gild-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(cabalgild) || !flag(cabal) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-cabal-gild-plugin/test + main-is: Main.hs + build-depends: + , directory + , filepath + , haskell-language-server:hls-cabal-plugin + , haskell-language-server:hls-cabal-gild-plugin + , hls-plugin-api == 2.11.0.0 + , hls-test-utils == 2.11.0.0 + + if flag(isolateCabalGildTests) + -- https://github.com/tfausak/cabal-gild/issues/89 + build-tool-depends: cabal-gild:cabal-gild >= 1.3 && < 1.3.2 + cpp-options: -Dhls_isolate_cabalgild_tests + +----------------------------- +-- cabal plugin +----------------------------- + +flag cabal + description: Enable cabal plugin + default: True + manual: True + +common cabal + if flag(cabal) + build-depends: haskell-language-server:hls-cabal-plugin + cpp-options: -Dhls_cabal + +library hls-cabal-plugin + import: defaults, pedantic, warnings + if !flag(cabal) + buildable: False + exposed-modules: + Ide.Plugin.Cabal + Ide.Plugin.Cabal.Diagnostics + Ide.Plugin.Cabal.Completion.CabalFields + Ide.Plugin.Cabal.Completion.Completer.FilePath + Ide.Plugin.Cabal.Completion.Completer.Module + Ide.Plugin.Cabal.Completion.Completer.Paths + Ide.Plugin.Cabal.Completion.Completer.Simple + Ide.Plugin.Cabal.Completion.Completer.Snippet + Ide.Plugin.Cabal.Completion.Completer.Types + Ide.Plugin.Cabal.Completion.Completions + Ide.Plugin.Cabal.Completion.Data + Ide.Plugin.Cabal.Completion.Types + Ide.Plugin.Cabal.Definition + Ide.Plugin.Cabal.FieldSuggest + Ide.Plugin.Cabal.Files + Ide.Plugin.Cabal.OfInterest + Ide.Plugin.Cabal.LicenseSuggest + Ide.Plugin.Cabal.Rules + Ide.Plugin.Cabal.CabalAdd.Command + Ide.Plugin.Cabal.CabalAdd.CodeAction + Ide.Plugin.Cabal.CabalAdd.Types + Ide.Plugin.Cabal.Orphans + Ide.Plugin.Cabal.Outline + Ide.Plugin.Cabal.Parse + + + build-depends: + , bytestring + , Cabal-syntax >= 3.7 + , containers + , deepseq + , directory + , filepath + , extra >=1.7.4 + , ghcide == 2.11.0.0 + , hashable + , hls-plugin-api == 2.11.0.0 + , hls-graph == 2.11.0.0 + , lens + , lsp ^>=2.7 + , lsp-types ^>=2.3 + , mtl + , regex-tdfa ^>=1.3.1 + , text + , text-rope + , transformers + , unordered-containers >=0.2.10.0 + , containers + , cabal-add ^>=0.2 + , aeson + , Cabal + , pretty + + hs-source-dirs: plugins/hls-cabal-plugin/src + +test-suite hls-cabal-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(cabal) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-cabal-plugin/test + main-is: Main.hs + other-modules: + CabalAdd + Completer + Context + Definition + Outline + Utils + build-depends: + , bytestring + , Cabal-syntax >= 3.7 + , extra + , filepath + , ghcide + , haskell-language-server:hls-cabal-plugin + , hls-test-utils == 2.11.0.0 + , lens + , lsp + , lsp-types + , text + +----------------------------- +-- class plugin +----------------------------- + +flag class + description: Enable class plugin + default: True + manual: True + +common class + if flag(class) + build-depends: haskell-language-server:hls-class-plugin + cpp-options: -Dhls_class + +library hls-class-plugin + import: defaults, pedantic, warnings + if !flag(class) + buildable: False + exposed-modules: Ide.Plugin.Class + other-modules: Ide.Plugin.Class.CodeAction + , Ide.Plugin.Class.CodeLens + , Ide.Plugin.Class.ExactPrint + , Ide.Plugin.Class.Types + , Ide.Plugin.Class.Utils + hs-source-dirs: plugins/hls-class-plugin/src + build-depends: + , aeson + , containers + , deepseq + , extra + , ghc + , ghc-exactprint >= 1.5 && < 1.13.0.0 + , ghcide == 2.11.0.0 + , hls-graph + , hls-plugin-api == 2.11.0.0 + , lens + , lsp + , mtl + , text + , transformers + + default-extensions: + DataKinds + OverloadedStrings + +test-suite hls-class-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(class) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-class-plugin/test + main-is: Main.hs + build-depends: + , filepath + , haskell-language-server:hls-class-plugin + , hls-test-utils == 2.11.0.0 + , lens + , lsp-types + , text + +----------------------------- +-- call-hierarchy plugin +----------------------------- + +flag callHierarchy + description: Enable call hierarchy plugin + default: True + manual: True + +common callHierarchy + if flag(callHierarchy) + build-depends: haskell-language-server:hls-call-hierarchy-plugin + cpp-options: -Dhls_callHierarchy + +library hls-call-hierarchy-plugin + import: defaults, pedantic, warnings + if !flag(callHierarchy) + buildable: False + exposed-modules: Ide.Plugin.CallHierarchy + other-modules: + Ide.Plugin.CallHierarchy.Internal + Ide.Plugin.CallHierarchy.Query + Ide.Plugin.CallHierarchy.Types + + hs-source-dirs: plugins/hls-call-hierarchy-plugin/src + build-depends: + , aeson + , containers + , extra + , ghc + , ghcide == 2.11.0.0 + , hiedb ^>= 0.7.0.0 + , hls-plugin-api == 2.11.0.0 + , lens + , lsp >=2.7 + , sqlite-simple + , text + + default-extensions: DataKinds + +test-suite hls-call-hierarchy-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(callHierarchy) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-call-hierarchy-plugin/test + main-is: Main.hs + build-depends: + , aeson + , containers + , extra + , filepath + , haskell-language-server:hls-call-hierarchy-plugin + , hls-test-utils == 2.11.0.0 + , lens + , lsp + , lsp-test + , text + +----------------------------- +-- eval plugin +----------------------------- + +flag eval + description: Enable eval plugin + default: True + manual: True + +common eval + if flag(eval) + build-depends: haskell-language-server:hls-eval-plugin + cpp-options: -Dhls_eval + +library hls-eval-plugin + import: defaults, pedantic, warnings + if !flag(eval) + buildable: False + exposed-modules: + Ide.Plugin.Eval + Ide.Plugin.Eval.Types + + hs-source-dirs: plugins/hls-eval-plugin/src + other-modules: + Ide.Plugin.Eval.Code + Ide.Plugin.Eval.Config + Ide.Plugin.Eval.GHC + Ide.Plugin.Eval.Handlers + Ide.Plugin.Eval.Parse.Comments + Ide.Plugin.Eval.Parse.Option + Ide.Plugin.Eval.Rules + Ide.Plugin.Eval.Util + + build-depends: + , aeson + , bytestring + , containers + , deepseq + , Diff ^>=0.5 || ^>=1.0.0 + , dlist + , extra + , filepath + , ghc + , ghc-boot-th + , ghcide == 2.11.0.0 + , hls-graph + , hls-plugin-api == 2.11.0.0 + , lens + , lsp + , lsp-types + , megaparsec >=9.0 + , mtl + , parser-combinators >=1.2 + , text + , text-rope + , transformers + , unliftio + , unordered-containers + + default-extensions: + DataKinds + +test-suite hls-eval-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(eval) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-eval-plugin/test + main-is: Main.hs + ghc-options: -fno-ignore-asserts + build-depends: + , aeson + , containers + , extra + , filepath + , haskell-language-server:hls-eval-plugin + , hls-plugin-api + , hls-test-utils == 2.11.0.0 + , lens + , lsp-types + , text + +----------------------------- +-- import lens plugin +----------------------------- + +flag importLens + description: Enable importLens plugin + default: True + manual: False + +common importLens + if flag(importLens) + build-depends: haskell-language-server:hls-explicit-imports-plugin + cpp-options: -Dhls_importLens + +library hls-explicit-imports-plugin + import: defaults, pedantic, warnings + if !flag(importlens) + buildable: False + exposed-modules: Ide.Plugin.ExplicitImports + hs-source-dirs: plugins/hls-explicit-imports-plugin/src + build-depends: + , aeson + , containers + , deepseq + , ghc + , ghcide == 2.11.0.0 + , hls-graph + , hls-plugin-api == 2.11.0.0 + , lens + , lsp + , mtl + , text + , transformers + + default-extensions: + DataKinds + +test-suite hls-explicit-imports-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(importlens) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-explicit-imports-plugin/test + main-is: Main.hs + build-depends: + , extra + , filepath + , haskell-language-server:hls-explicit-imports-plugin + , hls-test-utils == 2.11.0.0 + , lens + , lsp-types + , text + +----------------------------- +-- rename plugin +----------------------------- + +flag rename + description: Enable rename plugin + default: True + manual: True + +common rename + if flag(rename) + build-depends: haskell-language-server:hls-rename-plugin + cpp-options: -Dhls_rename + +library hls-rename-plugin + import: defaults, pedantic, warnings + if !flag(rename) + buildable: False + exposed-modules: Ide.Plugin.Rename + hs-source-dirs: plugins/hls-rename-plugin/src + build-depends: + , containers + , ghc + , ghcide == 2.11.0.0 + , hashable + , hiedb ^>= 0.7.0.0 + , hls-plugin-api == 2.11.0.0 + , haskell-language-server:hls-refactor-plugin + , lens + , lsp-types + , mtl + , mod + , syb + , text + , transformers + , unordered-containers + + +test-suite hls-rename-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(rename) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-rename-plugin/test + main-is: Main.hs + build-depends: + , aeson + , containers + , filepath + , hls-plugin-api + , haskell-language-server:hls-rename-plugin + , hls-test-utils == 2.11.0.0 + , lens + , lsp-types + , text + +----------------------------- +-- retrie plugin +----------------------------- + +flag retrie + description: Enable retrie plugin + default: True + manual: True + +common retrie + if flag(retrie) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)) + build-depends: haskell-language-server:hls-retrie-plugin + cpp-options: -Dhls_retrie + +library hls-retrie-plugin + import: defaults, pedantic, warnings + if !(flag(retrie) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))) + buildable: False + exposed-modules: Ide.Plugin.Retrie + hs-source-dirs: plugins/hls-retrie-plugin/src + build-depends: + , aeson + , bytestring + , containers + , extra + , ghc + , ghcide == 2.11.0.0 + , hashable + , hls-plugin-api == 2.11.0.0 + , haskell-language-server:hls-refactor-plugin + , lens + , lsp + , lsp-types + , mtl + , retrie >=0.1.1.0 + , safe-exceptions + , stm + , text + , text-rope + , transformers + , unordered-containers + + default-extensions: + DataKinds + +test-suite hls-retrie-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !(flag(retrie) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-retrie-plugin/test + main-is: Main.hs + build-depends: + , containers + , filepath + , hls-plugin-api + , haskell-language-server:{hls-refactor-plugin, hls-retrie-plugin} + , hls-test-utils == 2.11.0.0 + , text + +----------------------------- +-- hlint plugin +----------------------------- + +flag ghc-lib + description: + Use ghc-lib-parser rather than the ghc library (requires hlint and + ghc-lib-parser-ex to also be built with it) + default: True + manual: True + +flag hlint + description: Enable hlint plugin + default: True + manual: True + +common hlint + if flag(hlint) + build-depends: haskell-language-server:hls-hlint-plugin + cpp-options: -Dhls_hlint + +library hls-hlint-plugin + import: defaults, pedantic, warnings + -- https://github.com/ndmitchell/hlint/pull/1594 + if !flag(hlint) + buildable: False + exposed-modules: Ide.Plugin.Hlint + hs-source-dirs: plugins/hls-hlint-plugin/src + build-depends: + , aeson + , bytestring + , containers + , deepseq + , filepath + , ghcide == 2.11.0.0 + , hashable + , hlint >= 3.5 && < 3.11 + , hls-plugin-api == 2.11.0.0 + , lens + , mtl + , refact + , regex-tdfa + , stm + , temporary + , text + , text-rope + , transformers + , unordered-containers + , ghc-lib-parser-ex + , lsp-types + + -- apply-refact doesn't work on 9.10, or even have a buildable + -- configuration + if impl(ghc >= 9.11) || impl(ghc < 9.10) + cpp-options: -DAPPLY_REFACT + build-depends: apply-refact + + if flag(ghc-lib) + cpp-options: -DGHC_LIB + build-depends: + ghc-lib-parser + else + build-depends: + ghc + , ghc-boot + + default-extensions: + DataKinds + +test-suite hls-hlint-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(hlint) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-hlint-plugin/test + main-is: Main.hs + -- Work around https://gitlab.haskell.org/ghc/ghc/-/issues/24648 + if os(darwin) + ghc-options: -optl-Wl,-ld_classic + + build-depends: + aeson + , containers + , filepath + , haskell-language-server:hls-hlint-plugin + , hls-plugin-api + , hls-test-utils == 2.11.0.0 + , lens + , lsp-types + , text + +----------------------------- +-- stan plugin +----------------------------- + +flag stan + description: Enable stan plugin + default: True + manual: True + +common stan + if flag(stan) && (impl(ghc < 9.11) || flag(ignore-plugins-ghc-bounds)) + build-depends: haskell-language-server:hls-stan-plugin + cpp-options: -Dhls_stan + +library hls-stan-plugin + import: defaults, pedantic, warnings + if !flag(stan) || (impl(ghc > 9.11) && !flag(ignore-plugins-ghc-bounds)) + buildable: False + exposed-modules: Ide.Plugin.Stan + hs-source-dirs: plugins/hls-stan-plugin/src + build-depends: + , deepseq + , hashable + , hls-plugin-api + , ghcide + , lsp-types + , text + , unordered-containers + , stan >= 0.2.1.0 + , trial + , directory + + default-extensions: + LambdaCase + TypeFamilies + DuplicateRecordFields + OverloadedStrings + +test-suite hls-stan-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(stan) || (impl(ghc > 9.11) && !flag(ignore-plugins-ghc-bounds)) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-stan-plugin/test + main-is: Main.hs + build-depends: + , filepath + , haskell-language-server:hls-stan-plugin + , hls-plugin-api + , hls-test-utils == 2.11.0.0 + , lens + , lsp-types + , text + default-extensions: + OverloadedStrings + +----------------------------- +-- module name plugin +----------------------------- + +flag moduleName + description: Enable moduleName plugin + default: True + manual: True + +common moduleName + if flag(moduleName) + build-depends: haskell-language-server:hls-module-name-plugin + cpp-options: -Dhls_moduleName + +library hls-module-name-plugin + import: defaults, pedantic, warnings + if !flag(modulename) + buildable: False + exposed-modules: Ide.Plugin.ModuleName + hs-source-dirs: plugins/hls-module-name-plugin/src + build-depends: + , aeson + , containers + , filepath + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lsp + , text + , text-rope + , transformers + + +test-suite hls-module-name-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(modulename) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-module-name-plugin/test + main-is: Main.hs + build-depends: + , filepath + , haskell-language-server:hls-module-name-plugin + , hls-test-utils == 2.11.0.0 + +----------------------------- +-- pragmas plugin +----------------------------- + +flag pragmas + description: Enable pragmas plugin + default: True + manual: True + +common pragmas + if flag(pragmas) + build-depends: haskell-language-server:hls-pragmas-plugin + cpp-options: -Dhls_pragmas + +library hls-pragmas-plugin + import: defaults, pedantic, warnings + if !flag(pragmas) + buildable: False + exposed-modules: Ide.Plugin.Pragmas + hs-source-dirs: plugins/hls-pragmas-plugin/src + build-depends: + , aeson + , extra + , fuzzy + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lens + , lens-aeson + , lsp + , text + , transformers + , containers + +test-suite hls-pragmas-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(pragmas) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-pragmas-plugin/test + main-is: Main.hs + build-depends: + , aeson + , filepath + , haskell-language-server:hls-pragmas-plugin + , hls-test-utils == 2.11.0.0 + , lens + , lsp-types + , text + +----------------------------- +-- splice plugin +----------------------------- + +flag splice + description: Enable splice plugin + default: True + manual: True + +common splice + if flag(splice) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)) + build-depends: haskell-language-server:hls-splice-plugin + cpp-options: -Dhls_splice + +library hls-splice-plugin + import: defaults, pedantic, warnings + if !(flag(splice) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))) + buildable: False + exposed-modules: + Ide.Plugin.Splice + Ide.Plugin.Splice.Types + + hs-source-dirs: plugins/hls-splice-plugin/src + build-depends: + , aeson + , extra + , foldl + , ghc + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , haskell-language-server:hls-refactor-plugin + , lens + , lsp + , mtl + , syb + , text + , transformers + , unliftio-core + + default-extensions: + DataKinds + +test-suite hls-splice-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !(flag(splice) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-splice-plugin/test + main-is: Main.hs + build-depends: + , filepath + , haskell-language-server:hls-splice-plugin + , hls-test-utils == 2.11.0.0 + , text + +----------------------------- +-- alternate number format plugin +----------------------------- + +flag alternateNumberFormat + description: Enable Alternate Number Format plugin + default: True + manual: True + +common alternateNumberFormat + if flag(alternateNumberFormat) + build-depends: haskell-language-server:hls-alternate-number-format-plugin + cpp-options: -Dhls_alternateNumberFormat + +library hls-alternate-number-format-plugin + import: defaults, pedantic, warnings + if !flag(alternateNumberFormat) + buildable: False + exposed-modules: Ide.Plugin.AlternateNumberFormat, Ide.Plugin.Conversion + other-modules: Ide.Plugin.Literals + hs-source-dirs: plugins/hls-alternate-number-format-plugin/src + build-depends: + , containers + , extra + , ghcide == 2.11.0.0 + , ghc-boot-th + , hls-graph + , hls-plugin-api == 2.11.0.0 + , lens + , lsp ^>=2.7 + , mtl + , regex-tdfa + , syb + , text + + default-extensions: + LambdaCase + OverloadedStrings + RecordWildCards + +test-suite hls-alternate-number-format-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(alternateNumberFormat) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-alternate-number-format-plugin/test + other-modules: Properties.Conversion + main-is: Main.hs + ghc-options: -fno-ignore-asserts + build-depends: + , filepath + , haskell-language-server:hls-alternate-number-format-plugin + , hls-test-utils == 2.11.0.0 + , regex-tdfa + , tasty-quickcheck + , text + + default-extensions: + LambdaCase + OverloadedStrings + RecordWildCards + +----------------------------- +-- qualify imported names plugin +----------------------------- + +flag qualifyImportedNames + description: Enable qualifyImportedNames plugin + default: True + manual: True + +common qualifyImportedNames + if flag(qualifyImportedNames) + build-depends: haskell-language-server:hls-qualify-imported-names-plugin + cpp-options: -Dhls_qualifyImportedNames + +library hls-qualify-imported-names-plugin + import: defaults, pedantic, warnings + if !flag(qualifyImportedNames) + buildable: False + exposed-modules: Ide.Plugin.QualifyImportedNames + hs-source-dirs: plugins/hls-qualify-imported-names-plugin/src + build-depends: + , containers + , ghc + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lens + , lsp + , text + , text-rope + , dlist + , transformers + + default-extensions: + DataKinds + +test-suite hls-qualify-imported-names-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(qualifyImportedNames) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-qualify-imported-names-plugin/test + main-is: Main.hs + build-depends: + , text + , filepath + , haskell-language-server:hls-qualify-imported-names-plugin + , hls-test-utils == 2.11.0.0 + +----------------------------- +-- code range plugin +----------------------------- + +flag codeRange + description: Enable Code Range plugin + default: True + manual: True + +common codeRange + if flag(codeRange) + build-depends: haskell-language-server:hls-code-range-plugin + cpp-options: -Dhls_codeRange + +library hls-code-range-plugin + import: defaults, pedantic, warnings + if !flag(codeRange) + buildable: False + exposed-modules: + Ide.Plugin.CodeRange + Ide.Plugin.CodeRange.Rules + other-modules: + Ide.Plugin.CodeRange.ASTPreProcess + hs-source-dirs: plugins/hls-code-range-plugin/src + build-depends: + , containers + , deepseq + , extra + , ghc + , ghcide == 2.11.0.0 + , hashable + , hls-plugin-api == 2.11.0.0 + , lens + , lsp + , mtl + , semigroupoids + , transformers + , vector + +test-suite hls-code-range-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(codeRange) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-code-range-plugin/test + main-is: Main.hs + other-modules: + Ide.Plugin.CodeRangeTest + Ide.Plugin.CodeRange.RulesTest + build-depends: + , bytestring + , filepath + , haskell-language-server:hls-code-range-plugin + , hls-test-utils == 2.11.0.0 + , lens + , lsp + , lsp-test + , transformers + , vector + +----------------------------- +-- change type signature plugin +----------------------------- + +flag changeTypeSignature + description: Enable changeTypeSignature plugin + default: True + manual: True + +common changeTypeSignature + if flag(changeTypeSignature) + build-depends: haskell-language-server:hls-change-type-signature-plugin + cpp-options: -Dhls_changeTypeSignature + +library hls-change-type-signature-plugin + import: defaults, pedantic, warnings + if !flag(changeTypeSignature) + buildable: False + exposed-modules: Ide.Plugin.ChangeTypeSignature + hs-source-dirs: plugins/hls-change-type-signature-plugin/src + build-depends: + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lens + , lsp-types + , regex-tdfa + , syb + , text + , transformers + , containers + , ghc + default-extensions: + DataKinds + ExplicitNamespaces + OverloadedStrings + RecordWildCards + + +test-suite hls-change-type-signature-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(changeTypeSignature) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-change-type-signature-plugin/test + main-is: Main.hs + build-depends: + , filepath + , haskell-language-server:hls-change-type-signature-plugin + , hls-plugin-api + , hls-test-utils == 2.11.0.0 + , regex-tdfa + , text + default-extensions: + OverloadedStrings + ViewPatterns + +----------------------------- +-- gadt plugin +----------------------------- + +flag gadt + description: Enable gadt plugin + default: True + manual: True + +common gadt + if flag(gadt) + build-depends: haskell-language-server:hls-gadt-plugin + cpp-options: -Dhls_gadt + +library hls-gadt-plugin + import: defaults, pedantic, warnings + if !flag(gadt) + buildable: False + exposed-modules: Ide.Plugin.GADT + other-modules: Ide.Plugin.GHC + hs-source-dirs: plugins/hls-gadt-plugin/src + build-depends: + , aeson + , containers + , extra + , ghc + , ghcide == 2.11.0.0 + , ghc-exactprint + , hls-plugin-api == 2.11.0.0 + , haskell-language-server:hls-refactor-plugin + , lens + , lsp >=2.7 + , mtl + , text + , transformers + + default-extensions: DataKinds + +test-suite hls-gadt-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(gadt) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-gadt-plugin/test + main-is: Main.hs + build-depends: + , filepath + , haskell-language-server:hls-gadt-plugin + , hls-test-utils == 2.11.0.0 + , text + +----------------------------- +-- explicit fixity plugin +----------------------------- + +flag explicitFixity + description: Enable explicitFixity plugin + default: True + manual: True + +common explicitFixity + if flag(explicitFixity) + build-depends: haskell-language-server:hls-explicit-fixity-plugin + cpp-options: -DexplicitFixity + +library hls-explicit-fixity-plugin + import: defaults, pedantic, warnings + if !flag(explicitFixity) + buildable: False + exposed-modules: Ide.Plugin.ExplicitFixity + hs-source-dirs: plugins/hls-explicit-fixity-plugin/src + build-depends: + , containers + , deepseq + , extra + , ghcide == 2.11.0.0 + , hashable + , hls-plugin-api == 2.11.0.0 + , lsp >=2.7 + , text -flag agpl - Description: Enable AGPL dependencies - Default: True - Manual: False + default-extensions: DataKinds -flag pedantic - Description: Enable -Werror - Default: False - Manual: True +test-suite hls-explicit-fixity-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(explicitFixity) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-explicit-fixity-plugin/test + main-is: Main.hs + build-depends: + , filepath + , haskell-language-server:hls-explicit-fixity-plugin + , hls-test-utils == 2.11.0.0 + , text -source-repository head - type: git - location: https://github.com/haskell/haskell-language-server +----------------------------- +-- explicit fields plugin +----------------------------- -common agpl - if flag(agpl) - cpp-options: - -DAGPL +flag explicitFields + description: Enable explicitFields plugin + default: True + manual: True -library - import: agpl - exposed-modules: - Ide.Logger - Ide.Plugin - Ide.Plugin.Config - Ide.Plugin.Eval - Ide.Plugin.Example - Ide.Plugin.Example2 - Ide.Plugin.GhcIde - Ide.Plugin.Ormolu - Ide.Plugin.Pragmas - Ide.Plugin.Floskell - Ide.Plugin.Formatter - Ide.Plugin.StylishHaskell - Ide.PluginUtils - Ide.Types - Ide.Version - other-modules: - Paths_haskell_language_server - hs-source-dirs: - src +common explicitFields + if flag(explicitFields) + build-depends: haskell-language-server:hls-explicit-record-fields-plugin + cpp-options: -DexplicitFields + +library hls-explicit-record-fields-plugin + import: defaults, pedantic, warnings + if !flag(explicitFields) + buildable: False + exposed-modules: Ide.Plugin.ExplicitFields + build-depends: + , ghc + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lsp + , lens + , hls-graph + , text + , syb + , transformers + , containers + , aeson + hs-source-dirs: plugins/hls-explicit-record-fields-plugin/src + + if flag(pedantic) + ghc-options: -Wwarn=incomplete-record-updates + +test-suite hls-explicit-record-fields-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(explicitFields) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-explicit-record-fields-plugin/test + main-is: Main.hs + build-depends: + , filepath + , text + , ghcide + , haskell-language-server:hls-explicit-record-fields-plugin + , hls-test-utils == 2.11.0.0 + +----------------------------- +-- overloaded record dot plugin +----------------------------- + +flag overloadedRecordDot + description: Enable overloadedRecordDot plugin + default: True + manual: True + +common overloadedRecordDot + if flag(overloadedRecordDot) + build-depends: haskell-language-server:hls-overloaded-record-dot-plugin + cpp-options: -Dhls_overloaded_record_dot + +library hls-overloaded-record-dot-plugin + import: defaults, pedantic, warnings + if !flag(overloadedRecordDot) + buildable: False + exposed-modules: Ide.Plugin.OverloadedRecordDot + build-depends: + , aeson + , ghcide + , hls-plugin-api + , lsp + , lens + , hls-graph + , text + , syb + , transformers + , containers + , deepseq + hs-source-dirs: plugins/hls-overloaded-record-dot-plugin/src + +test-suite hls-overloaded-record-dot-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(overloadedRecordDot) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-overloaded-record-dot-plugin/test + main-is: Main.hs + build-depends: + , filepath + , text + , haskell-language-server:hls-overloaded-record-dot-plugin + , hls-test-utils == 2.11.0.0 + + +----------------------------- +-- floskell plugin +----------------------------- + +flag floskell + description: Enable floskell plugin + default: True + manual: True + +common floskell + if flag(floskell) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)) + build-depends: haskell-language-server:hls-floskell-plugin + cpp-options: -Dhls_floskell + +library hls-floskell-plugin + import: defaults, pedantic, warnings + -- https://github.com/ennocramer/floskell/pull/82 + if !(flag(floskell) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))) + buildable: False + exposed-modules: Ide.Plugin.Floskell + hs-source-dirs: plugins/hls-floskell-plugin/src + build-depends: + , floskell ^>=0.11.0 + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lsp-types ^>=2.3 + , mtl + , text + + +test-suite hls-floskell-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !(flag(floskell) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds))) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-floskell-plugin/test + main-is: Main.hs + build-depends: + , filepath + , haskell-language-server:hls-floskell-plugin + , hls-test-utils == 2.11.0.0 + +----------------------------- +-- fourmolu plugin +----------------------------- + +flag fourmolu + description: Enable fourmolu plugin + default: True + manual: True + +common fourmolu + if flag(fourmolu) + build-depends: haskell-language-server:hls-fourmolu-plugin + cpp-options: -Dhls_fourmolu + +library hls-fourmolu-plugin + import: defaults, pedantic, warnings + if !flag(fourmolu) + buildable: False + exposed-modules: Ide.Plugin.Fourmolu + hs-source-dirs: plugins/hls-fourmolu-plugin/src + build-depends: + , filepath + , fourmolu ^>= 0.14 || ^>= 0.15 || ^>= 0.16 || ^>=0.17 || ^>=0.18 + , ghc-boot-th + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lens + , lsp + , mtl + , process-extras >= 0.7.1 + , text + , transformers + , yaml + +test-suite hls-fourmolu-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(fourmolu) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-fourmolu-plugin/test + main-is: Main.hs + -- Work around https://gitlab.haskell.org/ghc/ghc/-/issues/24648 + if os(darwin) + ghc-options: -optl-Wl,-ld_classic + build-tool-depends: + fourmolu:fourmolu + build-depends: + , aeson + , filepath + , haskell-language-server:hls-fourmolu-plugin + , hls-plugin-api + , hls-test-utils == 2.11.0.0 + , lsp-test + +----------------------------- +-- ormolu plugin +----------------------------- + +flag ormolu + description: Enable ormolu plugin + default: True + manual: True + +common ormolu + if flag(ormolu) + build-depends: haskell-language-server:hls-ormolu-plugin + cpp-options: -Dhls_ormolu + +library hls-ormolu-plugin + import: defaults, pedantic, warnings + if !flag(ormolu) + buildable: False + exposed-modules: Ide.Plugin.Ormolu + hs-source-dirs: plugins/hls-ormolu-plugin/src + build-depends: + , extra + , filepath + , ghc-boot-th + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lsp + , mtl + , process-extras >= 0.7.1 + , ormolu ^>=0.5.3 || ^>= 0.6 || ^>= 0.7 || ^>=0.8 + , text + , transformers + + +test-suite hls-ormolu-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(ormolu) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-ormolu-plugin/test + main-is: Main.hs + -- Work around https://gitlab.haskell.org/ghc/ghc/-/issues/24648 + if os(darwin) + ghc-options: -optl-Wl,-ld_classic + build-tool-depends: + ormolu:ormolu build-depends: - base >=4.12 && <5 , aeson - , binary + , filepath + , haskell-language-server:hls-ormolu-plugin + , hls-plugin-api + , hls-test-utils == 2.11.0.0 + , lsp-types + , ormolu + +----------------------------- +-- stylish-haskell plugin +----------------------------- + +flag stylishHaskell + description: Enable stylishHaskell plugin + default: True + manual: True + +common stylishHaskell + if flag(stylishHaskell) + build-depends: haskell-language-server:hls-stylish-haskell-plugin + cpp-options: -Dhls_stylishHaskell + +library hls-stylish-haskell-plugin + import: defaults, pedantic, warnings + -- https://github.com/haskell/stylish-haskell/issues/479 + if !flag(stylishHaskell) + buildable: False + exposed-modules: Ide.Plugin.StylishHaskell + hs-source-dirs: plugins/hls-stylish-haskell-plugin/src + build-depends: + , directory + , filepath + , ghc-boot-th + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lsp-types + , mtl + , stylish-haskell >=0.12 && <0.16 + , text + + +test-suite hls-stylish-haskell-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(stylishHaskell) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-stylish-haskell-plugin/test + main-is: Main.hs + build-depends: + , filepath + , haskell-language-server:hls-stylish-haskell-plugin + , hls-test-utils == 2.11.0.0 + +----------------------------- +-- refactor plugin +----------------------------- + +flag refactor + description: Enable refactor plugin + default: True + manual: True + +common refactor + if flag(refactor) + build-depends: haskell-language-server:hls-refactor-plugin + cpp-options: -Dhls_refactor + +library hls-refactor-plugin + import: defaults, pedantic, warnings + if !flag(refactor) + buildable: False + exposed-modules: Development.IDE.GHC.ExactPrint + Development.IDE.GHC.Compat.ExactPrint + Development.IDE.Plugin.CodeAction + Development.IDE.Plugin.CodeAction.Util + Development.IDE.GHC.Dump + other-modules: Development.IDE.Plugin.CodeAction.Args + Development.IDE.Plugin.CodeAction.ExactPrint + Development.IDE.Plugin.CodeAction.PositionIndexed + Development.IDE.Plugin.Plugins.AddArgument + Development.IDE.Plugin.Plugins.Diagnostic + Development.IDE.Plugin.Plugins.FillHole + Development.IDE.Plugin.Plugins.FillTypeWildcard + Development.IDE.Plugin.Plugins.ImportUtils + default-extensions: + CPP + DataKinds + DerivingStrategies + DerivingVia + DuplicateRecordFields + ExplicitNamespaces + FunctionalDependencies + LambdaCase + OverloadedStrings + PatternSynonyms + RecordWildCards + ViewPatterns + hs-source-dirs: plugins/hls-refactor-plugin/src + build-depends: + , ghc , bytestring + , ghc-boot + , regex-tdfa + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lsp + , text + , text-rope + , transformers + , unordered-containers , containers - , data-default + , ghc-exactprint < 1 || >= 1.4 + , extra + , syb + , hls-graph + , dlist , deepseq - , Diff + , mtl + , lens + , time + -- FIXME: Only needed to workaround for qualified imports in GHC 9.4 + , regex-applicative + , parser-combinators + if impl(ghc < 9.10) + build-depends: data-default + +test-suite hls-refactor-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(refactor) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-refactor-plugin/test + main-is: Main.hs + other-modules: Test.AddArgument + ghc-options: -O0 + build-depends: + , data-default , directory , extra , filepath - , floskell == 0.10.* + , ghcide:ghcide + , haskell-language-server:hls-refactor-plugin + , hls-test-utils == 2.11.0.0 + , lens + , lsp-test + , lsp-types + , parser-combinators + , regex-tdfa + , shake + , tasty + , tasty-expected-failure + , tasty-hunit + , text + +----------------------------- +-- semantic tokens plugin +----------------------------- + +flag semanticTokens + description: Enable semantic tokens plugin + default: True + manual: True + +common semanticTokens + if flag(semanticTokens) + build-depends: haskell-language-server:hls-semantic-tokens-plugin + cpp-options: -Dhls_semanticTokens + +library hls-semantic-tokens-plugin + import: defaults, pedantic, warnings + if !flag(semanticTokens) + buildable: False + exposed-modules: + Ide.Plugin.SemanticTokens + Ide.Plugin.SemanticTokens.Types + Ide.Plugin.SemanticTokens.Mappings + other-modules: + Ide.Plugin.SemanticTokens.Query + Ide.Plugin.SemanticTokens.SemanticConfig + Ide.Plugin.SemanticTokens.Utils + Ide.Plugin.SemanticTokens.Tokenize + Ide.Plugin.SemanticTokens.Internal + + hs-source-dirs: plugins/hls-semantic-tokens-plugin/src + build-depends: + , containers + , extra + , text-rope + , mtl >= 2.2 , ghc - , ghcide >= 0.1 - , gitrev - , hashable - , haskell-lsp == 0.22.* - , hie-bios ^>= 0.6.1 - , hslogger + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 , lens - , ormolu ^>= 0.1.2 - , optparse-simple - , process - , regex-tdfa >= 1.3.1.0 - , shake >= 0.17.5 - , stylish-haskell == 0.11.* - , temporary + , lsp >=2.6 , text - , time , transformers + , bytestring + , syb + , array + , deepseq + , dlist + , hls-graph == 2.11.0.0 + , template-haskell + , data-default + , stm + , stm-containers + + default-extensions: DataKinds + +test-suite hls-semantic-tokens-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(semanticTokens) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-semantic-tokens-plugin/test + main-is: SemanticTokensTest.hs + + build-depends: + , aeson + , containers + , data-default + , filepath + , ghcide == 2.11.0.0 + , haskell-language-server:hls-semantic-tokens-plugin + , hls-plugin-api == 2.11.0.0 + , hls-test-utils == 2.11.0.0 + , lens + , lsp + , lsp-test + , text + , text-rope + +----------------------------- +-- notes plugin +----------------------------- + +flag notes + description: Enable notes plugin + default: True + manual: True + +common notes + if flag(notes) + build-depends: haskell-language-server:hls-notes-plugin + cpp-options: -Dhls_notes + +library hls-notes-plugin + import: defaults, pedantic, warnings + if !flag(notes) + buildable: False + exposed-modules: + Ide.Plugin.Notes + hs-source-dirs: plugins/hls-notes-plugin/src + build-depends: + , array + , ghcide == 2.11.0.0 + , hls-graph == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lens + , lsp >=2.7 + , mtl >= 2.2 + , regex-tdfa >= 1.3.1 + , text + , text-rope , unordered-containers - if os(windows) - build-depends: Win32 - else - build-depends: unix - if flag(agpl) - build-depends: - brittany - exposed-modules: - Ide.Plugin.Brittany + default-extensions: + DataKinds + , DeriveAnyClass + , DerivingStrategies + , OverloadedStrings + , LambdaCase + , TypeFamilies - ghc-options: - -Wall - -Wredundant-constraints - -Wno-name-shadowing - if flag(pedantic) - ghc-options: -Werror +test-suite hls-notes-plugin-tests + import: defaults, pedantic, test-defaults, warnings + if !flag(notes) + buildable: False + type: exitcode-stdio-1.0 + hs-source-dirs: plugins/hls-notes-plugin/test + main-is: NotesTest.hs + build-depends: + , filepath + , haskell-language-server:hls-notes-plugin + , hls-test-utils == 2.11.0.0 + default-extensions: OverloadedStrings - default-language: Haskell2010 +---------------------------- +---------------------------- +-- HLS +---------------------------- +---------------------------- -executable haskell-language-server - import: agpl - main-is: Main.hs - hs-source-dirs: - exe - other-modules: - Arguments - Paths_haskell_language_server - autogen-modules: - Paths_haskell_language_server - ghc-options: - -threaded - -Wall - -Wno-name-shadowing - -Wredundant-constraints - -- allow user RTS overrides - -rtsopts - -- disable idle GC - -- disable parallel GC - -- increase nursery size - "-with-rtsopts=-I0 -qg -A128M" - if flag(pedantic) - ghc-options: -Werror +library + import: defaults + , warnings + , pedantic + -- plugins + , cabal + , callHierarchy + , cabalfmt + , cabalgild + , changeTypeSignature + , class + , eval + , importLens + , rename + , retrie + , hlint + , stan + , moduleName + , pragmas + , splice + , alternateNumberFormat + , qualifyImportedNames + , codeRange + , gadt + , explicitFixity + , explicitFields + , floskell + , fourmolu + , ormolu + , stylishHaskell + , refactor + , overloadedRecordDot + , semanticTokens + , notes + exposed-modules: + Ide.Arguments + Ide.Main + Ide.Version + HlsPlugins + + other-modules: Paths_haskell_language_server + autogen-modules: Paths_haskell_language_server + hs-source-dirs: src build-depends: - base >=4.7 && <5 - , binary - , containers + , aeson-pretty , data-default , directory , extra , filepath - , process , ghc - -------------------------------------------------------------- - , ghcide - , gitrev - , hashable - , haskell-lsp + , ghcide == 2.11.0.0 + , githash >=0.1.6.1 , hie-bios - , haskell-language-server - , hslogger + , hls-plugin-api == 2.11.0.0 , optparse-applicative - , safe-exceptions - , shake >= 0.17.5 + , optparse-simple + , prettyprinter >= 1.7 + , process , text - , time - , unordered-containers - default-language: Haskell2010 -executable haskell-language-server-wrapper - import: agpl - main-is: Wrapper.hs - hs-source-dirs: - exe - other-modules: - Arguments - Paths_haskell_language_server - autogen-modules: - Paths_haskell_language_server + default-extensions: DataKinds + +executable haskell-language-server + import: defaults + , warnings + , pedantic + main-is: Main.hs + hs-source-dirs: exe + ghc-options: - -threaded - -Wall - -Wno-name-shadowing - -Wredundant-constraints - -- allow user RTS overrides - -rtsopts - -- disable idle GC - -- disable parallel GC - -- increase nursery size - "-with-rtsopts=-I0 -qg -A128M" + -threaded + -- allow user RTS overrides + -rtsopts + -- disable idle GC + -- increase nursery size + -- Enable collection of heap statistics + "-with-rtsopts=-I0 -A128M -T" if flag(pedantic) - ghc-options: -Werror + ghc-options: -Werror + if !os(windows) && flag(dynamic) + -- We want to link against the dyn rts just like official GHC binaries do; + -- the linked rts determines how external libs are loaded dynamically by TH. + -- The standard way of doing this is via the --enable-dynamic-executables Cabal option + -- Unfortunately it doesnt' work, see https://github.com/haskell/haskell-language-server/issues/2659 + -- One can use --ghc-options=-dynamic but this gets applied to the dependencies as well, + -- which results in massive rebuilds and incompatibilities with profiling. + -- So instead we set the -dynamic flag diretly here. + ghc-options: -dynamic + + build-depends: + , haskell-language-server + , hls-plugin-api + , lsp + , prettyprinter >= 1.7 + , text + + default-extensions: DataKinds + +executable haskell-language-server-wrapper + import: defaults + , warnings + , pedantic + main-is: Wrapper.hs + hs-source-dirs: exe + other-modules: Paths_haskell_language_server + autogen-modules: Paths_haskell_language_server + ghc-options: + -threaded + -- allow user RTS overrides + -rtsopts + -- disable idle GC + -- increase nursery size + "-with-rtsopts=-I0 -A128M" + build-depends: - base + , data-default , directory , extra , filepath - , gitrev - , ghc - , ghc-paths - , hie-bios + , ghcide , haskell-language-server - , optparse-applicative - , process - default-language: Haskell2010 - --- This common stanza simulates a previous private lib --- We removed it due to issues with stack when loading the project using a stack based hie.yaml --- See https://github.com/haskell/haskell-language-server/issues/114 -common hls-test-utils - import: agpl - hs-source-dirs: test/utils - other-modules: Test.Hls.Util - build-depends: base - , haskell-language-server - , haskell-lsp - , hie-bios - , aeson - , blaze-markup - , containers - , data-default - , directory - , filepath - , hslogger - , hspec - , hspec-core - , lsp-test - , stm - , tasty-hunit - , temporary - , text - , transformers - , unordered-containers - , yaml - ghc-options: -Wall -Wredundant-constraints - if flag(pedantic) - ghc-options: -Werror - default-language: Haskell2010 + , hie-bios + , hls-plugin-api + , lsp + , lsp-types + , text + , transformers + , unliftio-core + if !os(windows) + build-depends: + , unix + , containers + else + build-depends: + , process test-suite func-test - import: agpl, hls-test-utils - type: exitcode-stdio-1.0 - default-language: Haskell2010 - build-tool-depends: haskell-language-server:haskell-language-server - , ghcide:ghcide-test-preprocessor - build-depends: base >=4.7 && <5 - , aeson - , bytestring - , data-default - , directory - , filepath - , haskell-language-server - , haskell-lsp - , haskell-lsp-types - , hspec-expectations - , lens - , lsp-test >= 0.10.0.0 - , tasty - , tasty-ant-xml >= 1.1.6 - , tasty-expected-failure - , tasty-golden - , tasty-hunit - , tasty-rerun - , text - , unordered-containers - hs-source-dirs: test/functional - main-is: Main.hs - other-modules: Command - , Completion - , Deferred - , Definition - , Diagnostic - , Eval - , Format - , FunctionalBadProject - , FunctionalCodeAction - , FunctionalLiquid - , HieBios - , Highlight - , Progress - , Reference - , Rename - , Symbol - , TypeDefinition - ghc-options: -Wall - -Wno-name-shadowing - -threaded -rtsopts -with-rtsopts=-N - if flag(pedantic) - ghc-options: -Werror -Wredundant-constraints + import: defaults + , test-defaults + , warnings + , pedantic + , refactor + type: exitcode-stdio-1.0 + build-tool-depends: + haskell-language-server:haskell-language-server, + + build-depends: + , aeson + , bytestring + , containers + , deepseq + , extra + , filepath + , ghcide:ghcide + , hashable + , hls-plugin-api + , hls-test-utils == 2.11.0.0 + , lens + , lsp-test + , lsp-types + , text + , unordered-containers + + hs-source-dirs: test/functional test/utils + + main-is: Main.hs + other-modules: + Config + ConfigSchema + Format + FunctionalBadProject + HieBios + Progress + Test.Hls.Command + Test.Hls.Flags + + default-extensions: OverloadedStrings + +-- Duplicating inclusion plugin conditions until tests are moved to their own packages + if flag(eval) + cpp-options: -Dhls_eval +-- formatters + if flag(floskell) && (impl(ghc < 9.10) || flag(ignore-plugins-ghc-bounds)) + cpp-options: -Dhls_floskell + if flag(fourmolu) + cpp-options: -Dhls_fourmolu + if flag(ormolu) + cpp-options: -Dhls_ormolu test-suite wrapper-test - import: agpl, hls-test-utils - type: exitcode-stdio-1.0 - build-tool-depends: haskell-language-server:haskell-language-server-wrapper - default-language: Haskell2010 - build-depends: base == 4.* - , directory - , process - , tasty - , tasty-hunit - , tasty-ant-xml >= 1.1.6 - hs-source-dirs: test/wrapper - main-is: Main.hs - ghc-options: -Wall + import: defaults + , warnings + , pedantic + type: exitcode-stdio-1.0 + build-tool-depends: + haskell-language-server:haskell-language-server-wrapper, + haskell-language-server:haskell-language-server + + build-depends: + , extra + , hls-test-utils == 2.11.0.0 + , process + + hs-source-dirs: test/wrapper + main-is: Main.hs + +benchmark benchmark + import: defaults, warnings + -- Depends on shake-bench which is unbuildable after this point + type: exitcode-stdio-1.0 + ghc-options: -threaded + main-is: Main.hs + hs-source-dirs: bench + build-tool-depends: + haskell-language-server:ghcide-bench, + eventlog2html:eventlog2html, + default-extensions: + LambdaCase + RecordWildCards + ViewPatterns + + build-depends: + , aeson + , containers + , data-default + , directory + , extra + , filepath + , haskell-language-server:ghcide-bench-lib + , haskell-language-server + , hls-plugin-api + , lens + , lens-aeson + , shake + , shake-bench == 0.2.* + , text + , yaml + +flag test-exe + description: Build the ghcide-test-preprocessor executable + default: True + +executable ghcide-test-preprocessor + import: warnings + default-language: GHC2021 + hs-source-dirs: ghcide-test/preprocessor + main-is: Main.hs + build-depends: base >=4 && <5 + + if !flag(test-exe) + buildable: False + +test-suite ghcide-tests + import: warnings, defaults + type: exitcode-stdio-1.0 + default-language: GHC2021 + build-tool-depends: + , ghcide:ghcide + , haskell-language-server:ghcide-test-preprocessor + , implicit-hie:gen-hie + + build-depends: + , aeson + , containers + , data-default + , directory + , enummapset + , extra + , filepath + , ghcide + , hls-plugin-api + , lens + , list-t + , lsp + , lsp-test ^>=0.17.1 + , lsp-types + , mtl + , network-uri + , QuickCheck + , random + , regex-tdfa ^>=1.3.1 + , shake + , sqlite-simple + , stm + , stm-containers + , tasty + , tasty-expected-failure + , tasty-hunit >=0.10 + , tasty-quickcheck + , tasty-rerun + , text + , text-rope + , unordered-containers + , hls-test-utils == 2.11.0.0 + + if impl(ghc <9.3) + build-depends: ghc-typelits-knownnat + + hs-source-dirs: ghcide-test/exe + ghc-options: -threaded -O0 + + main-is: Main.hs + other-modules: + Config + AsyncTests + BootTests + ClientSettingsTests + CodeLensTests + CompletionTests + CPPTests + CradleTests + DependentFileTest + DiagnosticTests + ExceptionTests + FindDefinitionAndHoverTests + FindImplementationAndHoverTests + FuzzySearch + GarbageCollectionTests + HaddockTests + HieDbRetry + HighlightTests + IfaceTests + InitializeResponseTests + LogType + NonLspCommandLine + OpenCloseTest + OutlineTests + PluginSimpleTests + PositionMappingTests + PreprocessorTests + Progress + ReferenceTests + ResolveTests + RootUriTests + SafeTests + SymlinkTests + THTests + UnitTests + WatchedFileTests + + -- Tests that have been pulled out of the main file + default-extensions: + LambdaCase + OverloadedStrings + RecordWildCards + ViewPatterns + +flag ghcide-bench + description: Build the ghcide-bench executable + default: True + +executable ghcide-bench + import: defaults + if !flag(ghcide-bench) + buildable: False + build-depends: + aeson, + bytestring, + containers, + data-default, + extra, + filepath, + hls-plugin-api, + hls-test-utils, + lens, + lsp-test, + lsp-types, + optparse-applicative, + process, + safe-exceptions, + hls-graph, + shake, + tasty-hunit >= 0.10, + text, + haskell-language-server:ghcide-bench-lib, + hs-source-dirs: ghcide-bench/exe + ghc-options: -threaded -Wall -Wno-name-shadowing -rtsopts + main-is: Main.hs + default-extensions: + LambdaCase + OverloadedStrings + RecordWildCards + ViewPatterns + +library ghcide-bench-lib + import: defaults + hs-source-dirs: ghcide-bench/src + ghc-options: -Wall -Wno-name-shadowing + exposed-modules: + Experiments.Types + Experiments + build-depends: + aeson, + async, + binary, + bytestring, + deepseq, + directory, + extra, + filepath, + ghcide:{ghcide}, + hashable, + lens, + lsp-test, + lsp-types, + optparse-applicative, + parser-combinators, + process, + safe-exceptions, + shake, + text, + hls-test-utils, + row-types + default-extensions: + LambdaCase + RecordWildCards + ViewPatterns + + +test-suite ghcide-bench-test + import: defaults + type: exitcode-stdio-1.0 + build-tool-depends: + ghcide:ghcide, + main-is: Main.hs + hs-source-dirs: ghcide-bench/test + ghc-options: -Wunused-packages + ghc-options: -threaded -Wall + build-depends: + extra, + haskell-language-server:ghcide-bench-lib, + lsp-test ^>= 0.17, + tasty, + tasty-hunit >= 0.10, + tasty-rerun + default-extensions: + LambdaCase + OverloadedStrings + RecordWildCards + ViewPatterns + +executable plugin-tutorial + import: defaults + -- The plugin tutorial is only compatible with 9.6 and 9.8. + -- No particular reason, just to avoid excessive CPP. + if (impl(ghc >= 9.6) && impl(ghc < 9.10)) + buildable: True + else + buildable: False + ghc-options: -pgmL markdown-unlit + main-is: docs/contributing/plugin-tutorial.lhs + build-tool-depends: markdown-unlit:markdown-unlit + build-depends: + base, + ghcide, + hls-plugin-api, + aeson, + lsp, + lsp-types, + markdown-unlit, + text, + unordered-containers, + containers, + transformers, + ghc, diff --git a/hie.yaml.cbl b/hie.yaml.cbl deleted file mode 100644 index fb92662e1b..0000000000 --- a/hie.yaml.cbl +++ /dev/null @@ -1,38 +0,0 @@ -# This is a sample hie.yaml file for opening haskell-language-server -# in hie, using cabal as the build system. To use is, copy it to a -# file called 'hie.yaml' -cradle: - multi: - - path: "./test/testdata/" - config: { cradle: { none: } } - - - path: "./" - config: - cradle: - cabal: - - path: "./test/functional/" - component: "haskell-language-server:func-test" - - - path: "./test/utils/" - component: "haskell-language-server:func-test" - - - path: "./exe/Main.hs" - component: "haskell-language-server:exe:haskell-language-server" - - - path: "./exe/Arguments.hs" - component: "haskell-language-server:exe:haskell-language-server" - - - path: "./exe/Wrapper.hs" - component: "haskell-language-server:exe:haskell-language-server-wrapper" - - - path: "./src" - component: "lib:haskell-language-server" - - - path: "./.stack-work/" - component: "lib:haskell-language-server" - - - path: "./ghcide/src" - component: "ghcide:lib:ghcide" - - - path: "./ghcide/exe" - component: "ghcide:exe:ghcide" diff --git a/hie.yaml.stack b/hie.yaml.stack deleted file mode 100644 index 54f5f3abb0..0000000000 --- a/hie.yaml.stack +++ /dev/null @@ -1,38 +0,0 @@ -# This is a sample hie.yaml file for opening haskell-language-server -# in hie, using stack as the build system. To use is, copy it to a -# file called 'hie.yaml' -cradle: - multi: - - path: "./test/testdata/" - config: { cradle: { none: } } - - - path: "./" - config: - cradle: - stack: - - path: "./test/functional/" - component: "haskell-language-server:func-test" - - - path: "./test/utils/" - component: "haskell-language-server:func-test" - - - path: "./exe/Main.hs" - component: "haskell-language-server:exe:haskell-language-server" - - - path: "./exe/Arguments.hs" - component: "haskell-language-server:exe:haskell-language-server" - - - path: "./exe/Wrapper.hs" - component: "haskell-language-server:exe:haskell-language-server-wrapper" - - - path: "./src" - component: "haskell-language-server:lib" - - - path: "./.stack-work/" - component: "haskell-language-server:lib" - - - path: "./ghcide/src" - component: "ghcide:lib:ghcide" - - - path: "./ghcide/exe" - component: "ghcide:exe:ghcide" diff --git a/hls-graph/LICENSE b/hls-graph/LICENSE new file mode 100644 index 0000000000..261eeb9e9f --- /dev/null +++ b/hls-graph/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/hls-graph/README.md b/hls-graph/README.md new file mode 100644 index 0000000000..802b7b1016 --- /dev/null +++ b/hls-graph/README.md @@ -0,0 +1,17 @@ +# hls-graph - a limited reimplementation of Shake for in-memory build graphs + +`ghcide` was originally built on top of [Shake](http://shakebuild.com), a Haskell build system. Nowadays Shake has been replaced by a special purpose implementation of a build graph called hls-graph, which drops all the persistency features in exchange for simplicity and performance. + +Features: + +* Dynamic dependencies +* User defined rules (there are no predefined File rules as in Shake) +* Build reports (a la Shake profiling) +* "Reactive" change tracking for minimal rebuilds (not available in Shake) + +What's missing: + +* Persistence +* A default set of rules for file system builds +* A testsuite +* General purpose application - many design decisions make assumptions specific to ghcide diff --git a/hls-graph/hls-graph.cabal b/hls-graph/hls-graph.cabal new file mode 100644 index 0000000000..5eccb4d75e --- /dev/null +++ b/hls-graph/hls-graph.cabal @@ -0,0 +1,142 @@ +cabal-version: 2.4 +name: hls-graph +version: 2.11.0.0 +synopsis: Haskell Language Server internal graph API +description: + Please see the README on GitHub at + +homepage: https://github.com/haskell/haskell-language-server#readme +bug-reports: https://github.com/haskell/haskell-language-server/issues +license: Apache-2.0 +license-file: LICENSE +author: The Haskell IDE Team +maintainer: The Haskell IDE Team +copyright: The Haskell IDE Team +category: Development +build-type: Simple +data-files: + html/profile.html + html/shake.js + +extra-source-files: README.md + +flag pedantic + description: Enable -Werror + default: False + manual: True + +flag embed-files + default: False + manual: True + description: Embed data files into the shake library + +flag stm-stats + default: False + manual: True + description: Collect STM transaction stats + +source-repository head + type: git + location: https://github.com/haskell/haskell-language-server + +common warnings + ghc-options: + -Wall + -Wredundant-constraints + -Wunused-packages + -Wno-name-shadowing + -Wno-unticked-promoted-constructors + +library + import: warnings + exposed-modules: + Control.Concurrent.STM.Stats + Development.IDE.Graph + Development.IDE.Graph.Classes + Development.IDE.Graph.Database + Development.IDE.Graph.Internal.Action + Development.IDE.Graph.Internal.Database + Development.IDE.Graph.Internal.Options + Development.IDE.Graph.Internal.Key + Development.IDE.Graph.Internal.Paths + Development.IDE.Graph.Internal.Profile + Development.IDE.Graph.Internal.Rules + Development.IDE.Graph.Internal.Types + Development.IDE.Graph.KeyMap + Development.IDE.Graph.KeySet + Development.IDE.Graph.Rule + Paths_hls_graph + + autogen-modules: Paths_hls_graph + hs-source-dirs: src + build-depends: + , aeson + , async >=2.0 + , base >=4.12 && <5 + , bytestring + , containers + , deepseq + , exceptions + , extra + , filepath + , focus >=1.0.3.2 + , hashable + , js-dgtable + , js-flot + , js-jquery + , list-t + , stm + , stm-containers + , text + , time + , transformers + , unliftio + , unordered-containers + + if flag(embed-files) + cpp-options: -DFILE_EMBED + build-depends: + , file-embed >=0.0.11 + , template-haskell + else + build-depends: + directory + + if flag(stm-stats) + cpp-options: -DSTM_STATS + + if flag(pedantic) + ghc-options: -Werror + + default-language: GHC2021 + default-extensions: + DataKinds + +test-suite tests + import: warnings + type: exitcode-stdio-1.0 + default-language: GHC2021 + hs-source-dirs: test + main-is: Main.hs + other-modules: + ActionSpec + DatabaseSpec + Example + RulesSpec + Spec + + ghc-options: + -threaded -rtsopts -with-rtsopts=-N -fno-ignore-asserts + + build-depends: + , base + , extra + , hls-graph + , hspec + , stm + , stm-containers + , tasty + , tasty-hspec >= 1.2 + , tasty-rerun + + build-tool-depends: hspec-discover:hspec-discover diff --git a/hls-graph/html/README.md b/hls-graph/html/README.md new file mode 100644 index 0000000000..4881424df4 --- /dev/null +++ b/hls-graph/html/README.md @@ -0,0 +1,24 @@ +# Shake HTML + +HTML files originally used by Shake and now by hls-graph, for profiling build runs. + +## Files + +* `profile.html` is the actual profiler. +* `profile-data.js`, `progress-data.js` and `metadata.js` are files with sample data, matching that generated by the compiler. +* `shake.js` is generated from the code in `ts`. + +## Development + +Before doing any work you need the jQuery, Flot and dgtable JavaScript dependencies. These can be found in the `js-jquery`, `js-flot` and `js-dgtable` repos. Copy the associated JavaScript files into a `lib` directory, renaming to take away version numbers and `.min` parts. + +To build and test `shake.js` from `html` run: + + tsc -p ts # generated shake.js + tslint -p ts # run the linter + +Or, for the one liner: + + tsc -p ts && tslint -p ts + +To test out the `profile.html` just open it after doing the above steps. diff --git a/hls-graph/html/data/metadata.js b/hls-graph/html/data/metadata.js new file mode 100644 index 0000000000..e30f25e9f0 --- /dev/null +++ b/hls-graph/html/data/metadata.js @@ -0,0 +1,2 @@ +var version = "HEAD"; +var generated = "10:33pm 30-Mar-2019"; diff --git a/hls-graph/html/data/profile-data.js b/hls-graph/html/data/profile-data.js new file mode 100644 index 0000000000..4bf8a7ffbf --- /dev/null +++ b/hls-graph/html/data/profile-data.js @@ -0,0 +1,590 @@ +var profile = +[["doesFileExist ../../src/General/GetOpt.hs",0.0001,0,0] +,["../../src/Development/Ninja/All.hs",0.0003,0,0] +,["doesFileExist ../../src/System/IO/Error.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/ByteString.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/Unique.hs",0.0001,0,0] +,["doesFileExist ../../src/System/IO/Unsafe.hs",0.0001,0,0] +,["doesFileExist ../../src/Control/Monad/Trans/Maybe.hs",0.0002,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Progress.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/Binary/Builder.hs",0.0002,0,0] +,["../../src/Development/Shake/Internal/Profile.hs",0.0003,0,0] +,["../../src/Development/Shake/Internal/Core/Types.hs",0.0003,0,0] +,["../../src/Development/Shake/Internal/FilePattern.hs",0.002,0,0] +,["doesFileExist ../../src/Foreign/Ptr.hs",0.0001,0,0] +,["doesFileExist ../../src/General/Ids.hs",0.0002,0,0] +,["doesFileExist ../../src/Paths.hs",0.0001,0,0] +,["doesFileExist ../../src/System/Process.hs",0.0001,0,0] +,["../../src/Development/Shake/Internal/History/Shared.hs",0.0005,0,0] +,["doesFileExist ../../src/Development/Ninja/Env.hs",0.0001,0,0] +,["doesFileExist ../../src/General/Process.hs",0.0001,0,0] +,["../../src/Development/Shake/FilePath.hs",0.0003,0,0] +,["../../src/Development/Shake/Internal/Core/Pool.hs",0.0002,0,0] +,["doesFileExist ../../src/System/FilePath.hs",0.0001,0,0] +,["doesFileExist ../../src/Control/Monad/Trans/Class.hs",0.0002,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Rules/OrderOnly.hs",0.0002,0,0] +,["../../src/General/ListBuilder.hs",0.0003,0,0] +,["../../src/Development/Shake/Internal/Core/Storage.hs",0.0002,0,0] +,["../../src/Development/Shake/Internal/Args.hs",0.0009,0,0] +,["doesFileExist ../../src/GHC/Exts.hs",0.0001,0,0] +,["../../src/Development/Shake/Classes.hs",0.0003,0,0] +,["doesFileExist ../../src/Data/Version.hs",0.0001,0,0] +,["doesFileExist ../../src/System/Exit.hs",0.0001,0,0] +,["../../src/General/Pool.hs",0.0002,0,0] +,["doesFileExist ../../src/System/Console/GetOpt.hs",0.0002,0,0] +,["doesFileExist ../../src/Data/Time.hs",0.0002,0,0] +,["doesFileExist ../../src/General/Cleanup.hs",0.0006,0,0] +,["doesFileExist ../../src/General/Makefile.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Args.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Core/Run.hs",0.0001,0,0] +,["doesFileExist ../../src/General/FileLock.hs",0.0001,0,0] +,["../../src/Development/Ninja/Parse.hs",0.0019,0,0] +,["../../src/Development/Shake/Internal/Core/Rules.hs",0.0011,0,0] +,["doesFileExist ../../src/Foreign/C/String.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/ByteString/Lazy.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/IORef.hs",0.0001,0,0] +,["doesFileExist ../../src/GHC/IO/Exception.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/ByteString/Char8.hs",0.0001,0,0] +,["../../src/General/Fence.hs",0.0002,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Core/Action.hs",0.0002,0,0] +,["doesFileExist ../../src/Data/Hashable.hs",0.0002,0,0] +,["../../src/Development/Shake/Internal/History/Network.hs",0.0003,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Rules/Default.hs",0.0001,0,0] +,["doesFileExist ../../src/System/Info.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/List.hs",0.0001,0,0] +,["../../src/Development/Shake/Internal/Derived.hs",0.0003,0,0] +,["doesFileExist ../../src/Control/Exception.hs",0.0001,0,0] +,["../../src/Development/Shake/Internal/Rules/File.hs",0.0003,0,0] +,["doesFileExist ../../src/Data/IORef/Extra.hs",0.0617,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Core/Build.hs",0.0001,0,0] +,["../../src/General/TypeMap.hs",0.0002,0,0] +,["doesFileExist ../../src/Data/Char.hs",0.0001,0,0] +,["doesFileExist ../../src/System/Posix/Files/ByteString.hs",0.0002,0,0] +,["doesFileExist ../../src/Data/Typeable.hs",0.0001,0,0] +,["doesFileExist ../../src/System/Random.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Command.hs",0.0002,0,0] +,["../../src/Development/Shake/Internal/History/Bloom.hs",0.0002,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Paths.hs",0.0002,0,0] +,["doesFileExist ../../src/Data/Bits.hs",0.0001,0,0] +,["../../src/Development/Shake/Internal/FileName.hs",0.0003,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/CompactUI.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/History/Shared.hs",0.0001,0,0] +,["../../src/Run.hs",0.0003,0,0] +,["../../src/Development/Shake/Internal/History/Cloud.hs",0.0003,0,0] +,["doesFileExist ../../src/Development/Ninja/Type.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Core/Pool.hs",0.0001,0,0] +,["doesFileExist ../../src/Prelude.hs",0.0001,0,0] +,["../../src/General/Bilist.hs",0.0003,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/FileName.hs",0.0001,0,0] +,["../../src/General/Chunks.hs",0.0003,0,0] +,["doesFileExist ../../src/Data/Primitive/Array.hs",0.0001,0,0] +,["doesFileExist ../../src/Network/HTTP.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/Data.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Demo.hs",0.0001,0,0] +,["../../src/General/Template.hs",0.0003,0,0] +,["doesFileExist ../../src/Development/Ninja/All.hs",0.0002,0,0] +,["../../src/Development/Shake/Internal/History/Types.hs",0.0002,0,0] +,["Development/Shake/Internal/History/Types.dep",0.0017,0,0,[[84],[3]]] +,["Development/Shake/Internal/History/Types.deps",0.0012,0,0,[[85]]] +,["doesFileExist ../../src/Data/HashMap/Strict.hs",0.0001,0,0] +,["doesFileExist ../../src/System/FilePath/Posix.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/FilePattern.hs",0.0001,0,0] +,["doesFileExist ../../src/Network/URI.hs",0.0001,0,0] +,["../../src/General/FileLock.hs",0.0002,0,0] +,["../../src/Development/Shake/Internal/Rules/Directory.hs",0.0003,0,0] +,["doesFileExist ../../src/Development/Ninja/Lexer.hs",0.0001,0,0] +,["doesFileExist ../../src/Control/Monad/Fix.hs",0.0001,0,0] +,["doesFileExist ../../src/General/Fence.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/History/Cloud.hs",0.0001,0,0] +,["doesFileExist ../../src/System/Environment.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/Heap.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/HashSet.hs",0.0001,0,0] +,["doesFileExist ../../src/Control/Monad/IO/Class.hs",0.0002,0,0] +,["../../src/Development/Shake/Internal/Rules/Rerun.hs",0.0003,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Core/Rules.hs",0.0001,0,0] +,["doesFileExist ../../src/Control/Monad/Trans/Reader.hs",0.0001,0,0] +,["doesFileExist ../../src/Control/Monad/ST.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/Binary/Put.hs",0.0002,0,0] +,["doesFileExist ../../src/System/Posix/IO.hs",0.0001,0,0] +,["doesFileExist ../../src/System/IO.hs",0.0001,0,0] +,["../../src/Development/Shake/Internal/Resource.hs",0.0003,0,0] +,["doesFileExist ../../src/Unsafe/Coerce.hs",0.0009,0,0] +,["../../src/Development/Shake/Internal/Rules/Files.hs",0.0003,0,0] +,["doesFileExist ../../src/General/Bilist.hs",0.0001,0,0] +,["../../src/General/EscCodes.hs",0.0003,0,0] +,["../../src/Development/Shake/Internal/CompactUI.hs",0.0004,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/History/Types.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/History/Network.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/List/Extra.hs",0.0001,0,0] +,["../../src/General/Process.hs",0.0002,0,0] +,["doesFileExist ../../src/System/IO/Extra.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/FileInfo.hs",0.0007,0,0] +,["../../src/Development/Shake/Internal/History/Symlink.hs",0.0004,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Rules/Directory.hs",0.0001,0,0] +,["../../src/Development/Shake/Internal/Rules/Oracle.hs",0.0006,0,0] +,["../../src/Development/Shake/Internal/Rules/Default.hs",0.0004,0,0] +,["../../src/Development/Shake/Internal/Progress.hs",0.0003,0,0] +,["doesFileExist ../../src/Data/ByteString/Lazy/Char8.hs",0.0002,0,0] +,["doesFileExist ../../src/Data/Dynamic.hs",0.0002,0,0] +,["doesFileExist ../../src/Data/ByteString/UTF8.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/Monoid.hs",0.0004,0,0] +,["doesFileExist ../../src/Language/Javascript/Flot.hs",0.0002,0,0] +,["doesFileExist ../../src/General/Wait.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/FilePath.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/History/Server.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Ninja/Parse.hs",0.0001,0,0] +,["doesFileExist ../../src/System/FilePattern/Directory.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/Tuple/Extra.hs",0.0001,0,0] +,["../../src/Development/Ninja/Type.hs",0.0005,0,0] +,["doesFileExist ../../src/Numeric/Extra.hs",0.0007,0,0] +,["doesFileExist ../../src/GHC/Conc.hs",0.0009,0,0] +,["doesFileExist ../../src/Data/Binary/Get.hs",0.0001,0,0] +,["../../src/Development/Shake/Internal/Core/Run.hs",0.0003,0,0] +,["doesFileExist ../../src/General/Template.hs",0.0001,0,0] +,["OracleQ (GhcPkg ())",0.1414,0,0,[],[["ghc-pkg",0.4508,0.5896]]] +,["doesFileExist ../../src/Language/Javascript/JQuery.hs",0.004,0,0] +,["../../src/Development/Shake/Internal/Demo.hs",0.0003,0,0] +,["doesFileExist ../../src/General/TypeMap.hs",0.0001,0,0] +,["doesFileExist ../../src/General/Timing.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/History/Bloom.hs",0.0001,0,0] +,["../../src/Development/Shake.hs",0.0003,0,0] +,["doesFileExist ../../src/Control/Concurrent.hs",0.0001,0,0] +,["doesFileExist ../../src/System/Time/Extra.hs",0.0001,0,0] +,["../../src/Development/Shake/Internal/CmdOption.hs",0.0005,0,0] +,["Development/Shake/Internal/CmdOption.dep",0.0145,0,0,[[152],[80],[126]]] +,["Development/Shake/Internal/CmdOption.deps",0.0013,0,0,[[153]]] +,["doesFileExist ../../src/Development/Shake/Internal/Resource.hs",0.0007,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Rules/Files.hs",0.0001,0,0] +,["doesFileExist ../../src/General/ListBuilder.hs",0.0001,0,0] +,["../../src/Development/Shake/Internal/Errors.hs",0.0003,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Core/Storage.hs",0.0001,0,0] +,["doesFileExist ../../src/Foreign.hs",0.0001,0,0] +,["../../src/General/Makefile.hs",0.0003,0,0] +,["General/Makefile.dep",0.0026,0,0,[[161],[45],[59]]] +,["General/Makefile.deps",0.0018,0,0,[[162]]] +,["../../src/General/Cleanup.hs",0.0006,0,0] +,["doesFileExist ../../src/Foreign/Marshal/Alloc.hs",0.0001,0,0] +,["../../src/General/Thread.hs",0.0002,0,0] +,["doesFileExist ../../src/General/Pool.hs",0.0001,0,0] +,["doesFileExist ../../src/Control/Monad/Extra.hs",0.0001,0,0] +,["../../src/Development/Shake/Internal/Core/Build.hs",0.0002,0,0] +,["doesFileExist ../../src/Data/Either.hs",0.0001,0,0] +,["doesFileExist ../../src/Foreign/C/Error.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Value.hs",0.0001,0,0] +,["../../src/Development/Shake/Internal/Core/Monad.hs",0.0005,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Errors.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Rules/Oracle.hs",0.0002,0,0] +,["../../src/Development/Shake/Internal/Rules/OrderOnly.hs",0.0003,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Profile.hs",0.0002,0,0] +,["../../src/Development/Shake/Internal/Options.hs",0.0003,0,0] +,["doesFileExist ../../src/Data/Function.hs",0.0001,0,0] +,["doesFileExist ../../src/General/EscCodes.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Database.hs",0.0001,0,0] +,["doesFileExist ../../src/General/Intern.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Core/Monad.hs",0.0001,0,0] +,["doesFileExist ../../src/Control/Exception/Extra.hs",0.0001,0,0] +,["General/Template.dep",0.0041,0,0,[[82],[88],[184],[59],[5],[126],[130],[144]]] +,["General/Template.deps",0.0011,0,0,[[185]]] +,["doesFileExist ../../src/Control/DeepSeq.hs",0.0002,0,0] +,["doesFileExist ../../src/Control/Monad.hs",0.0001,0,0] +,["Development/Ninja/Parse.dep",0.0092,0,0,[[39],[45],[17],[72],[93],[188]]] +,["doesFileExist ../../src/Development/Shake/Internal/Derived.hs",0.0001,0,0] +,["../../shake.cabal",0.0002,0,0] +,[".pkgs",0.0044,0,0,[[191]]] +,["OracleQ (GhcFlags ())",0.0005,0,0,[[192]]] +,["Development/Shake/Internal/History/Types.o Development/Shake/Internal/History/Types.hi",0.532,0,0,[[86],[84],[143],[193]],[["ghc",3.3763,3.903]]] +,["Development/Shake/Internal/History/Types.hi",0.0004,0,0,[[194]]] +,["Development/Shake/Internal/History/Types.o",0.0006,0,0,[[194]]] +,["Development/Shake/Internal/CmdOption.o Development/Shake/Internal/CmdOption.hi",0.7516,0,0,[[154],[152],[143],[193]],[["ghc",1.8268,2.5737]]] +,["Development/Shake/Internal/CmdOption.o",0.0003,0,0,[[197]]] +,["Development/Shake/Internal/CmdOption.hi",0.0003,0,0,[[197]]] +,["General/Makefile.o General/Makefile.hi",0.5911,0,0,[[163],[161],[143],[193]],[["ghc",1.0346,1.6191]]] +,["General/Makefile.hi",0.0003,0,0,[[200]]] +,["General/Makefile.o",0.0003,0,0,[[200]]] +,["General/Template.o General/Template.hi",0.6362,0,0,[[186],[82],[143],[193]],[["ghc",2.8643,3.4941]]] +,["General/Template.o",0.0003,0,0,[[203]]] +,["General/Template.hi",0.0006,0,0,[[203]]] +,["doesFileExist ../../src/GHC/Stack.hs",0.0002,0,0] +,["doesFileExist ../../src/Control/Applicative.hs",0.0001,0,0] +,["../../src/Development/Shake/Command.hs",0.0002,0,0] +,["../../src/Development/Ninja/Lexer.hs",0.0005,0,0] +,["doesFileExist ../../src/General/Binary.hs",0.0001,0,0] +,["doesFileExist ../../src/General/Thread.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/Functor.hs",0.0001,0,0] +,["General/TypeMap.dep",0.0029,0,0,[[58],[87],[61],[109],[212],[74]]] +,["General/TypeMap.deps",0.0012,0,0,[[213]]] +,["General/TypeMap.o General/TypeMap.hi",0.5423,0,0,[[214],[58],[143],[193]],[["ghc",0.6005,1.1343]]] +,["General/TypeMap.o",0.0014,0,0,[[215]]] +,["General/TypeMap.hi",0.0003,0,0,[[215]]] +,["doesFileExist ../../src/Control/Monad/Fail.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/Maybe.hs",0.0001,0,0] +,["Development/Shake/Internal/History/Network.dep",0.0024,0,0,[[49],[79],[90],[52],[219],[42]]] +,["Development/Shake/Internal/History/Network.deps",0.0012,0,0,[[220]]] +,["Development/Shake/Internal/History/Network.o Development/Shake/Internal/History/Network.hi",0.7966,0,0,[[221],[49],[143],[193]],[["ghc",3.5012,4.2912]]] +,["Development/Shake/Internal/History/Network.hi",0.0014,0,0,[[222]]] +,["Development/Shake/Internal/History/Network.o",0.0007,0,0,[[222]]] +,["Development/Ninja/Type.dep",0.0152,0,0,[[137],[17],[45],[219]]] +,["General/Cleanup.dep",0.0052,0,0,[[164],[54],[87],[43],[117],[219]]] +,["General/Cleanup.deps",0.0017,0,0,[[226]]] +,["General/Cleanup.o General/Cleanup.hi",0.6072,0,0,[[227],[164],[143],[193]],[["ghc",2.258,2.8553]]] +,["General/Cleanup.hi",0.0004,0,0,[[228]]] +,["General/Cleanup.o",0.0003,0,0,[[228]]] +,["../../src/General/Wait.hs",0.0002,0,0] +,["../../src/General/GetOpt.hs",0.0003,0,0] +,["General/GetOpt.dep",0.0039,0,0,[[232],[32],[99],[219],[170],[117]]] +,["General/GetOpt.deps",0.0015,0,0,[[233]]] +,["General/GetOpt.o General/GetOpt.hi",0.6806,0,0,[[234],[232],[143],[193]],[["ghc",4.5104,5.1844]]] +,["General/GetOpt.o",0.0005,0,0,[[235]]] +,["General/GetOpt.hi",0.0003,0,0,[[235]]] +,["../../src/Paths.hs",0.0002,0,0] +,["doesFileExist ../../src/Development/Shake/Classes.hs",0.0002,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Options.hs",0.0002,0,0] +,["../../src/General/Intern.hs",0.0002,0,0] +,["doesFileExist ../../src/Foreign/Storable.hs",0.0002,0,0] +,["../../src/Development/Shake/Internal/History/Server.hs",0.0002,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Rules/Rerun.hs",0.0002,0,0] +,["../../src/Development/Shake/Internal/History/Serialise.hs",0.0004,0,0] +,["../../src/Development/Shake/Internal/FileInfo.hs",0.0003,0,0] +,["doesFileExist ../../src/Data/Binary.hs",0.0001,0,0] +,["Development/Shake/Classes.dep",0.0028,0,0,[[28],[48],[61],[247],[187]]] +,["Development/Shake/Classes.deps",0.0013,0,0,[[248]]] +,["Development/Shake/Classes.o Development/Shake/Classes.hi",0.4995,0,0,[[249],[28],[143],[193]],[["ghc",0.6007,1.0927]]] +,["Development/Shake/Classes.hi",0.0008,0,0,[[250]]] +,["Development/Shake/Classes.o",0.0005,0,0,[[250]]] +,["../../src/Development/Ninja/Env.hs",0.0004,0,0] +,["Development/Ninja/Env.dep",0.0041,0,0,[[253],[87],[48],[43]]] +,["Development/Ninja/Env.deps",0.0014,0,0,[[254]]] +,["Development/Ninja/Env.o Development/Ninja/Env.hi",0.6363,0,0,[[255],[253],[143],[193]],[["ghc",1.625,2.2508]]] +,["Development/Ninja/Env.o",0.0005,0,0,[[256]]] +,["Development/Ninja/Env.hi",0.0003,0,0,[[256]]] +,["Development/Ninja/Type.deps",0.0027,0,0,[[225],[255]]] +,["Development/Ninja/Type.o Development/Ninja/Type.hi",0.6863,0,0,[[259],[137,258],[143],[193]],[["ghc",4.2972,4.9755]]] +,["Development/Ninja/Type.hi",0.0003,0,0,[[260]]] +,["Development/Ninja/Type.o",0.0003,0,0,[[260]]] +,["../../src/Development/Shake/Internal/Core/Action.hs",0.0004,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/CmdOption.hs",0.0002,0,0] +,["../../src/General/Ids.hs",0.0003,0,0] +,["doesFileExist ../../src/GHC/IO.hs",0.0001,0,0] +,["General/Ids.dep",0.0079,0,0,[[265],[56],[78],[54],[182],[168],[117],[219],[212],[87],[74],[266],[27]]] +,["doesFileExist ../../src/System/Directory.hs",0.0001,0,0] +,["../../src/General/Timing.hs",0.0002,0,0] +,["doesFileExist ../../src/Data/Either/Extra.hs",0.0001,0,0] +,["General/Fence.dep",0.0023,0,0,[[46],[188],[100],[219],[270],[43]]] +,["General/Fence.deps",0.001,0,0,[[271]]] +,["General/Fence.o General/Fence.hi",0.6501,0,0,[[272],[46],[143],[193]],[["ghc",1.1762,1.8127]]] +,["General/Fence.hi",0.0004,0,0,[[273]]] +,["General/Fence.o",0.0008,0,0,[[273]]] +,["doesFileExist ../../src/System/Info/Extra.hs",0.0002,0,0] +,["Development/Shake/FilePath.dep",0.0042,0,0,[[19],[276],[21],[88]]] +,["Development/Shake/FilePath.deps",0.0014,0,0,[[277]]] +,["Development/Shake/FilePath.o Development/Shake/FilePath.hi",0.576,0,0,[[278],[19],[143],[193]],[["ghc",0.6008,1.1707]]] +,["Development/Shake/FilePath.o",0.0003,0,0,[[279]]] +,["Development/Shake/FilePath.hi",0.0003,0,0,[[279]]] +,["Development/Shake/Internal/FilePattern.dep",0.0145,0,0,[[11],[174],[21],[117],[188],[59],[219],[276]]] +,["Development/Shake/Internal/FileName.dep",0.0097,0,0,[[67],[45],[128],[239],[21],[210],[276],[52]]] +,["doesFileExist ../../src/Development/Shake/Internal/History/Symlink.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Rules/File.hs",0.0001,0,0] +,["Development/Shake/Internal/Rules/Default.dep",0.0071,0,0,[[124],[102],[122],[285],[156],[244]]] +,["doesFileExist ../../src/System/Posix/Files.hs",0.0001,0,0] +,["doesFileExist ../../src/Data/ByteString/Lazy/Internal.hs",0.0033,0,0] +,["../../src/Development/Shake/Internal/Value.hs",0.0003,0,0] +,["Development/Shake/Internal/Value.dep",0.0088,0,0,[[289],[239],[174],[61],[66],[109]]] +,["doesFileExist ../../src/Data/ByteString/Unsafe.hs",0.0001,0,0] +,["doesFileExist ../../src/System/FilePattern.hs",0.0001,0,0] +,["doesFileExist ../../src/System/Directory/Extra.hs",0.0001,0,0] +,["Development/Shake/Internal/Args.dep",0.03,0,0,[[26],[65],[240],[102],[174],[68],[81],[47],[132],[285],[7],[181],[147],[211],[0],[180],[136],[184],[188],[170],[52],[219],[293],[97],[30],[151]]] +,["doesFileExist ../../src/General/Extra.hs",0.0001,0,0] +,["Development/Shake/Internal/History/Symlink.dep",0.0046,0,0,[[121],[168],[295],[268],[21],[12],[41],[287]]] +,["Development/Shake/Internal/Errors.dep",0.006,0,0,[[158],[136],[184],[100],[295],[61],[117],[219]]] +,["Development/Shake/Internal/CompactUI.dep",0.0071,0,0,[[113],[264],[240],[7],[151],[295],[54],[211],[180],[43],[168]]] +,["Run.dep",0.0066,0,0,[[70],[83],[97],[115],[132],[147],[168],[184],[219],[268],[295],[0],[15],[30]]] +,["Development/Shake/Internal/Demo.dep",0.0662,0,0,[[145],[65],[63],[184],[188],[59],[52],[219],[268],[30],[21],[295],[132],[107],[276]]] +,["General/Timing.dep",0.0088,0,0,[[269],[43],[5],[136],[138],[295],[151]]] +,["Development/Ninja/All.dep",0.014,0,0,[[1],[17],[72],[134],[115],[239],[3],[45],[268],[87],[99],[136],[184],[188],[219],[59],[117],[276],[295],[147],[35],[76],[120],[174],[285],[23]]] +,["General/Process.dep",0.0099,0,0,[[118],[150],[187],[184],[168],[117],[219],[171],[30],[119],[276],[15],[151],[4],[43],[45],[42],[295],[174],[44]]] +,["../../src/General/Extra.hs",0.0003,0,0] +,["doesFileExist ../../src/Data/Version/Extra.hs",0.0001,0,0] +,["Paths_shake.dep",0.0013,0,0,[[238],[305]]] +,["Paths_shake.deps",0.001,0,0,[[306]]] +,["Paths_shake.o Paths_shake.hi",0.4356,0,0,[[307],[238],[143],[193]],[["ghc",0.6004,1.0302]]] +,["Paths_shake.o",0.0014,0,0,[[308]]] +,["Paths_shake.hi",0.0003,0,0,[[308]]] +,["doesFileExist ../../src/Numeric.hs",0.0001,0,0] +,["doesFileExist ../../src/Development/Shake/Internal/Core/Types.hs",0.0001,0,0] +,["Development/Shake/Internal/Rules/Rerun.dep",0.0032,0,0,[[101],[102],[312],[57],[47],[239],[3],[210]]] +,["Development/Shake/Internal/Rules/Oracle.dep",0.0121,0,0,[[123],[312],[102],[240],[57],[172],[239],[3],[42],[188],[247],[210],[295]]] +,["Development/Shake/Internal/Core/Pool.dep",0.0057,0,0,[[20],[54],[167],[312],[183],[151],[270],[100],[95]]] +,["Development/Shake/Internal/Rules/OrderOnly.dep",0.0052,0,0,[[176],[312],[47],[285],[45]]] +,["Development/Shake/Internal/Rules/Files.dep",0.0132,0,0,[[110],[188],[100],[219],[117],[61],[210],[47],[312],[57],[102],[174],[295],[76],[239],[244],[285],[89],[132],[120],[240],[129],[74]]] +,["Development/Shake/Internal/Core/Build.dep",0.0239,0,0,[[169],[239],[167],[172],[174],[312],[47],[69],[96],[240],[183],[131],[45],[100],[295],[182],[54],[168],[138],[87],[13],[102],[61],[56],[219],[117],[270],[151]]] +,["Development/Shake.dep",0.0184,0,0,[[149],[74],[100],[172],[240],[312],[47],[102],[155],[190],[174],[7],[36],[63],[89],[122],[285],[156],[175],[23],[244]]] +,["Development/Shake/Internal/Rules/Directory.dep",0.0104,0,0,[[92],[168],[100],[219],[247],[52],[136],[99],[268],[97],[312],[47],[102],[57],[172],[239],[132],[89],[295],[210]]] +,["Development/Shake/Internal/Progress.dep",0.0183,0,0,[[125],[207],[136],[184],[168],[268],[15],[21],[59],[43],[52],[219],[240],[312],[45],[126],[138],[142],[180],[295],[65],[151],[13],[41]]] +,["Development/Shake/Internal/Derived.dep",0.0175,0,0,[[53],[168],[100],[268],[21],[107],[119],[174],[155],[312],[47],[102],[240],[285],[3],[87],[295],[117],[48],[61],[127]]] +,["Development/Shake/Internal/Profile.dep",0.0085,0,0,[[9],[142],[136],[179],[117],[219],[21],[119],[138],[295],[174],[312],[172],[13],[99],[65],[239],[151],[87],[126],[45],[182]]] +,["doesFileExist ../../src/General/Chunks.hs",0.0001,0,0] +,["Development/Shake/Internal/History/Shared.dep",0.0076,0,0,[[16],[172],[114],[284],[239],[210],[295],[324],[168],[293],[21],[107],[311],[120],[131],[76],[129],[100],[219],[3],[74]]] +,["doesFileExist ../../src/Control/Concurrent/Extra.hs",0.0001,0,0] +,["General/Pool.dep",0.0089,0,0,[[31],[326],[151],[54],[168],[147],[211],[98],[99],[56],[62]]] +,["Development/Shake/Internal/Core/Run.dep",0.0127,0,0,[[141],[54],[136],[326],[100],[210],[239],[159],[69],[96],[13],[182],[146],[131],[168],[61],[138],[117],[87],[127],[219],[43],[268],[151],[3],[312],[47],[102],[167],[7],[172],[177],[240],[174],[147],[211],[295],[34],[129],[74]]] +,["General/Thread.dep",0.0067,0,0,[[166],[34],[48],[326],[54],[295],[168]]] +,["Development/Shake/Internal/History/Cloud.dep",0.0104,0,0,[[71],[172],[114],[116],[133],[148],[326],[151],[188],[100],[6],[22],[95],[87],[61],[270],[210],[295],[131]]] +,["Development/Shake/Internal/Resource.dep",0.0092,0,0,[[108],[179],[5],[326],[95],[184],[136],[43],[168],[111],[167],[47],[312],[183],[73],[100],[151]]] +,["General/Wait.dep",0.004,0,0,[[231],[168],[100],[326],[56],[117],[78],[27],[218]]] +,["General/Wait.deps",0.001,0,0,[[332]]] +,["General/Wait.o General/Wait.hi",1.0318,0,0,[[333],[231],[143],[193]],[["ghc",1.0995,2.123]]] +,["General/Wait.hi",0.0003,0,0,[[334]]] +,["General/Wait.o",0.0014,0,0,[[334]]] +,["Development/Shake/Internal/Core/Action.dep",0.0147,0,0,[[263],[54],[168],[100],[187],[61],[268],[292],[135],[326],[219],[136],[43],[117],[138],[295],[87],[13],[182],[239],[183],[69],[167],[312],[102],[73],[172],[120],[76],[240],[174],[34],[95]]] +,["General/Extra.dep",0.0188,0,0,[[304],[184],[59],[117],[97],[132],[187],[311],[34],[61],[119],[151],[5],[276],[62],[268],[30],[138],[242],[326],[219],[48],[78],[188],[104],[139],[206]]] +,["General/Extra.deps",0.0041,0,0,[[338],[278,227]]] +,["General/Extra.o General/Extra.hi",1.364,0,0,[[339],[304,281,229],[143],[193]],[["ghc",3.4583,4.8013]]] +,["General/Extra.hi",0.001,0,0,[[340]]] +,["General/Extra.o",0.0003,0,0,[[340]]] +,["Development/Shake/Internal/History/Symlink.deps",0.0029,0,0,[[296],[339]]] +,["Development/Shake/Internal/History/Symlink.o Development/Shake/Internal/History/Symlink.hi",1.1794,0,0,[[343],[121,281,229,341],[143],[193]],[["ghc",4.8226,5.9703]]] +,["Development/Shake/Internal/History/Symlink.hi",0.0009,0,0,[[344]]] +,["Development/Shake/Internal/History/Symlink.o",0.0004,0,0,[[344]]] +,["Development/Shake/Internal/Errors.deps",0.0063,0,0,[[297],[339]]] +,["Development/Shake/Internal/Errors.o Development/Shake/Internal/Errors.hi",1.0817,0,0,[[347],[158,281,229,341],[143],[193]],[["ghc",4.9817,6.0552]]] +,["Development/Shake/Internal/Errors.o",0.0003,0,0,[[348]]] +,["Development/Shake/Internal/Errors.hi",0.0003,0,0,[[348]]] +,["Development/Shake/Internal/FilePattern.deps",0.0084,0,0,[[282],[347]]] +,["Development/Shake/Internal/FilePattern.o Development/Shake/Internal/FilePattern.hi",0.844,0,0,[[351],[11,281,350,229,341],[143],[193]],[["ghc",6.7192,7.5541]]] +,["Development/Shake/Internal/FilePattern.o",0.0006,0,0,[[352]]] +,["Development/Shake/Internal/FilePattern.hi",0.0006,0,0,[[352]]] +,["Development/Shake/Internal/Value.deps",0.0033,0,0,[[290],[249,347]]] +,["Development/Shake/Internal/Value.o Development/Shake/Internal/Value.hi",0.8159,0,0,[[355],[289,251,281,350,229,341],[143],[193]],[["ghc",6.3347,7.1393]]] +,["Development/Shake/Internal/Value.hi",0.0006,0,0,[[356]]] +,["Development/Shake/Internal/Value.o",0.0003,0,0,[[356]]] +,["General/Process.deps",0.0155,0,0,[[303],[339,347]]] +,["General/Process.o General/Process.hi",0.9122,0,0,[[359],[118,281,350,229,341],[143],[193]],[["ghc",6.2048,7.1061]]] +,["General/Process.o",0.0003,0,0,[[360]]] +,["General/Process.hi",0.0005,0,0,[[360]]] +,["General/Timing.deps",0.007,0,0,[[301],[339]]] +,["General/Timing.o General/Timing.hi",0.6649,0,0,[[363],[269,281,229,341],[143],[193]],[["ghc",4.8699,5.5256]]] +,["General/Timing.o",0.0003,0,0,[[364]]] +,["General/Timing.hi",0.0005,0,0,[[364]]] +,["General/Thread.deps",0.0034,0,0,[[329],[227,339]]] +,["General/Thread.o General/Thread.hi",0.8005,0,0,[[367],[166,281,229,341],[143],[193]],[["ghc",5.5352,6.3282]]] +,["General/Thread.hi",0.0003,0,0,[[368]]] +,["General/Thread.o",0.0003,0,0,[[368]]] +,["General/Pool.deps",0.0052,0,0,[[327],[363,367]]] +,["General/Pool.o General/Pool.hi",0.8336,0,0,[[371],[31,281,229,341,369,366],[143],[193]],[["ghc",7.1157,7.9387]]] +,["General/Pool.o",0.0008,0,0,[[372]]] +,["General/Pool.hi",0.0004,0,0,[[372]]] +,["doesFileExist ../../src/Development/Shake/Internal/History/Serialise.hs",0.0002,0,0] +,["Development/Shake/Internal/History/Server.dep",0.006,0,0,[[243],[148],[375],[172],[210],[295],[87],[42],[3],[120],[114],[116],[61]]] +,["../../src/Development/Shake/Database.hs",0.0003,0,0] +,["Development/Shake/Database.dep",0.0127,0,0,[[377],[326],[54],[188],[100],[43],[34],[174],[240],[102],[37],[312],[50]]] +,["doesFileExist ../../src/Data/Word.hs",0.0002,0,0] +,["General/EscCodes.dep",0.0068,0,0,[[112],[59],[107],[97],[5],[379],[66],[12],[242],[165]]] +,["General/EscCodes.deps",0.0012,0,0,[[380]]] +,["General/EscCodes.o General/EscCodes.hi",0.88,0,0,[[381],[112],[143],[193]],[["ghc",2.5782,3.4493]]] +,["General/EscCodes.o",0.0006,0,0,[[382]]] +,["General/EscCodes.hi",0.0003,0,0,[[382]]] +,["General/Intern.dep",0.0025,0,0,[[241],[239],[242],[379],[74],[87],[52]]] +,["General/Intern.deps",0.0021,0,0,[[385],[249]]] +,["General/Intern.o General/Intern.hi",0.7673,0,0,[[386],[241,251],[143],[193]],[["ghc",2.132,2.8908]]] +,["General/Intern.o",0.0003,0,0,[[387]]] +,["General/Intern.hi",0.0005,0,0,[[387]]] +,["General/Ids.deps",0.0076,0,0,[[267],[386]]] +,["General/Ids.o General/Ids.hi",0.8581,0,0,[[390],[265,251,389],[143],[193]],[["ghc",2.8992,3.7487]]] +,["General/Ids.o",0.001,0,0,[[391]]] +,["General/Ids.hi",0.0003,0,0,[[391]]] +,["Development/Shake/Internal/Rules/File.dep",0.0145,0,0,[[55],[168],[100],[61],[52],[219],[45],[99],[242],[379],[129],[210],[295],[312],[102],[57],[47],[76],[244],[239],[132],[89],[120],[240],[174],[21],[5],[74]]] +,["Development/Shake/Internal/FileInfo.dep",0.0102,0,0,[[246],[48],[184],[239],[76],[288],[59],[379],[311],[107],[160],[2],[268],[33],[174],[188],[45],[41],[44],[60]]] +,["General/FileLock.dep",0.0049,0,0,[[91],[184],[21],[295],[34],[188],[66],[379],[12],[41],[107],[106]]] +,["General/FileLock.deps",0.0024,0,0,[[396],[339,227]]] +,["General/FileLock.o General/FileLock.hi",0.7159,0,0,[[397],[91,281,229,341],[143],[193]],[["ghc",6.0052,6.7105]]] +,["General/FileLock.o",0.0003,0,0,[[398]]] +,["General/FileLock.hi",0.0003,0,0,[[398]]] +,["Development/Shake/Internal/Core/Storage.dep",0.0086,0,0,[[25],[324],[34],[210],[182],[240],[174],[147],[38],[13],[184],[168],[129],[270],[33],[59],[379],[51],[239],[311],[295],[117],[219],[21],[128],[87],[45],[3],[74]]] +,["General/Chunks.dep",0.0063,0,0,[[77],[151],[21],[326],[168],[54],[107],[268],[3],[379],[129],[210],[295],[34],[211],[74]]] +,["doesFileExist ../../src/Data/ByteString/Internal.hs",0.0002,0,0] +,["Development/Ninja/Lexer.dep",0.0116,0,0,[[209],[136],[59],[45],[291],[72],[403],[5],[379],[12],[242],[27]]] +,["Development/Ninja/Lexer.deps",0.006,0,0,[[404],[259]]] +,["Development/Ninja/Lexer.o Development/Ninja/Lexer.hi",1.0702,0,0,[[405],[209,258,261],[143],[193]],[["ghc",7.7163,8.7791]]] +,["Development/Ninja/Lexer.o",0.0003,0,0,[[406]]] +,["Development/Ninja/Lexer.hi",0.0003,0,0,[[406]]] +,["Development/Ninja/Parse.deps",0.0057,0,0,[[189],[255,259,405]]] +,["Development/Ninja/Parse.o Development/Ninja/Parse.hi",0.6308,0,0,[[409],[39,258,408,261],[143],[193]],[["ghc",8.7861,9.4093]]] +,["Development/Ninja/Parse.hi",0.0004,0,0,[[410]]] +,["Development/Ninja/Parse.o",0.0003,0,0,[[410]]] +,["doesFileExist ../../src/Data/Semigroup.hs",0.0003,0,0] +,["General/ListBuilder.dep",0.0018,0,0,[[24],[413]]] +,["General/ListBuilder.deps",0.001,0,0,[[414]]] +,["General/ListBuilder.o General/ListBuilder.hi",0.5663,0,0,[[415],[24],[143],[193]],[["ghc",7.1502,7.7115]]] +,["General/ListBuilder.o",0.0003,0,0,[[416]]] +,["General/ListBuilder.hi",0.0004,0,0,[[416]]] +,["General/Bilist.dep",0.0018,0,0,[[75],[413]]] +,["General/Bilist.deps",0.0012,0,0,[[419]]] +,["General/Bilist.o General/Bilist.hi",0.6522,0,0,[[420],[75],[143],[193]],[["ghc",2.7239,3.3698]]] +,["General/Bilist.hi",0.0003,0,0,[[421]]] +,["General/Bilist.o",0.0011,0,0,[[421]]] +,["Development/Shake/Internal/Core/Monad.dep",0.0154,0,0,[[173],[184],[100],[43],[413],[74],[218]]] +,["Development/Shake/Internal/Core/Monad.deps",0.0013,0,0,[[424]]] +,["Development/Shake/Internal/Core/Monad.o Development/Shake/Internal/Core/Monad.hi",0.9616,0,0,[[425],[173],[143],[193]],[["ghc",3.909,4.8636]]] +,["Development/Shake/Internal/Core/Monad.o",0.0006,0,0,[[426]]] +,["Development/Shake/Internal/Core/Monad.hi",0.0004,0,0,[[426]]] +,["Development/Shake/Command.dep",0.0187,0,0,[[208],[136],[168],[100],[59],[270],[117],[219],[413],[268],[97],[30],[119],[15],[276],[151],[5],[3],[126],[295],[18],[264],[47],[312],[132],[89],[240],[285],[190]]] +,["Development/Shake/Internal/History/Serialise.dep",0.0064,0,0,[[245],[148],[295],[210],[13],[117],[172],[120],[114],[87],[413],[61],[74]]] +,["Development/Shake/Internal/Core/Rules.dep",0.0166,0,0,[[40],[207],[136],[54],[168],[94],[100],[103],[239],[210],[295],[61],[80],[117],[87],[146],[219],[43],[413],[42],[8],[105],[140],[157],[218],[312],[183],[172],[240],[174]]] +,["Development/Shake/Internal/Options.dep",0.0238,0,0,[[178],[80],[117],[136],[219],[127],[188],[295],[87],[89],[45],[128],[264],[413],[74]]] +,["Development/Shake/Internal/Options.deps",0.0096,0,0,[[432],[339,351,154]]] +,["Development/Shake/Internal/Options.o Development/Shake/Internal/Options.hi",1.5102,0,0,[[433],[178,281,199,350,354,229,341],[143],[193]],[["ghc",7.5631,9.0593]]] +,["Development/Shake/Internal/Options.hi",0.0004,0,0,[[434]]] +,["Development/Shake/Internal/Options.o",0.0005,0,0,[[434]]] +,["Development/Shake/Internal/Core/Types.dep",0.0089,0,0,[[10],[100],[187],[242],[379],[61],[210],[219],[52],[54],[295],[326],[69],[96],[114],[131],[174],[146],[43],[45],[138],[151],[182],[99],[87],[13],[136],[167],[183],[172],[240],[239],[413],[34],[74],[218]]] +,["Development/Shake/Internal/History/Bloom.dep",0.0027,0,0,[[64],[379],[66],[48],[413],[242],[12],[74]]] +,["Development/Shake/Internal/History/Bloom.deps",0.0015,0,0,[[438]]] +,["Development/Shake/Internal/History/Bloom.o Development/Shake/Internal/History/Bloom.hi",0.6861,0,0,[[439],[64],[143],[193]],[["ghc",1.1417,1.8181]]] +,["Development/Shake/Internal/History/Bloom.o",0.0004,0,0,[[440]]] +,["Development/Shake/Internal/History/Bloom.hi",0.0004,0,0,[[440]]] +,["../../src/Development/Shake/Internal/Paths.hs",0.0002,0,0] +,["Development/Shake/Internal/Paths.dep",0.0066,0,0,[[443],[14],[54],[168],[29],[268],[21],[276],[5],[97],[295],[42]]] +,["Development/Shake/Internal/Paths.deps",0.0121,0,0,[[444],[307,339]]] +,["Development/Shake/Internal/Paths.o Development/Shake/Internal/Paths.hi",1.0141,0,0,[[445],[443,281,229,341,310],[143],[193]],[["ghc",5.1914,6.1978]]] +,["Development/Shake/Internal/Paths.hi",0.0004,0,0,[[446]]] +,["Development/Shake/Internal/Paths.o",0.0003,0,0,[[446]]] +,["../../src/General/Binary.hs",0.0003,0,0] +,["General/Binary.dep",0.0082,0,0,[[449],[239],[188],[247],[117],[136],[242],[12],[5],[3],[403],[291],[42],[128],[413]]] +,["General/Binary.deps",0.0022,0,0,[[450],[249]]] +,["General/Binary.o General/Binary.hi",0.897,0,0,[[451],[449,251],[143],[193]],[["ghc",1.8267,2.7129]]] +,["General/Binary.hi",0.0003,0,0,[[452]]] +,["General/Binary.o",0.0003,0,0,[[452]]] +,["Development/Shake/Internal/FileName.deps",0.0072,0,0,[[283],[249,451]]] +,["Development/Shake/Internal/FileName.o Development/Shake/Internal/FileName.hi",0.7527,0,0,[[455],[67,251,453],[143],[193]],[["ghc",3.7574,4.5026]]] +,["Development/Shake/Internal/FileName.hi",0.0006,0,0,[[456]]] +,["Development/Shake/Internal/FileName.o",0.001,0,0,[[456]]] +,["Development/Shake/Internal/FileInfo.deps",0.0056,0,0,[[395],[249,455,347]]] +,["Development/Shake/Internal/FileInfo.o Development/Shake/Internal/FileInfo.hi",0.8833,0,0,[[459],[246,251,281,350,457,453,229,341],[143],[193]],[["ghc",6.0642,6.9357]]] +,["Development/Shake/Internal/FileInfo.o",0.0003,0,0,[[460]]] +,["Development/Shake/Internal/FileInfo.hi",0.0007,0,0,[[460]]] +,["Development/Shake/Internal/History/Serialise.deps",0.0068,0,0,[[430],[439,339,451,390,355,459,86]]] +,["Development/Shake/Internal/History/Serialise.o Development/Shake/Internal/History/Serialise.hi",0.9927,0,0,[[463],[245,251,281,350,462,457,442,195,357,453,229,341,393,389],[143],[193]],[["ghc",7.9511,8.9334]]] +,["Development/Shake/Internal/History/Serialise.hi",0.0004,0,0,[[464]]] +,["Development/Shake/Internal/History/Serialise.o",0.0005,0,0,[[464]]] +,["Development/Shake/Internal/History/Server.deps",0.0068,0,0,[[376],[439,463,355,451,339,459,86,221]]] +,["Development/Shake/Internal/History/Server.o Development/Shake/Internal/History/Server.hi",0.5586,0,0,[[467],[243,251,281,350,462,457,442,223,465,195,357,453,229,341,393,389],[143],[193]],[["ghc",8.943,9.4933]]] +,["Development/Shake/Internal/History/Server.hi",0.0005,0,0,[[468]]] +,["Development/Shake/Internal/History/Server.o",0.0005,0,0,[[468]]] +,["Development/Shake/Internal/History/Cloud.deps",0.0065,0,0,[[330],[355,86,221,467,439,272,451,339,333]]] +,["Development/Shake/Internal/History/Cloud.o Development/Shake/Internal/History/Cloud.hi",0.8027,0,0,[[471],[71,251,281,350,462,457,442,223,465,469,195,357,453,229,341,274,393,389,335],[143],[193]],[["ghc",9.5012,10.2923]]] +,["Development/Shake/Internal/History/Cloud.hi",0.0005,0,0,[[472]]] +,["Development/Shake/Internal/History/Cloud.o",0.001,0,0,[[472]]] +,["General/Chunks.deps",0.0086,0,0,[[402],[451,339,227,367]]] +,["General/Chunks.o General/Chunks.hi",0.7743,0,0,[[475],[77,251,281,453,229,341,369],[143],[193]],[["ghc",6.9468,7.7111]]] +,["General/Chunks.hi",0.0008,0,0,[[476]]] +,["General/Chunks.o",0.0003,0,0,[[476]]] +,["Development/Shake/Internal/Core/Storage.deps",0.0071,0,0,[[401],[475,227,451,386,433,347,363,397,390,249,339]]] +,["Development/Shake/Internal/Core/Storage.o Development/Shake/Internal/Core/Storage.hi",0.889,0,0,[[479],[25,251,281,199,350,354,435,453,477,229,341,400,393,389,369,366],[143],[193]],[["ghc",9.0672,9.9391]]] +,["Development/Shake/Internal/Core/Storage.hi",0.0003,0,0,[[480]]] +,["Development/Shake/Internal/Core/Storage.o",0.0004,0,0,[[480]]] +,["Development/Shake/Internal/History/Shared.deps",0.0081,0,0,[[325],[355,86,343,249,451,339,475,459,333,455]]] +,["Development/Shake/Internal/History/Shared.o Development/Shake/Internal/History/Shared.hi",0.856,0,0,[[483],[16,251,281,350,462,457,345,195,357,453,477,229,341,369,335],[143],[193]],[["ghc",7.7229,8.5677]]] +,["Development/Shake/Internal/History/Shared.hi",0.0004,0,0,[[484]]] +,["Development/Shake/Internal/History/Shared.o",0.0006,0,0,[[484]]] +,["Development/Shake/Internal/Core/Types.deps",0.0113,0,0,[[437],[451,339,483,471,86,333,347,214,386,390,371,425,355,433,249,227]]] +,["Development/Shake/Internal/Core/Types.o Development/Shake/Internal/Core/Types.hi",1.3355,0,0,[[487],[10,251,281,199,428,350,462,457,354,442,473,223,465,469,485,345,195,435,357,453,477,229,341,274,393,389,374,369,366,217,335],[143],[193]],[["ghc",10.3035,11.6223]]] +,["Development/Shake/Internal/Core/Types.o",0.0005,0,0,[[488]]] +,["Development/Shake/Internal/Core/Types.hi",0.0004,0,0,[[488]]] +,["Development/Shake/Internal/Profile.deps",0.0051,0,0,[[323],[186,339,347,487,355,390,445,249,386]]] +,["Development/Shake/Internal/Profile.o Development/Shake/Internal/Profile.hi",0.9127,0,0,[[491],[9,251,281,199,428,490,350,462,457,354,442,473,223,465,469,485,345,195,435,447,357,453,477,229,341,274,393,389,374,205,369,366,217,335,310],[143],[193]],[["ghc",11.6373,12.5327]]] +,["Development/Shake/Internal/Profile.hi",0.0005,0,0,[[492]]] +,["Development/Shake/Internal/Profile.o",0.0005,0,0,[[492]]] +,["Development/Shake/Internal/Core/Pool.deps",0.0059,0,0,[[315],[371,487,425,272]]] +,["Development/Shake/Internal/Core/Pool.o Development/Shake/Internal/Core/Pool.hi",0.5991,0,0,[[495],[20,251,281,199,428,490,350,462,457,354,442,473,223,465,469,485,345,195,435,357,453,477,229,341,274,393,389,374,369,366,217,335],[143],[193]],[["ghc",11.6377,12.2258]]] +,["Development/Shake/Internal/Core/Pool.hi",0.0004,0,0,[[496]]] +,["Development/Shake/Internal/Core/Pool.o",0.0007,0,0,[[496]]] +,["Development/Shake/Internal/Progress.deps",0.0064,0,0,[[321],[433,487,186,381,339,445,390]]] +,["Development/Shake/Internal/Progress.o Development/Shake/Internal/Progress.hi",1.0471,0,0,[[499],[125,251,281,199,428,490,350,462,457,354,442,473,223,465,469,485,345,195,435,447,357,453,477,229,384,341,274,393,389,374,205,369,366,217,335,310],[143],[193]],[["ghc",11.6377,12.6697]]] +,["Development/Shake/Internal/Progress.hi",0.0007,0,0,[[500]]] +,["Development/Shake/Internal/Progress.o",0.0003,0,0,[[500]]] +,["Development/Shake/Internal/CompactUI.deps",0.0063,0,0,[[298],[154,433,499,339,367,381]]] +,["Development/Shake/Internal/CompactUI.o Development/Shake/Internal/CompactUI.hi",0.6297,0,0,[[503],[113,251,281,199,428,490,350,462,457,354,442,473,223,465,469,485,345,195,435,447,501,357,453,477,229,384,341,274,393,389,374,205,369,366,217,335,310],[143],[193]],[["ghc",12.6827,13.3019]]] +,["Development/Shake/Internal/CompactUI.o",0.0003,0,0,[[504]]] +,["Development/Shake/Internal/CompactUI.hi",0.0004,0,0,[[504]]] +,["Development/Shake/Internal/Core/Rules.deps",0.0081,0,0,[[431],[249,451,339,214,415,487,425,355,433,347]]] +,["Development/Shake/Internal/Core/Rules.o Development/Shake/Internal/Core/Rules.hi",1.0384,0,0,[[507],[40,251,281,199,428,490,350,462,457,354,442,473,223,465,469,485,345,195,435,357,453,477,229,341,274,393,389,418,374,369,366,217,335],[143],[193]],[["ghc",11.6377,12.6618]]] +,["Development/Shake/Internal/Core/Rules.hi",0.0008,0,0,[[508]]] +,["Development/Shake/Internal/Core/Rules.o",0.0003,0,0,[[508]]] +,["Development/Shake/Internal/Core/Action.deps",0.0094,0,0,[[337],[339,390,386,249,425,483,371,487,507,495,355,459,455,433,347,227,272]]] +,["Development/Shake/Internal/Core/Action.o Development/Shake/Internal/Core/Action.hi",1.2343,0,0,[[511],[263,251,281,199,428,497,509,490,350,462,457,354,442,473,223,465,469,485,345,195,435,357,453,477,229,341,274,393,389,418,374,369,366,217,335],[143],[193]],[["ghc",12.6736,13.8914]]] +,["Development/Shake/Internal/Core/Action.o",0.0007,0,0,[[512]]] +,["Development/Shake/Internal/Core/Action.hi",0.0003,0,0,[[512]]] +,["Development/Shake/Internal/Resource.deps",0.0069,0,0,[[331],[272,420,371,511,487,425,495]]] +,["Development/Shake/Internal/Resource.o Development/Shake/Internal/Resource.hi",0.7486,0,0,[[515],[108,251,281,199,514,428,497,509,490,350,462,457,354,442,473,223,465,469,485,345,195,435,357,422,453,477,229,341,274,393,389,418,374,369,366,217,335],[143],[193]],[["ghc",13.908,14.6456]]] +,["Development/Shake/Internal/Resource.o",0.0003,0,0,[[516]]] +,["Development/Shake/Internal/Resource.hi",0.0003,0,0,[[516]]] +,["Development/Shake/Internal/Core/Run.deps",0.0097,0,0,[[328],[451,249,479,483,471,390,386,214,333,487,511,507,371,499,355,491,433,347,363,367,339,227]]] +,["Development/Shake/Internal/Core/Run.o Development/Shake/Internal/Core/Run.hi",1.1905,0,0,[[519],[141,251,281,199,514,428,497,509,481,490,350,462,457,354,442,473,223,465,469,485,345,195,435,447,493,501,357,453,477,229,384,341,274,400,393,389,418,374,205,369,366,217,335,310],[143],[193]],[["ghc",13.9076,15.08]]] +,["Development/Shake/Internal/Core/Run.hi",0.0004,0,0,[[520]]] +,["Development/Shake/Internal/Core/Run.o",0.0004,0,0,[[520]]] +,["Development/Shake/Internal/Core/Build.deps",0.0083,0,0,[[318],[249,371,355,347,487,511,483,471,433,425,333,339,386,390,507]]] +,["Development/Shake/Internal/Core/Build.o Development/Shake/Internal/Core/Build.hi",1.0772,0,0,[[523],[169,251,281,199,514,428,497,509,490,350,462,457,354,442,473,223,465,469,485,345,195,435,357,453,477,229,341,274,393,389,418,374,369,366,217,335],[143],[193]],[["ghc",13.908,14.9701]]] +,["Development/Shake/Internal/Core/Build.hi",0.0004,0,0,[[524]]] +,["Development/Shake/Internal/Core/Build.o",0.0007,0,0,[[524]]] +,["Development/Shake/Internal/Rules/Oracle.deps",0.0066,0,0,[[314],[487,507,433,523,355,249,451,339]]] +,["Development/Shake/Internal/Rules/Oracle.o Development/Shake/Internal/Rules/Oracle.hi",0.9125,0,0,[[527],[123,251,281,199,514,525,428,497,509,490,350,462,457,354,442,473,223,465,469,485,345,195,435,357,453,477,229,341,274,393,389,418,374,369,366,217,335],[143],[193]],[["ghc",14.9848,15.8686]]] +,["Development/Shake/Internal/Rules/Oracle.o",0.0005,0,0,[[528]]] +,["Development/Shake/Internal/Rules/Oracle.hi",0.0006,0,0,[[528]]] +,["Development/Shake/Internal/Rules/Rerun.deps",0.0067,0,0,[[313],[507,487,523,511,249,451]]] +,["Development/Shake/Internal/Rules/Rerun.o Development/Shake/Internal/Rules/Rerun.hi",0.9982,0,0,[[531],[101,251,281,199,514,525,428,497,509,490,350,462,457,354,442,473,223,465,469,485,345,195,435,357,453,477,229,341,274,393,389,418,374,369,366,217,335],[143],[193]],[["ghc",14.9844,15.9726]]] +,["Development/Shake/Internal/Rules/Rerun.hi",0.0004,0,0,[[532]]] +,["Development/Shake/Internal/Rules/Rerun.o",0.0005,0,0,[[532]]] +,["Development/Shake/Internal/Rules/File.deps",0.0142,0,0,[[394],[451,339,487,507,523,511,455,531,249,278,351,459,433,347]]] +,["Development/Shake/Internal/Rules/File.o Development/Shake/Internal/Rules/File.hi",1.0247,0,0,[[535],[55,251,281,199,514,525,428,497,509,490,350,462,457,354,442,473,223,465,469,485,345,195,435,533,357,453,477,229,341,274,393,389,418,374,369,366,217,335],[143],[193]],[["ghc",15.9807,16.9911]]] +,["Development/Shake/Internal/Rules/File.o",0.0004,0,0,[[536]]] +,["Development/Shake/Internal/Rules/File.hi",0.0004,0,0,[[536]]] +,["Development/Shake/Internal/Rules/OrderOnly.deps",0.0073,0,0,[[316],[487,511,535]]] +,["Development/Shake/Internal/Rules/OrderOnly.o Development/Shake/Internal/Rules/OrderOnly.hi",0.6053,0,0,[[539],[176,251,281,199,514,525,428,497,509,490,350,462,457,354,442,473,223,465,469,485,345,195,435,538,533,357,453,477,229,341,274,393,389,418,374,369,366,217,335],[143],[193]],[["ghc",17.0047,17.6002]]] +,["Development/Shake/Internal/Rules/OrderOnly.o",0.0003,0,0,[[540]]] +,["Development/Shake/Internal/Rules/OrderOnly.hi",0.0005,0,0,[[540]]] +,["Development/Shake/Internal/Derived.deps",0.0097,0,0,[[322],[347,515,487,511,507,433,535,339]]] +,["Development/Shake/Internal/Derived.o Development/Shake/Internal/Derived.hi",0.8974,0,0,[[543],[53,251,281,199,514,525,428,497,509,490,350,462,457,354,442,473,223,465,469,485,345,195,435,518,538,533,357,422,453,477,229,341,274,393,389,418,374,369,366,217,335],[143],[193]],[["ghc",17.0048,17.8887]]] +,["Development/Shake/Internal/Derived.hi",0.0004,0,0,[[544]]] +,["Development/Shake/Internal/Derived.o",0.0005,0,0,[[544]]] +,["Development/Shake/Command.deps",0.007,0,0,[[429],[339,359,154,511,487,278,351,433,535,543]]] +,["Development/Shake/Command.o Development/Shake/Command.hi",1.3214,0,0,[[547],[208,251,281,199,514,525,428,497,509,490,545,350,462,457,354,442,473,223,465,469,485,345,195,435,518,538,533,357,422,453,477,229,341,274,393,389,418,374,362,369,366,217,335],[143],[193]],[["ghc",17.9004,19.2094]]] +,["Development/Shake/Command.o",0.0004,0,0,[[548]]] +,["Development/Shake/Command.hi",0.0003,0,0,[[548]]] +,["Development/Shake/Internal/Demo.deps",0.0056,0,0,[[300],[445,547,339,278]]] +,["Development/Shake/Internal/Demo.o Development/Shake/Internal/Demo.hi",0.8708,0,0,[[551],[145,251,550,281,199,514,525,428,497,509,490,545,350,462,457,354,442,473,223,465,469,485,345,195,435,447,518,538,533,357,422,453,477,229,341,274,393,389,418,374,362,369,366,217,335,310],[143],[193]],[["ghc",19.2207,20.0818]]] +,["Development/Shake/Internal/Demo.hi",0.0004,0,0,[[552]]] +,["Development/Shake/Internal/Demo.o",0.0004,0,0,[[552]]] +,["Development/Shake/Internal/Rules/Files.deps",0.0132,0,0,[[317],[451,511,487,523,507,347,339,455,249,531,535,351,278,459,433]]] +,["Development/Shake/Internal/Rules/Files.o Development/Shake/Internal/Rules/Files.hi",0.997,0,0,[[555],[110,251,281,199,514,525,428,497,509,490,350,462,457,354,442,473,223,465,469,485,345,195,435,538,533,357,453,477,229,341,274,393,389,418,374,369,366,217,335],[143],[193]],[["ghc",17.0044,17.9879]]] +,["Development/Shake/Internal/Rules/Files.hi",0.001,0,0,[[556]]] +,["Development/Shake/Internal/Rules/Files.o",0.0004,0,0,[[556]]] +,["Development/Shake/Internal/Rules/Directory.deps",0.0074,0,0,[[320],[487,511,507,523,355,249,278,351,339,451]]] +,["Development/Shake/Internal/Rules/Directory.o Development/Shake/Internal/Rules/Directory.hi",1.0173,0,0,[[559],[92,251,281,199,514,525,428,497,509,490,350,462,457,354,442,473,223,465,469,485,345,195,435,357,453,477,229,341,274,393,389,418,374,369,366,217,335],[143],[193]],[["ghc",15.0203,15.9896]]] +,["Development/Shake/Internal/Rules/Directory.hi",0.0005,0,0,[[560]]] +,["Development/Shake/Internal/Rules/Directory.o",0.0005,0,0,[[560]]] +,["Development/Shake/Internal/Rules/Default.deps",0.0058,0,0,[[286],[507,559,535,555,531]]] +,["Development/Shake/Internal/Rules/Default.o Development/Shake/Internal/Rules/Default.hi",0.5379,0,0,[[563],[124,251,281,199,514,525,428,497,509,490,350,462,457,354,442,473,223,465,469,485,345,195,435,561,538,557,533,357,453,477,229,341,274,393,389,418,374,369,366,217,335],[143],[193]],[["ghc",17.999,18.5225]]] +,["Development/Shake/Internal/Rules/Default.hi",0.0009,0,0,[[564]]] +,["Development/Shake/Internal/Rules/Default.o",0.0004,0,0,[[564]]] +,["Development/Shake/Database.deps",0.0039,0,0,[[378],[227,347,433,507,519,487,563]]] +,["Development/Shake/Database.o Development/Shake/Database.hi",0.8172,0,0,[[567],[377,251,281,199,514,525,428,497,509,521,481,490,350,462,457,354,442,473,223,465,469,485,345,195,435,447,493,501,565,561,538,557,533,357,453,477,229,384,341,274,400,393,389,418,374,205,369,366,217,335,310],[143],[193]],[["ghc",18.5297,19.3381]]] +,["Development/Shake/Database.o",0.0004,0,0,[[568]]] +,["Development/Shake/Database.hi",0.0003,0,0,[[568]]] +,["Development/Shake/Internal/Args.deps",0.0095,0,0,[[294],[445,433,507,347,503,551,511,278,535,499,567,363,367,234,381]]] +,["Development/Shake/Internal/Args.o Development/Shake/Internal/Args.hi",1.0868,0,0,[[571],[26,251,550,570,281,199,506,514,525,428,497,509,521,481,490,553,545,350,462,457,354,442,473,223,465,469,485,345,195,435,447,493,501,518,565,561,538,557,533,357,422,453,477,229,384,341,274,400,237,393,389,418,374,362,205,369,366,217,335,310],[143],[193]],[["ghc",20.0902,21.1625]]] +,["Development/Shake/Internal/Args.o",0.0004,0,0,[[572]]] +,["Development/Shake/Internal/Args.hi",0.0003,0,0,[[572]]] +,["Development/Shake.deps",0.0108,0,0,[[319],[355,433,487,511,507,515,543,347,499,571,547,351,559,535,555,527,539,531]]] +,["Development/Shake.o Development/Shake.hi",0.6938,0,0,[[575],[149,251,550,570,281,574,199,506,514,525,428,497,509,521,481,490,553,545,350,462,457,354,442,473,223,465,469,485,345,195,435,447,493,501,518,565,561,538,557,530,542,533,357,422,453,477,229,384,341,274,400,237,393,389,418,374,362,205,369,366,217,335,310],[143],[193]],[["ghc",21.1754,21.8577]]] +,["Development/Shake.hi",0.0003,0,0,[[576]]] +,["Development/Shake.o",0.0003,0,0,[[576]]] +,["Development/Ninja/All.deps",0.0084,0,0,[[302],[255,259,409,575,249,339,363,163,455,459,347,535,539]]] +,["Development/Ninja/All.o Development/Ninja/All.hi",1.0557,0,0,[[579],[1,258,408,411,261,577,251,550,570,281,574,199,506,514,525,428,497,509,521,481,490,553,545,350,462,457,354,442,473,223,465,469,485,345,195,435,447,493,501,518,565,561,538,557,530,542,533,357,422,453,477,229,384,341,274,400,237,393,389,418,201,374,362,205,369,366,217,335,310],[143],[193]],[["ghc",21.8681,22.9021]]] +,["Development/Ninja/All.hi",0.0003,0,0,[[580]]] +,["Development/Ninja/All.o",0.0005,0,0,[[580]]] +,["Run.deps",0.006,0,0,[[299],[579,575,278,363,339,234]]] +,["Run.o Run.hi",0.7432,0,0,[[583],[70,581,258,408,411,261,577,251,550,570,281,574,199,506,514,525,428,497,509,521,481,490,553,545,350,462,457,354,442,473,223,465,469,485,345,195,435,447,493,501,518,565,561,538,557,530,542,533,357,422,453,477,229,384,341,274,400,237,393,389,418,201,374,362,205,369,366,217,335,310],[143],[193]],[["ghc",22.916,23.6482]]] +,["Run.o",0.0003,0,0,[[584]]] +,["Main.exe",3.8846,0,0,[[583],[585,582,257,407,412,262,578,252,549,569,280,573,198,505,513,526,427,498,510,522,482,489,554,546,349,461,458,353,441,474,224,466,470,486,346,196,436,448,494,502,517,566,562,537,558,529,541,534,358,423,454,478,230,383,342,275,399,236,392,388,417,202,373,361,204,370,365,216,336,309],[143],[193]],[["ghc",23.6565,27.5353]]] +,["Root",0,0,0,[[586]],[["",27.538,27.538]]] +] diff --git a/hls-graph/html/data/progress-data.js b/hls-graph/html/data/progress-data.js new file mode 100644 index 0000000000..ea20c00b2d --- /dev/null +++ b/hls-graph/html/data/progress-data.js @@ -0,0 +1,183 @@ +var progress = +[{"name":"self-zero-j2.prog", "values": + [{"idealSecs":76.7, "idealPerc":1.3, "actualSecs":195.6, "actualPerc":0.5} + ,{"idealSecs":74.0, "idealPerc":4.8, "actualSecs":563.2, "actualPerc":0.7} + ,{"idealSecs":72.7, "idealPerc":6.5, "actualSecs":183.8, "actualPerc":2.5} + ,{"idealSecs":71.4, "idealPerc":8.2, "actualSecs":130.9, "actualPerc":4.3} + ,{"idealSecs":70.1, "idealPerc":9.8, "actualSecs":109.3, "actualPerc":6.1} + ,{"idealSecs":68.8, "idealPerc":11.4, "actualSecs":97.2, "actualPerc":7.8} + ,{"idealSecs":67.6, "idealPerc":13.0, "actualSecs":89.6, "actualPerc":9.6} + ,{"idealSecs":66.3, "idealPerc":14.7, "actualSecs":84.0, "actualPerc":11.3} + ,{"idealSecs":65.0, "idealPerc":16.3, "actualSecs":79.6, "actualPerc":13.0} + ,{"idealSecs":63.8, "idealPerc":17.9, "actualSecs":75.9, "actualPerc":14.8} + ,{"idealSecs":62.5, "idealPerc":19.6, "actualSecs":72.7, "actualPerc":16.5} + ,{"idealSecs":61.2, "idealPerc":21.2, "actualSecs":70.0, "actualPerc":18.2} + ,{"idealSecs":60.0, "idealPerc":22.8, "actualSecs":67.5, "actualPerc":20.0} + ,{"idealSecs":58.7, "idealPerc":24.5, "actualSecs":65.1, "actualPerc":21.8} + ,{"idealSecs":57.4, "idealPerc":26.1, "actualSecs":62.5, "actualPerc":23.6} + ,{"idealSecs":56.2, "idealPerc":27.7, "actualSecs":60.5, "actualPerc":25.4} + ,{"idealSecs":54.9, "idealPerc":29.3, "actualSecs":58.6, "actualPerc":27.1} + ,{"idealSecs":53.6, "idealPerc":31.0, "actualSecs":57.0, "actualPerc":28.8} + ,{"idealSecs":52.1, "idealPerc":32.9, "actualSecs":55.0, "actualPerc":30.9} + ,{"idealSecs":50.5, "idealPerc":35.0, "actualSecs":53.6, "actualPerc":32.8} + ,{"idealSecs":49.3, "idealPerc":36.6, "actualSecs":51.5, "actualPerc":34.7} + ,{"idealSecs":48.0, "idealPerc":38.2, "actualSecs":49.4, "actualPerc":36.7} + ,{"idealSecs":46.7, "idealPerc":39.8, "actualSecs":47.8, "actualPerc":38.5} + ,{"idealSecs":45.5, "idealPerc":41.5, "actualSecs":46.2, "actualPerc":40.2} + ,{"idealSecs":44.2, "idealPerc":43.1, "actualSecs":44.7, "actualPerc":42.0} + ,{"idealSecs":43.2, "idealPerc":44.4, "actualSecs":44.7, "actualPerc":42.0} + ,{"idealSecs":41.9, "idealPerc":46.1, "actualSecs":42.6, "actualPerc":44.9} + ,{"idealSecs":40.7, "idealPerc":47.7, "actualSecs":40.0, "actualPerc":47.3} + ,{"idealSecs":39.4, "idealPerc":49.3, "actualSecs":38.5, "actualPerc":49.1} + ,{"idealSecs":38.1, "idealPerc":51.0, "actualSecs":37.1, "actualPerc":50.9} + ,{"idealSecs":36.7, "idealPerc":52.8, "actualSecs":35.7, "actualPerc":52.8} + ,{"idealSecs":35.4, "idealPerc":54.4, "actualSecs":33.8, "actualPerc":54.9} + ,{"idealSecs":34.2, "idealPerc":56.0, "actualSecs":32.3, "actualPerc":56.7} + ,{"idealSecs":32.4, "idealPerc":58.3, "actualSecs":29.1, "actualPerc":60.2} + ,{"idealSecs":31.4, "idealPerc":59.6, "actualSecs":29.1, "actualPerc":60.2} + ,{"idealSecs":29.7, "idealPerc":61.7, "actualSecs":28.0, "actualPerc":62.6} + ,{"idealSecs":28.5, "idealPerc":63.4, "actualSecs":28.0, "actualPerc":62.6} + ,{"idealSecs":27.2, "idealPerc":65.0, "actualSecs":27.4, "actualPerc":64.4} + ,{"idealSecs":26.0, "idealPerc":66.6, "actualSecs":24.8, "actualPerc":67.2} + ,{"idealSecs":24.7, "idealPerc":68.2, "actualSecs":23.3, "actualPerc":69.0} + ,{"idealSecs":23.4, "idealPerc":69.9, "actualSecs":21.9, "actualPerc":70.8} + ,{"idealSecs":22.1, "idealPerc":71.5, "actualSecs":20.5, "actualPerc":72.7} + ,{"idealSecs":20.8, "idealPerc":73.2, "actualSecs":18.7, "actualPerc":74.9} + ,{"idealSecs":19.5, "idealPerc":74.9, "actualSecs":17.3, "actualPerc":76.7} + ,{"idealSecs":18.2, "idealPerc":76.5, "actualSecs":15.5, "actualPerc":79.0} + ,{"idealSecs":16.5, "idealPerc":78.7, "actualSecs":12.8, "actualPerc":82.4} + ,{"idealSecs":15.5, "idealPerc":80.0, "actualSecs":12.8, "actualPerc":82.4} + ,{"idealSecs":13.9, "idealPerc":82.1, "actualSecs":11.7, "actualPerc":84.3} + ,{"idealSecs":10.4, "idealPerc":86.6, "actualSecs":10.9, "actualPerc":85.9} + ,{"idealSecs":8.6, "idealPerc":89.0, "actualSecs":7.1, "actualPerc":90.5} + ,{"idealSecs":6.8, "idealPerc":91.3, "actualSecs":5.9, "actualPerc":92.2} + ,{"idealSecs":5.0, "idealPerc":93.5, "actualSecs":4.8, "actualPerc":93.8} + ,{"idealSecs":4.0, "idealPerc":94.8, "actualSecs":4.8, "actualPerc":93.8} + ,{"idealSecs":3.0, "idealPerc":96.1, "actualSecs":4.8, "actualPerc":93.8} + ,{"idealSecs":2.0, "idealPerc":97.4, "actualSecs":4.8, "actualPerc":93.8} + ,{"idealSecs":1.0, "idealPerc":98.7, "actualSecs":4.8, "actualPerc":93.8} + ,{"idealSecs":0.0, "idealPerc":100.0, "actualSecs":4.8, "actualPerc":93.8} + ] +},{"name":"self-rebuild-j2.prog", "values": + [{"idealSecs":79.3, "idealPerc":4.3, "actualSecs":461.8, "actualPerc":0.8} + ,{"idealSecs":78.0, "idealPerc":5.9, "actualSecs":164.3, "actualPerc":2.7} + ,{"idealSecs":76.7, "idealPerc":7.5, "actualSecs":117.9, "actualPerc":4.6} + ,{"idealSecs":75.5, "idealPerc":9.0, "actualSecs":99.1, "actualPerc":6.5} + ,{"idealSecs":74.2, "idealPerc":10.5, "actualSecs":88.3, "actualPerc":8.4} + ,{"idealSecs":72.9, "idealPerc":12.1, "actualSecs":66.6, "actualPerc":12.1} + ,{"idealSecs":71.6, "idealPerc":13.6, "actualSecs":66.6, "actualPerc":12.1} + ,{"idealSecs":70.3, "idealPerc":15.2, "actualSecs":72.7, "actualPerc":14.0} + ,{"idealSecs":69.1, "idealPerc":16.7, "actualSecs":69.2, "actualPerc":15.9} + ,{"idealSecs":67.8, "idealPerc":18.3, "actualSecs":66.3, "actualPerc":17.8} + ,{"idealSecs":66.5, "idealPerc":19.8, "actualSecs":63.7, "actualPerc":19.7} + ,{"idealSecs":65.2, "idealPerc":21.4, "actualSecs":61.2, "actualPerc":21.6} + ,{"idealSecs":63.9, "idealPerc":22.9, "actualSecs":59.2, "actualPerc":23.5} + ,{"idealSecs":62.6, "idealPerc":24.5, "actualSecs":57.2, "actualPerc":25.4} + ,{"idealSecs":61.3, "idealPerc":26.1, "actualSecs":55.1, "actualPerc":27.3} + ,{"idealSecs":59.2, "idealPerc":28.6, "actualSecs":54.0, "actualPerc":29.7} + ,{"idealSecs":57.9, "idealPerc":30.2, "actualSecs":51.2, "actualPerc":32.0} + ,{"idealSecs":56.6, "idealPerc":31.7, "actualSecs":49.5, "actualPerc":33.8} + ,{"idealSecs":55.4, "idealPerc":33.2, "actualSecs":47.7, "actualPerc":35.8} + ,{"idealSecs":53.4, "idealPerc":35.6, "actualSecs":46.1, "actualPerc":38.2} + ,{"idealSecs":51.8, "idealPerc":37.5, "actualSecs":43.7, "actualPerc":40.8} + ,{"idealSecs":50.5, "idealPerc":39.1, "actualSecs":41.9, "actualPerc":42.8} + ,{"idealSecs":49.3, "idealPerc":40.6, "actualSecs":40.3, "actualPerc":44.7} + ,{"idealSecs":48.0, "idealPerc":42.2, "actualSecs":38.8, "actualPerc":46.6} + ,{"idealSecs":46.7, "idealPerc":43.7, "actualSecs":37.3, "actualPerc":48.5} + ,{"idealSecs":45.4, "idealPerc":45.3, "actualSecs":35.9, "actualPerc":50.3} + ,{"idealSecs":44.0, "idealPerc":47.0, "actualSecs":34.5, "actualPerc":52.3} + ,{"idealSecs":42.6, "idealPerc":48.6, "actualSecs":33.0, "actualPerc":54.3} + ,{"idealSecs":41.3, "idealPerc":50.2, "actualSecs":31.5, "actualPerc":56.3} + ,{"idealSecs":39.9, "idealPerc":51.8, "actualSecs":30.1, "actualPerc":58.2} + ,{"idealSecs":38.7, "idealPerc":53.4, "actualSecs":28.7, "actualPerc":60.0} + ,{"idealSecs":37.1, "idealPerc":55.3, "actualSecs":27.8, "actualPerc":61.6} + ,{"idealSecs":36.1, "idealPerc":56.5, "actualSecs":26.2, "actualPerc":63.5} + ,{"idealSecs":35.1, "idealPerc":57.7, "actualSecs":26.2, "actualPerc":63.5} + ,{"idealSecs":32.1, "idealPerc":61.3, "actualSecs":26.1, "actualPerc":65.7} + ,{"idealSecs":30.1, "idealPerc":63.7, "actualSecs":24.8, "actualPerc":67.7} + ,{"idealSecs":28.8, "idealPerc":65.3, "actualSecs":22.1, "actualPerc":70.6} + ,{"idealSecs":27.5, "idealPerc":66.8, "actualSecs":20.7, "actualPerc":72.4} + ,{"idealSecs":26.5, "idealPerc":68.0, "actualSecs":19.2, "actualPerc":74.2} + ,{"idealSecs":23.9, "idealPerc":71.2, "actualSecs":18.4, "actualPerc":75.9} + ,{"idealSecs":22.6, "idealPerc":72.8, "actualSecs":17.0, "actualPerc":77.7} + ,{"idealSecs":21.3, "idealPerc":74.3, "actualSecs":15.4, "actualPerc":79.8} + ,{"idealSecs":20.0, "idealPerc":75.9, "actualSecs":14.0, "actualPerc":81.6} + ,{"idealSecs":17.8, "idealPerc":78.5, "actualSecs":11.6, "actualPerc":84.6} + ,{"idealSecs":16.8, "idealPerc":79.7, "actualSecs":11.6, "actualPerc":84.6} + ,{"idealSecs":15.3, "idealPerc":81.6, "actualSecs":10.5, "actualPerc":86.4} + ,{"idealSecs":11.7, "idealPerc":85.9, "actualSecs":9.7, "actualPerc":88.0} + ,{"idealSecs":10.7, "idealPerc":87.1, "actualSecs":7.9, "actualPerc":90.1} + ,{"idealSecs":9.4, "idealPerc":88.7, "actualSecs":6.3, "actualPerc":92.0} + ,{"idealSecs":7.2, "idealPerc":91.3, "actualSecs":4.7, "actualPerc":94.1} + ,{"idealSecs":5.0, "idealPerc":93.9, "actualSecs":3.8, "actualPerc":95.3} + ,{"idealSecs":4.0, "idealPerc":95.2, "actualSecs":3.8, "actualPerc":95.3} + ,{"idealSecs":3.0, "idealPerc":96.4, "actualSecs":3.8, "actualPerc":95.3} + ,{"idealSecs":2.0, "idealPerc":97.6, "actualSecs":3.8, "actualPerc":95.3} + ,{"idealSecs":1.0, "idealPerc":98.8, "actualSecs":3.8, "actualPerc":95.3} + ,{"idealSecs":0.0, "idealPerc":100.0, "actualSecs":3.8, "actualPerc":95.3} + ] +},{"name":"self-clean-j2.prog", "values": + [{"idealSecs":87.2, "idealPerc":1.3, "actualSecs":0.6, "actualPerc":63.7} + ,{"idealSecs":85.8, "idealPerc":2.8, "actualSecs":4.8, "actualPerc":35.3} + ,{"idealSecs":83.1, "idealPerc":5.8, "actualSecs":21.5, "actualPerc":19.1} + ,{"idealSecs":81.8, "idealPerc":7.3, "actualSecs":27.1, "actualPerc":18.5} + ,{"idealSecs":80.6, "idealPerc":8.8, "actualSecs":33.1, "actualPerc":18.1} + ,{"idealSecs":79.2, "idealPerc":10.3, "actualSecs":32.8, "actualPerc":20.7} + ,{"idealSecs":77.9, "idealPerc":11.7, "actualSecs":38.2, "actualPerc":20.4} + ,{"idealSecs":76.6, "idealPerc":13.2, "actualSecs":38.1, "actualPerc":22.5} + ,{"idealSecs":75.3, "idealPerc":14.7, "actualSecs":41.0, "actualPerc":23.0} + ,{"idealSecs":74.1, "idealPerc":16.1, "actualSecs":38.6, "actualPerc":25.9} + ,{"idealSecs":72.8, "idealPerc":17.6, "actualSecs":39.4, "actualPerc":27.3} + ,{"idealSecs":71.5, "idealPerc":19.0, "actualSecs":43.4, "actualPerc":27.0} + ,{"idealSecs":70.2, "idealPerc":20.5, "actualSecs":40.1, "actualPerc":30.1} + ,{"idealSecs":68.9, "idealPerc":22.0, "actualSecs":35.9, "actualPerc":34.1} + ,{"idealSecs":66.8, "idealPerc":24.3, "actualSecs":41.6, "actualPerc":33.1} + ,{"idealSecs":65.5, "idealPerc":25.8, "actualSecs":43.5, "actualPerc":33.4} + ,{"idealSecs":64.2, "idealPerc":27.3, "actualSecs":46.4, "actualPerc":33.3} + ,{"idealSecs":62.7, "idealPerc":28.9, "actualSecs":40.7, "actualPerc":37.7} + ,{"idealSecs":61.4, "idealPerc":30.4, "actualSecs":43.5, "actualPerc":37.3} + ,{"idealSecs":60.1, "idealPerc":32.0, "actualSecs":46.1, "actualPerc":37.2} + ,{"idealSecs":58.8, "idealPerc":33.4, "actualSecs":46.4, "actualPerc":38.1} + ,{"idealSecs":57.5, "idealPerc":34.9, "actualSecs":48.6, "actualPerc":38.0} + ,{"idealSecs":56.5, "idealPerc":36.1, "actualSecs":48.6, "actualPerc":38.0} + ,{"idealSecs":55.2, "idealPerc":37.5, "actualSecs":50.1, "actualPerc":39.2} + ,{"idealSecs":53.8, "idealPerc":39.0, "actualSecs":53.4, "actualPerc":38.5} + ,{"idealSecs":52.5, "idealPerc":40.5, "actualSecs":37.4, "actualPerc":48.1} + ,{"idealSecs":51.2, "idealPerc":42.0, "actualSecs":39.2, "actualPerc":47.9} + ,{"idealSecs":49.9, "idealPerc":43.5, "actualSecs":30.8, "actualPerc":54.8} + ,{"idealSecs":48.6, "idealPerc":45.0, "actualSecs":28.5, "actualPerc":57.6} + ,{"idealSecs":46.6, "idealPerc":47.2, "actualSecs":28.3, "actualPerc":58.9} + ,{"idealSecs":45.3, "idealPerc":48.7, "actualSecs":27.5, "actualPerc":60.4} + ,{"idealSecs":43.9, "idealPerc":50.2, "actualSecs":26.3, "actualPerc":62.2} + ,{"idealSecs":42.9, "idealPerc":51.4, "actualSecs":24.9, "actualPerc":64.0} + ,{"idealSecs":41.2, "idealPerc":53.3, "actualSecs":23.9, "actualPerc":65.8} + ,{"idealSecs":40.2, "idealPerc":54.4, "actualSecs":23.9, "actualPerc":65.8} + ,{"idealSecs":38.4, "idealPerc":56.5, "actualSecs":23.5, "actualPerc":67.6} + ,{"idealSecs":37.4, "idealPerc":57.6, "actualSecs":23.5, "actualPerc":67.6} + ,{"idealSecs":36.4, "idealPerc":58.8, "actualSecs":23.5, "actualPerc":67.6} + ,{"idealSecs":33.4, "idealPerc":62.2, "actualSecs":24.5, "actualPerc":69.0} + ,{"idealSecs":32.1, "idealPerc":63.7, "actualSecs":25.2, "actualPerc":68.9} + ,{"idealSecs":30.8, "idealPerc":65.2, "actualSecs":25.0, "actualPerc":69.5} + ,{"idealSecs":29.5, "idealPerc":66.6, "actualSecs":25.4, "actualPerc":69.6} + ,{"idealSecs":28.2, "idealPerc":68.1, "actualSecs":19.1, "actualPerc":75.7} + ,{"idealSecs":26.3, "idealPerc":70.3, "actualSecs":19.9, "actualPerc":75.5} + ,{"idealSecs":25.0, "idealPerc":71.7, "actualSecs":18.2, "actualPerc":77.5} + ,{"idealSecs":23.7, "idealPerc":73.2, "actualSecs":17.3, "actualPerc":78.7} + ,{"idealSecs":22.4, "idealPerc":74.7, "actualSecs":13.6, "actualPerc":82.7} + ,{"idealSecs":20.2, "idealPerc":77.2, "actualSecs":8.8, "actualPerc":88.5} + ,{"idealSecs":19.2, "idealPerc":78.3, "actualSecs":8.8, "actualPerc":88.5} + ,{"idealSecs":17.5, "idealPerc":80.2, "actualSecs":7.5, "actualPerc":90.4} + ,{"idealSecs":13.9, "idealPerc":84.2, "actualSecs":6.1, "actualPerc":92.4} + ,{"idealSecs":12.9, "idealPerc":85.4, "actualSecs":4.6, "actualPerc":94.3} + ,{"idealSecs":11.4, "idealPerc":87.0, "actualSecs":3.1, "actualPerc":96.1} + ,{"idealSecs":9.2, "idealPerc":89.6, "actualSecs":1.6, "actualPerc":98.1} + ,{"idealSecs":7.0, "idealPerc":92.0, "actualSecs":0.5, "actualPerc":99.4} + ,{"idealSecs":6.0, "idealPerc":93.2, "actualSecs":0.5, "actualPerc":99.4} + ,{"idealSecs":5.0, "idealPerc":94.3, "actualSecs":0.5, "actualPerc":99.4} + ,{"idealSecs":4.0, "idealPerc":95.4, "actualSecs":0.5, "actualPerc":99.4} + ,{"idealSecs":3.0, "idealPerc":96.6, "actualSecs":0.5, "actualPerc":99.4} + ,{"idealSecs":2.0, "idealPerc":97.7, "actualSecs":0.5, "actualPerc":99.4} + ,{"idealSecs":1.0, "idealPerc":98.9, "actualSecs":0.5, "actualPerc":99.4} + ,{"idealSecs":0.0, "idealPerc":100.0, "actualSecs":0.5, "actualPerc":99.4} + ] +}] diff --git a/hls-graph/html/profile.html b/hls-graph/html/profile.html new file mode 100644 index 0000000000..8ef85ab061 --- /dev/null +++ b/hls-graph/html/profile.html @@ -0,0 +1,211 @@ + + + + + +Shake report + + + + + + + + + + + + + + + + + + Loading... + + diff --git a/hls-graph/html/shake.js b/hls-graph/html/shake.js new file mode 100644 index 0000000000..03e9f78f71 --- /dev/null +++ b/hls-graph/html/shake.js @@ -0,0 +1,1058 @@ +"use strict"; +function bindPlot(element, data, options) { + const redraw = () => { + if ($(element).is(":visible")) + $.plot($(element), data.get(), options); + }; + window.setTimeout(redraw, 1); + $(window).on("resize", redraw); + data.event(redraw); +} +function varLink(name) { + return React.createElement("a", { href: "https://hackage.haskell.org/package/shake/docs/Development-Shake.html#v:" + name }, + React.createElement("tt", null, name)); +} +function newTable(columns, data, sortColumn, sortDescend) { + const f = (x) => ({ name: x.field, label: x.label, width: x.width, cellClasses: x.alignRight ? "right" : "" }); + const formatters = {}; + for (const c of columns) + formatters[c.field] = c.show || ((x) => x); + const table = new DGTable({ + adjustColumnWidthForSortArrow: false, + cellFormatter: (val, colname) => formatters[colname](val), + columns: columns.map(f), + width: DGTable.Width.SCROLL + }); + $(table.el).css("height", "100%"); + window.setTimeout(() => { + table.render(); + table.tableHeightChanged(); + if (sortColumn) + table.sort(sortColumn, sortDescend); + table.setRows(data.get(), true); + }, 1); + let toRender = false; + data.event(xs => { + table.setRows(xs, true); + if ($(table.el).is(":visible")) + table.render(); + else + toRender = true; + }); + $(window).on("resize", () => { + if ($(table.el).is(":visible")) { + table.tableHeightChanged(); + if (toRender) { + table.render(); + toRender = false; + } + } + }); + return React.createElement("div", { style: "height:100%;width:100%;" }, table.el); +} +// These are global variables mutated/queried by query execution +let environmentAll; // All the profiles +let environmentThis; // The specific profile under test +let environmentGroup; // The group produced as a result +function group(x) { + environmentGroup.push(x); + return true; +} +function leaf() { + return environmentThis.depends.length === 0; +} +function run(i) { + if (i === undefined) + return environmentThis.built; + else + return environmentThis.built === i; +} +function changed() { + return environmentThis.changed === environmentThis.built; +} +function visited(i) { + if (i === undefined) + return environmentThis.visited; + else + return environmentThis.visited === i; +} +function unchanged() { + return !unchanged(); +} +function named(r, groupName) { + if (r === undefined) + return environmentThis.name; + const res = execRegExp(r, environmentThis.name); + if (res === null) { + if (groupName === undefined) + return false; + else { + group(groupName); + return true; + } + } + if (res.length !== 1) { + for (let i = 1; i < res.length; i++) + group(res[i]); + } + return true; +} +function command(r, groupName) { + const n = (environmentThis.traces || []).length; + if (r === undefined) + return n === 0 ? "" : environmentThis.traces[0].command; + for (const t of environmentThis.traces) { + const res = execRegExp(r, t.command); + if (res === null) + continue; + if (res.length !== 1) { + for (let j = 1; j < res.length; j++) + group(res[j]); + } + return true; + } + if (groupName === undefined) + return false; + else { + group(groupName); + return true; + } +} +function profileLoaded(profileRaw, buildRaw) { + $(document.body).empty().append(profileRoot(unraw(profileRaw), unrawBuild(buildRaw))); +} +function unraw(xs) { + const ans = xs.map((x, i) => ({ + index: i, + name: x[0], + execution: x[1], + built: x[2], + changed: x[3], + visited: x[4], + depends: x.length > 5 ? x[5] : [], + rdepends: [], + traces: [] + })); + for (const p of ans) + for (const ds of p.depends) + for (const d of ds) + ans[d].rdepends.push(p.index); + return ans; +} +function unrawBuild(b) { + return { dirtyKeys: b.length > 0 ? b[0] : null }; +} +function profileRoot(profile, build) { + const [s, search] = createSearch(profile); + const t = createTabs([["Summary", () => reportSummary(profile, build)], + ["Rules", () => reportRuleTable(profile, search)], + ["Parallelizability", () => reportParallelism(profile)], + ["Details", () => reportDetails(profile, search)] + // , ["Why rebuild", () => reportRebuild(profile, search)] + ]); + return React.createElement("table", { class: "fill" }, + React.createElement("tr", null, + React.createElement("td", { style: "padding-top: 8px; padding-bottom: 8px;" }, + React.createElement("a", { href: "https://shakebuild.com/", style: "font-size: 20px; text-decoration: none; color: #3131a7; font-weight: bold;" }, "Shake profile report"), + React.createElement("span", { style: "color:gray;white-space:pre;" }, + " - generated at ", + generated, + " by hls-graph v", + version))), + React.createElement("tr", null, + React.createElement("td", null, s)), + React.createElement("tr", null, + React.createElement("td", { height: "100%" }, t))); +} +function createTabs(xs) { + const bodies = xs.map(x => { + const el = React.createElement("div", { style: "padding:5px;width:100%;height:100%;min-width:150px;min-height:150px;overflow:auto;display:none;" }); + const upd = lazy(() => $(el).append(x[1]())); + return pair(el, upd); + }); + let lbls = []; + const f = (i) => () => { + bodies[i][1](); + lbls.map((x, j) => $(x).toggleClass("active", i === j)); + bodies.map((x, j) => $(x[0]).toggle(i === j)); + $(window).trigger("resize"); + }; + lbls = xs.map((x, i) => React.createElement("a", { onclick: f(i) }, x[0])); + f(0)(); + return React.createElement("table", { class: "fill" }, + React.createElement("tr", null, + React.createElement("td", null, + React.createElement("table", { width: "100%", style: "border-spacing:0px;" }, + React.createElement("tr", { class: "tabstrip" }, + React.createElement("td", { width: "20", class: "bottom" }, "\u00A0"), + React.createElement("td", { style: "padding:0px;" }, lbls), + React.createElement("td", { width: "100%", class: "bottom" }, "\u00A0"))))), + React.createElement("tr", { height: "100%" }, + React.createElement("td", { style: "background-color:white;" }, bodies.map(fst)))); +} +// A mapping from names (rule names or those matched from rule parts) +// to the indicies in profiles. +class Search { + profile; + mapping; + constructor(profile, mapping) { + this.profile = profile; + if (mapping !== undefined) + this.mapping = mapping; + else { + this.mapping = {}; + for (const p of profile) + this.mapping[p.name] = [p.index]; + } + } + forEachProfiles(f) { + for (const s in this.mapping) + f(this.mapping[s].map(i => this.profile[i]), s); + } + forEachProfile(f) { + this.forEachProfiles((ps, group) => ps.forEach(p => f(p, group))); + } + mapProfiles(f) { + const res = []; + this.forEachProfiles((ps, group) => res.push(f(ps, group))); + return res; + } + mapProfile(f) { + const res = []; + this.forEachProfile((p, group) => res.push(f(p, group))); + return res; + } +} +function createSearch(profile) { + const caption = React.createElement("div", null, + "Found ", + profile.length, + " entries, not filtered or grouped."); + const input = React.createElement("input", { id: "search", type: "text", value: "", placeholder: "Filter and group", style: "width: 100%; font-size: 16px; border-radius: 8px; padding: 5px 10px; border: 2px solid #999;" }); + const res = new Prop(new Search(profile)); + $(input).on("change keyup paste", () => { + const s = $(input).val(); + if (s === "") { + res.set(new Search(profile)); + $(caption).text("Found " + profile.length + " entries, not filtered or grouped."); + } + else if (s.indexOf("(") === -1) { + const mapping = {}; + let found = 0; + for (const p of profile) { + if (p.name.indexOf(s) !== -1) { + found++; + mapping[p.name] = [p.index]; + } + } + res.set(new Search(profile, mapping)); + $(caption).text("Substring filtered to " + found + " / " + profile.length + " entries, not grouped."); + } + else { + let f; + try { + f = new Function("return " + s); + } + catch (e) { + $(caption).text("Error compiling function, " + e); + return; + } + const mapping = {}; + let groups = 0; + let found = 0; + environmentAll = profile; + for (const p of profile) { + environmentThis = p; + environmentGroup = []; + let bool; + try { + bool = f(); + } + catch (e) { + $(caption).text("Error running function, " + e); + return; + } + if (bool) { + found++; + const name = environmentGroup.length === 0 ? p.name : environmentGroup.join(" "); + if (name in mapping) + mapping[name].push(p.index); + else { + groups++; + mapping[name] = [p.index]; + } + } + } + res.set(new Search(profile, mapping)); + $(caption).text("Function filtered to " + found + " / " + profile.length + " entries, " + + (groups === found ? "not grouped." : groups + " groups.")); + } + }); + const body = React.createElement("table", { width: "100%", style: "padding-bottom: 17px;" }, + React.createElement("tr", null, + React.createElement("td", { width: "100%" }, input), + React.createElement("td", { style: "padding-left:6px;padding-right: 6px;" }, searchHelp(input))), + React.createElement("tr", null, + React.createElement("td", null, caption))); + return [body, res]; +} +function searchHelp(input) { + const examples = [["Only the last run", "run(0)"], + ["Only the last visited", "visited(0)"], + ["Named 'Main'", "named(\"Main\")"], + ["Group by file extension", "named(/(\\.[_0-9a-z]+)$/)"], + ["No dependencies (an input)", "leaf()"], + ["Didn't change when it last rebuilt", "unchanged()"], + ["Ran 'gcc'", "command(\"gcc\")"] + ]; + const f = (code) => () => { + $(input).val((i, x) => x + (x === "" ? "" : " && ") + code); + $(input).trigger("change"); + }; + const dropdown = React.createElement("div", { class: "dropdown", style: "display:none;" }, + React.createElement("ul", { style: "padding-left:30px;" }, examples.map(([desc, code]) => React.createElement("li", null, + React.createElement("a", { onclick: f(code) }, + React.createElement("tt", null, code)), + " ", + React.createElement("span", { class: "note" }, desc))))); + const arrow_down = React.createElement("span", { style: "vertical-align:middle;font-size:80%;" }, "\u25BC"); + const arrow_up = React.createElement("span", { style: "vertical-align:middle;font-size:80%;display:none;" }, "\u25B2"); + const show_inner = () => { $(dropdown).toggle(); $(arrow_up).toggle(); $(arrow_down).toggle(); }; + return React.createElement("div", null, + React.createElement("button", { style: "white-space:nowrap;padding-top:5px;padding-bottom:5px;", onclick: show_inner }, + React.createElement("b", { style: "font-size:150%;vertical-align:middle;" }, "+"), + "\u00A0 Filter and Group \u00A0", + arrow_down, + arrow_up), + dropdown); +} +function initProgress() { + $(function () { + $(".version").html("Generated by Shake " + version + "."); + $("#output").html(""); + for (const x of progress) { + var actual = []; + var ideal = []; + // Start at t = 5 seconds, since the early progress jumps a lot + for (var t = 5; t < x.values.length; t++) { + var y = x.values[t]; + actual.push([y.idealSecs, y.actualSecs]); + ideal.push([y.idealSecs, y.idealSecs]); + } + var ys = [{ data: ideal, color: "gray" }, { label: x.name, data: actual, color: "red" }]; + var div = $("
"); + $("#output").append(div); + $.plot(div, ys, { + xaxis: { + transform: function (v) { return -v; }, + inverseTransform: function (v) { return -v; } + } + }); + } + }); +} +// Stuff that Shake generates and injects in +function untraced(p) { + return Math.max(0, p.execution - p.traces.map(t => t.stop - t.start).sum()); +} +///////////////////////////////////////////////////////////////////// +// BASIC UI TOOLKIT +class Prop { + val; + callback; + constructor(val) { this.val = val; this.callback = () => { return; }; } + get() { return this.val; } + set(val) { + this.val = val; + this.callback(val); + } + event(next) { + const old = this.callback; + this.callback = val => { old(val); next(val); }; + next(this.val); + } + map(f) { + const res = new Prop(f(this.get())); + this.event(a => res.set(f(a))); + return res; + } +} +jQuery.fn.enable = function (x) { + // Set the values to enabled/disabled + return this.each(function () { + if (x) + $(this).removeAttr("disabled"); + else + $(this).attr("disabled", "disabled"); + }); +}; +///////////////////////////////////////////////////////////////////// +// BROWSER HELPER METHODS +// Given "?foo=bar&baz=1" returns {foo:"bar",baz:"1"} +function uriQueryParameters(s) { + // From https://stackoverflow.com/questions/901115/get-querystring-values-with-jquery/3867610#3867610 + const params = {}; + const a = /\+/g; // Regex for replacing addition symbol with a space + const r = /([^&=]+)=?([^&]*)/g; + const d = (x) => decodeURIComponent(x.replace(a, " ")); + const q = s.substring(1); + while (true) { + const e = r.exec(q); + if (!e) + break; + params[d(e[1])] = d(e[2]); + } + return params; +} +///////////////////////////////////////////////////////////////////// +// STRING FORMATTING +function showTime(x) { + function digits(x) { const s = String(x); return s.length === 1 ? "0" + s : s; } + if (x >= 3600) { + x = Math.round(x / 60); + return Math.floor(x / 60) + "h" + digits(x % 60) + "m"; + } + else if (x >= 60) { + x = Math.round(x); + return Math.floor(x / 60) + "m" + digits(x % 60) + "s"; + } + else + return x.toFixed(2) + "s"; +} +function showPerc(x) { + return (x * 100).toFixed(2) + "%"; +} +function showInt(x) { + // From https://stackoverflow.com/questions/2901102/how-to-print-a-number-with-commas-as-thousands-separators-in-javascript + // Show, with commas + return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ","); +} +function showRun(run) { + return run === 0 ? "Latest run" : run + " run" + plural(run) + " ago"; +} +function plural(n, not1 = "s", is1 = "") { + return n === 1 ? is1 : not1; +} +///////////////////////////////////////////////////////////////////// +// MISC +function compareFst(a, b) { + return a[0] - b[0]; +} +function compareSnd(a, b) { + return a[1] - b[1]; +} +function compareSndRev(a, b) { + return b[1] - a[1]; +} +function pair(a, b) { + return [a, b]; +} +function triple(a, b, c) { + return [a, b, c]; +} +function fst([x, _]) { + return x; +} +function snd([_, x]) { + return x; +} +function execRegExp(r, s) { + if (typeof r === "string") + return s.indexOf(r) === -1 ? null : []; + else + return r.exec(s); +} +function cache(key, op) { + const store = {}; + return k => { + const s = key(k); + if (!(s in store)) + store[s] = op(k); + return store[s]; + }; +} +function lazy(thunk) { + let store = null; + let done = false; + return () => { + if (!done) { + store = thunk(); + done = true; + } + return store; + }; +} +Array.prototype.sum = function () { + let res = 0; + for (const x of this) + res += x; + return res; +}; +Array.prototype.insertSorted = function (x, compare) { + let start = 0; + let stop = this.length - 1; + let middle = 0; + while (start <= stop) { + middle = Math.floor((start + stop) / 2); + if (compare(this[middle], x) > 0) + stop = middle - 1; + else + start = middle + 1; + } + this.splice(start, 0, x); + return this; +}; +Array.prototype.concatLength = function () { + let res = 0; + for (const x of this) + res += x.length; + return res; +}; +Array.prototype.sortOn = function (f) { + return this.map(x => pair(f(x), x)).sort(compareFst).map(snd); +}; +Array.prototype.last = function () { + return this[this.length - 1]; +}; +Array.prototype.maximum = function (def) { + if (this.length === 0) + return def; + let res = this[0]; + for (let i = 1; i < this.length; i++) + res = Math.max(res, this[i]); + return res; +}; +Array.prototype.minimum = function (def) { + if (this.length === 0) + return def; + let res = this[0]; + for (let i = 1; i < this.length; i++) + res = Math.min(res, this[i]); + return res; +}; +// Use JSX with el instead of React.createElement +// Originally from https://gist.github.com/sergiodxa/a493c98b7884128081bb9a281952ef33 +// our element factory +function createElement(type, props, ...children) { + const element = document.createElement(type); + for (const name in props || {}) { + if (name.substr(0, 2) === "on") + element.addEventListener(name.substr(2), props[name]); + else + element.setAttribute(name, props[name]); + } + for (const child of children.flat(10)) { + const c = typeof child === "object" ? child : document.createTextNode(child.toString()); + element.appendChild(c); + } + return element; +} +// How .tsx gets desugared +const React = { createElement }; +function reportCmdPlot(profile) { + // first find the end point + const runs = findRuns(profile); + if (runs.length === 0) { + return React.createElement("div", null, + React.createElement("h2", null, "No data found"), + React.createElement("p", null, "The Shake database contains no rules which ran traced commands."), + React.createElement("p", null, + "You can populate this information by using ", + varLink("cmd"), + " or wrapping your ", + React.createElement("tt", null, "IO"), + " actions in ", + varLink("traced"), + ".")); + } + const combo = React.createElement("select", null, + runs.map(([run, time], i) => React.createElement("option", null, + showRun(run) + " (" + showTime(time) + ") ", + i === 0 ? "" : " - may be incomplete")), + ";"); + const warning = React.createElement("i", null); + const plot = React.createElement("div", { style: "width:100%; height:100%;" }); + const plotData = new Prop([]); + bindPlot(plot, plotData, { + legend: { show: true, position: "nw", sorted: "reverse" }, + series: { stack: true, lines: { fill: 1, lineWidth: 0 } }, + yaxis: { min: 0 }, + xaxis: { tickFormatter: showTime } + }); + function setPlotData(runsIndex) { + const [run, end] = runs[runsIndex]; + const profileRun = profile.filter(p => p.built === run); + // Make sure we max(0,) every step in the process, in case one does parallelism of threads + const missing = profileRun.map(untraced).sum(); + $(warning).text(missing < 1 ? "" : "Warning: " + showTime(missing) + " of execution was not traced."); + const series = calcPlotData(end, profileRun, 100); + const res = []; + for (const s in series) + res.push({ label: s, data: series[s].map((x, i) => pair(end * i / 100, x)) }); + plotData.set(res); + } + setPlotData(0); + $(combo).change(() => setPlotData(combo.selectedIndex)); + return React.createElement("table", { class: "fill" }, + React.createElement("tr", null, + React.createElement("td", { width: "100%", style: "text-align:center;" }, + React.createElement("h2", null, "Number of commands executing over time")), + React.createElement("td", null, combo)), + React.createElement("tr", null, + React.createElement("td", { height: "100%", colspan: "2" }, plot)), + React.createElement("tr", null, + React.createElement("td", { colspan: "2", style: "text-align:center;" }, + "Time since the start of building. ", + warning))); +} +// Find which runs had traced commands and when the last stopped, sort so most recent first +function findRuns(profile) { + const runs = {}; + for (const p of profile) { + if (p.traces.length > 0) { + if (p.traces.length === 1 && p.traces[0].command === "") + continue; // the fake end command + const old = runs[p.built]; + const end = p.traces.last().stop; + runs[p.built] = old === undefined ? end : Math.max(old, end); + } + } + const runsList = []; + for (const i in runs) + runsList.push(pair(Number(i), runs[i])); + runsList.sort(compareFst); + return runsList; +} +function calcPlotData(end, profile, buckets) { + const ans = {}; + for (const p of profile) { + for (const t of p.traces) { + let xs; + if (t.command in ans) + xs = ans[t.command]; + else { + xs = []; + for (let i = 0; i < buckets; i++) + xs.push(0); // fill with 1 more element, but the last bucket will always be 0 + ans[t.command] = xs; + } + const start = t.start * buckets / end; + const stop = t.stop * buckets / end; + if (Math.floor(start) === Math.floor(stop)) + xs[Math.floor(start)] += stop - start; + else { + for (let j = Math.ceil(start); j < Math.floor(stop); j++) + xs[j]++; + xs[Math.floor(start)] += Math.ceil(start) - start; + xs[Math.floor(stop)] += stop - Math.floor(stop); + } + } + } + return ans; +} +function reportCmdTable(profile, search) { + const columns = [{ field: "name", label: "Name", width: 200 }, + { field: "count", label: "Count", width: 65, alignRight: true, show: showInt }, + { field: "total", label: "Total", width: 75, alignRight: true, show: showTime }, + { field: "average", label: "Average", width: 75, alignRight: true, show: showTime }, + { field: "max", label: "Max", width: 75, alignRight: true, show: showTime } + ]; + return newTable(columns, search.map(cmdData), "total", true); +} +function cmdData(search) { + const res = {}; + search.forEachProfile(p => { + for (const t of p.traces) { + const time = t.stop - t.start; + if (t.command === "") + continue; // do nothing + else if (!(t.command in res)) + res[t.command] = { count: 1, total: time, max: time }; + else { + const ans = res[t.command]; + ans.count++; + ans.total += time; + ans.max = Math.max(ans.max, time); + } + } + }); + const res2 = []; + for (const i in res) + res2.push({ name: i, average: res[i].total / res[i].count, ...res[i] }); + return res2; +} +function reportDetails(profile, search) { + const result = React.createElement("div", { class: "details" }); + const self = new Prop(0); + search.event(xs => self.set(xs.mapProfile((p, _) => p.index).maximum())); + const f = (i) => React.createElement("a", { onclick: () => self.set(i) }, profile[i].name); + self.event(i => { + const p = profile[i]; + const content = React.createElement("ul", null, + React.createElement("li", null, + React.createElement("b", null, "Name:"), + " ", + p.name), + React.createElement("li", null, + React.createElement("b", null, "Built:"), + " ", + showRun(p.built)), + React.createElement("li", null, + React.createElement("b", null, "Changed:"), + " ", + showRun(p.changed)), + React.createElement("li", null, + React.createElement("b", null, "Execution time:"), + showTime(p.execution)), + React.createElement("li", null, + React.createElement("b", null, "Traced commands:"), + React.createElement("ol", null, p.traces.map(t => React.createElement("li", null, + t.command, + " took ", + showTime(t.stop - t.start))))), + React.createElement("li", null, + React.createElement("b", null, "Dependencies:"), + React.createElement("ol", null, p.depends.map(ds => React.createElement("li", null, + React.createElement("ul", null, ds.map(d => React.createElement("li", null, f(d)))))))), + React.createElement("li", null, + React.createElement("b", null, "Things that depend on me:"), + React.createElement("ul", null, p.rdepends.map(d => React.createElement("li", null, f(d)))))); + $(result).empty().append(content); + }); + return result; +} +function reportParallelism(profile) { + // now simulate for -j1 .. -j24 + const plotData = [{ label: "Realistic (based on current dependencies)", data: [], color: "#3131a7" }, + { label: "Ideal (if no dependencies and perfect speedup)", data: [], color: "green" }, + { label: "Gap", data: [], color: "orange" } + ]; + let threads1; + for (let threads = 1; threads <= 24; threads++) { + const taken = simulateThreads(profile, threads)[0]; + if (threads === 1) + threads1 = taken; + plotData[0].data.push([threads, taken]); + plotData[1].data.push([threads, threads1 / threads]); + plotData[2].data.push([threads, Math.max(0, taken - (threads1 / threads))]); + } + const plot = React.createElement("div", { style: "width:100%; height:100%;" }); + bindPlot(plot, new Prop(plotData), { + xaxis: { tickDecimals: 0 }, + yaxis: { min: 0, tickFormatter: showTime } + }); + return React.createElement("table", { class: "fill" }, + React.createElement("tr", null, + React.createElement("td", { style: "text-align:center;" }, + React.createElement("h2", null, "Time to build at different number of threads"))), + React.createElement("tr", null, + React.createElement("td", { height: "100%" }, plot)), + React.createElement("tr", null, + React.createElement("td", { style: "text-align:center;" }, "Number of threads available."))); +} +// Simulate running N threads over the profile, return: +// [total time take, point at which each entry kicked off] +function simulateThreads(profile, threads) { + // How far are we through this simulation + let timestamp = 0; + // Who is currently running, with the highest seconds FIRST + const running = []; + const started = []; + // Things that are done + const ready = profile.filter(x => x.depends.length === 0); + const waiting = profile.map(x => x.depends.concatLength()); // number I am waiting on before I am done + function runningWait() { + const [ind, time] = running.pop(); + timestamp = time; + for (const d of profile[ind].rdepends) { + waiting[d]--; + if (waiting[d] === 0) + ready.push(profile[d]); + } + } + while (true) { + // Queue up as many people as we can + while (running.length < threads && ready.length > 0) { + const p = ready.pop(); + started[p.index] = timestamp; + running.insertSorted([p.index, timestamp + p.execution], compareSndRev); + } + if (running.length === 0) { + if (waiting.maximum(0) > 0) + throw new Error("Failed to run all tasks"); + return [timestamp, started]; + } + runningWait(); + } +} +function reportRebuild(profile, search) { + const depth = []; + for (const p of profile) { + depth[p.index] = p.depends.flat().map(d => depth[d] + 1).maximum(0); + } + const ind = search.get().mapProfile((p, _) => p.index).sortOn(i => -depth[i])[0]; + const p = profile[ind]; + function f(p) { + const res = []; + while (p.depends.length !== 0) { + const ds = p.depends.flat().sortOn(i => -depth[i]); + res.push(React.createElement("li", null, + React.createElement("select", { style: "width:400px;" }, ds.slice(0, 1).map(x => React.createElement("option", null, profile[x].name))))); + p = profile[ds[0]]; + } + return res; + } + return React.createElement("div", null, + React.createElement("h2", null, "Why did it rebuild?"), + React.createElement("p", null, + "Rule ", + p.name + " " + (p.built === 0 ? "rebuild in the last run" : "did not rebuild")), + React.createElement("ul", null, f(p))); +} +function reportRuleTable(profile, search) { + const [etimes, wtimes] = calcEWTimes(profile, 24); + const columns = [{ field: "name", label: "Name", width: 400 }, + { field: "count", label: "Count", width: 65, alignRight: true, show: showInt }, + { field: "leaf", label: "Leaf", width: 60, alignRight: true }, + { field: "visited", label: "Visit", width: 50, alignRight: true }, + { field: "run", label: "Run", width: 50, alignRight: true }, + { field: "changed", label: "Change", width: 60, alignRight: true }, + { field: "time", label: "Time", width: 75, alignRight: true, show: showTime }, + { field: "etime", label: "ETime", width: 75, alignRight: true, show: showTime }, + { field: "wtime", label: "WTime", width: 75, alignRight: true, show: showTime } + ]; + return newTable(columns, search.map(s => ruleData(etimes, wtimes, s)), "time", true); +} +// Calculate the exclusive time of each rule at some number of threads +function calcEWTimes(profile, threads) { + const [_, started] = simulateThreads(profile, threads); + const starts = started.map((s, i) => pair(i, s)).sort(compareSnd); + const costs = starts.map(([ind, start], i) => { + // find out who else runs before I finish + const execution = profile[ind].execution; + const end = start + execution; + let overlap = 0; // how much time I am overlapped for + let exclusive = 0; // how much time I am the only runner + let finisher = start; // the first overlapping person to finish + for (let j = i + 1; j < starts.length; j++) { + const [jInd, jStarts] = starts[j]; + if (jStarts > end) + break; + overlap += Math.min(end - jStarts, profile[jInd].execution); + exclusive += Math.max(0, Math.min(jStarts, end) - finisher); + finisher = Math.max(finisher, jStarts + profile[jInd].execution); + } + exclusive += Math.max(0, end - finisher); + return triple(ind, execution === 0 ? 0 : execution * (execution / (execution + overlap)), exclusive); + }); + const etimes = []; + const wtimes = []; + for (const [ind, etime, wtime] of costs) { + etimes[ind] = etime; + wtimes[ind] = wtime; + } + return [etimes, wtimes]; +} +function ruleData(etimes, wtimes, search) { + return search.mapProfiles((ps, name) => ({ + name, + count: ps.length, + leaf: ps.every(p => p.depends.length === 0), + run: ps.map(p => p.built).minimum(), + visited: ps.map(p => p.visited).minimum(), + changed: ps.some(p => p.built === p.changed), + time: ps.map(p => p.execution).sum(), + etime: ps.map(p => etimes[p.index]).sum(), + wtime: ps.map(p => wtimes[p.index]).sum(), + })); +} +function reportSummary(profile, build) { + let countLast = 0; // number of rules run in the last run + let visitedLast = 0; // number of rules visited in the last run + let highestRun = 0; // highest run you have seen (add 1 to get the count of runs) + let sumExecution = 0; // build time in total + let sumExecutionLast = 0; // build time in total + let countTrace = -1; + let countTraceLast = -1; // traced commands run + // start both are -1 because the end command will have run in the previous step + let maxTraceStopLast = 0; // time the last traced command stopped + for (const p of profile) { + sumExecution += p.execution; + highestRun = Math.max(highestRun, p.changed); // changed is always greater or equal to built + countTrace += p.traces.length; + if (p.built === 0) { + sumExecutionLast += p.execution; + countLast++; + countTraceLast += p.traces.length; + if (p.traces.length > 0) + maxTraceStopLast = Math.max(maxTraceStopLast, p.traces.last().stop); + } + if (p.visited === 0) { + visitedLast++; + } + } + return React.createElement("div", null, + React.createElement("h2", null, "Totals"), + React.createElement("ul", null, + React.createElement("li", null, + React.createElement("b", null, "Runs:"), + " ", + showInt(highestRun + 1), + " ", + React.createElement("span", { class: "note" }, "total number of runs so far.")), + React.createElement("li", null, + React.createElement("b", null, "Rules:"), + " ", + showInt(profile.length), + " (", + showInt(countLast), + " in last run) ", + React.createElement("span", { class: "note" }, "number of defined build rules."))), + React.createElement("h2", null, "Performance"), + React.createElement("ul", null, + React.createElement("li", null, + React.createElement("b", null, "Build time:"), + " ", + showTime(sumExecution), + " ", + React.createElement("span", { class: "note" }, "how long a complete build would take single threaded.")), + React.createElement("li", null, + React.createElement("b", null, "Last build time:"), + " ", + showTime(maxTraceStopLast), + " ", + React.createElement("span", { class: "note" }, "how long the last build take.")), + React.createElement("li", null, + React.createElement("b", null, "Parallelism:"), + " ", + (maxTraceStopLast === 0 ? 0 : sumExecutionLast / maxTraceStopLast).toFixed(2), + " ", + React.createElement("span", { class: "note" }, "average number of commands executing simultaneously in the last build.")), + React.createElement("li", null, + React.createElement("b", null, "Speculative critical path:"), + " ", + showTime(speculativeCriticalPath(profile)), + " ", + React.createElement("span", { class: "note" }, "how long it would take on infinite CPUs.")), + React.createElement("li", null, + React.createElement("b", null, "Precise critical path:"), + " ", + showTime(preciseCriticalPath(profile)), + " ", + React.createElement("span", { class: "note" }, "critical path not speculatively executing."))), + React.createElement("h2", null, "This run"), + React.createElement("ul", null, + React.createElement("li", null, + React.createElement("b", null, "Rules built:"), + " ", + showInt(countLast), + " ", + React.createElement("span", { class: "note" }, "Total number of rules built in this run")), + React.createElement("li", null, + React.createElement("b", null, "Rules visited:"), + " ", + showInt(visitedLast - countLast), + " ", + React.createElement("span", { class: "note" }, "Total number of rules looked up from the values store in this run")), + React.createElement("li", null, + React.createElement("b", null, "Dirty set:"), + renderDirtySet(build, profile)))); +} +function renderDirtySet(build, profile) { + if (build.dirtyKeys === null) { + return "ALL"; + } + else { + return React.createElement("ul", null, build.dirtyKeys.map(d => { return React.createElement("li", null, profile[d - 1].name); })); + } +} +function speculativeCriticalPath(profile) { + const criticalPath = []; // the critical path to any element + let maxCriticalPath = 0; + for (const p of profile) { + let cost = 0; + for (const ds of p.depends) + for (const d of ds) + cost = Math.max(cost, criticalPath[d]); + cost += p.execution; + maxCriticalPath = Math.max(cost, maxCriticalPath); + criticalPath[p.index] = cost; + } + return maxCriticalPath; +} +/* +Calculating a precise critical path, taking into account the deep dependency structure, is non-obvious. +Dependencies have the type [{X}], e.g: + + X = [{a,b},{c,d}] + +That is r builds a and b, then after those both complete (assuming they don't change), it builds c and d, +then it is finished. Importantly, r doesn't start building c/d until after a and b have finished. This +detail extends the critical path. + +To calculate the precise critical path, we simulate with the notion of demand and waiting. +*/ +function preciseCriticalPath(profile) { + const waiting = profile.map(x => x.depends.concatLength()); // number I am waiting on before I am done + const demanded = []; // I have been demanded by someone + const oncomplete = []; // Completion functions + const complete = []; // Who is complete already + const running = []; + let timestamp = 0; + // demand dependency set N of a rule + function demandN(p, round) { + for (; round < p.depends.length; round++) { + let todo = p.depends[round].length; // Number before we continue + const step = () => { + todo--; + if (todo === 0) + demandN(p, round + 1); + }; + for (const d of p.depends[round]) { + if (complete[d]) + todo--; + else { + const old = oncomplete[d]; + oncomplete[d] = !old ? step : () => { old(); step(); }; + demand(profile[d]); + } + } + if (todo !== 0) + break; + // todo === 0, so continue (equivalent to calling step but tail recursive) + } + } + // demand a particular rule + function demand(p) { + if (demanded[p.index]) + return; + demanded[p.index] = true; + if (waiting[p.index] === 0) + running.insertSorted([p.index, timestamp + p.execution], compareSndRev); + else + demandN(p, 0); + } + // We don't know the targets we ask for, so we approximate by saying the ones which nothing depends on + for (const p of profile) { + if (p.rdepends.length === 0) + demand(p); + } + while (running.length > 0) { + const [ind, time] = running.pop(); + timestamp = time; + complete[ind] = true; + if (oncomplete[ind]) { + oncomplete[ind](); + delete oncomplete[ind]; + } + for (const d of profile[ind].rdepends) { + waiting[d]--; + if (waiting[d] === 0 && demanded[d]) + running.insertSorted([d, timestamp + profile[d].execution], compareSndRev); + } + } + for (let i = 0; i < profile.length; i++) + if (!complete[i]) + throw new Error("Failed to run all tasks"); + return timestamp; +} diff --git a/hls-graph/html/ts/elements.tsx b/hls-graph/html/ts/elements.tsx new file mode 100644 index 0000000000..266d8001d2 --- /dev/null +++ b/hls-graph/html/ts/elements.tsx @@ -0,0 +1,77 @@ + + +function bindPlot(element: HTMLElement, data: Prop, options: jquery.flot.plotOptions): void { + const redraw = () => { + if ($(element).is(":visible")) + $.plot($(element), data.get(), options); + }; + window.setTimeout(redraw, 1); + $(window).on("resize", redraw); + data.event(redraw); +} + + +function varLink(name: string): HTMLElement { + return {name}; +} + + +interface Column { + field: string; + label: string; + width: int; + alignRight?: boolean; + show?: (x: any) => string; +} + +// A simple approximation of what DGTable provides +declare class DGTable { + public static Width: {SCROLL: void}; + public el: HTMLElement; + constructor(options: any); + public setRows(rows: object[], resort: boolean): void; + public render(): void; + public tableHeightChanged(): void; + public sort(x: string, descending: boolean): void; +} + +function newTable(columns: Column[], data: Prop, sortColumn?: string, sortDescend?: boolean): HTMLElement { + const f = (x: Column) => ({name: x.field, label: x.label, width: x.width, cellClasses: x.alignRight ? "right" : ""}); + const formatters = {}; + for (const c of columns) + formatters[c.field] = c.show || ((x: any) => x); + + const table = new DGTable({ + adjustColumnWidthForSortArrow: false, + cellFormatter: (val: any, colname: string) => formatters[colname](val), + columns: columns.map(f), + width: DGTable.Width.SCROLL + }); + $(table.el).css("height", "100%"); + window.setTimeout(() => { + table.render(); + table.tableHeightChanged(); + if (sortColumn) + table.sort(sortColumn, sortDescend); + table.setRows(data.get(), true); + }, 1); + + let toRender = false; + data.event(xs => { + table.setRows(xs, true); + if ($(table.el).is(":visible")) + table.render(); + else + toRender = true; + }); + $(window).on("resize", () => { + if ($(table.el).is(":visible")) { + table.tableHeightChanged(); + if (toRender) { + table.render(); + toRender = false; + } + } + }); + return
{table.el}
; +} diff --git a/hls-graph/html/ts/environment.ts b/hls-graph/html/ts/environment.ts new file mode 100644 index 0000000000..44b73be4a1 --- /dev/null +++ b/hls-graph/html/ts/environment.ts @@ -0,0 +1,87 @@ + +// These are global variables mutated/queried by query execution +let environmentAll: Profile[]; // All the profiles +let environmentThis: Profile; // The specific profile under test +let environmentGroup: string[]; // The group produced as a result + +function group(x: string): boolean { + environmentGroup.push(x); + return true; +} + +function leaf(): boolean { + return environmentThis.depends.length === 0; +} + +function run(): number; +function run(i: timestamp): boolean; +function run(i?: timestamp): number | boolean { + if (i === undefined) + return environmentThis.built; + else + return environmentThis.built === i; +} + +function changed(): boolean { + return environmentThis.changed === environmentThis.built; +} + +function visited(): number; +function visited(i:timestamp): boolean; +function visited(i?: timestamp): number | boolean { + if(i === undefined) + return environmentThis.visited; + else + return environmentThis.visited === i; +} + +function unchanged(): boolean { + return !unchanged(); +} + +function named(): string; +function named(r: string | RegExp, groupName?: string): boolean; +function /* export */ named(r?: string | RegExp, groupName?: string): string | boolean { + if (r === undefined) + return environmentThis.name; + + const res = execRegExp(r, environmentThis.name); + if (res === null) { + if (groupName === undefined) + return false; + else { + group(groupName); + return true; + } + } + if (res.length !== 1) { + for (let i = 1; i < res.length; i++) + group(res[i]); + } + return true; +} + +function command(): string; +function command(r: string | RegExp, groupName?: string): boolean; +function /* export */ command(r?: any, groupName?: any): any { + const n = (environmentThis.traces || []).length; + if (r === undefined) + return n === 0 ? "" : environmentThis.traces[0].command; + + for (const t of environmentThis.traces) { + const res = execRegExp(r, t.command); + if (res === null) + continue; + if (res.length !== 1) { + for (let j = 1; j < res.length; j++) + group(res[j]); + } + return true; + } + if (groupName === undefined) + return false; + else { + group(groupName); + return true; + } +} diff --git a/hls-graph/html/ts/jquery.d.ts b/hls-graph/html/ts/jquery.d.ts new file mode 100644 index 0000000000..8401753e35 --- /dev/null +++ b/hls-graph/html/ts/jquery.d.ts @@ -0,0 +1,3190 @@ +// Type definitions for jQuery 1.10.x / 2.0.x +// Project: http://jquery.com/ +// Definitions by: Boris Yankov , Christian Hoffmeister , Steve Fenton , Diullei Gomes , Tass Iliopoulos , Jason Swearingen , Sean Hill , Guus Goossens , Kelly Summerlin , Basarat Ali Syed , Nicholas Wolverson , Derek Cicerone , Andrew Gaspar , James Harrison Fisher , Seikichi Kondo , Benjamin Jackman , Poul Sorensen , Josh Strobl , John Reilly , Dick van den Brink +// Definitions: https://github.com/borisyankov/DefinitelyTyped + +/* ***************************************************************************** +Copyright (c) Microsoft Corporation. All rights reserved. +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this file except in compliance with the License. You may obtain a copy of the +License at http://www.apache.org/licenses/LICENSE-2.0 + +THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED +WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, +MERCHANTABLITY OR NON-INFRINGEMENT. + +See the Apache Version 2.0 License for specific language governing permissions +and limitations under the License. +***************************************************************************** */ + + +/** + * Interface for the AJAX setting that will configure the AJAX request + */ +interface JQueryAjaxSettings { + /** + * The content type sent in the request header that tells the server what kind of response it will accept in return. If the accepts setting needs modification, it is recommended to do so once in the $.ajaxSetup() method. + */ + accepts?: any; + /** + * By default, all requests are sent asynchronously (i.e. this is set to true by default). If you need synchronous requests, set this option to false. Cross-domain requests and dataType: "jsonp" requests do not support synchronous operation. Note that synchronous requests may temporarily lock the browser, disabling any actions while the request is active. As of jQuery 1.8, the use of async: false with jqXHR ($.Deferred) is deprecated; you must use the success/error/complete callback options instead of the corresponding methods of the jqXHR object such as jqXHR.done() or the deprecated jqXHR.success(). + */ + async?: boolean; + /** + * A pre-request callback function that can be used to modify the jqXHR (in jQuery 1.4.x, XMLHTTPRequest) object before it is sent. Use this to set custom headers, etc. The jqXHR and settings objects are passed as arguments. This is an Ajax Event. Returning false in the beforeSend function will cancel the request. As of jQuery 1.5, the beforeSend option will be called regardless of the type of request. + */ + beforeSend? (jqXHR: JQueryXHR, settings: JQueryAjaxSettings): any; + /** + * If set to false, it will force requested pages not to be cached by the browser. Note: Setting cache to false will only work correctly with HEAD and GET requests. It works by appending "_={timestamp}" to the GET parameters. The parameter is not needed for other types of requests, except in IE8 when a POST is made to a URL that has already been requested by a GET. + */ + cache?: boolean; + /** + * A function to be called when the request finishes (after success and error callbacks are executed). The function gets passed two arguments: The jqXHR (in jQuery 1.4.x, XMLHTTPRequest) object and a string categorizing the status of the request ("success", "notmodified", "error", "timeout", "abort", or "parsererror"). As of jQuery 1.5, the complete setting can accept an array of functions. Each function will be called in turn. This is an Ajax Event. + */ + complete? (jqXHR: JQueryXHR, textStatus: string): any; + /** + * An object of string/regular-expression pairs that determine how jQuery will parse the response, given its content type. (version added: 1.5) + */ + contents?: { [key: string]: any; }; + //According to jQuery.ajax source code, ajax's option actually allows contentType to set to "false" + // https://github.com/borisyankov/DefinitelyTyped/issues/742 + /** + * When sending data to the server, use this content type. Default is "application/x-www-form-urlencoded; charset=UTF-8", which is fine for most cases. If you explicitly pass in a content-type to $.ajax(), then it is always sent to the server (even if no data is sent). The W3C XMLHttpRequest specification dictates that the charset is always UTF-8; specifying another charset will not force the browser to change the encoding. + */ + contentType?: any; + /** + * This object will be made the context of all Ajax-related callbacks. By default, the context is an object that represents the ajax settings used in the call ($.ajaxSettings merged with the settings passed to $.ajax). + */ + context?: any; + /** + * An object containing dataType-to-dataType converters. Each converter's value is a function that returns the transformed value of the response. (version added: 1.5) + */ + converters?: { [key: string]: any; }; + /** + * If you wish to force a crossDomain request (such as JSONP) on the same domain, set the value of crossDomain to true. This allows, for example, server-side redirection to another domain. (version added: 1.5) + */ + crossDomain?: boolean; + /** + * Data to be sent to the server. It is converted to a query string, if not already a string. It's appended to the url for GET-requests. See processData option to prevent this automatic processing. Object must be Key/Value pairs. If value is an Array, jQuery serializes multiple values with same key based on the value of the traditional setting (described below). + */ + data?: any; + /** + * A function to be used to handle the raw response data of XMLHttpRequest.This is a pre-filtering function to sanitize the response. You should return the sanitized data. The function accepts two arguments: The raw data returned from the server and the 'dataType' parameter. + */ + dataFilter? (data: any, ty: any): any; + /** + * The type of data that you're expecting back from the server. If none is specified, jQuery will try to infer it based on the MIME type of the response (an XML MIME type will yield XML, in 1.4 JSON will yield a JavaScript object, in 1.4 script will execute the script, and anything else will be returned as a string). + */ + dataType?: string; + /** + * A function to be called if the request fails. The function receives three arguments: The jqXHR (in jQuery 1.4.x, XMLHttpRequest) object, a string describing the type of error that occurred and an optional exception object, if one occurred. Possible values for the second argument (besides null) are "timeout", "error", "abort", and "parsererror". When an HTTP error occurs, errorThrown receives the textual portion of the HTTP status, such as "Not Found" or "Internal Server Error." As of jQuery 1.5, the error setting can accept an array of functions. Each function will be called in turn. Note: This handler is not called for cross-domain script and cross-domain JSONP requests. This is an Ajax Event. + */ + error? (jqXHR: JQueryXHR, textStatus: string, errorThrown: string): any; + /** + * Whether to trigger global Ajax event handlers for this request. The default is true. Set to false to prevent the global handlers like ajaxStart or ajaxStop from being triggered. This can be used to control various Ajax Events. + */ + global?: boolean; + /** + * An object of additional header key/value pairs to send along with requests using the XMLHttpRequest transport. The header X-Requested-With: XMLHttpRequest is always added, but its default XMLHttpRequest value can be changed here. Values in the headers setting can also be overwritten from within the beforeSend function. (version added: 1.5) + */ + headers?: { [key: string]: any; }; + /** + * Allow the request to be successful only if the response has changed since the last request. This is done by checking the Last-Modified header. Default value is false, ignoring the header. In jQuery 1.4 this technique also checks the 'etag' specified by the server to catch unmodified data. + */ + ifModified?: boolean; + /** + * Allow the current environment to be recognized as "local," (e.g. the filesystem), even if jQuery does not recognize it as such by default. The following protocols are currently recognized as local: file, *-extension, and widget. If the isLocal setting needs modification, it is recommended to do so once in the $.ajaxSetup() method. (version added: 1.5.1) + */ + isLocal?: boolean; + /** + * Override the callback function name in a jsonp request. This value will be used instead of 'callback' in the 'callback=?' part of the query string in the url. So {jsonp:'onJSONPLoad'} would result in 'onJSONPLoad=?' passed to the server. As of jQuery 1.5, setting the jsonp option to false prevents jQuery from adding the "?callback" string to the URL or attempting to use "=?" for transformation. In this case, you should also explicitly set the jsonpCallback setting. For example, { jsonp: false, jsonpCallback: "callbackName" } + */ + jsonp?: any; + /** + * Specify the callback function name for a JSONP request. This value will be used instead of the random name automatically generated by jQuery. It is preferable to let jQuery generate a unique name as it'll make it easier to manage the requests and provide callbacks and error handling. You may want to specify the callback when you want to enable better browser caching of GET requests. As of jQuery 1.5, you can also use a function for this setting, in which case the value of jsonpCallback is set to the return value of that function. + */ + jsonpCallback?: any; + /** + * The HTTP method to use for the request (e.g. "POST", "GET", "PUT"). (version added: 1.9.0) + */ + method?: string; + /** + * A mime type to override the XHR mime type. (version added: 1.5.1) + */ + mimeType?: string; + /** + * A password to be used with XMLHttpRequest in response to an HTTP access authentication request. + */ + password?: string; + /** + * By default, data passed in to the data option as an object (technically, anything other than a string) will be processed and transformed into a query string, fitting to the default content-type "application/x-www-form-urlencoded". If you want to send a DOMDocument, or other non-processed data, set this option to false. + */ + processData?: boolean; + /** + * Only applies when the "script" transport is used (e.g., cross-domain requests with "jsonp" or "script" dataType and "GET" type). Sets the charset attribute on the script tag used in the request. Used when the character set on the local page is not the same as the one on the remote script. + */ + scriptCharset?: string; + /** + * An object of numeric HTTP codes and functions to be called when the response has the corresponding code. f the request is successful, the status code functions take the same parameters as the success callback; if it results in an error (including 3xx redirect), they take the same parameters as the error callback. (version added: 1.5) + */ + statusCode?: { [key: string]: any; }; + /** + * A function to be called if the request succeeds. The function gets passed three arguments: The data returned from the server, formatted according to the dataType parameter; a string describing the status; and the jqXHR (in jQuery 1.4.x, XMLHttpRequest) object. As of jQuery 1.5, the success setting can accept an array of functions. Each function will be called in turn. This is an Ajax Event. + */ + success? (data: any, textStatus: string, jqXHR: JQueryXHR): any; + /** + * Set a timeout (in milliseconds) for the request. This will override any global timeout set with $.ajaxSetup(). The timeout period starts at the point the $.ajax call is made; if several other requests are in progress and the browser has no connections available, it is possible for a request to time out before it can be sent. In jQuery 1.4.x and below, the XMLHttpRequest object will be in an invalid state if the request times out; accessing any object members may throw an exception. In Firefox 3.0+ only, script and JSONP requests cannot be cancelled by a timeout; the script will run even if it arrives after the timeout period. + */ + timeout?: number; + /** + * Set this to true if you wish to use the traditional style of param serialization. + */ + traditional?: boolean; + /** + * The type of request to make ("POST" or "GET"), default is "GET". Note: Other HTTP request methods, such as PUT and DELETE, can also be used here, but they are not supported by all browsers. + */ + type?: string; + /** + * A string containing the URL to which the request is sent. + */ + url?: string; + /** + * A username to be used with XMLHttpRequest in response to an HTTP access authentication request. + */ + username?: string; + /** + * Callback for creating the XMLHttpRequest object. Defaults to the ActiveXObject when available (IE), the XMLHttpRequest otherwise. Override to provide your own implementation for XMLHttpRequest or enhancements to the factory. + */ + xhr?: any; + /** + * An object of fieldName-fieldValue pairs to set on the native XHR object. For example, you can use it to set withCredentials to true for cross-domain requests if needed. In jQuery 1.5, the withCredentials property was not propagated to the native XHR and thus CORS requests requiring it would ignore this flag. For this reason, we recommend using jQuery 1.5.1+ should you require the use of it. (version added: 1.5.1) + */ + xhrFields?: { [key: string]: any; }; +} + +/** + * Interface for the jqXHR object + */ +interface JQueryXHR extends XMLHttpRequest, JQueryPromise { + /** + * The .overrideMimeType() method may be used in the beforeSend() callback function, for example, to modify the response content-type header. As of jQuery 1.5.1, the jqXHR object also contains the overrideMimeType() method (it was available in jQuery 1.4.x, as well, but was temporarily removed in jQuery 1.5). + */ + overrideMimeType(mimeType: string): any; + /** + * Cancel the request. + * + * @param statusText A string passed as the textStatus parameter for the done callback. Default value: "canceled" + */ + abort(statusText?: string): void; + /** + * Incorporates the functionality of the .done() and .fail() methods, allowing (as of jQuery 1.8) the underlying Promise to be manipulated. Refer to deferred.then() for implementation details. + */ + then(doneCallback: (data: any, textStatus: string, jqXHR: JQueryXHR) => void, failCallback?: (jqXHR: JQueryXHR, textStatus: string, errorThrown: any) => void): JQueryPromise; + /** + * Property containing the parsed response if the response Content-Type is json + */ + responseJSON?: any; + /** + * A function to be called if the request fails. + */ + error(xhr: JQueryXHR, textStatus: string, errorThrown: string): void; +} + +/** + * Interface for the JQuery callback + */ +interface JQueryCallback { + /** + * Add a callback or a collection of callbacks to a callback list. + * + * @param callbacks A function, or array of functions, that are to be added to the callback list. + */ + add(callbacks: Function): JQueryCallback; + /** + * Add a callback or a collection of callbacks to a callback list. + * + * @param callbacks A function, or array of functions, that are to be added to the callback list. + */ + add(callbacks: Function[]): JQueryCallback; + + /** + * Disable a callback list from doing anything more. + */ + disable(): JQueryCallback; + + /** + * Determine if the callbacks list has been disabled. + */ + disabled(): boolean; + + /** + * Remove all of the callbacks from a list. + */ + empty(): JQueryCallback; + + /** + * Call all of the callbacks with the given arguments + * + * @param arguments The argument or list of arguments to pass back to the callback list. + */ + fire(...arguments: any[]): JQueryCallback; + + /** + * Determine if the callbacks have already been called at least once. + */ + fired(): boolean; + + /** + * Call all callbacks in a list with the given context and arguments. + * + * @param context A reference to the context in which the callbacks in the list should be fired. + * @param arguments An argument, or array of arguments, to pass to the callbacks in the list. + */ + fireWith(context?: any, args?: any[]): JQueryCallback; + + /** + * Determine whether a supplied callback is in a list + * + * @param callback The callback to search for. + */ + has(callback: Function): boolean; + + /** + * Lock a callback list in its current state. + */ + lock(): JQueryCallback; + + /** + * Determine if the callbacks list has been locked. + */ + locked(): boolean; + + /** + * Remove a callback or a collection of callbacks from a callback list. + * + * @param callbacks A function, or array of functions, that are to be removed from the callback list. + */ + remove(callbacks: Function): JQueryCallback; + /** + * Remove a callback or a collection of callbacks from a callback list. + * + * @param callbacks A function, or array of functions, that are to be removed from the callback list. + */ + remove(callbacks: Function[]): JQueryCallback; +} + +/** + * Allows jQuery Promises to interop with non-jQuery promises + */ +interface JQueryGenericPromise { + /** + * Add handlers to be called when the Deferred object is resolved, rejected, or still in progress. + * + * @param doneFilter A function that is called when the Deferred is resolved. + * @param failFilter An optional function that is called when the Deferred is rejected. + */ + then(doneFilter: (value?: T, ...values: any[]) => U|JQueryPromise, failFilter?: (...reasons: any[]) => any, progressFilter?: (...progression: any[]) => any): JQueryPromise; + + /** + * Add handlers to be called when the Deferred object is resolved, rejected, or still in progress. + * + * @param doneFilter A function that is called when the Deferred is resolved. + * @param failFilter An optional function that is called when the Deferred is rejected. + */ + then(doneFilter: (value?: T, ...values: any[]) => void, failFilter?: (...reasons: any[]) => any, progressFilter?: (...progression: any[]) => any): JQueryPromise; +} + +/** + * Interface for the JQuery promise/deferred callbacks + */ +interface JQueryPromiseCallback { + (value?: T, ...args: any[]): void; +} + +interface JQueryPromiseOperator { + (callback1: JQueryPromiseCallback|JQueryPromiseCallback[], ...callbacksN: Array|JQueryPromiseCallback[]>): JQueryPromise; +} + +/** + * Interface for the JQuery promise, part of callbacks + */ +interface JQueryPromise extends JQueryGenericPromise { + /** + * Determine the current state of a Deferred object. + */ + state(): string; + /** + * Add handlers to be called when the Deferred object is either resolved or rejected. + * + * @param alwaysCallbacks1 A function, or array of functions, that is called when the Deferred is resolved or rejected. + * @param alwaysCallbacks2 Optional additional functions, or arrays of functions, that are called when the Deferred is resolved or rejected. + */ + always(alwaysCallback1?: JQueryPromiseCallback|JQueryPromiseCallback[], ...alwaysCallbacksN: Array|JQueryPromiseCallback[]>): JQueryPromise; + /** + * Add handlers to be called when the Deferred object is resolved. + * + * @param doneCallbacks1 A function, or array of functions, that are called when the Deferred is resolved. + * @param doneCallbacks2 Optional additional functions, or arrays of functions, that are called when the Deferred is resolved. + */ + done(doneCallback1?: JQueryPromiseCallback|JQueryPromiseCallback[], ...doneCallbackN: Array|JQueryPromiseCallback[]>): JQueryPromise; + /** + * Add handlers to be called when the Deferred object is rejected. + * + * @param failCallbacks1 A function, or array of functions, that are called when the Deferred is rejected. + * @param failCallbacks2 Optional additional functions, or arrays of functions, that are called when the Deferred is rejected. + */ + fail(failCallback1?: JQueryPromiseCallback|JQueryPromiseCallback[], ...failCallbacksN: Array|JQueryPromiseCallback[]>): JQueryPromise; + /** + * Add handlers to be called when the Deferred object generates progress notifications. + * + * @param progressCallbacks A function, or array of functions, to be called when the Deferred generates progress notifications. + */ + progress(progressCallback1?: JQueryPromiseCallback|JQueryPromiseCallback[], ...progressCallbackN: Array|JQueryPromiseCallback[]>): JQueryPromise; + + // Deprecated - given no typings + pipe(doneFilter?: (x: any) => any, failFilter?: (x: any) => any, progressFilter?: (x: any) => any): JQueryPromise; +} + +/** + * Interface for the JQuery deferred, part of callbacks + */ +interface JQueryDeferred extends JQueryGenericPromise { + /** + * Determine the current state of a Deferred object. + */ + state(): string; + /** + * Add handlers to be called when the Deferred object is either resolved or rejected. + * + * @param alwaysCallbacks1 A function, or array of functions, that is called when the Deferred is resolved or rejected. + * @param alwaysCallbacks2 Optional additional functions, or arrays of functions, that are called when the Deferred is resolved or rejected. + */ + always(alwaysCallback1?: JQueryPromiseCallback|JQueryPromiseCallback[], ...alwaysCallbacksN: Array|JQueryPromiseCallback[]>): JQueryDeferred; + /** + * Add handlers to be called when the Deferred object is resolved. + * + * @param doneCallbacks1 A function, or array of functions, that are called when the Deferred is resolved. + * @param doneCallbacks2 Optional additional functions, or arrays of functions, that are called when the Deferred is resolved. + */ + done(doneCallback1?: JQueryPromiseCallback|JQueryPromiseCallback[], ...doneCallbackN: Array|JQueryPromiseCallback[]>): JQueryDeferred; + /** + * Add handlers to be called when the Deferred object is rejected. + * + * @param failCallbacks1 A function, or array of functions, that are called when the Deferred is rejected. + * @param failCallbacks2 Optional additional functions, or arrays of functions, that are called when the Deferred is rejected. + */ + fail(failCallback1?: JQueryPromiseCallback|JQueryPromiseCallback[], ...failCallbacksN: Array|JQueryPromiseCallback[]>): JQueryDeferred; + /** + * Add handlers to be called when the Deferred object generates progress notifications. + * + * @param progressCallbacks A function, or array of functions, to be called when the Deferred generates progress notifications. + */ + progress(progressCallback1?: JQueryPromiseCallback|JQueryPromiseCallback[], ...progressCallbackN: Array|JQueryPromiseCallback[]>): JQueryDeferred; + + /** + * Call the progressCallbacks on a Deferred object with the given args. + * + * @param args Optional arguments that are passed to the progressCallbacks. + */ + notify(value?: any, ...args: any[]): JQueryDeferred; + + /** + * Call the progressCallbacks on a Deferred object with the given context and args. + * + * @param context Context passed to the progressCallbacks as the this object. + * @param args Optional arguments that are passed to the progressCallbacks. + */ + notifyWith(context: any, value?: any[]): JQueryDeferred; + + /** + * Reject a Deferred object and call any failCallbacks with the given args. + * + * @param args Optional arguments that are passed to the failCallbacks. + */ + reject(value?: any, ...args: any[]): JQueryDeferred; + /** + * Reject a Deferred object and call any failCallbacks with the given context and args. + * + * @param context Context passed to the failCallbacks as the this object. + * @param args An optional array of arguments that are passed to the failCallbacks. + */ + rejectWith(context: any, value?: any[]): JQueryDeferred; + + /** + * Resolve a Deferred object and call any doneCallbacks with the given args. + * + * @param value First argument passed to doneCallbacks. + * @param args Optional subsequent arguments that are passed to the doneCallbacks. + */ + resolve(value?: T, ...args: any[]): JQueryDeferred; + + /** + * Resolve a Deferred object and call any doneCallbacks with the given context and args. + * + * @param context Context passed to the doneCallbacks as the this object. + * @param args An optional array of arguments that are passed to the doneCallbacks. + */ + resolveWith(context: any, value?: T[]): JQueryDeferred; + + /** + * Return a Deferred's Promise object. + * + * @param target Object onto which the promise methods have to be attached + */ + promise(target?: any): JQueryPromise; + + // Deprecated - given no typings + pipe(doneFilter?: (x: any) => any, failFilter?: (x: any) => any, progressFilter?: (x: any) => any): JQueryPromise; +} + +/** + * Interface of the JQuery extension of the W3C event object + */ +interface BaseJQueryEventObject extends Event { + data: any; + delegateTarget: Element; + isDefaultPrevented(): boolean; + isImmediatePropagationStopped(): boolean; + isPropagationStopped(): boolean; + namespace: string; + originalEvent: Event; + preventDefault(): any; + relatedTarget: Element; + result: any; + stopImmediatePropagation(): void; + stopPropagation(): void; + target: Element; + pageX: number; + pageY: number; + which: number; + metaKey: boolean; +} + +interface JQueryInputEventObject extends BaseJQueryEventObject { + altKey: boolean; + ctrlKey: boolean; + metaKey: boolean; + shiftKey: boolean; +} + +interface JQueryMouseEventObject extends JQueryInputEventObject { + button: number; + clientX: number; + clientY: number; + offsetX: number; + offsetY: number; + pageX: number; + pageY: number; + screenX: number; + screenY: number; +} + +interface JQueryKeyEventObject extends JQueryInputEventObject { + char: any; + charCode: number; + key: any; + keyCode: number; +} + +interface JQueryEventObject extends BaseJQueryEventObject, JQueryInputEventObject, JQueryMouseEventObject, JQueryKeyEventObject{ +} + +/* + Collection of properties of the current browser +*/ + +interface JQuerySupport { + ajax?: boolean; + boxModel?: boolean; + changeBubbles?: boolean; + checkClone?: boolean; + checkOn?: boolean; + cors?: boolean; + cssFloat?: boolean; + hrefNormalized?: boolean; + htmlSerialize?: boolean; + leadingWhitespace?: boolean; + noCloneChecked?: boolean; + noCloneEvent?: boolean; + opacity?: boolean; + optDisabled?: boolean; + optSelected?: boolean; + scriptEval? (): boolean; + style?: boolean; + submitBubbles?: boolean; + tbody?: boolean; +} + +interface JQueryParam { + /** + * Create a serialized representation of an array or object, suitable for use in a URL query string or Ajax request. + * + * @param obj An array or object to serialize. + */ + (obj: any): string; + + /** + * Create a serialized representation of an array or object, suitable for use in a URL query string or Ajax request. + * + * @param obj An array or object to serialize. + * @param traditional A Boolean indicating whether to perform a traditional "shallow" serialization. + */ + (obj: any, traditional: boolean): string; +} + +/** + * The interface used to construct jQuery events (with $.Event). It is + * defined separately instead of inline in JQueryStatic to allow + * overriding the construction function with specific strings + * returning specific event objects. + */ +interface JQueryEventConstructor { + (name: string, eventProperties?: any): JQueryEventObject; + new (name: string, eventProperties?: any): JQueryEventObject; +} + +/** + * The interface used to specify coordinates. + */ +interface JQueryCoordinates { + left: number; + top: number; +} + +/** + * Elements in the array returned by serializeArray() + */ +interface JQuerySerializeArrayElement { + name: string; + value: string; +} + +interface JQueryAnimationOptions { + /** + * A string or number determining how long the animation will run. + */ + duration?: any; + /** + * A string indicating which easing function to use for the transition. + */ + easing?: string; + /** + * A function to call once the animation is complete. + */ + complete?: Function; + /** + * A function to be called for each animated property of each animated element. This function provides an opportunity to modify the Tween object to change the value of the property before it is set. + */ + step?: (now: number, tween: any) => any; + /** + * A function to be called after each step of the animation, only once per animated element regardless of the number of animated properties. (version added: 1.8) + */ + progress?: (animation: JQueryPromise, progress: number, remainingMs: number) => any; + /** + * A function to call when the animation begins. (version added: 1.8) + */ + start?: (animation: JQueryPromise) => any; + /** + * A function to be called when the animation completes (its Promise object is resolved). (version added: 1.8) + */ + done?: (animation: JQueryPromise, jumpedToEnd: boolean) => any; + /** + * A function to be called when the animation fails to complete (its Promise object is rejected). (version added: 1.8) + */ + fail?: (animation: JQueryPromise, jumpedToEnd: boolean) => any; + /** + * A function to be called when the animation completes or stops without completing (its Promise object is either resolved or rejected). (version added: 1.8) + */ + always?: (animation: JQueryPromise, jumpedToEnd: boolean) => any; + /** + * A Boolean indicating whether to place the animation in the effects queue. If false, the animation will begin immediately. As of jQuery 1.7, the queue option can also accept a string, in which case the animation is added to the queue represented by that string. When a custom queue name is used the animation does not automatically start; you must call .dequeue("queuename") to start it. + */ + queue?: any; + /** + * A map of one or more of the CSS properties defined by the properties argument and their corresponding easing functions. (version added: 1.4) + */ + specialEasing?: Object; +} + +/** + * Static members of jQuery (those on $ and jQuery themselves) + */ +interface JQueryStatic { + + /** + * Perform an asynchronous HTTP (Ajax) request. + * + * @param settings A set of key/value pairs that configure the Ajax request. All settings are optional. A default can be set for any option with $.ajaxSetup(). + */ + ajax(settings: JQueryAjaxSettings): JQueryXHR; + /** + * Perform an asynchronous HTTP (Ajax) request. + * + * @param url A string containing the URL to which the request is sent. + * @param settings A set of key/value pairs that configure the Ajax request. All settings are optional. A default can be set for any option with $.ajaxSetup(). + */ + ajax(url: string, settings?: JQueryAjaxSettings): JQueryXHR; + + /** + * Handle custom Ajax options or modify existing options before each request is sent and before they are processed by $.ajax(). + * + * @param dataTypes An optional string containing one or more space-separated dataTypes + * @param handler A handler to set default values for future Ajax requests. + */ + ajaxPrefilter(dataTypes: string, handler: (opts: any, originalOpts: JQueryAjaxSettings, jqXHR: JQueryXHR) => any): void; + /** + * Handle custom Ajax options or modify existing options before each request is sent and before they are processed by $.ajax(). + * + * @param handler A handler to set default values for future Ajax requests. + */ + ajaxPrefilter(handler: (opts: any, originalOpts: JQueryAjaxSettings, jqXHR: JQueryXHR) => any): void; + + ajaxSettings: JQueryAjaxSettings; + + /** + * Set default values for future Ajax requests. Its use is not recommended. + * + * @param options A set of key/value pairs that configure the default Ajax request. All options are optional. + */ + ajaxSetup(options: JQueryAjaxSettings): void; + + /** + * Load data from the server using a HTTP GET request. + * + * @param url A string containing the URL to which the request is sent. + * @param success A callback function that is executed if the request succeeds. + * @param dataType The type of data expected from the server. Default: Intelligent Guess (xml, json, script, or html). + */ + get(url: string, success?: (data: any, textStatus: string, jqXHR: JQueryXHR) => any, dataType?: string): JQueryXHR; + /** + * Load data from the server using a HTTP GET request. + * + * @param url A string containing the URL to which the request is sent. + * @param data A plain object or string that is sent to the server with the request. + * @param success A callback function that is executed if the request succeeds. + * @param dataType The type of data expected from the server. Default: Intelligent Guess (xml, json, script, or html). + */ + get(url: string, data?: Object|string, success?: (data: any, textStatus: string, jqXHR: JQueryXHR) => any, dataType?: string): JQueryXHR; + /** + * Load JSON-encoded data from the server using a GET HTTP request. + * + * @param url A string containing the URL to which the request is sent. + * @param success A callback function that is executed if the request succeeds. + */ + getJSON(url: string, success?: (data: any, textStatus: string, jqXHR: JQueryXHR) => any): JQueryXHR; + /** + * Load JSON-encoded data from the server using a GET HTTP request. + * + * @param url A string containing the URL to which the request is sent. + * @param data A plain object or string that is sent to the server with the request. + * @param success A callback function that is executed if the request succeeds. + */ + getJSON(url: string, data?: Object|string, success?: (data: any, textStatus: string, jqXHR: JQueryXHR) => any): JQueryXHR; + /** + * Load a JavaScript file from the server using a GET HTTP request, then execute it. + * + * @param url A string containing the URL to which the request is sent. + * @param success A callback function that is executed if the request succeeds. + */ + getScript(url: string, success?: (script: string, textStatus: string, jqXHR: JQueryXHR) => any): JQueryXHR; + + /** + * Create a serialized representation of an array or object, suitable for use in a URL query string or Ajax request. + */ + param: JQueryParam; + + /** + * Load data from the server using a HTTP POST request. + * + * @param url A string containing the URL to which the request is sent. + * @param success A callback function that is executed if the request succeeds. Required if dataType is provided, but can be null in that case. + * @param dataType The type of data expected from the server. Default: Intelligent Guess (xml, json, script, text, html). + */ + post(url: string, success?: (data: any, textStatus: string, jqXHR: JQueryXHR) => any, dataType?: string): JQueryXHR; + /** + * Load data from the server using a HTTP POST request. + * + * @param url A string containing the URL to which the request is sent. + * @param data A plain object or string that is sent to the server with the request. + * @param success A callback function that is executed if the request succeeds. Required if dataType is provided, but can be null in that case. + * @param dataType The type of data expected from the server. Default: Intelligent Guess (xml, json, script, text, html). + */ + post(url: string, data?: Object|string, success?: (data: any, textStatus: string, jqXHR: JQueryXHR) => any, dataType?: string): JQueryXHR; + + /** + * A multi-purpose callbacks list object that provides a powerful way to manage callback lists. + * + * @param flags An optional list of space-separated flags that change how the callback list behaves. + */ + Callbacks(flags?: string): JQueryCallback; + + /** + * Holds or releases the execution of jQuery's ready event. + * + * @param hold Indicates whether the ready hold is being requested or released + */ + holdReady(hold: boolean): void; + + /** + * Accepts a string containing a CSS selector which is then used to match a set of elements. + * + * @param selector A string containing a selector expression + * @param context A DOM Element, Document, or jQuery to use as context + */ + (selector: string, context?: Element|JQuery): JQuery; + + /** + * Accepts a string containing a CSS selector which is then used to match a set of elements. + * + * @param element A DOM element to wrap in a jQuery object. + */ + (element: Element): JQuery; + + /** + * Accepts a string containing a CSS selector which is then used to match a set of elements. + * + * @param elementArray An array containing a set of DOM elements to wrap in a jQuery object. + */ + (elementArray: Element[]): JQuery; + + /** + * Binds a function to be executed when the DOM has finished loading. + * + * @param callback A function to execute after the DOM is ready. + */ + (callback: (jQueryAlias?: JQueryStatic) => any): JQuery; + + /** + * Accepts a string containing a CSS selector which is then used to match a set of elements. + * + * @param object A plain object to wrap in a jQuery object. + */ + (object: {}): JQuery; + + /** + * Accepts a string containing a CSS selector which is then used to match a set of elements. + * + * @param object An existing jQuery object to clone. + */ + (object: JQuery): JQuery; + + /** + * Specify a function to execute when the DOM is fully loaded. + */ + (): JQuery; + + /** + * Creates DOM elements on the fly from the provided string of raw HTML. + * + * @param html A string of HTML to create on the fly. Note that this parses HTML, not XML. + * @param ownerDocument A document in which the new elements will be created. + */ + (html: string, ownerDocument?: Document): JQuery; + + /** + * Creates DOM elements on the fly from the provided string of raw HTML. + * + * @param html A string defining a single, standalone, HTML element (e.g.
or
). + * @param attributes An object of attributes, events, and methods to call on the newly-created element. + */ + (html: string, attributes: Object): JQuery; + + /** + * Relinquish jQuery's control of the $ variable. + * + * @param removeAll A Boolean indicating whether to remove all jQuery variables from the global scope (including jQuery itself). + */ + noConflict(removeAll?: boolean): Object; + + /** + * Provides a way to execute callback functions based on one or more objects, usually Deferred objects that represent asynchronous events. + * + * @param deferreds One or more Deferred objects, or plain JavaScript objects. + */ + when(...deferreds: Array/* as JQueryDeferred */>): JQueryPromise; + + /** + * Hook directly into jQuery to override how particular CSS properties are retrieved or set, normalize CSS property naming, or create custom properties. + */ + cssHooks: { [key: string]: any; }; + cssNumber: any; + + /** + * Store arbitrary data associated with the specified element. Returns the value that was set. + * + * @param element The DOM element to associate with the data. + * @param key A string naming the piece of data to set. + * @param value The new data value. + */ + data(element: Element, key: string, value: T): T; + /** + * Returns value at named data store for the element, as set by jQuery.data(element, name, value), or the full data store for the element. + * + * @param element The DOM element to associate with the data. + * @param key A string naming the piece of data to set. + */ + data(element: Element, key: string): any; + /** + * Returns value at named data store for the element, as set by jQuery.data(element, name, value), or the full data store for the element. + * + * @param element The DOM element to associate with the data. + */ + data(element: Element): any; + + /** + * Execute the next function on the queue for the matched element. + * + * @param element A DOM element from which to remove and execute a queued function. + * @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue. + */ + dequeue(element: Element, queueName?: string): void; + + /** + * Determine whether an element has any jQuery data associated with it. + * + * @param element A DOM element to be checked for data. + */ + hasData(element: Element): boolean; + + /** + * Show the queue of functions to be executed on the matched element. + * + * @param element A DOM element to inspect for an attached queue. + * @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue. + */ + queue(element: Element, queueName?: string): any[]; + /** + * Manipulate the queue of functions to be executed on the matched element. + * + * @param element A DOM element where the array of queued functions is attached. + * @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue. + * @param newQueue An array of functions to replace the current queue contents. + */ + queue(element: Element, queueName: string, newQueue: Function[]): JQuery; + /** + * Manipulate the queue of functions to be executed on the matched element. + * + * @param element A DOM element on which to add a queued function. + * @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue. + * @param callback The new function to add to the queue. + */ + queue(element: Element, queueName: string, callback: Function): JQuery; + + /** + * Remove a previously-stored piece of data. + * + * @param element A DOM element from which to remove data. + * @param name A string naming the piece of data to remove. + */ + removeData(element: Element, name?: string): JQuery; + + /** + * A constructor function that returns a chainable utility object with methods to register multiple callbacks into callback queues, invoke callback queues, and relay the success or failure state of any synchronous or asynchronous function. + * + * @param beforeStart A function that is called just before the constructor returns. + */ + Deferred(beforeStart?: (deferred: JQueryDeferred) => any): JQueryDeferred; + + /** + * Effects + */ + fx: { + tick: () => void; + /** + * The rate (in milliseconds) at which animations fire. + */ + interval: number; + stop: () => void; + speeds: { slow: number; fast: number; }; + /** + * Globally disable all animations. + */ + off: boolean; + step: any; + }; + + /** + * Takes a function and returns a new one that will always have a particular context. + * + * @param fnction The function whose context will be changed. + * @param context The object to which the context (this) of the function should be set. + * @param additionalArguments Any number of arguments to be passed to the function referenced in the function argument. + */ + proxy(fnction: (...args: any[]) => any, context: Object, ...additionalArguments: any[]): any; + /** + * Takes a function and returns a new one that will always have a particular context. + * + * @param context The object to which the context (this) of the function should be set. + * @param name The name of the function whose context will be changed (should be a property of the context object). + * @param additionalArguments Any number of arguments to be passed to the function named in the name argument. + */ + proxy(context: Object, name: string, ...additionalArguments: any[]): any; + + Event: JQueryEventConstructor; + + /** + * Takes a string and throws an exception containing it. + * + * @param message The message to send out. + */ + error(message: any): JQuery; + + expr: any; + fn: any; //TODO: Decide how we want to type this + + isReady: boolean; + + // Properties + support: JQuerySupport; + + /** + * Check to see if a DOM element is a descendant of another DOM element. + * + * @param container The DOM element that may contain the other element. + * @param contained The DOM element that may be contained by (a descendant of) the other element. + */ + contains(container: Element, contained: Element): boolean; + + /** + * A generic iterator function, which can be used to seamlessly iterate over both objects and arrays. Arrays and array-like objects with a length property (such as a function's arguments object) are iterated by numeric index, from 0 to length-1. Other objects are iterated via their named properties. + * + * @param collection The object or array to iterate over. + * @param callback The function that will be executed on every object. + */ + each( + collection: T[], + callback: (indexInArray: number, valueOfElement: T) => any + ): any; + + /** + * A generic iterator function, which can be used to seamlessly iterate over both objects and arrays. Arrays and array-like objects with a length property (such as a function's arguments object) are iterated by numeric index, from 0 to length-1. Other objects are iterated via their named properties. + * + * @param collection The object or array to iterate over. + * @param callback The function that will be executed on every object. + */ + each( + collection: any, + callback: (indexInArray: any, valueOfElement: any) => any + ): any; + + /** + * Merge the contents of two or more objects together into the first object. + * + * @param target An object that will receive the new properties if additional objects are passed in or that will extend the jQuery namespace if it is the sole argument. + * @param object1 An object containing additional properties to merge in. + * @param objectN Additional objects containing properties to merge in. + */ + extend(target: any, object1?: any, ...objectN: any[]): any; + /** + * Merge the contents of two or more objects together into the first object. + * + * @param deep If true, the merge becomes recursive (aka. deep copy). + * @param target The object to extend. It will receive the new properties. + * @param object1 An object containing additional properties to merge in. + * @param objectN Additional objects containing properties to merge in. + */ + extend(deep: boolean, target: any, object1?: any, ...objectN: any[]): any; + + /** + * Execute some JavaScript code globally. + * + * @param code The JavaScript code to execute. + */ + globalEval(code: string): any; + + /** + * Finds the elements of an array which satisfy a filter function. The original array is not affected. + * + * @param array The array to search through. + * @param func The function to process each item against. The first argument to the function is the item, and the second argument is the index. The function should return a Boolean value. this will be the global window object. + * @param invert If "invert" is false, or not provided, then the function returns an array consisting of all elements for which "callback" returns true. If "invert" is true, then the function returns an array consisting of all elements for which "callback" returns false. + */ + grep(array: T[], func: (elementOfArray: T, indexInArray: number) => boolean, invert?: boolean): T[]; + + /** + * Search for a specified value within an array and return its index (or -1 if not found). + * + * @param value The value to search for. + * @param array An array through which to search. + * @param fromIndex he index of the array at which to begin the search. The default is 0, which will search the whole array. + */ + inArray(value: T, array: T[], fromIndex?: number): number; + + /** + * Determine whether the argument is an array. + * + * @param obj Object to test whether or not it is an array. + */ + isArray(obj: any): boolean; + /** + * Check to see if an object is empty (contains no enumerable properties). + * + * @param obj The object that will be checked to see if it's empty. + */ + isEmptyObject(obj: any): boolean; + /** + * Determine if the argument passed is a Javascript function object. + * + * @param obj Object to test whether or not it is a function. + */ + isFunction(obj: any): boolean; + /** + * Determines whether its argument is a number. + * + * @param obj The value to be tested. + */ + isNumeric(value: any): boolean; + /** + * Check to see if an object is a plain object (created using "{}" or "new Object"). + * + * @param obj The object that will be checked to see if it's a plain object. + */ + isPlainObject(obj: any): boolean; + /** + * Determine whether the argument is a window. + * + * @param obj Object to test whether or not it is a window. + */ + isWindow(obj: any): boolean; + /** + * Check to see if a DOM node is within an XML document (or is an XML document). + * + * @param node he DOM node that will be checked to see if it's in an XML document. + */ + isXMLDoc(node: Node): boolean; + + /** + * Convert an array-like object into a true JavaScript array. + * + * @param obj Any object to turn into a native Array. + */ + makeArray(obj: any): any[]; + + /** + * Translate all items in an array or object to new array of items. + * + * @param array The Array to translate. + * @param callback The function to process each item against. The first argument to the function is the array item, the second argument is the index in array The function can return any value. Within the function, this refers to the global (window) object. + */ + map(array: T[], callback: (elementOfArray: T, indexInArray: number) => U): U[]; + /** + * Translate all items in an array or object to new array of items. + * + * @param arrayOrObject The Array or Object to translate. + * @param callback The function to process each item against. The first argument to the function is the value; the second argument is the index or key of the array or object property. The function can return any value to add to the array. A returned array will be flattened into the resulting array. Within the function, this refers to the global (window) object. + */ + map(arrayOrObject: any, callback: (value: any, indexOrKey: any) => any): any; + + /** + * Merge the contents of two arrays together into the first array. + * + * @param first The first array to merge, the elements of second added. + * @param second The second array to merge into the first, unaltered. + */ + merge(first: T[], second: T[]): T[]; + + /** + * An empty function. + */ + noop(): any; + + /** + * Return a number representing the current time. + */ + now(): number; + + /** + * Takes a well-formed JSON string and returns the resulting JavaScript object. + * + * @param json The JSON string to parse. + */ + parseJSON(json: string): any; + + /** + * Parses a string into an XML document. + * + * @param data a well-formed XML string to be parsed + */ + parseXML(data: string): XMLDocument; + + /** + * Remove the whitespace from the beginning and end of a string. + * + * @param str Remove the whitespace from the beginning and end of a string. + */ + trim(str: string): string; + + /** + * Determine the internal JavaScript [[Class]] of an object. + * + * @param obj Object to get the internal JavaScript [[Class]] of. + */ + type(obj: any): string; + + /** + * Sorts an array of DOM elements, in place, with the duplicates removed. Note that this only works on arrays of DOM elements, not strings or numbers. + * + * @param array The Array of DOM elements. + */ + unique(array: Element[]): Element[]; + + /** + * Parses a string into an array of DOM nodes. + * + * @param data HTML string to be parsed + * @param context DOM element to serve as the context in which the HTML fragment will be created + * @param keepScripts A Boolean indicating whether to include scripts passed in the HTML string + */ + parseHTML(data: string, context?: HTMLElement, keepScripts?: boolean): any[]; + + /** + * Parses a string into an array of DOM nodes. + * + * @param data HTML string to be parsed + * @param context DOM element to serve as the context in which the HTML fragment will be created + * @param keepScripts A Boolean indicating whether to include scripts passed in the HTML string + */ + parseHTML(data: string, context?: Document, keepScripts?: boolean): any[]; +} + +/** + * The jQuery instance members + */ +interface JQuery { + /** + * Register a handler to be called when Ajax requests complete. This is an AjaxEvent. + * + * @param handler The function to be invoked. + */ + ajaxComplete(handler: (event: JQueryEventObject, XMLHttpRequest: XMLHttpRequest, ajaxOptions: any) => any): JQuery; + /** + * Register a handler to be called when Ajax requests complete with an error. This is an Ajax Event. + * + * @param handler The function to be invoked. + */ + ajaxError(handler: (event: JQueryEventObject, jqXHR: JQueryXHR, ajaxSettings: JQueryAjaxSettings, thrownError: any) => any): JQuery; + /** + * Attach a function to be executed before an Ajax request is sent. This is an Ajax Event. + * + * @param handler The function to be invoked. + */ + ajaxSend(handler: (event: JQueryEventObject, jqXHR: JQueryXHR, ajaxOptions: JQueryAjaxSettings) => any): JQuery; + /** + * Register a handler to be called when the first Ajax request begins. This is an Ajax Event. + * + * @param handler The function to be invoked. + */ + ajaxStart(handler: () => any): JQuery; + /** + * Register a handler to be called when all Ajax requests have completed. This is an Ajax Event. + * + * @param handler The function to be invoked. + */ + ajaxStop(handler: () => any): JQuery; + /** + * Attach a function to be executed whenever an Ajax request completes successfully. This is an Ajax Event. + * + * @param handler The function to be invoked. + */ + ajaxSuccess(handler: (event: JQueryEventObject, XMLHttpRequest: XMLHttpRequest, ajaxOptions: JQueryAjaxSettings) => any): JQuery; + + /** + * Load data from the server and place the returned HTML into the matched element. + * + * @param url A string containing the URL to which the request is sent. + * @param data A plain object or string that is sent to the server with the request. + * @param complete A callback function that is executed when the request completes. + */ + load(url: string, data?: string|Object, complete?: (responseText: string, textStatus: string, XMLHttpRequest: XMLHttpRequest) => any): JQuery; + + /** + * Encode a set of form elements as a string for submission. + */ + serialize(): string; + /** + * Encode a set of form elements as an array of names and values. + */ + serializeArray(): JQuerySerializeArrayElement[]; + + /** + * Adds the specified class(es) to each of the set of matched elements. + * + * @param className One or more space-separated classes to be added to the class attribute of each matched element. + */ + addClass(className: string): JQuery; + /** + * Adds the specified class(es) to each of the set of matched elements. + * + * @param function A function returning one or more space-separated class names to be added to the existing class name(s). Receives the index position of the element in the set and the existing class name(s) as arguments. Within the function, this refers to the current element in the set. + */ + addClass(func: (index: number, className: string) => string): JQuery; + + /** + * Add the previous set of elements on the stack to the current set, optionally filtered by a selector. + */ + addBack(selector?: string): JQuery; + + /** + * Get the value of an attribute for the first element in the set of matched elements. + * + * @param attributeName The name of the attribute to get. + */ + attr(attributeName: string): string; + /** + * Set one or more attributes for the set of matched elements. + * + * @param attributeName The name of the attribute to set. + * @param value A value to set for the attribute. + */ + attr(attributeName: string, value: string|number): JQuery; + /** + * Set one or more attributes for the set of matched elements. + * + * @param attributeName The name of the attribute to set. + * @param func A function returning the value to set. this is the current element. Receives the index position of the element in the set and the old attribute value as arguments. + */ + attr(attributeName: string, func: (index: number, attr: string) => string|number): JQuery; + /** + * Set one or more attributes for the set of matched elements. + * + * @param attributes An object of attribute-value pairs to set. + */ + attr(attributes: Object): JQuery; + + /** + * Determine whether any of the matched elements are assigned the given class. + * + * @param className The class name to search for. + */ + hasClass(className: string): boolean; + + /** + * Get the HTML contents of the first element in the set of matched elements. + */ + html(): string; + /** + * Set the HTML contents of each element in the set of matched elements. + * + * @param htmlString A string of HTML to set as the content of each matched element. + */ + html(htmlString: string): JQuery; + /** + * Set the HTML contents of each element in the set of matched elements. + * + * @param func A function returning the HTML content to set. Receives the index position of the element in the set and the old HTML value as arguments. jQuery empties the element before calling the function; use the oldhtml argument to reference the previous content. Within the function, this refers to the current element in the set. + */ + html(func: (index: number, oldhtml: string) => string): JQuery; + /** + * Set the HTML contents of each element in the set of matched elements. + * + * @param func A function returning the HTML content to set. Receives the index position of the element in the set and the old HTML value as arguments. jQuery empties the element before calling the function; use the oldhtml argument to reference the previous content. Within the function, this refers to the current element in the set. + */ + + /** + * Get the value of a property for the first element in the set of matched elements. + * + * @param propertyName The name of the property to get. + */ + prop(propertyName: string): any; + /** + * Set one or more properties for the set of matched elements. + * + * @param propertyName The name of the property to set. + * @param value A value to set for the property. + */ + prop(propertyName: string, value: string|number|boolean): JQuery; + /** + * Set one or more properties for the set of matched elements. + * + * @param properties An object of property-value pairs to set. + */ + prop(properties: Object): JQuery; + /** + * Set one or more properties for the set of matched elements. + * + * @param propertyName The name of the property to set. + * @param func A function returning the value to set. Receives the index position of the element in the set and the old property value as arguments. Within the function, the keyword this refers to the current element. + */ + prop(propertyName: string, func: (index: number, oldPropertyValue: any) => any): JQuery; + + /** + * Remove an attribute from each element in the set of matched elements. + * + * @param attributeName An attribute to remove; as of version 1.7, it can be a space-separated list of attributes. + */ + removeAttr(attributeName: string): JQuery; + + /** + * Remove a single class, multiple classes, or all classes from each element in the set of matched elements. + * + * @param className One or more space-separated classes to be removed from the class attribute of each matched element. + */ + removeClass(className?: string): JQuery; + /** + * Remove a single class, multiple classes, or all classes from each element in the set of matched elements. + * + * @param function A function returning one or more space-separated class names to be removed. Receives the index position of the element in the set and the old class value as arguments. + */ + removeClass(func: (index: number, className: string) => string): JQuery; + + /** + * Remove a property for the set of matched elements. + * + * @param propertyName The name of the property to remove. + */ + removeProp(propertyName: string): JQuery; + + /** + * Add or remove one or more classes from each element in the set of matched elements, depending on either the class's presence or the value of the switch argument. + * + * @param className One or more class names (separated by spaces) to be toggled for each element in the matched set. + * @param swtch A Boolean (not just truthy/falsy) value to determine whether the class should be added or removed. + */ + toggleClass(className: string, swtch?: boolean): JQuery; + /** + * Add or remove one or more classes from each element in the set of matched elements, depending on either the class's presence or the value of the switch argument. + * + * @param swtch A boolean value to determine whether the class should be added or removed. + */ + toggleClass(swtch?: boolean): JQuery; + /** + * Add or remove one or more classes from each element in the set of matched elements, depending on either the class's presence or the value of the switch argument. + * + * @param func A function that returns class names to be toggled in the class attribute of each element in the matched set. Receives the index position of the element in the set, the old class value, and the switch as arguments. + * @param swtch A boolean value to determine whether the class should be added or removed. + */ + toggleClass(func: (index: number, className: string, swtch: boolean) => string, swtch?: boolean): JQuery; + + /** + * Get the current value of the first element in the set of matched elements. + */ + val(): any; + /** + * Set the value of each element in the set of matched elements. + * + * @param value A string of text or an array of strings corresponding to the value of each matched element to set as selected/checked. + */ + val(value: string|string[]): JQuery; + /** + * Set the value of each element in the set of matched elements. + * + * @param func A function returning the value to set. this is the current element. Receives the index position of the element in the set and the old value as arguments. + */ + val(func: (index: number, value: string) => string): JQuery; + + + /** + * Get the value of style properties for the first element in the set of matched elements. + * + * @param propertyName A CSS property. + */ + css(propertyName: string): string; + /** + * Set one or more CSS properties for the set of matched elements. + * + * @param propertyName A CSS property name. + * @param value A value to set for the property. + */ + css(propertyName: string, value: string|number): JQuery; + /** + * Set one or more CSS properties for the set of matched elements. + * + * @param propertyName A CSS property name. + * @param value A function returning the value to set. this is the current element. Receives the index position of the element in the set and the old value as arguments. + */ + css(propertyName: string, value: (index: number, value: string) => string|number): JQuery; + /** + * Set one or more CSS properties for the set of matched elements. + * + * @param properties An object of property-value pairs to set. + */ + css(properties: Object): JQuery; + + /** + * Get the current computed height for the first element in the set of matched elements. + */ + height(): number; + /** + * Set the CSS height of every matched element. + * + * @param value An integer representing the number of pixels, or an integer with an optional unit of measure appended (as a string). + */ + height(value: number|string): JQuery; + /** + * Set the CSS height of every matched element. + * + * @param func A function returning the height to set. Receives the index position of the element in the set and the old height as arguments. Within the function, this refers to the current element in the set. + */ + height(func: (index: number, height: number) => number|string): JQuery; + + /** + * Get the current computed height for the first element in the set of matched elements, including padding but not border. + */ + innerHeight(): number; + + /** + * Sets the inner height on elements in the set of matched elements, including padding but not border. + * + * @param value An integer representing the number of pixels, or an integer along with an optional unit of measure appended (as a string). + */ + innerHeight(height: number|string): JQuery; + + /** + * Get the current computed width for the first element in the set of matched elements, including padding but not border. + */ + innerWidth(): number; + + /** + * Sets the inner width on elements in the set of matched elements, including padding but not border. + * + * @param value An integer representing the number of pixels, or an integer along with an optional unit of measure appended (as a string). + */ + innerWidth(width: number|string): JQuery; + + /** + * Get the current coordinates of the first element in the set of matched elements, relative to the document. + */ + offset(): JQueryCoordinates; + /** + * An object containing the properties top and left, which are integers indicating the new top and left coordinates for the elements. + * + * @param coordinates An object containing the properties top and left, which are integers indicating the new top and left coordinates for the elements. + */ + offset(coordinates: JQueryCoordinates): JQuery; + /** + * An object containing the properties top and left, which are integers indicating the new top and left coordinates for the elements. + * + * @param func A function to return the coordinates to set. Receives the index of the element in the collection as the first argument and the current coordinates as the second argument. The function should return an object with the new top and left properties. + */ + offset(func: (index: number, coords: JQueryCoordinates) => JQueryCoordinates): JQuery; + + /** + * Get the current computed height for the first element in the set of matched elements, including padding, border, and optionally margin. Returns an integer (without "px") representation of the value or null if called on an empty set of elements. + * + * @param includeMargin A Boolean indicating whether to include the element's margin in the calculation. + */ + outerHeight(includeMargin?: boolean): number; + + /** + * Sets the outer height on elements in the set of matched elements, including padding and border. + * + * @param value An integer representing the number of pixels, or an integer along with an optional unit of measure appended (as a string). + */ + outerHeight(height: number|string): JQuery; + + /** + * Get the current computed width for the first element in the set of matched elements, including padding and border. + * + * @param includeMargin A Boolean indicating whether to include the element's margin in the calculation. + */ + outerWidth(includeMargin?: boolean): number; + + /** + * Sets the outer width on elements in the set of matched elements, including padding and border. + * + * @param value An integer representing the number of pixels, or an integer along with an optional unit of measure appended (as a string). + */ + outerWidth(width: number|string): JQuery; + + /** + * Get the current coordinates of the first element in the set of matched elements, relative to the offset parent. + */ + position(): JQueryCoordinates; + + /** + * Get the current horizontal position of the scroll bar for the first element in the set of matched elements or set the horizontal position of the scroll bar for every matched element. + */ + scrollLeft(): number; + /** + * Set the current horizontal position of the scroll bar for each of the set of matched elements. + * + * @param value An integer indicating the new position to set the scroll bar to. + */ + scrollLeft(value: number): JQuery; + + /** + * Get the current vertical position of the scroll bar for the first element in the set of matched elements or set the vertical position of the scroll bar for every matched element. + */ + scrollTop(): number; + /** + * Set the current vertical position of the scroll bar for each of the set of matched elements. + * + * @param value An integer indicating the new position to set the scroll bar to. + */ + scrollTop(value: number): JQuery; + + /** + * Get the current computed width for the first element in the set of matched elements. + */ + width(): number; + /** + * Set the CSS width of each element in the set of matched elements. + * + * @param value An integer representing the number of pixels, or an integer along with an optional unit of measure appended (as a string). + */ + width(value: number|string): JQuery; + /** + * Set the CSS width of each element in the set of matched elements. + * + * @param func A function returning the width to set. Receives the index position of the element in the set and the old width as arguments. Within the function, this refers to the current element in the set. + */ + width(func: (index: number, width: number) => number|string): JQuery; + + /** + * Remove from the queue all items that have not yet been run. + * + * @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue. + */ + clearQueue(queueName?: string): JQuery; + + /** + * Store arbitrary data associated with the matched elements. + * + * @param key A string naming the piece of data to set. + * @param value The new data value; it can be any Javascript type including Array or Object. + */ + data(key: string, value: any): JQuery; + /** + * Return the value at the named data store for the first element in the jQuery collection, as set by data(name, value) or by an HTML5 data-* attribute. + * + * @param key Name of the data stored. + */ + data(key: string): any; + /** + * Store arbitrary data associated with the matched elements. + * + * @param obj An object of key-value pairs of data to update. + */ + data(obj: { [key: string]: any; }): JQuery; + /** + * Return the value at the named data store for the first element in the jQuery collection, as set by data(name, value) or by an HTML5 data-* attribute. + */ + data(): any; + + /** + * Execute the next function on the queue for the matched elements. + * + * @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue. + */ + dequeue(queueName?: string): JQuery; + + /** + * Remove a previously-stored piece of data. + * + * @param name A string naming the piece of data to delete or space-separated string naming the pieces of data to delete. + */ + removeData(name: string): JQuery; + /** + * Remove a previously-stored piece of data. + * + * @param list An array of strings naming the pieces of data to delete. + */ + removeData(list: string[]): JQuery; + /** + * Remove all previously-stored piece of data. + */ + removeData(): JQuery; + + /** + * Return a Promise object to observe when all actions of a certain type bound to the collection, queued or not, have finished. + * + * @param type The type of queue that needs to be observed. (default: fx) + * @param target Object onto which the promise methods have to be attached + */ + promise(type?: string, target?: Object): JQueryPromise; + + /** + * Perform a custom animation of a set of CSS properties. + * + * @param properties An object of CSS properties and values that the animation will move toward. + * @param duration A string or number determining how long the animation will run. + * @param complete A function to call once the animation is complete. + */ + animate(properties: Object, duration?: string|number, complete?: Function): JQuery; + /** + * Perform a custom animation of a set of CSS properties. + * + * @param properties An object of CSS properties and values that the animation will move toward. + * @param duration A string or number determining how long the animation will run. + * @param easing A string indicating which easing function to use for the transition. (default: swing) + * @param complete A function to call once the animation is complete. + */ + animate(properties: Object, duration?: string|number, easing?: string, complete?: Function): JQuery; + /** + * Perform a custom animation of a set of CSS properties. + * + * @param properties An object of CSS properties and values that the animation will move toward. + * @param options A map of additional options to pass to the method. + */ + animate(properties: Object, options: JQueryAnimationOptions): JQuery; + + /** + * Set a timer to delay execution of subsequent items in the queue. + * + * @param duration An integer indicating the number of milliseconds to delay execution of the next item in the queue. + * @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue. + */ + delay(duration: number, queueName?: string): JQuery; + + /** + * Display the matched elements by fading them to opaque. + * + * @param duration A string or number determining how long the animation will run. + * @param complete A function to call once the animation is complete. + */ + fadeIn(duration?: number|string, complete?: Function): JQuery; + /** + * Display the matched elements by fading them to opaque. + * + * @param duration A string or number determining how long the animation will run. + * @param easing A string indicating which easing function to use for the transition. + * @param complete A function to call once the animation is complete. + */ + fadeIn(duration?: number|string, easing?: string, complete?: Function): JQuery; + /** + * Display the matched elements by fading them to opaque. + * + * @param options A map of additional options to pass to the method. + */ + fadeIn(options: JQueryAnimationOptions): JQuery; + + /** + * Hide the matched elements by fading them to transparent. + * + * @param duration A string or number determining how long the animation will run. + * @param complete A function to call once the animation is complete. + */ + fadeOut(duration?: number|string, complete?: Function): JQuery; + /** + * Hide the matched elements by fading them to transparent. + * + * @param duration A string or number determining how long the animation will run. + * @param easing A string indicating which easing function to use for the transition. + * @param complete A function to call once the animation is complete. + */ + fadeOut(duration?: number|string, easing?: string, complete?: Function): JQuery; + /** + * Hide the matched elements by fading them to transparent. + * + * @param options A map of additional options to pass to the method. + */ + fadeOut(options: JQueryAnimationOptions): JQuery; + + /** + * Adjust the opacity of the matched elements. + * + * @param duration A string or number determining how long the animation will run. + * @param opacity A number between 0 and 1 denoting the target opacity. + * @param complete A function to call once the animation is complete. + */ + fadeTo(duration: string|number, opacity: number, complete?: Function): JQuery; + /** + * Adjust the opacity of the matched elements. + * + * @param duration A string or number determining how long the animation will run. + * @param opacity A number between 0 and 1 denoting the target opacity. + * @param easing A string indicating which easing function to use for the transition. + * @param complete A function to call once the animation is complete. + */ + fadeTo(duration: string|number, opacity: number, easing?: string, complete?: Function): JQuery; + + /** + * Display or hide the matched elements by animating their opacity. + * + * @param duration A string or number determining how long the animation will run. + * @param complete A function to call once the animation is complete. + */ + fadeToggle(duration?: number|string, complete?: Function): JQuery; + /** + * Display or hide the matched elements by animating their opacity. + * + * @param duration A string or number determining how long the animation will run. + * @param easing A string indicating which easing function to use for the transition. + * @param complete A function to call once the animation is complete. + */ + fadeToggle(duration?: number|string, easing?: string, complete?: Function): JQuery; + /** + * Display or hide the matched elements by animating their opacity. + * + * @param options A map of additional options to pass to the method. + */ + fadeToggle(options: JQueryAnimationOptions): JQuery; + + /** + * Stop the currently-running animation, remove all queued animations, and complete all animations for the matched elements. + * + * @param queue The name of the queue in which to stop animations. + */ + finish(queue?: string): JQuery; + + /** + * Hide the matched elements. + * + * @param duration A string or number determining how long the animation will run. + * @param complete A function to call once the animation is complete. + */ + hide(duration?: number|string, complete?: Function): JQuery; + /** + * Hide the matched elements. + * + * @param duration A string or number determining how long the animation will run. + * @param easing A string indicating which easing function to use for the transition. + * @param complete A function to call once the animation is complete. + */ + hide(duration?: number|string, easing?: string, complete?: Function): JQuery; + /** + * Hide the matched elements. + * + * @param options A map of additional options to pass to the method. + */ + hide(options: JQueryAnimationOptions): JQuery; + + /** + * Display the matched elements. + * + * @param duration A string or number determining how long the animation will run. + * @param complete A function to call once the animation is complete. + */ + show(duration?: number|string, complete?: Function): JQuery; + /** + * Display the matched elements. + * + * @param duration A string or number determining how long the animation will run. + * @param easing A string indicating which easing function to use for the transition. + * @param complete A function to call once the animation is complete. + */ + show(duration?: number|string, easing?: string, complete?: Function): JQuery; + /** + * Display the matched elements. + * + * @param options A map of additional options to pass to the method. + */ + show(options: JQueryAnimationOptions): JQuery; + + /** + * Display the matched elements with a sliding motion. + * + * @param duration A string or number determining how long the animation will run. + * @param complete A function to call once the animation is complete. + */ + slideDown(duration?: number|string, complete?: Function): JQuery; + /** + * Display the matched elements with a sliding motion. + * + * @param duration A string or number determining how long the animation will run. + * @param easing A string indicating which easing function to use for the transition. + * @param complete A function to call once the animation is complete. + */ + slideDown(duration?: number|string, easing?: string, complete?: Function): JQuery; + /** + * Display the matched elements with a sliding motion. + * + * @param options A map of additional options to pass to the method. + */ + slideDown(options: JQueryAnimationOptions): JQuery; + + /** + * Display or hide the matched elements with a sliding motion. + * + * @param duration A string or number determining how long the animation will run. + * @param complete A function to call once the animation is complete. + */ + slideToggle(duration?: number|string, complete?: Function): JQuery; + /** + * Display or hide the matched elements with a sliding motion. + * + * @param duration A string or number determining how long the animation will run. + * @param easing A string indicating which easing function to use for the transition. + * @param complete A function to call once the animation is complete. + */ + slideToggle(duration?: number|string, easing?: string, complete?: Function): JQuery; + /** + * Display or hide the matched elements with a sliding motion. + * + * @param options A map of additional options to pass to the method. + */ + slideToggle(options: JQueryAnimationOptions): JQuery; + + /** + * Hide the matched elements with a sliding motion. + * + * @param duration A string or number determining how long the animation will run. + * @param complete A function to call once the animation is complete. + */ + slideUp(duration?: number|string, complete?: Function): JQuery; + /** + * Hide the matched elements with a sliding motion. + * + * @param duration A string or number determining how long the animation will run. + * @param easing A string indicating which easing function to use for the transition. + * @param complete A function to call once the animation is complete. + */ + slideUp(duration?: number|string, easing?: string, complete?: Function): JQuery; + /** + * Hide the matched elements with a sliding motion. + * + * @param options A map of additional options to pass to the method. + */ + slideUp(options: JQueryAnimationOptions): JQuery; + + /** + * Stop the currently-running animation on the matched elements. + * + * @param clearQueue A Boolean indicating whether to remove queued animation as well. Defaults to false. + * @param jumpToEnd A Boolean indicating whether to complete the current animation immediately. Defaults to false. + */ + stop(clearQueue?: boolean, jumpToEnd?: boolean): JQuery; + /** + * Stop the currently-running animation on the matched elements. + * + * @param queue The name of the queue in which to stop animations. + * @param clearQueue A Boolean indicating whether to remove queued animation as well. Defaults to false. + * @param jumpToEnd A Boolean indicating whether to complete the current animation immediately. Defaults to false. + */ + stop(queue?: string, clearQueue?: boolean, jumpToEnd?: boolean): JQuery; + + /** + * Display or hide the matched elements. + * + * @param duration A string or number determining how long the animation will run. + * @param complete A function to call once the animation is complete. + */ + toggle(duration?: number|string, complete?: Function): JQuery; + /** + * Display or hide the matched elements. + * + * @param duration A string or number determining how long the animation will run. + * @param easing A string indicating which easing function to use for the transition. + * @param complete A function to call once the animation is complete. + */ + toggle(duration?: number|string, easing?: string, complete?: Function): JQuery; + /** + * Display or hide the matched elements. + * + * @param options A map of additional options to pass to the method. + */ + toggle(options: JQueryAnimationOptions): JQuery; + /** + * Display or hide the matched elements. + * + * @param showOrHide A Boolean indicating whether to show or hide the elements. + */ + toggle(showOrHide: boolean): JQuery; + + /** + * Attach a handler to an event for the elements. + * + * @param eventType A string containing one or more DOM event types, such as "click" or "submit," or custom event names. + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + bind(eventType: string, eventData: any, handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Attach a handler to an event for the elements. + * + * @param eventType A string containing one or more DOM event types, such as "click" or "submit," or custom event names. + * @param handler A function to execute each time the event is triggered. + */ + bind(eventType: string, handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Attach a handler to an event for the elements. + * + * @param eventType A string containing one or more DOM event types, such as "click" or "submit," or custom event names. + * @param eventData An object containing data that will be passed to the event handler. + * @param preventBubble Setting the third argument to false will attach a function that prevents the default action from occurring and stops the event from bubbling. The default is true. + */ + bind(eventType: string, eventData: any, preventBubble: boolean): JQuery; + /** + * Attach a handler to an event for the elements. + * + * @param eventType A string containing one or more DOM event types, such as "click" or "submit," or custom event names. + * @param preventBubble Setting the third argument to false will attach a function that prevents the default action from occurring and stops the event from bubbling. The default is true. + */ + bind(eventType: string, preventBubble: boolean): JQuery; + /** + * Attach a handler to an event for the elements. + * + * @param events An object containing one or more DOM event types and functions to execute for them. + */ + bind(events: any): JQuery; + + /** + * Trigger the "blur" event on an element + */ + blur(): JQuery; + /** + * Bind an event handler to the "blur" JavaScript event + * + * @param handler A function to execute each time the event is triggered. + */ + blur(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "blur" JavaScript event + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + blur(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Trigger the "change" event on an element. + */ + change(): JQuery; + /** + * Bind an event handler to the "change" JavaScript event + * + * @param handler A function to execute each time the event is triggered. + */ + change(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "change" JavaScript event + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + change(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Trigger the "click" event on an element. + */ + click(): JQuery; + /** + * Bind an event handler to the "click" JavaScript event + * + * @param eventData An object containing data that will be passed to the event handler. + */ + click(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "click" JavaScript event + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + click(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Trigger the "dblclick" event on an element. + */ + dblclick(): JQuery; + /** + * Bind an event handler to the "dblclick" JavaScript event + * + * @param handler A function to execute each time the event is triggered. + */ + dblclick(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "dblclick" JavaScript event + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + dblclick(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery; + + delegate(selector: any, eventType: string, handler: (eventObject: JQueryEventObject) => any): JQuery; + delegate(selector: any, eventType: string, eventData: any, handler: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Trigger the "focus" event on an element. + */ + focus(): JQuery; + /** + * Bind an event handler to the "focus" JavaScript event + * + * @param handler A function to execute each time the event is triggered. + */ + focus(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "focus" JavaScript event + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + focus(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Trigger the "focusin" event on an element. + */ + focusin(): JQuery; + /** + * Bind an event handler to the "focusin" JavaScript event + * + * @param handler A function to execute each time the event is triggered. + */ + focusin(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "focusin" JavaScript event + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + focusin(eventData: Object, handler: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Trigger the "focusout" event on an element. + */ + focusout(): JQuery; + /** + * Bind an event handler to the "focusout" JavaScript event + * + * @param handler A function to execute each time the event is triggered. + */ + focusout(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "focusout" JavaScript event + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + focusout(eventData: Object, handler: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Bind two handlers to the matched elements, to be executed when the mouse pointer enters and leaves the elements. + * + * @param handlerIn A function to execute when the mouse pointer enters the element. + * @param handlerOut A function to execute when the mouse pointer leaves the element. + */ + hover(handlerIn: (eventObject: JQueryEventObject) => any, handlerOut: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind a single handler to the matched elements, to be executed when the mouse pointer enters or leaves the elements. + * + * @param handlerInOut A function to execute when the mouse pointer enters or leaves the element. + */ + hover(handlerInOut: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Trigger the "keydown" event on an element. + */ + keydown(): JQuery; + /** + * Bind an event handler to the "keydown" JavaScript event + * + * @param handler A function to execute each time the event is triggered. + */ + keydown(handler: (eventObject: JQueryKeyEventObject) => any): JQuery; + /** + * Bind an event handler to the "keydown" JavaScript event + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + keydown(eventData?: any, handler?: (eventObject: JQueryKeyEventObject) => any): JQuery; + + /** + * Trigger the "keypress" event on an element. + */ + keypress(): JQuery; + /** + * Bind an event handler to the "keypress" JavaScript event + * + * @param handler A function to execute each time the event is triggered. + */ + keypress(handler: (eventObject: JQueryKeyEventObject) => any): JQuery; + /** + * Bind an event handler to the "keypress" JavaScript event + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + keypress(eventData?: any, handler?: (eventObject: JQueryKeyEventObject) => any): JQuery; + + /** + * Trigger the "keyup" event on an element. + */ + keyup(): JQuery; + /** + * Bind an event handler to the "keyup" JavaScript event + * + * @param handler A function to execute each time the event is triggered. + */ + keyup(handler: (eventObject: JQueryKeyEventObject) => any): JQuery; + /** + * Bind an event handler to the "keyup" JavaScript event + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + keyup(eventData?: any, handler?: (eventObject: JQueryKeyEventObject) => any): JQuery; + + /** + * Bind an event handler to the "load" JavaScript event. + * + * @param handler A function to execute when the event is triggered. + */ + load(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "load" JavaScript event. + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute when the event is triggered. + */ + load(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Trigger the "mousedown" event on an element. + */ + mousedown(): JQuery; + /** + * Bind an event handler to the "mousedown" JavaScript event. + * + * @param handler A function to execute when the event is triggered. + */ + mousedown(handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + /** + * Bind an event handler to the "mousedown" JavaScript event. + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute when the event is triggered. + */ + mousedown(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + + /** + * Trigger the "mouseenter" event on an element. + */ + mouseenter(): JQuery; + /** + * Bind an event handler to be fired when the mouse enters an element. + * + * @param handler A function to execute when the event is triggered. + */ + mouseenter(handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + /** + * Bind an event handler to be fired when the mouse enters an element. + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute when the event is triggered. + */ + mouseenter(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + + /** + * Trigger the "mouseleave" event on an element. + */ + mouseleave(): JQuery; + /** + * Bind an event handler to be fired when the mouse leaves an element. + * + * @param handler A function to execute when the event is triggered. + */ + mouseleave(handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + /** + * Bind an event handler to be fired when the mouse leaves an element. + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute when the event is triggered. + */ + mouseleave(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + + /** + * Trigger the "mousemove" event on an element. + */ + mousemove(): JQuery; + /** + * Bind an event handler to the "mousemove" JavaScript event. + * + * @param handler A function to execute when the event is triggered. + */ + mousemove(handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + /** + * Bind an event handler to the "mousemove" JavaScript event. + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute when the event is triggered. + */ + mousemove(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + + /** + * Trigger the "mouseout" event on an element. + */ + mouseout(): JQuery; + /** + * Bind an event handler to the "mouseout" JavaScript event. + * + * @param handler A function to execute when the event is triggered. + */ + mouseout(handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + /** + * Bind an event handler to the "mouseout" JavaScript event. + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute when the event is triggered. + */ + mouseout(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + + /** + * Trigger the "mouseover" event on an element. + */ + mouseover(): JQuery; + /** + * Bind an event handler to the "mouseover" JavaScript event. + * + * @param handler A function to execute when the event is triggered. + */ + mouseover(handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + /** + * Bind an event handler to the "mouseover" JavaScript event. + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute when the event is triggered. + */ + mouseover(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + + /** + * Trigger the "mouseup" event on an element. + */ + mouseup(): JQuery; + /** + * Bind an event handler to the "mouseup" JavaScript event. + * + * @param handler A function to execute when the event is triggered. + */ + mouseup(handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + /** + * Bind an event handler to the "mouseup" JavaScript event. + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute when the event is triggered. + */ + mouseup(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery; + + /** + * Remove an event handler. + */ + off(): JQuery; + /** + * Remove an event handler. + * + * @param events One or more space-separated event types and optional namespaces, or just namespaces, such as "click", "keydown.myPlugin", or ".myPlugin". + * @param selector A selector which should match the one originally passed to .on() when attaching event handlers. + * @param handler A handler function previously attached for the event(s), or the special value false. + */ + off(events: string, selector?: string, handler?: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Remove an event handler. + * + * @param events One or more space-separated event types and optional namespaces, or just namespaces, such as "click", "keydown.myPlugin", or ".myPlugin". + * @param handler A handler function previously attached for the event(s), or the special value false. + */ + off(events: string, handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Remove an event handler. + * + * @param events An object where the string keys represent one or more space-separated event types and optional namespaces, and the values represent handler functions previously attached for the event(s). + * @param selector A selector which should match the one originally passed to .on() when attaching event handlers. + */ + off(events: { [key: string]: any; }, selector?: string): JQuery; + + /** + * Attach an event handler function for one or more events to the selected elements. + * + * @param events One or more space-separated event types and optional namespaces, such as "click" or "keydown.myPlugin". + * @param handler A function to execute when the event is triggered. The value false is also allowed as a shorthand for a function that simply does return false. Rest parameter args is for optional parameters passed to jQuery.trigger(). Note that the actual parameters on the event handler function must be marked as optional (? syntax). + */ + on(events: string, handler: (eventObject: JQueryEventObject, ...args: any[]) => any): JQuery; + /** + * Attach an event handler function for one or more events to the selected elements. + * + * @param events One or more space-separated event types and optional namespaces, such as "click" or "keydown.myPlugin". + * @param data Data to be passed to the handler in event.data when an event is triggered. + * @param handler A function to execute when the event is triggered. The value false is also allowed as a shorthand for a function that simply does return false. + */ + on(events: string, data : any, handler: (eventObject: JQueryEventObject, ...args: any[]) => any): JQuery; + /** + * Attach an event handler function for one or more events to the selected elements. + * + * @param events One or more space-separated event types and optional namespaces, such as "click" or "keydown.myPlugin". + * @param selector A selector string to filter the descendants of the selected elements that trigger the event. If the selector is null or omitted, the event is always triggered when it reaches the selected element. + * @param handler A function to execute when the event is triggered. The value false is also allowed as a shorthand for a function that simply does return false. + */ + on(events: string, selector: string, handler: (eventObject: JQueryEventObject, ...eventData: any[]) => any): JQuery; + /** + * Attach an event handler function for one or more events to the selected elements. + * + * @param events One or more space-separated event types and optional namespaces, such as "click" or "keydown.myPlugin". + * @param selector A selector string to filter the descendants of the selected elements that trigger the event. If the selector is null or omitted, the event is always triggered when it reaches the selected element. + * @param data Data to be passed to the handler in event.data when an event is triggered. + * @param handler A function to execute when the event is triggered. The value false is also allowed as a shorthand for a function that simply does return false. + */ + on(events: string, selector: string, data: any, handler: (eventObject: JQueryEventObject, ...eventData: any[]) => any): JQuery; + /** + * Attach an event handler function for one or more events to the selected elements. + * + * @param events An object in which the string keys represent one or more space-separated event types and optional namespaces, and the values represent a handler function to be called for the event(s). + * @param selector A selector string to filter the descendants of the selected elements that will call the handler. If the selector is null or omitted, the handler is always called when it reaches the selected element. + * @param data Data to be passed to the handler in event.data when an event occurs. + */ + on(events: { [key: string]: any; }, selector?: string, data?: any): JQuery; + /** + * Attach an event handler function for one or more events to the selected elements. + * + * @param events An object in which the string keys represent one or more space-separated event types and optional namespaces, and the values represent a handler function to be called for the event(s). + * @param data Data to be passed to the handler in event.data when an event occurs. + */ + on(events: { [key: string]: any; }, data?: any): JQuery; + + /** + * Attach a handler to an event for the elements. The handler is executed at most once per element per event type. + * + * @param events A string containing one or more JavaScript event types, such as "click" or "submit," or custom event names. + * @param handler A function to execute at the time the event is triggered. + */ + one(events: string, handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Attach a handler to an event for the elements. The handler is executed at most once per element per event type. + * + * @param events A string containing one or more JavaScript event types, such as "click" or "submit," or custom event names. + * @param data An object containing data that will be passed to the event handler. + * @param handler A function to execute at the time the event is triggered. + */ + one(events: string, data: Object, handler: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Attach a handler to an event for the elements. The handler is executed at most once per element per event type. + * + * @param events One or more space-separated event types and optional namespaces, such as "click" or "keydown.myPlugin". + * @param selector A selector string to filter the descendants of the selected elements that trigger the event. If the selector is null or omitted, the event is always triggered when it reaches the selected element. + * @param handler A function to execute when the event is triggered. The value false is also allowed as a shorthand for a function that simply does return false. + */ + one(events: string, selector: string, handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Attach a handler to an event for the elements. The handler is executed at most once per element per event type. + * + * @param events One or more space-separated event types and optional namespaces, such as "click" or "keydown.myPlugin". + * @param selector A selector string to filter the descendants of the selected elements that trigger the event. If the selector is null or omitted, the event is always triggered when it reaches the selected element. + * @param data Data to be passed to the handler in event.data when an event is triggered. + * @param handler A function to execute when the event is triggered. The value false is also allowed as a shorthand for a function that simply does return false. + */ + one(events: string, selector: string, data: any, handler: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Attach a handler to an event for the elements. The handler is executed at most once per element per event type. + * + * @param events An object in which the string keys represent one or more space-separated event types and optional namespaces, and the values represent a handler function to be called for the event(s). + * @param selector A selector string to filter the descendants of the selected elements that will call the handler. If the selector is null or omitted, the handler is always called when it reaches the selected element. + * @param data Data to be passed to the handler in event.data when an event occurs. + */ + one(events: { [key: string]: any; }, selector?: string, data?: any): JQuery; + + /** + * Attach a handler to an event for the elements. The handler is executed at most once per element per event type. + * + * @param events An object in which the string keys represent one or more space-separated event types and optional namespaces, and the values represent a handler function to be called for the event(s). + * @param data Data to be passed to the handler in event.data when an event occurs. + */ + one(events: { [key: string]: any; }, data?: any): JQuery; + + + /** + * Specify a function to execute when the DOM is fully loaded. + * + * @param handler A function to execute after the DOM is ready. + */ + ready(handler: (jQueryAlias?: JQueryStatic) => any): JQuery; + + /** + * Trigger the "resize" event on an element. + */ + resize(): JQuery; + /** + * Bind an event handler to the "resize" JavaScript event. + * + * @param handler A function to execute each time the event is triggered. + */ + resize(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "resize" JavaScript event. + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + resize(eventData: Object, handler: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Trigger the "scroll" event on an element. + */ + scroll(): JQuery; + /** + * Bind an event handler to the "scroll" JavaScript event. + * + * @param handler A function to execute each time the event is triggered. + */ + scroll(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "scroll" JavaScript event. + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + scroll(eventData: Object, handler: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Trigger the "select" event on an element. + */ + select(): JQuery; + /** + * Bind an event handler to the "select" JavaScript event. + * + * @param handler A function to execute each time the event is triggered. + */ + select(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "select" JavaScript event. + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + select(eventData: Object, handler: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Trigger the "submit" event on an element. + */ + submit(): JQuery; + /** + * Bind an event handler to the "submit" JavaScript event + * + * @param handler A function to execute each time the event is triggered. + */ + submit(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "submit" JavaScript event + * + * @param eventData An object containing data that will be passed to the event handler. + * @param handler A function to execute each time the event is triggered. + */ + submit(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Execute all handlers and behaviors attached to the matched elements for the given event type. + * + * @param eventType A string containing a JavaScript event type, such as click or submit. + * @param extraParameters Additional parameters to pass along to the event handler. + */ + trigger(eventType: string, extraParameters?: any[]|Object): JQuery; + /** + * Execute all handlers and behaviors attached to the matched elements for the given event type. + * + * @param event A jQuery.Event object. + * @param extraParameters Additional parameters to pass along to the event handler. + */ + trigger(event: JQueryEventObject, extraParameters?: any[]|Object): JQuery; + + /** + * Execute all handlers attached to an element for an event. + * + * @param eventType A string containing a JavaScript event type, such as click or submit. + * @param extraParameters An array of additional parameters to pass along to the event handler. + */ + triggerHandler(eventType: string, ...extraParameters: any[]): Object; + + /** + * Execute all handlers attached to an element for an event. + * + * @param event A jQuery.Event object. + * @param extraParameters An array of additional parameters to pass along to the event handler. + */ + triggerHandler(event: JQueryEventObject, ...extraParameters: any[]): Object; + + /** + * Remove a previously-attached event handler from the elements. + * + * @param eventType A string containing a JavaScript event type, such as click or submit. + * @param handler The function that is to be no longer executed. + */ + unbind(eventType?: string, handler?: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Remove a previously-attached event handler from the elements. + * + * @param eventType A string containing a JavaScript event type, such as click or submit. + * @param fls Unbinds the corresponding 'return false' function that was bound using .bind( eventType, false ). + */ + unbind(eventType: string, fls: boolean): JQuery; + /** + * Remove a previously-attached event handler from the elements. + * + * @param evt A JavaScript event object as passed to an event handler. + */ + unbind(evt: any): JQuery; + + /** + * Remove a handler from the event for all elements which match the current selector, based upon a specific set of root elements. + */ + undelegate(): JQuery; + /** + * Remove a handler from the event for all elements which match the current selector, based upon a specific set of root elements. + * + * @param selector A selector which will be used to filter the event results. + * @param eventType A string containing a JavaScript event type, such as "click" or "keydown" + * @param handler A function to execute at the time the event is triggered. + */ + undelegate(selector: string, eventType: string, handler?: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Remove a handler from the event for all elements which match the current selector, based upon a specific set of root elements. + * + * @param selector A selector which will be used to filter the event results. + * @param events An object of one or more event types and previously bound functions to unbind from them. + */ + undelegate(selector: string, events: Object): JQuery; + /** + * Remove a handler from the event for all elements which match the current selector, based upon a specific set of root elements. + * + * @param namespace A string containing a namespace to unbind all events from. + */ + undelegate(namespace: string): JQuery; + + /** + * Bind an event handler to the "unload" JavaScript event. (DEPRECATED from v1.8) + * + * @param handler A function to execute when the event is triggered. + */ + unload(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "unload" JavaScript event. (DEPRECATED from v1.8) + * + * @param eventData A plain object of data that will be passed to the event handler. + * @param handler A function to execute when the event is triggered. + */ + unload(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * The DOM node context originally passed to jQuery(); if none was passed then context will likely be the document. (DEPRECATED from v1.10) + */ + context: Element; + + jquery: string; + + /** + * Bind an event handler to the "error" JavaScript event. (DEPRECATED from v1.8) + * + * @param handler A function to execute when the event is triggered. + */ + error(handler: (eventObject: JQueryEventObject) => any): JQuery; + /** + * Bind an event handler to the "error" JavaScript event. (DEPRECATED from v1.8) + * + * @param eventData A plain object of data that will be passed to the event handler. + * @param handler A function to execute when the event is triggered. + */ + error(eventData: any, handler: (eventObject: JQueryEventObject) => any): JQuery; + + /** + * Add a collection of DOM elements onto the jQuery stack. + * + * @param elements An array of elements to push onto the stack and make into a new jQuery object. + */ + pushStack(elements: any[]): JQuery; + /** + * Add a collection of DOM elements onto the jQuery stack. + * + * @param elements An array of elements to push onto the stack and make into a new jQuery object. + * @param name The name of a jQuery method that generated the array of elements. + * @param arguments The arguments that were passed in to the jQuery method (for serialization). + */ + pushStack(elements: any[], name: string, arguments: any[]): JQuery; + + /** + * Insert content, specified by the parameter, after each element in the set of matched elements. + * + * param content1 HTML string, DOM element, array of elements, or jQuery object to insert after each element in the set of matched elements. + * param content2 One or more additional DOM elements, arrays of elements, HTML strings, or jQuery objects to insert after each element in the set of matched elements. + */ + after(content1: JQuery|any[]|Element|Text|string, ...content2: any[]): JQuery; + /** + * Insert content, specified by the parameter, after each element in the set of matched elements. + * + * param func A function that returns an HTML string, DOM element(s), or jQuery object to insert after each element in the set of matched elements. Receives the index position of the element in the set as an argument. Within the function, this refers to the current element in the set. + */ + after(func: (index: number, html: string) => string|Element|JQuery): JQuery; + + /** + * Insert content, specified by the parameter, to the end of each element in the set of matched elements. + * + * param content1 DOM element, array of elements, HTML string, or jQuery object to insert at the end of each element in the set of matched elements. + * param content2 One or more additional DOM elements, arrays of elements, HTML strings, or jQuery objects to insert at the end of each element in the set of matched elements. + */ + append(content1: JQuery|any[]|Element|Text|string, ...content2: any[]): JQuery; + /** + * Insert content, specified by the parameter, to the end of each element in the set of matched elements. + * + * param func A function that returns an HTML string, DOM element(s), or jQuery object to insert at the end of each element in the set of matched elements. Receives the index position of the element in the set and the old HTML value of the element as arguments. Within the function, this refers to the current element in the set. + */ + append(func: (index: number, html: string) => string|Element|JQuery): JQuery; + + /** + * Insert every element in the set of matched elements to the end of the target. + * + * @param target A selector, element, HTML string, array of elements, or jQuery object; the matched set of elements will be inserted at the end of the element(s) specified by this parameter. + */ + appendTo(target: JQuery|any[]|Element|string): JQuery; + + /** + * Insert content, specified by the parameter, before each element in the set of matched elements. + * + * param content1 HTML string, DOM element, array of elements, or jQuery object to insert before each element in the set of matched elements. + * param content2 One or more additional DOM elements, arrays of elements, HTML strings, or jQuery objects to insert before each element in the set of matched elements. + */ + before(content1: JQuery|any[]|Element|Text|string, ...content2: any[]): JQuery; + /** + * Insert content, specified by the parameter, before each element in the set of matched elements. + * + * param func A function that returns an HTML string, DOM element(s), or jQuery object to insert before each element in the set of matched elements. Receives the index position of the element in the set as an argument. Within the function, this refers to the current element in the set. + */ + before(func: (index: number, html: string) => string|Element|JQuery): JQuery; + + /** + * Create a deep copy of the set of matched elements. + * + * param withDataAndEvents A Boolean indicating whether event handlers and data should be copied along with the elements. The default value is false. + * param deepWithDataAndEvents A Boolean indicating whether event handlers and data for all children of the cloned element should be copied. By default its value matches the first argument's value (which defaults to false). + */ + clone(withDataAndEvents?: boolean, deepWithDataAndEvents?: boolean): JQuery; + + /** + * Remove the set of matched elements from the DOM. + * + * param selector A selector expression that filters the set of matched elements to be removed. + */ + detach(selector?: string): JQuery; + + /** + * Remove all child nodes of the set of matched elements from the DOM. + */ + empty(): JQuery; + + /** + * Insert every element in the set of matched elements after the target. + * + * param target A selector, element, array of elements, HTML string, or jQuery object; the matched set of elements will be inserted after the element(s) specified by this parameter. + */ + insertAfter(target: JQuery|any[]|Element|Text|string): JQuery; + + /** + * Insert every element in the set of matched elements before the target. + * + * param target A selector, element, array of elements, HTML string, or jQuery object; the matched set of elements will be inserted before the element(s) specified by this parameter. + */ + insertBefore(target: JQuery|any[]|Element|Text|string): JQuery; + + /** + * Insert content, specified by the parameter, to the beginning of each element in the set of matched elements. + * + * param content1 DOM element, array of elements, HTML string, or jQuery object to insert at the beginning of each element in the set of matched elements. + * param content2 One or more additional DOM elements, arrays of elements, HTML strings, or jQuery objects to insert at the beginning of each element in the set of matched elements. + */ + prepend(content1: JQuery|any[]|Element|Text|string, ...content2: any[]): JQuery; + /** + * Insert content, specified by the parameter, to the beginning of each element in the set of matched elements. + * + * param func A function that returns an HTML string, DOM element(s), or jQuery object to insert at the beginning of each element in the set of matched elements. Receives the index position of the element in the set and the old HTML value of the element as arguments. Within the function, this refers to the current element in the set. + */ + prepend(func: (index: number, html: string) => string|Element|JQuery): JQuery; + + /** + * Insert every element in the set of matched elements to the beginning of the target. + * + * @param target A selector, element, HTML string, array of elements, or jQuery object; the matched set of elements will be inserted at the beginning of the element(s) specified by this parameter. + */ + prependTo(target: JQuery|any[]|Element|string): JQuery; + + /** + * Remove the set of matched elements from the DOM. + * + * @param selector A selector expression that filters the set of matched elements to be removed. + */ + remove(selector?: string): JQuery; + + /** + * Replace each target element with the set of matched elements. + * + * @param target A selector string, jQuery object, DOM element, or array of elements indicating which element(s) to replace. + */ + replaceAll(target: JQuery|any[]|Element|string): JQuery; + + /** + * Replace each element in the set of matched elements with the provided new content and return the set of elements that was removed. + * + * param newContent The content to insert. May be an HTML string, DOM element, array of DOM elements, or jQuery object. + */ + replaceWith(newContent: JQuery|any[]|Element|Text|string): JQuery; + /** + * Replace each element in the set of matched elements with the provided new content and return the set of elements that was removed. + * + * param func A function that returns content with which to replace the set of matched elements. + */ + replaceWith(func: () => Element|JQuery): JQuery; + + /** + * Get the combined text contents of each element in the set of matched elements, including their descendants. + */ + text(): string; + /** + * Set the content of each element in the set of matched elements to the specified text. + * + * @param text The text to set as the content of each matched element. When Number or Boolean is supplied, it will be converted to a String representation. + */ + text(text: string|number|boolean): JQuery; + /** + * Set the content of each element in the set of matched elements to the specified text. + * + * @param func A function returning the text content to set. Receives the index position of the element in the set and the old text value as arguments. + */ + text(func: (index: number, text: string) => string): JQuery; + + /** + * Retrieve all the elements contained in the jQuery set, as an array. + */ + toArray(): any[]; + + /** + * Remove the parents of the set of matched elements from the DOM, leaving the matched elements in their place. + */ + unwrap(): JQuery; + + /** + * Wrap an HTML structure around each element in the set of matched elements. + * + * @param wrappingElement A selector, element, HTML string, or jQuery object specifying the structure to wrap around the matched elements. + */ + wrap(wrappingElement: JQuery|Element|string): JQuery; + /** + * Wrap an HTML structure around each element in the set of matched elements. + * + * @param func A callback function returning the HTML content or jQuery object to wrap around the matched elements. Receives the index position of the element in the set as an argument. Within the function, this refers to the current element in the set. + */ + wrap(func: (index: number) => string|JQuery): JQuery; + + /** + * Wrap an HTML structure around all elements in the set of matched elements. + * + * @param wrappingElement A selector, element, HTML string, or jQuery object specifying the structure to wrap around the matched elements. + */ + wrapAll(wrappingElement: JQuery|Element|string): JQuery; + wrapAll(func: (index: number) => string): JQuery; + + /** + * Wrap an HTML structure around the content of each element in the set of matched elements. + * + * @param wrappingElement An HTML snippet, selector expression, jQuery object, or DOM element specifying the structure to wrap around the content of the matched elements. + */ + wrapInner(wrappingElement: JQuery|Element|string): JQuery; + /** + * Wrap an HTML structure around the content of each element in the set of matched elements. + * + * @param func A callback function which generates a structure to wrap around the content of the matched elements. Receives the index position of the element in the set as an argument. Within the function, this refers to the current element in the set. + */ + wrapInner(func: (index: number) => string): JQuery; + + /** + * Iterate over a jQuery object, executing a function for each matched element. + * + * @param func A function to execute for each matched element. + */ + each(func: (index: number, elem: Element) => any): JQuery; + + /** + * Retrieve one of the elements matched by the jQuery object. + * + * @param index A zero-based integer indicating which element to retrieve. + */ + get(index: number): HTMLElement; + /** + * Retrieve the elements matched by the jQuery object. + */ + get(): any[]; + + /** + * Search for a given element from among the matched elements. + */ + index(): number; + /** + * Search for a given element from among the matched elements. + * + * @param selector A selector representing a jQuery collection in which to look for an element. + */ + index(selector: string|JQuery|Element): number; + + /** + * The number of elements in the jQuery object. + */ + length: number; + /** + * A selector representing selector passed to jQuery(), if any, when creating the original set. + * version deprecated: 1.7, removed: 1.9 + */ + selector: string; + [index: string]: any; + [index: number]: HTMLElement; + + /** + * Add elements to the set of matched elements. + * + * @param selector A string representing a selector expression to find additional elements to add to the set of matched elements. + * @param context The point in the document at which the selector should begin matching; similar to the context argument of the $(selector, context) method. + */ + add(selector: string, context?: Element): JQuery; + /** + * Add elements to the set of matched elements. + * + * @param elements One or more elements to add to the set of matched elements. + */ + add(...elements: Element[]): JQuery; + /** + * Add elements to the set of matched elements. + * + * @param html An HTML fragment to add to the set of matched elements. + */ + add(html: string): JQuery; + /** + * Add elements to the set of matched elements. + * + * @param obj An existing jQuery object to add to the set of matched elements. + */ + add(obj: JQuery): JQuery; + + /** + * Get the children of each element in the set of matched elements, optionally filtered by a selector. + * + * @param selector A string containing a selector expression to match elements against. + */ + children(selector?: string): JQuery; + + /** + * For each element in the set, get the first element that matches the selector by testing the element itself and traversing up through its ancestors in the DOM tree. + * + * @param selector A string containing a selector expression to match elements against. + */ + closest(selector: string): JQuery; + /** + * For each element in the set, get the first element that matches the selector by testing the element itself and traversing up through its ancestors in the DOM tree. + * + * @param selector A string containing a selector expression to match elements against. + * @param context A DOM element within which a matching element may be found. If no context is passed in then the context of the jQuery set will be used instead. + */ + closest(selector: string, context?: Element): JQuery; + /** + * For each element in the set, get the first element that matches the selector by testing the element itself and traversing up through its ancestors in the DOM tree. + * + * @param obj A jQuery object to match elements against. + */ + closest(obj: JQuery): JQuery; + /** + * For each element in the set, get the first element that matches the selector by testing the element itself and traversing up through its ancestors in the DOM tree. + * + * @param element An element to match elements against. + */ + closest(element: Element): JQuery; + + /** + * Get an array of all the elements and selectors matched against the current element up through the DOM tree. + * + * @param selectors An array or string containing a selector expression to match elements against (can also be a jQuery object). + * @param context A DOM element within which a matching element may be found. If no context is passed in then the context of the jQuery set will be used instead. + */ + closest(selectors: any, context?: Element): any[]; + + /** + * Get the children of each element in the set of matched elements, including text and comment nodes. + */ + contents(): JQuery; + + /** + * End the most recent filtering operation in the current chain and return the set of matched elements to its previous state. + */ + end(): JQuery; + + /** + * Reduce the set of matched elements to the one at the specified index. + * + * @param index An integer indicating the 0-based position of the element. OR An integer indicating the position of the element, counting backwards from the last element in the set. + * + */ + eq(index: number): JQuery; + + /** + * Reduce the set of matched elements to those that match the selector or pass the function's test. + * + * @param selector A string containing a selector expression to match the current set of elements against. + */ + filter(selector: string): JQuery; + /** + * Reduce the set of matched elements to those that match the selector or pass the function's test. + * + * @param func A function used as a test for each element in the set. this is the current DOM element. + */ + filter(func: (index: number, element: Element) => any): JQuery; + /** + * Reduce the set of matched elements to those that match the selector or pass the function's test. + * + * @param element An element to match the current set of elements against. + */ + filter(element: Element): JQuery; + /** + * Reduce the set of matched elements to those that match the selector or pass the function's test. + * + * @param obj An existing jQuery object to match the current set of elements against. + */ + filter(obj: JQuery): JQuery; + + /** + * Get the descendants of each element in the current set of matched elements, filtered by a selector, jQuery object, or element. + * + * @param selector A string containing a selector expression to match elements against. + */ + find(selector: string): JQuery; + /** + * Get the descendants of each element in the current set of matched elements, filtered by a selector, jQuery object, or element. + * + * @param element An element to match elements against. + */ + find(element: Element): JQuery; + /** + * Get the descendants of each element in the current set of matched elements, filtered by a selector, jQuery object, or element. + * + * @param obj A jQuery object to match elements against. + */ + find(obj: JQuery): JQuery; + + /** + * Reduce the set of matched elements to the first in the set. + */ + first(): JQuery; + + /** + * Reduce the set of matched elements to those that have a descendant that matches the selector or DOM element. + * + * @param selector A string containing a selector expression to match elements against. + */ + has(selector: string): JQuery; + /** + * Reduce the set of matched elements to those that have a descendant that matches the selector or DOM element. + * + * @param contained A DOM element to match elements against. + */ + has(contained: Element): JQuery; + + /** + * Check the current matched set of elements against a selector, element, or jQuery object and return true if at least one of these elements matches the given arguments. + * + * @param selector A string containing a selector expression to match elements against. + */ + is(selector: string): boolean; + /** + * Check the current matched set of elements against a selector, element, or jQuery object and return true if at least one of these elements matches the given arguments. + * + * @param func A function used as a test for the set of elements. It accepts one argument, index, which is the element's index in the jQuery collection.Within the function, this refers to the current DOM element. + */ + is(func: (index: number, element: Element) => boolean): boolean; + /** + * Check the current matched set of elements against a selector, element, or jQuery object and return true if at least one of these elements matches the given arguments. + * + * @param obj An existing jQuery object to match the current set of elements against. + */ + is(obj: JQuery): boolean; + /** + * Check the current matched set of elements against a selector, element, or jQuery object and return true if at least one of these elements matches the given arguments. + * + * @param elements One or more elements to match the current set of elements against. + */ + is(elements: any): boolean; + + /** + * Reduce the set of matched elements to the final one in the set. + */ + last(): JQuery; + + /** + * Pass each element in the current matched set through a function, producing a new jQuery object containing the return values. + * + * @param callback A function object that will be invoked for each element in the current set. + */ + map(callback: (index: number, domElement: Element) => any): JQuery; + + /** + * Get the immediately following sibling of each element in the set of matched elements. If a selector is provided, it retrieves the next sibling only if it matches that selector. + * + * @param selector A string containing a selector expression to match elements against. + */ + next(selector?: string): JQuery; + + /** + * Get all following siblings of each element in the set of matched elements, optionally filtered by a selector. + * + * @param selector A string containing a selector expression to match elements against. + */ + nextAll(selector?: string): JQuery; + + /** + * Get all following siblings of each element up to but not including the element matched by the selector, DOM node, or jQuery object passed. + * + * @param selector A string containing a selector expression to indicate where to stop matching following sibling elements. + * @param filter A string containing a selector expression to match elements against. + */ + nextUntil(selector?: string, filter?: string): JQuery; + /** + * Get all following siblings of each element up to but not including the element matched by the selector, DOM node, or jQuery object passed. + * + * @param element A DOM node or jQuery object indicating where to stop matching following sibling elements. + * @param filter A string containing a selector expression to match elements against. + */ + nextUntil(element?: Element, filter?: string): JQuery; + /** + * Get all following siblings of each element up to but not including the element matched by the selector, DOM node, or jQuery object passed. + * + * @param obj A DOM node or jQuery object indicating where to stop matching following sibling elements. + * @param filter A string containing a selector expression to match elements against. + */ + nextUntil(obj?: JQuery, filter?: string): JQuery; + + /** + * Remove elements from the set of matched elements. + * + * @param selector A string containing a selector expression to match elements against. + */ + not(selector: string): JQuery; + /** + * Remove elements from the set of matched elements. + * + * @param func A function used as a test for each element in the set. this is the current DOM element. + */ + not(func: (index: number, element: Element) => boolean): JQuery; + /** + * Remove elements from the set of matched elements. + * + * @param elements One or more DOM elements to remove from the matched set. + */ + not(elements: Element|Element[]): JQuery; + /** + * Remove elements from the set of matched elements. + * + * @param obj An existing jQuery object to match the current set of elements against. + */ + not(obj: JQuery): JQuery; + + /** + * Get the closest ancestor element that is positioned. + */ + offsetParent(): JQuery; + + /** + * Get the parent of each element in the current set of matched elements, optionally filtered by a selector. + * + * @param selector A string containing a selector expression to match elements against. + */ + parent(selector?: string): JQuery; + + /** + * Get the ancestors of each element in the current set of matched elements, optionally filtered by a selector. + * + * @param selector A string containing a selector expression to match elements against. + */ + parents(selector?: string): JQuery; + + /** + * Get the ancestors of each element in the current set of matched elements, up to but not including the element matched by the selector, DOM node, or jQuery object. + * + * @param selector A string containing a selector expression to indicate where to stop matching ancestor elements. + * @param filter A string containing a selector expression to match elements against. + */ + parentsUntil(selector?: string, filter?: string): JQuery; + /** + * Get the ancestors of each element in the current set of matched elements, up to but not including the element matched by the selector, DOM node, or jQuery object. + * + * @param element A DOM node or jQuery object indicating where to stop matching ancestor elements. + * @param filter A string containing a selector expression to match elements against. + */ + parentsUntil(element?: Element, filter?: string): JQuery; + /** + * Get the ancestors of each element in the current set of matched elements, up to but not including the element matched by the selector, DOM node, or jQuery object. + * + * @param obj A DOM node or jQuery object indicating where to stop matching ancestor elements. + * @param filter A string containing a selector expression to match elements against. + */ + parentsUntil(obj?: JQuery, filter?: string): JQuery; + + /** + * Get the immediately preceding sibling of each element in the set of matched elements, optionally filtered by a selector. + * + * @param selector A string containing a selector expression to match elements against. + */ + prev(selector?: string): JQuery; + + /** + * Get all preceding siblings of each element in the set of matched elements, optionally filtered by a selector. + * + * @param selector A string containing a selector expression to match elements against. + */ + prevAll(selector?: string): JQuery; + + /** + * Get all preceding siblings of each element up to but not including the element matched by the selector, DOM node, or jQuery object. + * + * @param selector A string containing a selector expression to indicate where to stop matching preceding sibling elements. + * @param filter A string containing a selector expression to match elements against. + */ + prevUntil(selector?: string, filter?: string): JQuery; + /** + * Get all preceding siblings of each element up to but not including the element matched by the selector, DOM node, or jQuery object. + * + * @param element A DOM node or jQuery object indicating where to stop matching preceding sibling elements. + * @param filter A string containing a selector expression to match elements against. + */ + prevUntil(element?: Element, filter?: string): JQuery; + /** + * Get all preceding siblings of each element up to but not including the element matched by the selector, DOM node, or jQuery object. + * + * @param obj A DOM node or jQuery object indicating where to stop matching preceding sibling elements. + * @param filter A string containing a selector expression to match elements against. + */ + prevUntil(obj?: JQuery, filter?: string): JQuery; + + /** + * Get the siblings of each element in the set of matched elements, optionally filtered by a selector. + * + * @param selector A string containing a selector expression to match elements against. + */ + siblings(selector?: string): JQuery; + + /** + * Reduce the set of matched elements to a subset specified by a range of indices. + * + * @param start An integer indicating the 0-based position at which the elements begin to be selected. If negative, it indicates an offset from the end of the set. + * @param end An integer indicating the 0-based position at which the elements stop being selected. If negative, it indicates an offset from the end of the set. If omitted, the range continues until the end of the set. + */ + slice(start: number, end?: number): JQuery; + + /** + * Show the queue of functions to be executed on the matched elements. + * + * @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue. + */ + queue(queueName?: string): any[]; + /** + * Manipulate the queue of functions to be executed, once for each matched element. + * + * @param newQueue An array of functions to replace the current queue contents. + */ + queue(newQueue: Function[]): JQuery; + /** + * Manipulate the queue of functions to be executed, once for each matched element. + * + * @param callback The new function to add to the queue, with a function to call that will dequeue the next item. + */ + queue(callback: Function): JQuery; + /** + * Manipulate the queue of functions to be executed, once for each matched element. + * + * @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue. + * @param newQueue An array of functions to replace the current queue contents. + */ + queue(queueName: string, newQueue: Function[]): JQuery; + /** + * Manipulate the queue of functions to be executed, once for each matched element. + * + * @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue. + * @param callback The new function to add to the queue, with a function to call that will dequeue the next item. + */ + queue(queueName: string, callback: Function): JQuery; +} +declare module "jquery" { + export = $; +} +declare var jQuery: JQueryStatic; +declare var $: JQueryStatic; diff --git a/hls-graph/html/ts/jquery.flot.d.ts b/hls-graph/html/ts/jquery.flot.d.ts new file mode 100644 index 0000000000..8535c915fa --- /dev/null +++ b/hls-graph/html/ts/jquery.flot.d.ts @@ -0,0 +1,240 @@ +// Type definitions for Flot +// Project: http://www.flotcharts.org/ +// Definitions by: Matt Burland +// Definitions: https://github.com/borisyankov/DefinitelyTyped + + +declare module jquery.flot { + interface plotOptions { + colors?: any[]; + series?: seriesOptions; + legend?: legendOptions; + xaxis?: axisOptions; + yaxis?: axisOptions; + xaxes?: axisOptions[]; + yaxes?: axisOptions[]; + grid?: gridOptions; + interaction?: interaction; + hooks?: hooks; + } + + interface hooks { + processOptions: { (plot: plot, options: plotOptions): void; } []; + processRawData: { (plot: plot, series: dataSeries, data: any[], datapoints: datapoints): void; }[]; + processDatapoints: { (plot: plot, series: dataSeries, datapoints: datapoints): void; }[]; + processOffset: { (plot: plot, offset: canvasPoint): void; }[]; + drawBackground: { (plot: plot, context: CanvasRenderingContext2D): void; }[]; + drawSeries: { (plot: plot, context: CanvasRenderingContext2D, series: dataSeries): void; }[]; + draw: { (plot: plot, context: CanvasRenderingContext2D): void; }[]; + bindEvents: { (plot: plot, eventHolder: JQuery): void; }[]; + drawOverlay: { (plot: plot, context: CanvasRenderingContext2D): void; }[]; + shutdown: { (plot: plot, eventHolder: JQuery): void; }[]; + } + + interface interaction { + redrawOverlayInterval?: number; + } + + interface gridOptions { + show?: boolean; + aboveData?: boolean; + color?: any; // color + backgroundColor?: any; //color/gradient or null + margin?: any; // number or margin object + labelMargin?: number; + axisMargin?: number; + markings?: any; //array of markings or (fn: axes -> array of markings) + borderWidth?: any; // number or width object + borderColor?: any; // color or null + minBorderMargin?: number; // or null + clickable?: boolean; + hoverable?: boolean; + autoHighlight?: boolean; + mouseActiveRadius?: number; + tickColor?: any; + markingsColor?: any; + markingsLineWidth?: number; + } + + interface legendOptions { + show?: boolean; + labelFormatter?: (label: string, series: any) => string; // null or (fn: string, series object -> string) + labelBoxBorderColor?: any; //color + noColumns?: number; + position?: string; //"ne" or "nw" or "se" or "sw" + margin?: any; //number of pixels or [x margin, y margin] + backgroundColor?: any; //null or color + backgroundOpacity?: number; // between 0 and 1 + container?: JQuery; // null or jQuery object/DOM element/jQuery expression + sorted?: any; //null/false, true, "ascending", "descending" or a comparator + } + + interface seriesOptions { + color?: any; // color or number + label?: string; + lines?: linesOptions; + bars?: barsOptions; + points?: pointsOptions; + xaxis?: number; + yaxis?: number; + clickable?: boolean; + hoverable?: boolean; + shadowSize?: number; + highlightColor?: any; + stack?: boolean; // NEIL: Since we use the Stack plugin + } + + interface dataSeries extends seriesOptions { + data: any[]; + } + + interface axisOptions { + show?: boolean; // null or true/false + position?: string; // "bottom" or "top" or "left" or "right" + + color?: any; // null or color spec + tickColor?: any; // null or color spec + font?: any; // null or font spec object + + min?: number; + max?: number; + autoscaleMargin?: number; + + transform?: (v: number) => number; // null or fn: number -> number + inverseTransform?: (v: number) => number; // null or fn: number -> number + + ticks?: any; // null or number or ticks array or (fn: axis -> ticks array) + tickSize?: any; // number or array + minTickSize?: any; // number or array + tickFormatter?: (t: number, a?: axis) => string; // (fn: number, object -> string) or string + tickDecimals?: number; + + labelWidth?: number; + labelHeight?: number; + reserveSpace?: boolean; + + tickLength?: number; + + alignTicksWithAxis?: number; + } + + interface seriesTypeBase { + show?: boolean; + lineWidth?: number; + fill?: any; //boolean or number + fillColor?: any; //null or color/gradient + } + + interface linesOptions extends seriesTypeBase { + steps?: boolean; + } + + interface barsOptions extends seriesTypeBase { + barWidth?: number; + align?: string; + horizontal?: boolean; + } + + interface pointsOptions extends seriesTypeBase { + radius?: number; + symbol?: any; + } + + interface gradient { + colors: any[]; + } + + interface item { + datapoint: number[]; // the point, e.g. [0, 2] + dataIndex: number; // the index of the point in the data array + series: dataSeries; //the series object + seriesIndex: number; //the index of the series + pageX: number; + pageY: number; //the global screen coordinates of the point + } + + interface datapoints { + points: number[]; + pointsize: number; + format: datapointFormat[]; + } + + interface datapointFormat { + x?: boolean; + y?: boolean; + number: boolean; + required: boolean; + defaultValue?: number; + } + + interface point { + x: number; + y: number; + } + + interface offset { + left: number; + top: number; + } + + interface canvasPoint { + top: number; + left: number; + bottom?: number; + right?: number; + } + + interface axes { + xaxis: axis; + yaxis: axis; + x2axis?: axis; + y2axis?: axis; + } + + interface axis extends axisOptions { + options: axisOptions; + p2c(point: point):canvasPoint; + c2p(canvasPoint: canvasPoint):point; + } + + interface plugin { + init(options: plotOptions): any; + options?: any; + name?: string; + version?: string; + } + + interface plot { + highlight(series: dataSeries, datapoint: item): void; + unhighlight(): void; + unhighlight(series: dataSeries, datapoint: item): void; + setData(data: any): void; + setupGrid(): void; + draw(): void; + triggerRedrawOverlay(): void; + width(): number; + height(): number; + offset(): JQueryCoordinates; + pointOffset(point: point): offset; + resize(): void; + shutdown(): void; + getData(): dataSeries[]; + getAxes(): axes; + getXAxes(): axis[]; + getYAxes(): axis[]; + getPlaceholder(): JQuery; + getCanvas(): HTMLCanvasElement; + getPlotOffset(): canvasPoint; + getOptions(): plotOptions; + } + + interface plotStatic { + (placeholder: JQuery, data: dataSeries[], options?: plotOptions): plot; + (placeholder: JQuery, data: any[], options?: plotOptions): plot; + plugins: plugin[]; + } +} + +interface JQueryStatic { + plot: jquery.flot.plotStatic; +} diff --git a/hls-graph/html/ts/profile.tsx b/hls-graph/html/ts/profile.tsx new file mode 100644 index 0000000000..a721ff418f --- /dev/null +++ b/hls-graph/html/ts/profile.tsx @@ -0,0 +1,80 @@ + +function profileLoaded(profileRaw: ProfileRaw[], buildRaw: BuildRaw): void { + $(document.body).empty().append(profileRoot(unraw(profileRaw), unrawBuild(buildRaw))); +} + +function unraw(xs: ProfileRaw[]): Profile[] { + const ans = xs.map((x, i) => ({ + index: i, + name: x[0], + execution: x[1], + built: x[2], + changed: x[3], + visited: x[4], + depends: x.length > 5 ? x[5] : [], + rdepends: [], + traces: [] + } as Profile)); + for (const p of ans) + for (const ds of p.depends) + for (const d of ds) + ans[d].rdepends.push(p.index); + return ans; +} + +function unrawBuild(b: BuildRaw): Build { + return { dirtyKeys: b.length > 0 ? b[0] : null }; +} + +function profileRoot(profile: Profile[], build: Build): HTMLElement { + const [s, search] = createSearch(profile); + const t = createTabs( + [ ["Summary", () => reportSummary(profile, build)] + , ["Rules", () => reportRuleTable(profile, search)] + , ["Parallelizability", () => reportParallelism(profile)] + , ["Details", () => reportDetails(profile, search)] + // , ["Why rebuild", () => reportRebuild(profile, search)] + ]); + return + + + +
+ + Shake profile report + + - generated at {generated} by hls-graph v{version} +
{s}
{t}
; +} + + +function createTabs(xs: Array<[string, () => HTMLElement]>): HTMLElement { + const bodies: Array< [HTMLElement, () => void] > = xs.map(x => { + const el =
; + const upd = lazy(() => $(el).append(x[1]())); + return pair(el, upd); + }); + let lbls = []; + const f = (i: int) => () => { + bodies[i][1](); + lbls.map((x, j) => $(x).toggleClass("active", i === j)); + bodies.map((x, j) => $(x[0]).toggle(i === j)); + $(window).trigger("resize"); + }; + lbls = xs.map((x, i) => {x[0]}); + f(0)(); + return + + + + +
+ + + + +
 {lbls} 
+
+ {bodies.map(fst)} +
; +} diff --git a/hls-graph/html/ts/reports/cmdplot.tsx b/hls-graph/html/ts/reports/cmdplot.tsx new file mode 100644 index 0000000000..0c5cd7b4da --- /dev/null +++ b/hls-graph/html/ts/reports/cmdplot.tsx @@ -0,0 +1,112 @@ + +function reportCmdPlot(profile: Profile[]): HTMLElement { + // first find the end point + const runs = findRuns(profile); + + if (runs.length === 0) { + return
+

No data found

+

+ The Shake database contains no rules which ran traced commands. +

+ You can populate this information by using {varLink("cmd")} or wrapping your IO actions in {varLink("traced")}. +

+
; + } + + const combo = ; + + const warning = ; + const plot =
; + const plotData: Prop = new Prop([]); + bindPlot(plot, plotData, { + legend: { show: true, position: "nw", sorted: "reverse" }, + series: { stack: true, lines: { fill: 1, lineWidth: 0 } }, + yaxis: { min: 0 }, + xaxis: { tickFormatter: showTime } + }); + + function setPlotData(runsIndex: int) { + const [run, end] = runs[runsIndex]; + const profileRun = profile.filter(p => p.built === run); + // Make sure we max(0,) every step in the process, in case one does parallelism of threads + const missing = profileRun.map(untraced).sum(); + $(warning).text(missing < 1 ? "" : "Warning: " + showTime(missing) + " of execution was not traced."); + const series = calcPlotData(end, profileRun, 100); + const res = []; + for (const s in series) + res.push({label: s, data: series[s].map((x, i) => pair(end * i / 100, x))}); + plotData.set(res); + } + setPlotData(0); + $(combo).change(() => setPlotData(combo.selectedIndex)); + + return + + + + + + + + + + +

Number of commands executing over time

{combo}
{plot}
Time since the start of building. {warning}
; +} + +// Find which runs had traced commands and when the last stopped, sort so most recent first +function findRuns(profile: Profile[]): Array<[timestamp, seconds]> { + const runs: MapInt = {}; + for (const p of profile) { + if (p.traces.length > 0) { + if (p.traces.length === 1 && p.traces[0].command === "") + continue; // the fake end command + const old = runs[p.built]; + const end = p.traces.last().stop; + runs[p.built] = old === undefined ? end : Math.max(old, end); + } + } + + const runsList: Array<[timestamp, seconds]> = []; + for (const i in runs) + runsList.push(pair(Number(i), runs[i])); + runsList.sort(compareFst); + return runsList; +} + +function calcPlotData(end: seconds, profile: Profile[], buckets: int): MapString { + const ans: MapString = {}; + for (const p of profile) { + for (const t of p.traces) { + let xs: number[]; + if (t.command in ans) + xs = ans[t.command]; + else { + xs = []; + for (let i = 0; i < buckets; i++) + xs.push(0); // fill with 1 more element, but the last bucket will always be 0 + ans[t.command] = xs; + } + + const start = t.start * buckets / end; + const stop = t.stop * buckets / end; + + if (Math.floor(start) === Math.floor(stop)) + xs[Math.floor(start)] += stop - start; + else { + for (let j = Math.ceil(start); j < Math.floor(stop); j++) + xs[j]++; + xs[Math.floor(start)] += Math.ceil(start) - start; + xs[Math.floor(stop)] += stop - Math.floor(stop); + } + } + } + return ans; +} diff --git a/hls-graph/html/ts/reports/cmdtable.tsx b/hls-graph/html/ts/reports/cmdtable.tsx new file mode 100644 index 0000000000..ef6eeda212 --- /dev/null +++ b/hls-graph/html/ts/reports/cmdtable.tsx @@ -0,0 +1,34 @@ + +function reportCmdTable(profile: Profile[], search: Prop): HTMLElement { + const columns: Column[] = + [ {field: "name", label: "Name", width: 200} + , {field: "count", label: "Count", width: 65, alignRight: true, show: showInt} + , {field: "total", label: "Total", width: 75, alignRight: true, show: showTime} + , {field: "average", label: "Average", width: 75, alignRight: true, show: showTime} + , {field: "max", label: "Max", width: 75, alignRight: true, show: showTime} + ]; + return newTable(columns, search.map(cmdData), "total", true); +} + +function cmdData(search: Search): object[] { + const res: MapString< {count: int, total: seconds, max: seconds} > = {}; + search.forEachProfile(p => { + for (const t of p.traces) { + const time = t.stop - t.start; + if (t.command === "") + continue; // do nothing + else if (!(t.command in res)) + res[t.command] = {count: 1, total: time, max: time}; + else { + const ans = res[t.command]; + ans.count++; + ans.total += time; + ans.max = Math.max(ans.max, time); + } + } + }); + const res2 = []; + for (const i in res) + res2.push({name: i, average: res[i].total / res[i].count, ...res[i]}); + return res2; +} diff --git a/hls-graph/html/ts/reports/details.tsx b/hls-graph/html/ts/reports/details.tsx new file mode 100644 index 0000000000..eb31bf6ee1 --- /dev/null +++ b/hls-graph/html/ts/reports/details.tsx @@ -0,0 +1,33 @@ + +function reportDetails(profile: Profile[], search: Prop): HTMLElement { + const result =
; + const self: Prop = new Prop(0); + search.event(xs => self.set(xs.mapProfile((p, _) => p.index).maximum())); + const f = (i: pindex) => self.set(i)}>{profile[i].name}; + self.event(i => { + const p = profile[i]; + const content =
    +
  • Name: {p.name}
  • +
  • Built: {showRun(p.built)}
  • +
  • Changed: {showRun(p.changed)}
  • +
  • Execution time:{showTime(p.execution)}
  • +
  • Traced commands: +
      + {p.traces.map(t =>
    1. {t.command} took {showTime(t.stop - t.start)}
    2. )} +
    +
  • +
  • Dependencies: +
      + {p.depends.map(ds =>
      • {ds.map(d =>
      • {f(d)}
      • )}
    1. )} +
    +
  • +
  • Things that depend on me: +
      + {p.rdepends.map(d =>
    • {f(d)}
    • )} +
    +
  • +
; + $(result).empty().append(content); + }); + return result; +} diff --git a/hls-graph/html/ts/reports/parallelism.tsx b/hls-graph/html/ts/reports/parallelism.tsx new file mode 100644 index 0000000000..459514fe4a --- /dev/null +++ b/hls-graph/html/ts/reports/parallelism.tsx @@ -0,0 +1,74 @@ + +function reportParallelism(profile: Profile[]): HTMLElement { + // now simulate for -j1 .. -j24 + const plotData: jquery.flot.dataSeries[] = + [ {label: "Realistic (based on current dependencies)", data: [], color: "#3131a7"} + , {label: "Ideal (if no dependencies and perfect speedup)", data: [], color: "green"} + , {label: "Gap", data: [], color: "orange"} + ]; + let threads1: seconds; + for (let threads = 1; threads <= 24; threads++) { + const taken = simulateThreads(profile, threads)[0]; + if (threads === 1) threads1 = taken; + plotData[0].data.push([threads, taken]); + plotData[1].data.push([threads, threads1 / threads]); + plotData[2].data.push([threads, Math.max(0, taken - (threads1 / threads))]); + } + + const plot =
; + bindPlot(plot, new Prop(plotData), { + xaxis: { tickDecimals: 0 }, + yaxis: { min: 0, tickFormatter: showTime } + }); + return + + + + + + + + + +

Time to build at different number of threads

{plot}
Number of threads available.
; +} + +// Simulate running N threads over the profile, return: +// [total time take, point at which each entry kicked off] +function simulateThreads(profile: Profile[], threads: int): [seconds, seconds[]] { + // How far are we through this simulation + let timestamp: seconds = 0; + + // Who is currently running, with the highest seconds FIRST + const running: Array<[pindex, seconds]> = []; + const started: seconds[] = []; + + // Things that are done + const ready: Profile[] = profile.filter(x => x.depends.length === 0); + const waiting: int[] = profile.map(x => x.depends.concatLength()) ; // number I am waiting on before I am done + + function runningWait(): void { + const [ind, time] = running.pop(); + timestamp = time; + for (const d of profile[ind].rdepends) { + waiting[d]--; + if (waiting[d] === 0) + ready.push(profile[d]); + } + } + + while (true) { + // Queue up as many people as we can + while (running.length < threads && ready.length > 0) { + const p = ready.pop(); + started[p.index] = timestamp; + running.insertSorted([p.index, timestamp + p.execution], compareSndRev); + } + if (running.length === 0) { + if (waiting.maximum(0) > 0) + throw new Error("Failed to run all tasks"); + return [timestamp, started]; + } + runningWait(); + } +} diff --git a/hls-graph/html/ts/reports/rebuild.tsx b/hls-graph/html/ts/reports/rebuild.tsx new file mode 100644 index 0000000000..5fdd83a1d1 --- /dev/null +++ b/hls-graph/html/ts/reports/rebuild.tsx @@ -0,0 +1,30 @@ + +function reportRebuild(profile: Profile[], search: Prop): HTMLElement { + const depth: int[] = []; + for (const p of profile) { + depth[p.index] = p.depends.flat().map(d => depth[d] + 1).maximum(0); + } + + const ind: pindex = search.get().mapProfile((p, _) => p.index).sortOn(i => -depth[i])[0]; + const p = profile[ind]; + + function f(p: Profile): HTMLElement[] { + const res = []; + while (p.depends.length !== 0) { + const ds = p.depends.flat().sortOn(i => -depth[i]); + res.push(
  • ); + p = profile[ds[0]]; + } + return res; + } + + return
    +

    Why did it rebuild?

    +

    + Rule {p.name + " " + (p.built === 0 ? "rebuild in the last run" : "did not rebuild")} +

    +
      + {f(p)} +
    +
    ; +} diff --git a/hls-graph/html/ts/reports/ruletable.tsx b/hls-graph/html/ts/reports/ruletable.tsx new file mode 100644 index 0000000000..7f5d2646b1 --- /dev/null +++ b/hls-graph/html/ts/reports/ruletable.tsx @@ -0,0 +1,61 @@ + +function reportRuleTable(profile: Profile[], search: Prop): HTMLElement { + const [etimes, wtimes] = calcEWTimes(profile, 24); + const columns: Column[] = + [ {field: "name", label: "Name", width: 400} + , {field: "count", label: "Count", width: 65, alignRight: true, show: showInt} + , {field: "leaf", label: "Leaf", width: 60, alignRight: true} + , {field: "visited", label: "Visit", width: 50, alignRight: true} + , {field: "run", label: "Run", width: 50, alignRight: true} + , {field: "changed", label: "Change", width: 60, alignRight: true} + , {field: "time", label: "Time", width: 75, alignRight: true, show: showTime} + , {field: "etime", label: "ETime", width: 75, alignRight: true, show: showTime} + , {field: "wtime", label: "WTime", width: 75, alignRight: true, show: showTime} + ]; + return newTable(columns, search.map(s => ruleData(etimes, wtimes, s)), "time", true); +} + +// Calculate the exclusive time of each rule at some number of threads +function calcEWTimes(profile: Profile[], threads: int): [seconds[], seconds[]] { + const [_, started] = simulateThreads(profile, threads); + const starts = started.map((s, i) => pair(i, s)).sort(compareSnd); + const costs = starts.map(([ind, start], i) => { + // find out who else runs before I finish + const execution = profile[ind].execution; + const end = start + execution; + let overlap = 0; // how much time I am overlapped for + let exclusive = 0; // how much time I am the only runner + let finisher = start; // the first overlapping person to finish + + for (let j = i + 1; j < starts.length; j++) { + const [jInd, jStarts] = starts[j]; + if (jStarts > end) break; + overlap += Math.min(end - jStarts, profile[jInd].execution); + exclusive += Math.max(0, Math.min(jStarts, end) - finisher); + finisher = Math.max(finisher, jStarts + profile[jInd].execution); + } + exclusive += Math.max(0, end - finisher); + return triple(ind, execution === 0 ? 0 : execution * (execution / (execution + overlap)), exclusive); + }); + const etimes: seconds[] = []; + const wtimes: seconds[] = []; + for (const [ind, etime, wtime] of costs) { + etimes[ind] = etime; + wtimes[ind] = wtime; + } + return [etimes, wtimes]; +} + +function ruleData(etimes: seconds[], wtimes: seconds[], search: Search): object[] { + return search.mapProfiles((ps, name) => ({ + name, + count: ps.length, + leaf: ps.every(p => p.depends.length === 0), + run: ps.map(p => p.built).minimum(), + visited: ps.map(p => p.visited).minimum(), + changed: ps.some(p => p.built === p.changed), + time: ps.map(p => p.execution).sum(), + etime: ps.map(p => etimes[p.index]).sum(), + wtime: ps.map(p => wtimes[p.index]).sum(), + })); +} diff --git a/hls-graph/html/ts/reports/summary.tsx b/hls-graph/html/ts/reports/summary.tsx new file mode 100644 index 0000000000..bc888f5111 --- /dev/null +++ b/hls-graph/html/ts/reports/summary.tsx @@ -0,0 +1,155 @@ + +function reportSummary(profile: Profile[], build: Build): HTMLElement { + let countLast: int = 0; // number of rules run in the last run + let visitedLast: int = 0; // number of rules visited in the last run + let highestRun: timestamp = 0; // highest run you have seen (add 1 to get the count of runs) + let sumExecution: seconds = 0; // build time in total + let sumExecutionLast: seconds = 0; // build time in total + let countTrace: int = -1; let countTraceLast: int = -1; // traced commands run + // start both are -1 because the end command will have run in the previous step + let maxTraceStopLast: seconds = 0; // time the last traced command stopped + + for (const p of profile) { + sumExecution += p.execution; + highestRun = Math.max(highestRun, p.changed); // changed is always greater or equal to built + countTrace += p.traces.length; + if (p.built === 0) { + sumExecutionLast += p.execution; + countLast++; + countTraceLast += p.traces.length; + if (p.traces.length > 0) + maxTraceStopLast = Math.max(maxTraceStopLast, p.traces.last().stop); + } + if (p.visited === 0) { + visitedLast++; + } + } + + return
    +

    Totals

    +
      +
    • Runs: {showInt(highestRun + 1)} total number of runs so far.
    • +
    • Rules: {showInt(profile.length)} ({showInt(countLast)} in last run) number of defined build rules.
    • +
    +

    Performance

    +
      +
    • Build time: {showTime(sumExecution)} how long a complete build would take single threaded.
    • +
    • Last build time: {showTime(maxTraceStopLast)} how long the last build take.
    • +
    • Parallelism: {(maxTraceStopLast === 0 ? 0 : sumExecutionLast / maxTraceStopLast).toFixed(2)} average number of commands executing simultaneously in the last build.
    • +
    • Speculative critical path: {showTime(speculativeCriticalPath(profile))} how long it would take on infinite CPUs.
    • +
    • Precise critical path: {showTime(preciseCriticalPath(profile))} critical path not speculatively executing.
    • +
    +

    This run

    +
      +
    • Rules built: {showInt(countLast)} Total number of rules built in this run
    • +
    • Rules visited: {showInt(visitedLast - countLast)} Total number of rules looked up from the values store in this run
    • +
    • Dirty set:{renderDirtySet(build,profile)}
    • +
    +
    ; +} + +function renderDirtySet(build: Build, profile: Profile[]) { + if(build.dirtyKeys === null) { + return "ALL"; + } + else { + return
      + {build.dirtyKeys.map( d => {return
    • {profile[d].name}
    • }) + } +
    ; + } +} + +function speculativeCriticalPath(profile: Profile[]): seconds { + const criticalPath: seconds[] = []; // the critical path to any element + let maxCriticalPath: seconds = 0; + for (const p of profile) { + let cost = 0; + for (const ds of p.depends) + for (const d of ds) + cost = Math.max(cost, criticalPath[d]); + cost += p.execution; + maxCriticalPath = Math.max(cost, maxCriticalPath); + criticalPath[p.index] = cost; + } + return maxCriticalPath; +} + +/* +Calculating a precise critical path, taking into account the deep dependeny structure, is non-obvious. +Dependencies have the type [{X}], e.g: + + X = [{a,b},{c,d}] + +That is r builds a and b, then after those both complete (assuming they don't change), it builds c and d, +then it is finished. Importantly, r doesn't start building c/d until after a and b have finished. This +detail extends the critical path. + +To calculate the precise critical path, we simulate with the notion of demand and waiting. +*/ +function preciseCriticalPath(profile: Profile[]): seconds { + const waiting: int[] = profile.map(x => x.depends.concatLength()) ; // number I am waiting on before I am done + const demanded: boolean[] = []; // I have been demanded by someone + const oncomplete: Array<() => void> = []; // Completion functions + const complete: boolean[] = []; // Who is complete already + const running: Array<[pindex, seconds]> = []; + let timestamp: seconds = 0; + + // demand dependency set N of a rule + function demandN(p: Profile, round: int): void { + for (; round < p.depends.length; round++) { + let todo = p.depends[round].length; // Number before we continue + const step = () => { + todo--; + if (todo === 0) + demandN(p, round + 1); + }; + for (const d of p.depends[round]) { + if (complete[d]) + todo--; + else { + const old = oncomplete[d]; + oncomplete[d] = !old ? step : () => { old(); step(); }; + demand(profile[d]); + } + } + if (todo !== 0) break; + // todo === 0, so continue (equivalent to calling step but tail recursive) + } + } + + // demand a particular rule + function demand(p: Profile): void { + if (demanded[p.index]) return; + demanded[p.index] = true; + if (waiting[p.index] === 0) + running.insertSorted([p.index, timestamp + p.execution], compareSndRev); + else + demandN(p, 0); + } + + // We don't know the targets we ask for, so we approximate by saying the ones which nothing depends on + for (const p of profile) { + if (p.rdepends.length === 0) + demand(p); + } + + while (running.length > 0) { + const [ind, time] = running.pop(); + timestamp = time; + complete[ind] = true; + if (oncomplete[ind]) { + oncomplete[ind](); + delete oncomplete[ind]; + } + for (const d of profile[ind].rdepends) { + waiting[d]--; + if (waiting[d] === 0 && demanded[d]) + running.insertSorted([d, timestamp + profile[d].execution], compareSndRev); + } + } + for (let i = 0; i < profile.length; i++) + if (!complete[i]) + throw new Error("Failed to run all tasks"); + return timestamp; +} diff --git a/hls-graph/html/ts/search.tsx b/hls-graph/html/ts/search.tsx new file mode 100644 index 0000000000..ca0e7ffb4a --- /dev/null +++ b/hls-graph/html/ts/search.tsx @@ -0,0 +1,139 @@ + +// A mapping from names (rule names or those matched from rule parts) +// to the indicies in profiles. +class Search { + private profile: Profile[]; + private mapping: MapString; + + constructor(profile: Profile[], mapping?: MapString) { + this.profile = profile; + if (mapping !== undefined) + this.mapping = mapping; + else { + this.mapping = {}; + for (const p of profile) + this.mapping[p.name] = [p.index]; + } + } + public forEachProfiles(f: (ps: Profile[], group: string) => void): void { + for (const s in this.mapping) + f(this.mapping[s].map(i => this.profile[i]), s); + } + public forEachProfile(f: (p: Profile, group: string) => void): void { + this.forEachProfiles((ps, group) => ps.forEach(p => f(p, group))); + } + public mapProfiles(f: (ps: Profile[], group: string) => A): A[] { + const res: A[] = []; + this.forEachProfiles((ps, group) => res.push(f(ps, group))); + return res; + } + public mapProfile(f: (p: Profile, group: string) => A): A[] { + const res: A[] = []; + this.forEachProfile((p, group) => res.push(f(p, group))); + return res; + } +} + + +function createSearch(profile: Profile[]): [HTMLElement, Prop] { + const caption =
    Found {profile.length} entries, not filtered or grouped.
    ; + const input = ; + const res = new Prop(new Search(profile)); + $(input).on("change keyup paste", () => { + const s: string = $(input).val(); + if (s === "") { + res.set(new Search(profile)); + $(caption).text("Found " + profile.length + " entries, not filtered or grouped."); + } else if (s.indexOf("(") === -1) { + const mapping = {}; + let found = 0 ; + for (const p of profile) { + if (p.name.indexOf(s) !== -1) { + found++; + mapping[p.name] = [p.index]; + } + } + res.set(new Search(profile, mapping)); + $(caption).text("Substring filtered to " + found + " / " + profile.length + " entries, not grouped."); + } else { + let f; + try { + f = new Function("return " + s); + } catch (e) { + $(caption).text("Error compiling function, " + e); + return; + } + const mapping = {}; + let groups = 0; + let found = 0; + environmentAll = profile; + for (const p of profile) { + environmentThis = p; + environmentGroup = []; + let bool: boolean; + try { + bool = f(); + } catch (e) { + $(caption).text("Error running function, " + e); + return; + } + if (bool) { + found++; + const name = environmentGroup.length === 0 ? p.name : environmentGroup.join(" "); + if (name in mapping) + mapping[name].push(p.index); + else { + groups++; + mapping[name] = [p.index]; + } + } + } + res.set(new Search(profile, mapping)); + $(caption).text("Function filtered to " + found + " / " + profile.length + " entries, " + + (groups === found ? "not grouped." : groups + " groups.")); + } + }); + + const body = + + + + + + + + +
    {input}{searchHelp(input)}
    {caption}
    ; + return [body, res]; +} + +function searchHelp(input: HTMLElement): HTMLElement { + const examples: Array<[string, string]> = + [ ["Only the last run", "run(0)"] + , ["Only the last visited", "visited(0)"] + , ["Named 'Main'", "named(\"Main\")"] + , ["Group by file extension", "named(/(\\.[_0-9a-z]+)$/)"] + , ["No dependencies (an input)", "leaf()"] + , ["Didn't change when it last rebuilt", "unchanged()"] + , ["Ran 'gcc'", "command(\"gcc\")"] + ]; + const f = (code: string) => () => { + $(input).val((i, x) => x + (x === "" ? "" : " && ") + code); + $(input).trigger("change"); + }; + const dropdown =
    ; + const arrow_down = ; + const arrow_up = ; + const show_inner = () => { $(dropdown).toggle(); $(arrow_up).toggle(); $(arrow_down).toggle(); }; + return
    + {dropdown} +
    ; +} diff --git a/hls-graph/html/ts/shake-progress.ts b/hls-graph/html/ts/shake-progress.ts new file mode 100644 index 0000000000..db1a9741b6 --- /dev/null +++ b/hls-graph/html/ts/shake-progress.ts @@ -0,0 +1,28 @@ +/* tslint:disable */ +"use strict"; + +function initProgress() { + $(function () { + $(".version").html("Generated by Shake " + version + "."); + $("#output").html(""); + for (const x of progress) { + var actual: [number, number][] = []; + var ideal: [number, number][] = []; + // Start at t = 5 seconds, since the early progress jumps a lot + for (var t = 5; t < x.values.length; t++) { + var y = x.values[t]; + actual.push([y.idealSecs, y.actualSecs]); + ideal.push([y.idealSecs, y.idealSecs]); + } + var ys = [{ data: ideal, color: "gray" }, { label: x.name, data: actual, color: "red" }]; + var div = $("
    "); + $("#output").append(div); + $.plot(div, ys, { + xaxis: { + transform: function (v) { return -v; }, + inverseTransform: function (v) { return -v; } + } + }); + } + }) +} diff --git a/hls-graph/html/ts/tsconfig.json b/hls-graph/html/ts/tsconfig.json new file mode 100644 index 0000000000..b6ec148dda --- /dev/null +++ b/hls-graph/html/ts/tsconfig.json @@ -0,0 +1,9 @@ +{ + "compilerOptions": { + "target": "esnext", + "outFile": "../shake.js", + "newLine": "lf", + "jsx": "react", + "alwaysStrict": true + } +} diff --git a/hls-graph/html/ts/tslint.json b/hls-graph/html/ts/tslint.json new file mode 100644 index 0000000000..abe4034b9f --- /dev/null +++ b/hls-graph/html/ts/tslint.json @@ -0,0 +1,19 @@ +{ + "defaultSeverity": "error", + "extends": [ + "tslint:recommended" + ], + "rules": { + "max-line-length": false, + "no-consecutive-blank-lines": [true, 2], + "variable-name": false, + "curly": false, + "trailing-comma": [true, {"multiline": "never", "singleline": "never"}], + "interface-name": false, + "interface-over-type-literal": false, + "no-shadowed-variable": false, + "arrow-parens": [true, "ban-single-arg-parens"], + "object-literal-sort-keys": [false], + "forin": false + } +} diff --git a/hls-graph/html/ts/types.ts b/hls-graph/html/ts/types.ts new file mode 100644 index 0000000000..7091a0a18c --- /dev/null +++ b/hls-graph/html/ts/types.ts @@ -0,0 +1,94 @@ +// Stuff that Shake generates and injects in + +// The version of Shake +declare const version: string; +declare const generated: string; + +///////////////////////////////////////////////////////////////////// +// PROFILE DATA + +type timestamp = int; + +interface Trace { + command: string; + start: seconds; + stop: seconds; +} + +type pindex = int; // an index into the list of profiles + +interface Profile { + index: pindex; // My index in the list of profiles + name: string; // Name of the thing I built + execution: seconds; // Seconds I took to execute + built: timestamp; // Timestamp at which I was recomputed + visited: timestamp; // Timestamp at which I was last visited + changed: timestamp; // Timestamp at which I last changed + depends: pindex[][]; // What I depend on (always lower than my index) + rdepends: pindex[]; // What depends on me + traces: Trace[]; // List of traces +} + +function untraced(p: Profile): seconds { + return Math.max(0, p.execution - p.traces.map(t => t.stop - t.start).sum()); +} + +interface Build { + dirtyKeys: pindex[]; +} + +type TraceRaw = + [ string + , seconds + , seconds + ]; + +type ProfileRaw = + [ string + , seconds + , timestamp + , timestamp + , timestamp + , pindex[][] // Optional + , TraceRaw[] // Optional + ]; + +type BuildRaw = + [ pindex[] // Optional + ]; + +///////////////////////////////////////////////////////////////////// +// PROGRESS DATA + +declare const progress: Array<{name: string, values: Progress[]}>; + +interface Progress { + idealSecs: number; + idealPerc: number; + actualSecs: number; + actualPerc: number; +} + +///////////////////////////////////////////////////////////////////// +// BASIC UI TOOLKIT + +class Prop { + private val: A; + private callback: ((val: A) => void); + constructor(val: A) { this.val = val; this.callback = () => { return; }; } + public get(): A { return this.val; } + public set(val: A): void { + this.val = val; + this.callback(val); + } + public event(next: (val: A) => void): void { + const old = this.callback; + this.callback = val => { old(val); next(val); }; + next(this.val); + } + public map(f: (val: A) => B): Prop { + const res = new Prop(f(this.get())); + this.event(a => res.set(f(a))); + return res; + } +} diff --git a/hls-graph/html/ts/util.ts b/hls-graph/html/ts/util.ts new file mode 100644 index 0000000000..d4ff5d332b --- /dev/null +++ b/hls-graph/html/ts/util.ts @@ -0,0 +1,235 @@ + +type key = string | number; + +type seconds = number; + +type color = string; + +type MapString = { [key: string]: T }; +type MapNumber = { [key: number]: T }; + +type int = number; +type MapInt = MapNumber; + + +///////////////////////////////////////////////////////////////////// +// JQUERY EXTENSIONS + +// tslint:disable-next-line: interface-name +interface JQuery { + enable(x: boolean): JQuery; +} + +jQuery.fn.enable = function(x: boolean) { + // Set the values to enabled/disabled + return this.each(function() { + if (x) + $(this).removeAttr("disabled"); + else + $(this).attr("disabled", "disabled"); + }); +}; + + +///////////////////////////////////////////////////////////////////// +// BROWSER HELPER METHODS + +// Given "?foo=bar&baz=1" returns {foo:"bar",baz:"1"} +function uriQueryParameters(s: string): MapString { + // From https://stackoverflow.com/questions/901115/get-querystring-values-with-jquery/3867610#3867610 + const params: MapString = {}; + const a = /\+/g; // Regex for replacing addition symbol with a space + const r = /([^&=]+)=?([^&]*)/g; + const d = (x: string) => decodeURIComponent(x.replace(a, " ")); + const q = s.substring(1); + + while (true) { + const e = r.exec(q); + if (!e) break; + params[d(e[1])] = d(e[2]); + } + return params; +} + + +///////////////////////////////////////////////////////////////////// +// STRING FORMATTING + +function showTime(x: seconds): string { + function digits(x: seconds) {const s = String(x); return s.length === 1 ? "0" + s : s; } + + if (x >= 3600) { + x = Math.round(x / 60); + return Math.floor(x / 60) + "h" + digits(x % 60) + "m"; + } else if (x >= 60) { + x = Math.round(x); + return Math.floor(x / 60) + "m" + digits(x % 60) + "s"; + } else + return x.toFixed(2) + "s"; +} + +function showPerc(x: number): string { + return (x * 100).toFixed(2) + "%"; +} + +function showInt(x: int): string { + // From https://stackoverflow.com/questions/2901102/how-to-print-a-number-with-commas-as-thousands-separators-in-javascript + // Show, with commas + return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ","); +} + +function showRun(run: timestamp): string { + return run === 0 ? "Latest run" : run + " run" + plural(run) + " ago"; +} + +function plural(n: int, not1 = "s", is1 = ""): string { + return n === 1 ? is1 : not1; +} + + +///////////////////////////////////////////////////////////////////// +// MISC + +function compareFst(a: [number, A], b: [number, A]): number { + return a[0] - b[0]; +} + +function compareSnd(a: [A, number], b: [A, number]): number { + return a[1] - b[1]; +} + +function compareSndRev(a: [A, number], b: [A, number]): number { + return b[1] - a[1]; +} + +function pair(a: A, b: B): [A, B] { + return [a, b]; +} + +function triple(a: A, b: B, c: C): [A, B, C] { + return [a, b, c]; +} + +function fst([x, _]: [A, B]): A { + return x; +} + +function snd([_, x]: [A, B]): B { + return x; +} + +function execRegExp(r: string | RegExp, s: string): string[] { + if (typeof r === "string") + return s.indexOf(r) === -1 ? null : []; + else + return r.exec(s); +} + +function cache(key: (k: K) => string, op: (k: K) => V): (k: K) => V { + const store: MapString = {}; + return k => { + const s = key(k); + if (!(s in store)) + store[s] = op(k); + return store[s]; + }; +} + +function lazy(thunk: () => V): () => V { + let store: V = null; + let done = false; + return () => { + if (!done) { + store = thunk(); + done = true; + } + return store; + }; +} + +interface Array { + insertSorted(x: T, compare: (a: T, b: T) => number): T[]; + concatLength(): int; + sortOn(f: (x: T) => number): T[]; + last(): T; + sum(): number; + maximum(def?: number): number; + minimum(def?: number): number; +} + +Array.prototype.sum = function(this: number[]): number { + let res = 0; + for (const x of this) + res += x; + return res; +}; + +Array.prototype.insertSorted = function(this: T[], x: T, compare: (a: T, b: T) => number): T[] { + let start = 0; + let stop = this.length - 1; + let middle = 0; + while (start <= stop) { + middle = Math.floor((start + stop) / 2); + if (compare(this[middle], x) > 0) + stop = middle - 1; + else + start = middle + 1; + } + this.splice(start, 0, x); + return this; +}; + +Array.prototype.concatLength = function(this: A[][]): int { + let res = 0; + for (const x of this) + res += x.length; + return res; +}; + +Array.prototype.sortOn = function(this: T[], f: (x: T) => number): T[] { + return this.map(x => pair(f(x), x)).sort(compareFst).map(snd); +}; + +Array.prototype.last = function(this: T[]): T { + return this[this.length - 1]; +}; + +Array.prototype.maximum = function(this: number[], def?: number): number { + if (this.length === 0) return def; + let res: number = this[0]; + for (let i = 1; i < this.length; i++) + res = Math.max(res, this[i]); + return res; +}; + +Array.prototype.minimum = function(this: number[], def?: number): number { + if (this.length === 0) return def; + let res: number = this[0]; + for (let i = 1; i < this.length; i++) + res = Math.min(res, this[i]); + return res; +}; + + +// Use JSX with el instead of React.createElement +// Originally from https://gist.github.com/sergiodxa/a493c98b7884128081bb9a281952ef33 + +// our element factory +function createElement(type: string, props?: MapString, ...children: any[]) { + const element = document.createElement(type); + + for (const name in props || {}) { + if (name.substr(0, 2) === "on") + element.addEventListener(name.substr(2), props[name]); + else + element.setAttribute(name, props[name]); + } + for (const child of children.flat(10)) { + const c = typeof child === "object" ? child : document.createTextNode(child.toString()); + element.appendChild(c); + } + return element; +} + +// How .tsx gets desugared +const React = {createElement}; diff --git a/hls-graph/src/Control/Concurrent/STM/Stats.hs b/hls-graph/src/Control/Concurrent/STM/Stats.hs new file mode 100644 index 0000000000..a6e7d0459b --- /dev/null +++ b/hls-graph/src/Control/Concurrent/STM/Stats.hs @@ -0,0 +1,182 @@ +{-# LANGUAGE CPP #-} +#ifdef STM_STATS +{-# LANGUAGE RecordWildCards #-} +#endif +module Control.Concurrent.STM.Stats + ( atomicallyNamed + , atomically + , getSTMStats + , dumpSTMStats + , module Control.Concurrent.STM + ) where + +import Control.Concurrent.STM hiding (atomically) +import qualified Control.Concurrent.STM as STM +import Data.Map (Map) +#ifdef STM_STATS +import Control.Exception (BlockedIndefinitelyOnSTM, Exception, + catch, throwIO) +import Control.Monad +import Data.IORef +import qualified Data.Map.Strict as M +import Data.Time (getCurrentTime) +import GHC.Conc (unsafeIOToSTM) +import System.IO +import System.IO.Unsafe +import Text.Printf +#endif + +atomicallyNamed :: String -> STM a -> IO a +atomically :: STM a -> IO a +dumpSTMStats :: IO () +getSTMStats :: IO (Map String (Int,Int)) + +#ifndef STM_STATS + +getSTMStats = pure mempty +atomicallyNamed _ = atomically +dumpSTMStats = pure () +atomically = STM.atomically + +#else +-- adapted from the STM.Stats package + +atomicallyNamed = trackNamedSTM +atomically = trackSTM + +-- | Global state, seems to be unavoidable here. +globalRetryCountMap :: IORef (Map String (Int,Int)) +globalRetryCountMap = unsafePerformIO (newIORef M.empty) +{-# NOINLINE globalRetryCountMap #-} + + +-- | For the most general transaction tracking function, 'trackSTMConf', all +-- settings can be configured using a 'TrackSTMConf' value. +data TrackSTMConf = TrackSTMConf + { tryThreshold :: Maybe Int + -- ^ If the number of retries of one transaction run reaches this + -- count, a warning is issued at runtime. If set to @Nothing@, disables the warnings completely. + , globalThreshold :: Maybe Int + -- ^ If the total number of retries of one named transaction reaches + -- this count, a warning is issued. If set to @Nothing@, disables the + -- warnings completely. + , extendException :: Bool + -- ^ If this is set, a 'BlockedIndefinitelyOnSTM' exception is replaced + -- by a 'BlockedIndefinitelyOnNamedSTM' exception, carrying the name of + -- the exception. + , warnFunction :: String -> IO () + -- ^ Function to call when a warning is to be emitted. + , warnInSTMFunction :: String -> IO () + -- ^ Function to call when a warning is to be emitted during an STM + -- transaction. This is possibly dangerous, see the documentation to + -- 'unsafeIOToSTM', but can be useful to detect transactions that keep + -- retrying forever. + } + +-- | The default settings are: +-- +-- > defaultTrackSTMConf = TrackSTMConf +-- > { tryThreshold = Just 10 +-- > , globalThreshold = Just 3000 +-- > , exception = True +-- > , warnFunction = hPutStrLn stderr +-- > , warnInSTMFunction = \_ -> return () +-- > } +defaultTrackSTMConf :: TrackSTMConf +defaultTrackSTMConf = TrackSTMConf + { tryThreshold = Just 10 + , globalThreshold = Just 3000 + , extendException = True + , warnFunction = hPutStrLn stderr + , warnInSTMFunction = \_ -> return () + } + +-- | A drop-in replacement for 'atomically'. The statistics will list this, and +-- all other unnamed transactions, as \"@_anonymous_@\" and +-- 'BlockedIndefinitelyOnSTM' exceptions will not be replaced. +-- See below for variants that give more control over the statistics and +-- generated warnings. +trackSTM :: STM a -> IO a +trackSTM = trackSTMConf defaultTrackSTMConf { extendException = False } "_anonymous_" + +-- | Run 'atomically' and collect the retry statistics under the given name and using the default configuration, 'defaultTrackSTMConf'. +trackNamedSTM :: String -> STM a -> IO a +trackNamedSTM = trackSTMConf defaultTrackSTMConf + +-- | Run 'atomically' and collect the retry statistics under the given name, +-- while issuing warnings when the configured thresholds are exceeded. +trackSTMConf :: TrackSTMConf -> String -> STM a -> IO a +trackSTMConf (TrackSTMConf {..}) name txm = do + counter <- newIORef 0 + let wrappedTx = + do unsafeIOToSTM $ do + i <- atomicModifyIORef' counter incCounter + when (warnPred i) $ + warnInSTMFunction $ msgPrefix ++ " reached try count of " ++ show i + txm + res <- if extendException + then STM.atomically wrappedTx + `catch` (\(_::BlockedIndefinitelyOnSTM) -> + throwIO (BlockedIndefinitelyOnNamedSTM name)) + else STM.atomically wrappedTx + i <- readIORef counter + doMB tryThreshold $ \threshold -> + when (i > threshold) $ + warnFunction $ msgPrefix ++ " finished after " ++ show (i-1) ++ " retries" + incGlobalRetryCount (i - 1) + return res + where + doMB Nothing _ = return () + doMB (Just x) m = m x + incCounter i = let j = i + 1 in (j, j) + warnPred j = case tryThreshold of + Nothing -> False + Just n -> j >= 2*n && (j >= 4 * n || j `mod` (2 * n) == 0) + msgPrefix = "STM transaction " ++ name + incGlobalRetryCount i = do + (k,k') <- atomicModifyIORef' globalRetryCountMap $ \m -> + let (oldVal, m') = M.insertLookupWithKey + (\_ (a1,b1) (a2,b2) -> ((,) $! a1+a2) $! b1+b2) + name + (1,i) + m + in (m', let j = maybe 0 snd oldVal in (j,j+i)) + doMB globalThreshold $ \globalRetryThreshold -> + when (k `div` globalRetryThreshold /= k' `div` globalRetryThreshold) $ + warnFunction $ msgPrefix ++ " reached global retry count of " ++ show k' + +-- | If 'extendException' is set (which is the case with 'trackNamedSTM'), an +-- occurrence of 'BlockedIndefinitelyOnSTM' is replaced by +-- 'BlockedIndefinitelyOnNamedSTM', carrying the name of the transaction and +-- thus giving more helpful error messages. +newtype BlockedIndefinitelyOnNamedSTM = BlockedIndefinitelyOnNamedSTM String + +instance Show BlockedIndefinitelyOnNamedSTM where + showsPrec _ (BlockedIndefinitelyOnNamedSTM name) = + showString $ "thread blocked indefinitely in STM transaction" ++ name + +instance Exception BlockedIndefinitelyOnNamedSTM + + + +-- | Fetches the current transaction statistics data. +-- +-- The map maps transaction names to counts of transaction commits and +-- transaction retries. +getSTMStats = readIORef globalRetryCountMap + +-- | Dumps the current transaction statistics data to 'System.IO.stderr'. +dumpSTMStats = do + stats <- getSTMStats + time <- show <$> getCurrentTime + hPutStrLn stderr $ "STM transaction statistics (" ++ time ++ "):" + sequence_ $ + hPrintf stderr "%-22s %10s %10s %10s\n" "Transaction" "Commits" "Retries" "Ratio" : + [ hPrintf stderr "%-22s %10d %10d %10.2f\n" name commits retries ratio + | (name,(commits,retries)) <- M.toList stats + , commits > 0 -- safeguard + , let ratio = fromIntegral retries / fromIntegral commits :: Double + ] + + +#endif diff --git a/hls-graph/src/Development/IDE/Graph.hs b/hls-graph/src/Development/IDE/Graph.hs new file mode 100644 index 0000000000..81ad3b3dfd --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph.hs @@ -0,0 +1,32 @@ +{-# LANGUAGE PatternSynonyms #-} +module Development.IDE.Graph( + shakeOptions, + Rules, + Action, action, + pattern Key, + newKey, renderKey, + actionFinally, actionBracket, actionCatch, actionFork, + -- * Configuration + ShakeOptions(shakeAllowRedefineRules, shakeExtra), + getShakeExtra, getShakeExtraRules, newShakeExtra, + -- * Explicit parallelism + parallel, + -- * Oracle rules + ShakeValue, RuleResult, + -- * Special rules + alwaysRerun, + -- * Actions for inspecting the keys in the database + getDirtySet, + getKeysAndVisitedAge, + module Development.IDE.Graph.KeyMap, + module Development.IDE.Graph.KeySet, + ) where + +import Development.IDE.Graph.Database +import Development.IDE.Graph.Internal.Action +import Development.IDE.Graph.Internal.Key +import Development.IDE.Graph.Internal.Options +import Development.IDE.Graph.Internal.Rules +import Development.IDE.Graph.Internal.Types +import Development.IDE.Graph.KeyMap +import Development.IDE.Graph.KeySet diff --git a/hls-graph/src/Development/IDE/Graph/Classes.hs b/hls-graph/src/Development/IDE/Graph/Classes.hs new file mode 100644 index 0000000000..ff9a37c5eb --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph/Classes.hs @@ -0,0 +1,8 @@ + +module Development.IDE.Graph.Classes( + Show(..), Typeable, Eq(..), Hashable(..), NFData(..) + ) where + +import Control.DeepSeq +import Data.Hashable +import Data.Typeable diff --git a/hls-graph/src/Development/IDE/Graph/Database.hs b/hls-graph/src/Development/IDE/Graph/Database.hs new file mode 100644 index 0000000000..bd8601cd16 --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph/Database.hs @@ -0,0 +1,85 @@ +module Development.IDE.Graph.Database( + ShakeDatabase, + ShakeValue, + shakeNewDatabase, + shakeRunDatabase, + shakeRunDatabaseForKeys, + shakeProfileDatabase, + shakeGetBuildStep, + shakeGetDatabaseKeys, + shakeGetDirtySet, + shakeGetCleanKeys + ,shakeGetBuildEdges) where +import Control.Concurrent.STM.Stats (readTVarIO) +import Data.Dynamic +import Data.Maybe +import Development.IDE.Graph.Classes () +import Development.IDE.Graph.Internal.Action +import Development.IDE.Graph.Internal.Database +import Development.IDE.Graph.Internal.Key +import Development.IDE.Graph.Internal.Options +import Development.IDE.Graph.Internal.Profile (writeProfile) +import Development.IDE.Graph.Internal.Rules +import Development.IDE.Graph.Internal.Types + + +-- Placeholder to be the 'extra' if the user doesn't set it +data NonExportedType = NonExportedType + +shakeNewDatabase :: ShakeOptions -> Rules () -> IO ShakeDatabase +shakeNewDatabase opts rules = do + let extra = fromMaybe (toDyn NonExportedType) $ shakeExtra opts + (theRules, actions) <- runRules extra rules + db <- newDatabase extra theRules + pure $ ShakeDatabase (length actions) actions db + +shakeRunDatabase :: ShakeDatabase -> [Action a] -> IO [a] +shakeRunDatabase = shakeRunDatabaseForKeys Nothing + +-- | Returns the set of dirty keys annotated with their age (in # of builds) +shakeGetDirtySet :: ShakeDatabase -> IO [(Key, Int)] +shakeGetDirtySet (ShakeDatabase _ _ db) = + Development.IDE.Graph.Internal.Database.getDirtySet db + +-- | Returns the build number +shakeGetBuildStep :: ShakeDatabase -> IO Int +shakeGetBuildStep (ShakeDatabase _ _ db) = do + Step s <- readTVarIO $ databaseStep db + return s + +-- Only valid if we never pull on the results, which we don't +unvoid :: Functor m => m () -> m a +unvoid = fmap undefined + +-- | Assumes that the database is not running a build +shakeRunDatabaseForKeys + :: Maybe [Key] + -- ^ Set of keys changed since last run. 'Nothing' means everything has changed + -> ShakeDatabase + -> [Action a] + -> IO [a] +shakeRunDatabaseForKeys keysChanged (ShakeDatabase lenAs1 as1 db) as2 = do + incDatabase db keysChanged + fmap (drop lenAs1) $ runActions db $ map unvoid as1 ++ as2 + +-- | Given a 'ShakeDatabase', write an HTML profile to the given file about the latest run. +shakeProfileDatabase :: ShakeDatabase -> FilePath -> IO () +shakeProfileDatabase (ShakeDatabase _ _ s) file = writeProfile file s + +-- | Returns the clean keys in the database +shakeGetCleanKeys :: ShakeDatabase -> IO [(Key, Result )] +shakeGetCleanKeys (ShakeDatabase _ _ db) = do + keys <- getDatabaseValues db + return [ (k,res) | (k, Clean res) <- keys] + +-- | Returns the total count of edges in the build graph +shakeGetBuildEdges :: ShakeDatabase -> IO Int +shakeGetBuildEdges (ShakeDatabase _ _ db) = do + keys <- getDatabaseValues db + let ress = mapMaybe (getResult . snd) keys + return $ sum $ map (lengthKeySet . getResultDepsDefault mempty . resultDeps) ress + +-- | Returns an approximation of the database keys, +-- annotated with how long ago (in # builds) they were visited +shakeGetDatabaseKeys :: ShakeDatabase -> IO [(Key, Int)] +shakeGetDatabaseKeys (ShakeDatabase _ _ db) = getKeysAndVisitAge db diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Action.hs b/hls-graph/src/Development/IDE/Graph/Internal/Action.hs new file mode 100644 index 0000000000..6d47d9b511 --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph/Internal/Action.hs @@ -0,0 +1,145 @@ +{-# LANGUAGE TypeFamilies #-} + +module Development.IDE.Graph.Internal.Action +( ShakeValue +, actionFork +, actionBracket +, actionCatch +, actionFinally +, alwaysRerun +, apply1 +, apply +, applyWithoutDependency +, parallel +, runActions +, Development.IDE.Graph.Internal.Action.getDirtySet +, getKeysAndVisitedAge +) where + +import Control.Concurrent.Async +import Control.DeepSeq (force) +import Control.Exception +import Control.Monad.IO.Class +import Control.Monad.Trans.Class +import Control.Monad.Trans.Reader +import Data.Foldable (toList) +import Data.Functor.Identity +import Data.IORef +import Development.IDE.Graph.Classes +import Development.IDE.Graph.Internal.Database +import Development.IDE.Graph.Internal.Key +import Development.IDE.Graph.Internal.Rules (RuleResult) +import Development.IDE.Graph.Internal.Types +import System.Exit + +type ShakeValue a = (Show a, Typeable a, Eq a, Hashable a, NFData a) + +-- | Always rerun this rule when dirty, regardless of the dependencies. +alwaysRerun :: Action () +alwaysRerun = do + ref <- Action $ asks actionDeps + liftIO $ modifyIORef' ref (AlwaysRerunDeps mempty <>) + +parallel :: [Action a] -> Action [a] +parallel [] = pure [] +parallel [x] = fmap (:[]) x +parallel xs = do + a <- Action ask + deps <- liftIO $ readIORef $ actionDeps a + case deps of + UnknownDeps -> + -- if we are already in the rerun mode, nothing we do is going to impact our state + liftIO $ mapConcurrently (ignoreState a) xs + deps -> do + (newDeps, res) <- liftIO $ unzip <$> mapConcurrently (usingState a) xs + liftIO $ writeIORef (actionDeps a) $ mconcat $ deps : newDeps + pure res + where + usingState a x = do + ref <- newIORef mempty + res <- runReaderT (fromAction x) a{actionDeps=ref} + deps <- readIORef ref + pure (deps, res) + +ignoreState :: SAction -> Action b -> IO b +ignoreState a x = do + ref <- newIORef mempty + runReaderT (fromAction x) a{actionDeps=ref} + +actionFork :: Action a -> (Async a -> Action b) -> Action b +actionFork act k = do + a <- Action ask + deps <- liftIO $ readIORef $ actionDeps a + let db = actionDatabase a + case deps of + UnknownDeps -> do + -- if we are already in the rerun mode, nothing we do is going to impact our state + [res] <- liftIO $ withAsync (ignoreState a act) $ \as -> runActions db [k as] + return res + _ -> + error "please help me" + +isAsyncException :: SomeException -> Bool +isAsyncException e + | Just (_ :: AsyncCancelled) <- fromException e = True + | Just (_ :: AsyncException) <- fromException e = True + | Just (_ :: ExitCode) <- fromException e = True + | otherwise = False + + +actionCatch :: Exception e => Action a -> (e -> Action a) -> Action a +actionCatch a b = do + v <- Action ask + Action $ lift $ catchJust f (runReaderT (fromAction a) v) (\x -> runReaderT (fromAction (b x)) v) + where + -- Catch only catches exceptions that were caused by this code, not those that + -- are a result of program termination + f e | isAsyncException e = Nothing + | otherwise = fromException e + +actionBracket :: IO a -> (a -> IO b) -> (a -> Action c) -> Action c +actionBracket a b c = do + v <- Action ask + Action $ lift $ bracket a b (\x -> runReaderT (fromAction (c x)) v) + +actionFinally :: Action a -> IO b -> Action a +actionFinally a b = do + v <- Action ask + Action $ lift $ finally (runReaderT (fromAction a) v) b + +apply1 :: (RuleResult key ~ value, ShakeValue key, Typeable value) => key -> Action value +apply1 k = runIdentity <$> apply (Identity k) + +apply :: (Traversable f, RuleResult key ~ value, ShakeValue key, Typeable value) => f key -> Action (f value) +apply ks = do + db <- Action $ asks actionDatabase + stack <- Action $ asks actionStack + (is, vs) <- liftIO $ build db stack ks + ref <- Action $ asks actionDeps + let !ks = force $ fromListKeySet $ toList is + liftIO $ modifyIORef' ref (ResultDeps [ks] <>) + pure vs + +-- | Evaluate a list of keys without recording any dependencies. +applyWithoutDependency :: (Traversable f, RuleResult key ~ value, ShakeValue key, Typeable value) => f key -> Action (f value) +applyWithoutDependency ks = do + db <- Action $ asks actionDatabase + stack <- Action $ asks actionStack + (_, vs) <- liftIO $ build db stack ks + pure vs + +runActions :: Database -> [Action a] -> IO [a] +runActions db xs = do + deps <- newIORef mempty + runReaderT (fromAction $ parallel xs) $ SAction db deps emptyStack + +-- | Returns the set of dirty keys annotated with their age (in # of builds) +getDirtySet :: Action [(Key, Int)] +getDirtySet = do + db <- getDatabase + liftIO $ Development.IDE.Graph.Internal.Database.getDirtySet db + +getKeysAndVisitedAge :: Action [(Key, Int)] +getKeysAndVisitedAge = do + db <- getDatabase + liftIO $ Development.IDE.Graph.Internal.Database.getKeysAndVisitAge db diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Database.hs b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs new file mode 100644 index 0000000000..359e5ceb6a --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph/Internal/Database.hs @@ -0,0 +1,379 @@ +-- We deliberately want to ensure the function we add to the rule database +-- has the constraints we need on it when we get it out. +{-# OPTIONS_GHC -Wno-redundant-constraints #-} + +{-# LANGUAGE CPP #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeFamilies #-} + +module Development.IDE.Graph.Internal.Database (compute, newDatabase, incDatabase, build, getDirtySet, getKeysAndVisitAge) where + +import Prelude hiding (unzip) + +import Control.Concurrent.Async +import Control.Concurrent.Extra +import Control.Concurrent.STM.Stats (STM, atomically, + atomicallyNamed, + modifyTVar', newTVarIO, + readTVarIO) +import Control.Exception +import Control.Monad +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Reader +import qualified Control.Monad.Trans.State.Strict as State +import Data.Dynamic +import Data.Either +import Data.Foldable (for_, traverse_) +import Data.IORef.Extra +import Data.Maybe +import Data.Traversable (for) +import Data.Tuple.Extra +import Debug.Trace (traceM) +import Development.IDE.Graph.Classes +import Development.IDE.Graph.Internal.Key +import Development.IDE.Graph.Internal.Rules +import Development.IDE.Graph.Internal.Types +import qualified Focus +import qualified ListT +import qualified StmContainers.Map as SMap +import System.IO.Unsafe +import System.Time.Extra (duration, sleep) + +#if MIN_VERSION_base(4,19,0) +import Data.Functor (unzip) +#else +import Data.List.NonEmpty (unzip) +#endif + + +newDatabase :: Dynamic -> TheRules -> IO Database +newDatabase databaseExtra databaseRules = do + databaseStep <- newTVarIO $ Step 0 + databaseValues <- atomically SMap.new + pure Database{..} + +-- | Increment the step and mark dirty. +-- Assumes that the database is not running a build +incDatabase :: Database -> Maybe [Key] -> IO () +-- only some keys are dirty +incDatabase db (Just kk) = do + atomicallyNamed "incDatabase" $ modifyTVar' (databaseStep db) $ \(Step i) -> Step $ i + 1 + transitiveDirtyKeys <- transitiveDirtySet db kk + for_ (toListKeySet transitiveDirtyKeys) $ \k -> + -- Updating all the keys atomically is not necessary + -- since we assume that no build is mutating the db. + -- Therefore run one transaction per key to minimise contention. + atomicallyNamed "incDatabase" $ SMap.focus updateDirty k (databaseValues db) + +-- all keys are dirty +incDatabase db Nothing = do + atomically $ modifyTVar' (databaseStep db) $ \(Step i) -> Step $ i + 1 + let list = SMap.listT (databaseValues db) + atomicallyNamed "incDatabase - all " $ flip ListT.traverse_ list $ \(k,_) -> + SMap.focus updateDirty k (databaseValues db) + +updateDirty :: Monad m => Focus.Focus KeyDetails m () +updateDirty = Focus.adjust $ \(KeyDetails status rdeps) -> + let status' + | Running _ _ _ x <- status = Dirty x + | Clean x <- status = Dirty (Just x) + | otherwise = status + in KeyDetails status' rdeps +-- | Unwrap and build a list of keys in parallel +build + :: forall f key value . (Traversable f, RuleResult key ~ value, Typeable key, Show key, Hashable key, Eq key, Typeable value) + => Database -> Stack -> f key -> IO (f Key, f value) +-- build _ st k | traceShow ("build", st, k) False = undefined +build db stack keys = do + built <- runAIO $ do + built <- builder db stack (fmap newKey keys) + case built of + Left clean -> return clean + Right dirty -> liftIO dirty + let (ids, vs) = unzip built + pure (ids, fmap (asV . resultValue) vs) + where + asV :: Value -> value + asV (Value x) = unwrapDynamic x + +-- | Build a list of keys and return their results. +-- If none of the keys are dirty, we can return the results immediately. +-- Otherwise, a blocking computation is returned *which must be evaluated asynchronously* to avoid deadlock. +builder + :: Traversable f => Database -> Stack -> f Key -> AIO (Either (f (Key, Result)) (IO (f (Key, Result)))) +-- builder _ st kk | traceShow ("builder", st,kk) False = undefined +builder db@Database{..} stack keys = withRunInIO $ \(RunInIO run) -> do + -- Things that I need to force before my results are ready + toForce <- liftIO $ newTVarIO [] + current <- liftIO $ readTVarIO databaseStep + results <- liftIO $ for keys $ \id -> + -- Updating the status of all the dependencies atomically is not necessary. + -- Therefore, run one transaction per dep. to avoid contention + atomicallyNamed "builder" $ do + -- Spawn the id if needed + status <- SMap.lookup id databaseValues + val <- case viewDirty current $ maybe (Dirty Nothing) keyStatus status of + Clean r -> pure r + Running _ force val _ + | memberStack id stack -> throw $ StackException stack + | otherwise -> do + modifyTVar' toForce (Wait force :) + pure val + Dirty s -> do + let act = run (refresh db stack id s) + (force, val) = splitIO (join act) + SMap.focus (updateStatus $ Running current force val s) id databaseValues + modifyTVar' toForce (Spawn force:) + pure val + + pure (id, val) + + toForceList <- liftIO $ readTVarIO toForce + let waitAll = run $ waitConcurrently_ toForceList + case toForceList of + [] -> return $ Left results + _ -> return $ Right $ do + waitAll + pure results + + +-- | isDirty +-- only dirty when it's build time is older than the changed time of one of its dependencies +isDirty :: Foldable t => Result -> t (a, Result) -> Bool +isDirty me = any (\(_,dep) -> resultBuilt me < resultChanged dep) + +-- | Refresh dependencies for a key and compute the key: +-- The refresh the deps linearly(last computed order of the deps for the key). +-- If any of the deps is dirty in the process, we jump to the actual computation of the key +-- and shortcut the refreshing of the rest of the deps. +-- * If no dirty dependencies and we have evaluated the key previously, then we refresh it in the current thread. +-- This assumes that the implementation will be a lookup +-- * Otherwise, we spawn a new thread to refresh the dirty deps (if any) and the key itself +refreshDeps :: KeySet -> Database -> Stack -> Key -> Result -> [KeySet] -> AIO Result +refreshDeps visited db stack key result = \case + -- no more deps to refresh + [] -> liftIO $ compute db stack key RunDependenciesSame (Just result) + (dep:deps) -> do + let newVisited = dep <> visited + res <- builder db stack (toListKeySet (dep `differenceKeySet` visited)) + case res of + Left res -> if isDirty result res + -- restart the computation if any of the deps are dirty + then liftIO $ compute db stack key RunDependenciesChanged (Just result) + -- else kick the rest of the deps + else refreshDeps newVisited db stack key result deps + Right iores -> do + res <- liftIO iores + if isDirty result res + then liftIO $ compute db stack key RunDependenciesChanged (Just result) + else refreshDeps newVisited db stack key result deps + +-- | Refresh a key: +refresh :: Database -> Stack -> Key -> Maybe Result -> AIO (IO Result) +-- refresh _ st k _ | traceShow ("refresh", st, k) False = undefined +refresh db stack key result = case (addStack key stack, result) of + (Left e, _) -> throw e + (Right stack, Just me@Result{resultDeps = ResultDeps deps}) -> asyncWithCleanUp $ refreshDeps mempty db stack key me (reverse deps) + (Right stack, _) -> + asyncWithCleanUp $ liftIO $ compute db stack key RunDependenciesChanged result + +-- | Compute a key. +compute :: Database -> Stack -> Key -> RunMode -> Maybe Result -> IO Result +-- compute _ st k _ _ | traceShow ("compute", st, k) False = undefined +compute db@Database{..} stack key mode result = do + let act = runRule databaseRules key (fmap resultData result) mode + deps <- newIORef UnknownDeps + (execution, RunResult{..}) <- + duration $ runReaderT (fromAction act) $ SAction db deps stack + curStep <- readTVarIO databaseStep + deps <- readIORef deps + let lastChanged = maybe curStep resultChanged result + let lastBuild = maybe curStep resultBuilt result + -- changed time is always older than or equal to build time + let (changed, built) = case runChanged of + -- some thing changed + ChangedRecomputeDiff -> (curStep, curStep) + -- recomputed is the same + ChangedRecomputeSame -> (lastChanged, curStep) + -- nothing changed + ChangedNothing -> (lastChanged, lastBuild) + let -- only update the deps when the rule ran with changes + actualDeps = if runChanged /= ChangedNothing then deps else previousDeps + previousDeps= maybe UnknownDeps resultDeps result + let res = Result runValue built changed curStep actualDeps execution runStore + case getResultDepsDefault mempty actualDeps of + deps | not (nullKeySet deps) + && runChanged /= ChangedNothing + -> do + -- IMPORTANT: record the reverse deps **before** marking the key Clean. + -- If an async exception strikes before the deps have been recorded, + -- we won't be able to accurately propagate dirtiness for this key + -- on the next build. + void $ + updateReverseDeps key db + (getResultDepsDefault mempty previousDeps) + deps + _ -> pure () + atomicallyNamed "compute and run hook" $ do + runHook + SMap.focus (updateStatus $ Clean res) key databaseValues + pure res + +updateStatus :: Monad m => Status -> Focus.Focus KeyDetails m () +updateStatus res = Focus.alter + (Just . maybe (KeyDetails res mempty) + (\it -> it{keyStatus = res})) + +-- | Returns the set of dirty keys annotated with their age (in # of builds) +getDirtySet :: Database -> IO [(Key, Int)] +getDirtySet db = do + Step curr <- readTVarIO (databaseStep db) + dbContents <- getDatabaseValues db + let calcAge Result{resultBuilt = Step x} = curr - x + calcAgeStatus (Dirty x)=calcAge <$> x + calcAgeStatus _ = Nothing + return $ mapMaybe (secondM calcAgeStatus) dbContents + +-- | Returns an approximation of the database keys, +-- annotated with how long ago (in # builds) they were visited +getKeysAndVisitAge :: Database -> IO [(Key, Int)] +getKeysAndVisitAge db = do + values <- getDatabaseValues db + Step curr <- readTVarIO (databaseStep db) + let keysWithVisitAge = mapMaybe (secondM (fmap getAge . getResult)) values + getAge Result{resultVisited = Step s} = curr - s + return keysWithVisitAge +-------------------------------------------------------------------------------- +-- Lazy IO trick + +data Box a = Box {fromBox :: a} + +-- | Split an IO computation into an unsafe lazy value and a forcing computation +splitIO :: IO a -> (IO (), a) +splitIO act = do + let act2 = Box <$> act + let res = unsafePerformIO act2 + (void $ evaluate res, fromBox res) + +-------------------------------------------------------------------------------- +-- Reverse dependencies + +-- | Update the reverse dependencies of an Id +updateReverseDeps + :: Key -- ^ Id + -> Database + -> KeySet -- ^ Previous direct dependencies of Id + -> KeySet -- ^ Current direct dependencies of Id + -> IO () +-- mask to ensure that all the reverse dependencies are updated +updateReverseDeps myId db prev new = do + forM_ (toListKeySet $ prev `differenceKeySet` new) $ \d -> + doOne (deleteKeySet myId) d + forM_ (toListKeySet new) $ + doOne (insertKeySet myId) + where + alterRDeps f = + Focus.adjust (onKeyReverseDeps f) + -- updating all the reverse deps atomically is not needed. + -- Therefore, run individual transactions for each update + -- in order to avoid contention + doOne f id = atomicallyNamed "updateReverseDeps" $ + SMap.focus (alterRDeps f) id (databaseValues db) + +getReverseDependencies :: Database -> Key -> STM (Maybe KeySet) +getReverseDependencies db = (fmap.fmap) keyReverseDeps . flip SMap.lookup (databaseValues db) + +transitiveDirtySet :: Foldable t => Database -> t Key -> IO KeySet +transitiveDirtySet database = flip State.execStateT mempty . traverse_ loop + where + loop x = do + seen <- State.get + if x `memberKeySet` seen then pure () else do + State.put (insertKeySet x seen) + next <- lift $ atomically $ getReverseDependencies database x + traverse_ loop (maybe mempty toListKeySet next) + +-------------------------------------------------------------------------------- +-- Asynchronous computations with cancellation + +-- | A simple monad to implement cancellation on top of 'Async', +-- generalizing 'withAsync' to monadic scopes. +newtype AIO a = AIO { unAIO :: ReaderT (IORef [Async ()]) IO a } + deriving newtype (Applicative, Functor, Monad, MonadIO) + +-- | Run the monadic computation, cancelling all the spawned asyncs if an exception arises +runAIO :: AIO a -> IO a +runAIO (AIO act) = do + asyncs <- newIORef [] + runReaderT act asyncs `onException` cleanupAsync asyncs + +-- | Like 'async' but with built-in cancellation. +-- Returns an IO action to wait on the result. +asyncWithCleanUp :: AIO a -> AIO (IO a) +asyncWithCleanUp act = do + st <- AIO ask + io <- unliftAIO act + -- mask to make sure we keep track of the spawned async + liftIO $ uninterruptibleMask $ \restore -> do + a <- async $ restore io + atomicModifyIORef'_ st (void a :) + return $ wait a + +unliftAIO :: AIO a -> AIO (IO a) +unliftAIO act = do + st <- AIO ask + return $ runReaderT (unAIO act) st + +newtype RunInIO = RunInIO (forall a. AIO a -> IO a) + +withRunInIO :: (RunInIO -> AIO b) -> AIO b +withRunInIO k = do + st <- AIO ask + k $ RunInIO (\aio -> runReaderT (unAIO aio) st) + +cleanupAsync :: IORef [Async a] -> IO () +-- mask to make sure we interrupt all the asyncs +cleanupAsync ref = uninterruptibleMask $ \unmask -> do + asyncs <- atomicModifyIORef' ref ([],) + -- interrupt all the asyncs without waiting + mapM_ (\a -> throwTo (asyncThreadId a) AsyncCancelled) asyncs + -- Wait until all the asyncs are done + -- But if it takes more than 10 seconds, log to stderr + unless (null asyncs) $ do + let warnIfTakingTooLong = unmask $ forever $ do + sleep 10 + traceM "cleanupAsync: waiting for asyncs to finish" + withAsync warnIfTakingTooLong $ \_ -> + mapM_ waitCatch asyncs + +data Wait + = Wait {justWait :: !(IO ())} + | Spawn {justWait :: !(IO ())} + +fmapWait :: (IO () -> IO ()) -> Wait -> Wait +fmapWait f (Wait io) = Wait (f io) +fmapWait f (Spawn io) = Spawn (f io) + +waitOrSpawn :: Wait -> IO (Either (IO ()) (Async ())) +waitOrSpawn (Wait io) = pure $ Left io +waitOrSpawn (Spawn io) = Right <$> async io + +waitConcurrently_ :: [Wait] -> AIO () +waitConcurrently_ [] = pure () +waitConcurrently_ [one] = liftIO $ justWait one +waitConcurrently_ many = do + ref <- AIO ask + -- spawn the async computations. + -- mask to make sure we keep track of all the asyncs. + (asyncs, syncs) <- liftIO $ uninterruptibleMask $ \unmask -> do + waits <- liftIO $ traverse (waitOrSpawn . fmapWait unmask) many + let (syncs, asyncs) = partitionEithers waits + liftIO $ atomicModifyIORef'_ ref (asyncs ++) + return (asyncs, syncs) + -- work on the sync computations + liftIO $ sequence_ syncs + -- wait for the async computations before returning + liftIO $ traverse_ wait asyncs diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Key.hs b/hls-graph/src/Development/IDE/Graph/Internal/Key.hs new file mode 100644 index 0000000000..85cebeb110 --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph/Internal/Key.hs @@ -0,0 +1,182 @@ +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE ViewPatterns #-} + +module Development.IDE.Graph.Internal.Key + ( Key -- Opaque - don't expose constructor, use newKey to create + , KeyValue (..) + , pattern Key + , newKey + , renderKey + -- * KeyMap + , KeyMap + , mapKeyMap + , insertKeyMap + , lookupKeyMap + , lookupDefaultKeyMap + , fromListKeyMap + , fromListWithKeyMap + , toListKeyMap + , elemsKeyMap + , restrictKeysKeyMap + -- * KeySet + , KeySet + , nullKeySet + , insertKeySet + , memberKeySet + , toListKeySet + , lengthKeySet + , filterKeySet + , singletonKeySet + , fromListKeySet + , deleteKeySet + , differenceKeySet + ) where + +--import Control.Monad.IO.Class () +import Control.Exception (evaluate) +import Data.Coerce +import Data.Dynamic +import qualified Data.HashMap.Strict as Map +import Data.IntMap (IntMap) +import qualified Data.IntMap.Strict as IM +import Data.IntSet (IntSet) +import qualified Data.IntSet as IS +import Data.IORef +import Data.Text (Text) +import qualified Data.Text as T +import Data.Typeable +import Development.IDE.Graph.Classes +import System.IO.Unsafe + + +newtype Key = UnsafeMkKey Int + +pattern Key :: () => (Typeable a, Hashable a, Show a) => a -> Key +pattern Key a <- (lookupKeyValue -> KeyValue a _) +{-# COMPLETE Key #-} + +data KeyValue = forall a . (Typeable a, Hashable a, Show a) => KeyValue a Text + +instance Eq KeyValue where + KeyValue a _ == KeyValue b _ = Just a == cast b +instance Hashable KeyValue where + hashWithSalt i (KeyValue x _) = hashWithSalt i (typeOf x, x) +instance Show KeyValue where + show (KeyValue _ t) = T.unpack t + +data GlobalKeyValueMap = GlobalKeyValueMap !(Map.HashMap KeyValue Key) !(IntMap KeyValue) {-# UNPACK #-} !Int + +keyMap :: IORef GlobalKeyValueMap +keyMap = unsafePerformIO $ newIORef (GlobalKeyValueMap Map.empty IM.empty 0) + +{-# NOINLINE keyMap #-} + +newKey :: (Typeable a, Hashable a, Show a) => a -> Key +newKey k = unsafePerformIO $ do + let !newKey = KeyValue k (T.pack (show k)) + atomicModifyIORef' keyMap $ \km@(GlobalKeyValueMap hm im n) -> + let new_key = Map.lookup newKey hm + in case new_key of + Just v -> (km, v) + Nothing -> + let !new_index = UnsafeMkKey n + in (GlobalKeyValueMap (Map.insert newKey new_index hm) (IM.insert n newKey im) (n+1), new_index) +{-# NOINLINE newKey #-} + +lookupKeyValue :: Key -> KeyValue +lookupKeyValue (UnsafeMkKey x) = unsafePerformIO $ do + -- NOTE: + -- The reason for this evaluate is that the x, if not forced yet, is a thunk + -- that forces the atomicModifyIORef' in the creation of the new key. If it + -- isn't forced *before* reading the keyMap, the keyMap will only obtain the new + -- key (x) *after* the IntMap is already copied out of the keyMap reference, + -- i.e. when it is forced for the lookup in the IntMap. + k <- evaluate x + GlobalKeyValueMap _ im _ <- readIORef keyMap + pure $! im IM.! k + +{-# NOINLINE lookupKeyValue #-} + +instance Eq Key where + UnsafeMkKey a == UnsafeMkKey b = a == b +instance Hashable Key where + hashWithSalt i (UnsafeMkKey x) = hashWithSalt i x +instance Show Key where + show (Key x) = show x + +renderKey :: Key -> Text +renderKey (lookupKeyValue -> KeyValue _ t) = t + +newtype KeySet = KeySet IntSet + deriving newtype (Eq, Ord, Semigroup, Monoid, NFData) + +instance Show KeySet where + showsPrec p (KeySet is)= showParen (p > 10) $ + showString "fromList " . shows ks + where ks = coerce (IS.toList is) :: [Key] + +insertKeySet :: Key -> KeySet -> KeySet +insertKeySet = coerce IS.insert + +memberKeySet :: Key -> KeySet -> Bool +memberKeySet = coerce IS.member + +toListKeySet :: KeySet -> [Key] +toListKeySet = coerce IS.toList + +nullKeySet :: KeySet -> Bool +nullKeySet = coerce IS.null + +differenceKeySet :: KeySet -> KeySet -> KeySet +differenceKeySet = coerce IS.difference + +deleteKeySet :: Key -> KeySet -> KeySet +deleteKeySet = coerce IS.delete + +fromListKeySet :: [Key] -> KeySet +fromListKeySet = coerce IS.fromList + +singletonKeySet :: Key -> KeySet +singletonKeySet = coerce IS.singleton + +filterKeySet :: (Key -> Bool) -> KeySet -> KeySet +filterKeySet = coerce IS.filter + +lengthKeySet :: KeySet -> Int +lengthKeySet = coerce IS.size + +newtype KeyMap a = KeyMap (IntMap a) + deriving newtype (Eq, Ord, Semigroup, Monoid) + +instance Show a => Show (KeyMap a) where + showsPrec p (KeyMap im)= showParen (p > 10) $ + showString "fromList " . shows ks + where ks = coerce (IM.toList im) :: [(Key,a)] + +mapKeyMap :: (a -> b) -> KeyMap a -> KeyMap b +mapKeyMap f (KeyMap m) = KeyMap (IM.map f m) + +insertKeyMap :: Key -> a -> KeyMap a -> KeyMap a +insertKeyMap (UnsafeMkKey k) v (KeyMap m) = KeyMap (IM.insert k v m) + +lookupKeyMap :: Key -> KeyMap a -> Maybe a +lookupKeyMap (UnsafeMkKey k) (KeyMap m) = IM.lookup k m + +lookupDefaultKeyMap :: a -> Key -> KeyMap a -> a +lookupDefaultKeyMap a (UnsafeMkKey k) (KeyMap m) = IM.findWithDefault a k m + +fromListKeyMap :: [(Key,a)] -> KeyMap a +fromListKeyMap xs = KeyMap (IM.fromList (coerce xs)) + +fromListWithKeyMap :: (a -> a -> a) -> [(Key,a)] -> KeyMap a +fromListWithKeyMap f xs = KeyMap (IM.fromListWith f (coerce xs)) + +toListKeyMap :: KeyMap a -> [(Key,a)] +toListKeyMap (KeyMap m) = coerce (IM.toList m) + +elemsKeyMap :: KeyMap a -> [a] +elemsKeyMap (KeyMap m) = IM.elems m + +restrictKeysKeyMap :: KeyMap a -> KeySet -> KeyMap a +restrictKeysKeyMap (KeyMap m) (KeySet s) = KeyMap (IM.restrictKeys m s) diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Options.hs b/hls-graph/src/Development/IDE/Graph/Internal/Options.hs new file mode 100644 index 0000000000..db8bd4e161 --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph/Internal/Options.hs @@ -0,0 +1,27 @@ +module Development.IDE.Graph.Internal.Options where + +import Control.Monad.Trans.Reader +import Data.Dynamic +import Development.IDE.Graph.Internal.Types + +data ShakeOptions = ShakeOptions { + shakeExtra :: Maybe Dynamic, + shakeAllowRedefineRules :: Bool, + shakeTimings :: Bool + } + +shakeOptions :: ShakeOptions +shakeOptions = ShakeOptions Nothing False False + +getShakeExtra :: Typeable a => Action (Maybe a) +getShakeExtra = do + extra <- Action $ asks $ databaseExtra . actionDatabase + pure $ fromDynamic extra + +getShakeExtraRules :: Typeable a => Rules (Maybe a) +getShakeExtraRules = do + extra <- Rules $ asks rulesExtra + pure $ fromDynamic extra + +newShakeExtra :: Typeable a => a -> Maybe Dynamic +newShakeExtra = Just . toDyn diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Paths.hs b/hls-graph/src/Development/IDE/Graph/Internal/Paths.hs new file mode 100644 index 0000000000..bccc11198f --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph/Internal/Paths.hs @@ -0,0 +1,63 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE TemplateHaskell #-} + +module Development.IDE.Graph.Internal.Paths (readDataFileHTML) where + +#ifndef FILE_EMBED +import Control.Exception (SomeException (SomeException), catch) +import Control.Monad (filterM) +import Paths_hls_graph +import System.Directory (doesFileExist, getCurrentDirectory) +import System.Environment (getExecutablePath) +import System.FilePath (takeDirectory, ()) +import System.IO.Unsafe (unsafePerformIO) +#endif +import qualified Data.ByteString.Lazy as LBS + +#ifdef FILE_EMBED +import qualified Data.ByteString as BS +import Data.FileEmbed + +htmlDataFiles :: [(FilePath, BS.ByteString)] +htmlDataFiles = + [ +#ifdef __GHCIDE__ + ("profile.html", $(embedFile "hls-graph/html/profile.html")) + , ("shake.js", $(embedFile "hls-graph/html/shake.js")) +#else + ("profile.html", $(embedFile "html/profile.html")) + , ("shake.js", $(embedFile "html/shake.js")) +#endif + ] + +readDataFileHTML :: FilePath -> IO LBS.ByteString +readDataFileHTML file = do + case lookup file htmlDataFiles of + Nothing -> fail $ "Could not find data file " ++ file ++ " in embedded data files!" + Just x -> pure (LBS.fromStrict x) + +#else +-- We want getDataFileName to be relative to the current directory on program startup, +-- even if we issue a change directory command. Therefore, first call caches, future ones read. + +{-# NOINLINE dataDirs #-} +dataDirs :: [String] +dataDirs = unsafePerformIO $ do + datdir <- getDataDir + exedir <- takeDirectory <$> getExecutablePath `catch` \SomeException{} -> pure "" + curdir <- getCurrentDirectory + pure $ [datdir] ++ [exedir | exedir /= ""] ++ [curdir] + + +getDataFile :: FilePath -> IO FilePath +getDataFile file = do + let poss = map ( file) dataDirs + res <- filterM doesFileExist poss + case res of + [] -> fail $ unlines $ ("Could not find data file " ++ file ++ ", looked in:") : map (" " ++) poss + x:_ -> pure x + +readDataFileHTML :: FilePath -> IO LBS.ByteString +readDataFileHTML file = LBS.readFile =<< getDataFile ("html" file) + +#endif diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Profile.hs b/hls-graph/src/Development/IDE/Graph/Internal/Profile.hs new file mode 100644 index 0000000000..5369c578f8 --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph/Internal/Profile.hs @@ -0,0 +1,222 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE ViewPatterns #-} + +{- HLINT ignore "Redundant bracket" -} -- a result of CPP expansion + +module Development.IDE.Graph.Internal.Profile (writeProfile) where + +import Control.Concurrent.STM.Stats (readTVarIO) +import Data.Bifunctor +import qualified Data.ByteString.Lazy.Char8 as LBS +import Data.Char +import Data.Dynamic (toDyn) +import qualified Data.HashMap.Strict as Map +import Data.List (dropWhileEnd, + intercalate, + partition, sort, + sortBy) +import Data.List.Extra (nubOrd) +import Data.Maybe +import Data.Time (getCurrentTime) +import Data.Time.Format.ISO8601 (iso8601Show) +import Development.IDE.Graph.Internal.Database (getDirtySet) +import Development.IDE.Graph.Internal.Key +import Development.IDE.Graph.Internal.Paths +import Development.IDE.Graph.Internal.Types +import qualified Language.Javascript.DGTable as DGTable +import qualified Language.Javascript.Flot as Flot +import qualified Language.Javascript.JQuery as JQuery +import Numeric.Extra (showDP) +import System.FilePath +import System.IO.Unsafe (unsafePerformIO) +import System.Time.Extra (Seconds) + +#if !MIN_VERSION_base(4,20,0) +import Data.List (foldl') +#endif + +#ifdef FILE_EMBED +import Data.FileEmbed +import Language.Haskell.TH.Syntax (runIO) +#endif + +-- | Generates an report given some build system profiling data. +writeProfile :: FilePath -> Database -> IO () +writeProfile out db = do + (report, mapping) <- toReport db + dirtyKeysMapped <- do + dirtyIds <- fromListKeySet . fmap fst <$> getDirtySet db + let dirtyKeysMapped = mapMaybe (`lookupKeyMap` mapping) . toListKeySet $ dirtyIds + return $ Just $ sort dirtyKeysMapped + rpt <- generateHTML dirtyKeysMapped report + LBS.writeFile out rpt + +data ProfileEntry = ProfileEntry + {prfName :: !String, prfBuilt :: !Int, prfChanged :: !Int, prfVisited :: !Int, prfDepends :: [[Int]], prfExecution :: !Seconds} + +-- | Eliminate all errors from the database, pretending they don't exist +-- resultsOnly :: Map.HashMap Id (Key, Status) -> Map.HashMap Id (Key, Result (Either BS.ByteString Value)) +resultsOnly :: [(Key, Status)] -> KeyMap Result +resultsOnly mp = mapKeyMap (\r -> + r{resultDeps = mapResultDeps (filterKeySet (isJust . flip lookupKeyMap keep)) $ resultDeps r} + ) keep + where + keep = fromListKeyMap $ mapMaybe (traverse getResult) mp + +-- | Given a map of representing a dependency order (with a show for error messages), find an ordering for the items such +-- that no item points to an item before itself. +-- Raise an error if you end up with a cycle. +-- +-- Algorithm: +-- Divide everyone up into those who have no dependencies [Id] +-- And those who depend on a particular Id, Dep :-> Maybe [(Key,[Dep])] +-- Where d :-> Just (k, ds), k depends on firstly d, then remaining on ds +-- For each with no dependencies, add to list, then take its dep hole and +-- promote them either to Nothing (if ds == []) or into a new slot. +-- k :-> Nothing means the key has already been freed +dependencyOrder :: (Key -> String) -> [(Key, [Key])] -> [Key] +dependencyOrder shw status = + f (map fst noDeps) $ + mapKeyMap Just $ + fromListWithKeyMap (++) + [(d, [(k,ds)]) | (k,d:ds) <- hasDeps] + where + (noDeps, hasDeps) = partition (null . snd) status + + f [] mp | null bad = [] + | otherwise = error $ unlines $ + "Internal invariant broken, database seems to be cyclic" : + map (" " ++) bad ++ + ["... plus " ++ show (length badOverflow) ++ " more ..." | not $ null badOverflow] + where (bad,badOverflow) = splitAt 10 [shw i | (i, Just _) <- toListKeyMap mp] + + f (x:xs) mp = x : f (now++xs) later + where free = fromMaybe [] $ lookupDefaultKeyMap (Just []) x mp + (now,later) = foldl' g ([], insertKeyMap x Nothing mp) free + + g (free, mp) (k, []) = (k:free, mp) + g (free, mp) (k, d:ds) = case lookupDefaultKeyMap (Just []) d mp of + Nothing -> g (free, mp) (k, ds) + Just todo -> (free, insertKeyMap d (Just $ (k,ds) : todo) mp) + +prepareForDependencyOrder :: Database -> IO (KeyMap Result) +prepareForDependencyOrder db = do + current <- readTVarIO $ databaseStep db + insertKeyMap (newKey "alwaysRerun") (alwaysRerunResult current) . resultsOnly + <$> getDatabaseValues db + +-- | Returns a list of profile entries, and a mapping linking a non-error Id to its profile entry +toReport :: Database -> IO ([ProfileEntry], KeyMap Int) +toReport db = do + status <- prepareForDependencyOrder db + let order = dependencyOrder show + $ map (second (toListKeySet . getResultDepsDefault (singletonKeySet $ newKey "alwaysRerun") . resultDeps)) + $ toListKeyMap status + ids = fromListKeyMap $ zip order [0..] + + steps = let xs = nubOrd $ concat [[resultChanged, resultBuilt, resultVisited] | Result{..} <- elemsKeyMap status] + + in Map.fromList $ zip (sortBy (flip compare) xs) [0..] + + f k Result{..} = ProfileEntry + {prfName = show k + ,prfBuilt = fromStep resultBuilt + ,prfVisited = fromStep resultVisited + ,prfChanged = fromStep resultChanged + ,prfDepends = map pure $ elemsKeyMap $ restrictKeysKeyMap ids $ getResultDepsDefault (singletonKeySet $ newKey "alwaysRerun") resultDeps + ,prfExecution = resultExecution + } + where fromStep i = fromJust $ Map.lookup i steps + pure ([maybe (error "toReport") (f i) $ lookupKeyMap i status | i <- order], ids) + +alwaysRerunResult :: Step -> Result +alwaysRerunResult current = Result (Value $ toDyn "") (Step 0) (Step 0) current (ResultDeps mempty) 0 mempty + +generateHTML :: Maybe [Int] -> [ProfileEntry] -> IO LBS.ByteString +generateHTML dirtyKeys xs = do + report <- readDataFileHTML "profile.html" + let f "data/profile-data.js" = pure $ LBS.pack $ "var profile =\n" ++ generateJSONProfile xs + f "data/build-data.js" = pure $ LBS.pack $ "var build =\n" ++ generateJSONBuild dirtyKeys + f other = error other + runTemplate f report + +generateJSONBuild :: Maybe [Int] -> String +generateJSONBuild (Just dirtyKeys) = jsonList [jsonList (map show dirtyKeys)] +generateJSONBuild Nothing = jsonList [] + +generateJSONProfile :: [ProfileEntry] -> String +generateJSONProfile = jsonListLines . map showEntry + where + showEntry ProfileEntry{..} = jsonList $ + [show prfName + ,showTime prfExecution + ,show prfBuilt + ,show prfChanged + ,show prfVisited + ] ++ + [show prfDepends | not (null prfDepends)] + showTime x = if '.' `elem` y then dropWhileEnd (== '.') $ dropWhileEnd (== '0') y else y + where y = showDP 4 x + +jsonListLines :: [String] -> String +jsonListLines xs = "[" ++ intercalate "\n," xs ++ "\n]" + +jsonList :: [String] -> String +jsonList xs = "[" ++ intercalate "," xs ++ "]" + +-- Very hard to abstract over TH, so we do it with CPP +#ifdef FILE_EMBED +#define FILE(x) (pure (LBS.fromStrict $(embedFile =<< runIO (x)))) +#else +#define FILE(x) (LBS.readFile =<< (x)) +#endif + +libraries :: [(String, IO LBS.ByteString)] +libraries = + [("jquery.js", FILE(JQuery.file)) + ,("jquery.dgtable.js", FILE(DGTable.file)) + ,("jquery.flot.js", FILE(Flot.file Flot.Flot)) + ,("jquery.flot.stack.js", FILE(Flot.file Flot.FlotStack)) + ] + + +-- | Template Engine. Perform the following replacements on a line basis: +-- +-- * ==> +-- +-- * ==> +runTemplate :: (FilePath -> IO LBS.ByteString) -> LBS.ByteString -> IO LBS.ByteString +runTemplate ask = lbsMapLinesIO f + where + link = LBS.pack "\n" `LBS.append` res `LBS.append` LBS.pack "\n" + | Just file <- LBS.stripPrefix link y = do res <- grab file; pure $ LBS.pack "" + | otherwise = pure x + where + y = LBS.dropWhile isSpace x + grab = asker . takeWhile (/= '\"') . LBS.unpack + + asker o@(splitFileName -> ("lib/",x)) = + case lookup x libraries of + Nothing -> error $ "Template library, unknown library: " ++ o + Just act -> act + + asker "shake.js" = readDataFileHTML "shake.js" + asker "data/metadata.js" = do + time <- getCurrentTime + pure $ LBS.pack $ + "var version = \"0\"" ++ + "\nvar generated = " ++ iso8601Show time + asker x = ask x + +-- Perform a mapM on each line and put the result back together again +lbsMapLinesIO :: (LBS.ByteString -> IO LBS.ByteString) -> LBS.ByteString -> IO LBS.ByteString +-- If we do the obvious @fmap LBS.unlines . mapM f@ then all the monadic actions are run on all the lines +-- before it starts producing the lazy result, killing streaming and having more stack usage. +-- The real solution (albeit with too many dependencies for something small) is a streaming library, +-- but a little bit of unsafePerformIO does the trick too. +lbsMapLinesIO f = pure . LBS.unlines . map (unsafePerformIO . f) . LBS.lines diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Rules.hs b/hls-graph/src/Development/IDE/Graph/Internal/Rules.hs new file mode 100644 index 0000000000..9a5f36ca35 --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph/Internal/Rules.hs @@ -0,0 +1,57 @@ +-- We deliberately want to ensure the function we add to the rule database +-- has the constraints we need on it when we get it out. +{-# OPTIONS_GHC -Wno-redundant-constraints #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeFamilies #-} + +module Development.IDE.Graph.Internal.Rules where + +import Control.Exception.Extra +import Control.Monad +import Control.Monad.IO.Class +import Control.Monad.Trans.Reader +import qualified Data.ByteString as BS +import Data.Dynamic +import qualified Data.HashMap.Strict as Map +import Data.IORef +import Data.Maybe +import Data.Typeable +import Development.IDE.Graph.Classes +import Development.IDE.Graph.Internal.Key +import Development.IDE.Graph.Internal.Types + +-- | The type mapping between the @key@ or a rule and the resulting @value@. +type family RuleResult key -- = value + +action :: Action a -> Rules () +action x = do + ref <- Rules $ asks rulesActions + liftIO $ modifyIORef' ref (void x:) + +addRule + :: forall key value . + (RuleResult key ~ value, Typeable key, Hashable key, Eq key, Typeable value) + => (key -> Maybe BS.ByteString -> RunMode -> Action (RunResult value)) + -> Rules () +addRule f = do + ref <- Rules $ asks rulesMap + liftIO $ modifyIORef' ref $ Map.insert (typeRep (Proxy :: Proxy key)) (toDyn f2) + where + f2 :: Key -> Maybe BS.ByteString -> RunMode -> Action (RunResult Value) + f2 (Key a) b c = do + v <- f (fromJust $ cast a :: key) b c + v <- liftIO $ evaluate v + pure $ Value . toDyn <$> v + +runRule + :: TheRules -> Key -> Maybe BS.ByteString -> RunMode -> Action (RunResult Value) +runRule rules key@(Key t) bs mode = case Map.lookup (typeOf t) rules of + Nothing -> liftIO $ errorIO $ "Could not find key: " ++ show key + Just x -> unwrapDynamic x key bs mode + +runRules :: Dynamic -> Rules () -> IO (TheRules, [Action ()]) +runRules rulesExtra (Rules rules) = do + rulesActions <- newIORef [] + rulesMap <- newIORef Map.empty + runReaderT rules SRules{..} + (,) <$> readIORef rulesMap <*> readIORef rulesActions diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Types.hs b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs new file mode 100644 index 0000000000..34bed42391 --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph/Internal/Types.hs @@ -0,0 +1,277 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE RecordWildCards #-} + +module Development.IDE.Graph.Internal.Types where + +import Control.Concurrent.STM (STM) +import Control.Monad ((>=>)) +import Control.Monad.Catch +import Control.Monad.IO.Class +import Control.Monad.Trans.Reader +import Data.Aeson (FromJSON, ToJSON) +import Data.Bifunctor (second) +import qualified Data.ByteString as BS +import Data.Dynamic +import Data.Foldable (fold) +import qualified Data.HashMap.Strict as Map +import Data.IORef +import Data.List (intercalate) +import Data.Maybe +import Data.Typeable +import Development.IDE.Graph.Classes +import Development.IDE.Graph.Internal.Key +import GHC.Conc (TVar, atomically) +import GHC.Generics (Generic) +import qualified ListT +import qualified StmContainers.Map as SMap +import StmContainers.Map (Map) +import System.Time.Extra (Seconds) +import UnliftIO (MonadUnliftIO) + +#if !MIN_VERSION_base(4,18,0) +import Control.Applicative (liftA2) +#endif + +unwrapDynamic :: forall a . Typeable a => Dynamic -> a +unwrapDynamic x = fromMaybe (error msg) $ fromDynamic x + where msg = "unwrapDynamic failed: Expected " ++ show (typeRep (Proxy :: Proxy a)) ++ + ", but got " ++ show (dynTypeRep x) + +--------------------------------------------------------------------- +-- RULES + +type TheRules = Map.HashMap TypeRep Dynamic + +-- | A computation that defines all the rules that form part of the computation graph. +-- +-- 'Rules' has access to 'IO' through 'MonadIO'. Use of 'IO' is at your own risk: if +-- you write 'Rules' that throw exceptions, then you need to make sure to handle them +-- yourself when you run the resulting 'Rules'. +newtype Rules a = Rules (ReaderT SRules IO a) + deriving newtype (Monad, Applicative, Functor, MonadIO) + +data SRules = SRules { + rulesExtra :: !Dynamic, + rulesActions :: !(IORef [Action ()]), + rulesMap :: !(IORef TheRules) + } + +--------------------------------------------------------------------- +-- ACTIONS + +-- | An action representing something that can be run as part of a 'Rule'. +-- +-- 'Action's can be pure functions but also have access to 'IO' via 'MonadIO' and 'MonadUnliftIO. +-- It should be assumed that actions throw exceptions, these can be caught with +-- 'Development.IDE.Graph.Internal.Action.actionCatch'. In particular, it is +-- permissible to use the 'MonadFail' instance, which will lead to an 'IOException'. +newtype Action a = Action {fromAction :: ReaderT SAction IO a} + deriving newtype (Monad, Applicative, Functor, MonadIO, MonadFail, MonadThrow, MonadCatch, MonadMask, MonadUnliftIO) + +data SAction = SAction { + actionDatabase :: !Database, + actionDeps :: !(IORef ResultDeps), + actionStack :: !Stack + } + +getDatabase :: Action Database +getDatabase = Action $ asks actionDatabase + +-- | waitForDatabaseRunningKeysAction waits for all keys in the database to finish running. +waitForDatabaseRunningKeysAction :: Action () +waitForDatabaseRunningKeysAction = getDatabase >>= liftIO . waitForDatabaseRunningKeys + +--------------------------------------------------------------------- +-- DATABASE + +data ShakeDatabase = ShakeDatabase !Int [Action ()] Database + +newtype Step = Step Int + deriving newtype (Eq,Ord,Hashable,Show) + +--------------------------------------------------------------------- +-- Keys + + + + +newtype Value = Value Dynamic + +data KeyDetails = KeyDetails { + keyStatus :: !Status, + keyReverseDeps :: !KeySet + } + +onKeyReverseDeps :: (KeySet -> KeySet) -> KeyDetails -> KeyDetails +onKeyReverseDeps f it@KeyDetails{..} = + it{keyReverseDeps = f keyReverseDeps} + +data Database = Database { + databaseExtra :: Dynamic, + databaseRules :: TheRules, + databaseStep :: !(TVar Step), + databaseValues :: !(Map Key KeyDetails) + } + +waitForDatabaseRunningKeys :: Database -> IO () +waitForDatabaseRunningKeys = getDatabaseValues >=> mapM_ (waitRunning . snd) + +getDatabaseValues :: Database -> IO [(Key, Status)] +getDatabaseValues = atomically + . (fmap.fmap) (second keyStatus) + . ListT.toList + . SMap.listT + . databaseValues + +data Status + = Clean !Result + | Dirty (Maybe Result) + | Running { + runningStep :: !Step, + runningWait :: !(IO ()), + runningResult :: Result, -- LAZY + runningPrev :: !(Maybe Result) + } + +viewDirty :: Step -> Status -> Status +viewDirty currentStep (Running s _ _ re) | currentStep /= s = Dirty re +viewDirty _ other = other + +getResult :: Status -> Maybe Result +getResult (Clean re) = Just re +getResult (Dirty m_re) = m_re +getResult (Running _ _ _ m_re) = m_re -- watch out: this returns the previous result + +waitRunning :: Status -> IO () +waitRunning Running{..} = runningWait +waitRunning _ = return () + +data Result = Result { + resultValue :: !Value, + resultBuilt :: !Step, -- ^ the step when it was last recomputed + resultChanged :: !Step, -- ^ the step when it last changed + resultVisited :: !Step, -- ^ the step when it was last looked up + resultDeps :: !ResultDeps, + resultExecution :: !Seconds, -- ^ How long it took, last time it ran + resultData :: !BS.ByteString + } + +-- Notice, invariant to maintain: +-- the ![KeySet] in ResultDeps need to be stored in reverse order, +-- so that we can append to it efficiently, and we need the ordering +-- so we can do a linear dependency refreshing in refreshDeps. +data ResultDeps = UnknownDeps | AlwaysRerunDeps !KeySet | ResultDeps ![KeySet] + deriving (Eq, Show) + +getResultDepsDefault :: KeySet -> ResultDeps -> KeySet +getResultDepsDefault _ (ResultDeps ids) = fold ids +getResultDepsDefault _ (AlwaysRerunDeps ids) = ids +getResultDepsDefault def UnknownDeps = def + +mapResultDeps :: (KeySet -> KeySet) -> ResultDeps -> ResultDeps +mapResultDeps f (ResultDeps ids) = ResultDeps $ fmap f ids +mapResultDeps f (AlwaysRerunDeps ids) = AlwaysRerunDeps $ f ids +mapResultDeps _ UnknownDeps = UnknownDeps + +instance Semigroup ResultDeps where + UnknownDeps <> x = x + x <> UnknownDeps = x + AlwaysRerunDeps ids <> x = AlwaysRerunDeps (ids <> getResultDepsDefault mempty x) + x <> AlwaysRerunDeps ids = AlwaysRerunDeps (getResultDepsDefault mempty x <> ids) + ResultDeps ids <> ResultDeps ids' = ResultDeps (ids <> ids') + +instance Monoid ResultDeps where + mempty = UnknownDeps + +--------------------------------------------------------------------- +-- Running builds + +-- | What mode a rule is running in, passed as an argument to 'BuiltinRun'. +data RunMode + = RunDependenciesSame -- ^ My dependencies have not changed. + | RunDependenciesChanged -- ^ At least one of my dependencies from last time have changed, or I have no recorded dependencies. + deriving (Eq,Show) + +instance NFData RunMode where rnf x = x `seq` () + +-- | How the output of a rule has changed. +data RunChanged + = ChangedNothing -- ^ Nothing has changed. + | ChangedRecomputeSame -- ^ I recomputed the value and it was the same. + | ChangedRecomputeDiff -- ^ I recomputed the value and it was different. + deriving (Eq,Show,Generic) + deriving anyclass (FromJSON, ToJSON) + +instance NFData RunChanged where rnf x = x `seq` () + +-- | The result of 'BuiltinRun'. +data RunResult value = RunResult + {runChanged :: RunChanged + -- ^ How has the 'RunResult' changed from what happened last time. + ,runStore :: BS.ByteString + -- ^ The value to store in the Shake database. + ,runValue :: value + -- ^ The value to return from 'Development.Shake.Rule.apply'. + ,runHook :: STM () + -- ^ The hook to run at the end of the build in the same transaction + -- when the key is marked as clean. + } deriving Functor + +--------------------------------------------------------------------- +-- EXCEPTIONS + +data GraphException = forall e. Exception e => GraphException { + target :: String, -- ^ The key that was being built + stack :: [String], -- ^ The stack of keys that led to this exception + inner :: e -- ^ The underlying exception +} + deriving (Exception) + +instance Show GraphException where + show GraphException{..} = unlines $ + ["GraphException: " ++ target] ++ + stack ++ + ["Inner exception: " ++ show inner] + +fromGraphException :: Typeable b => SomeException -> Maybe b +fromGraphException x = do + GraphException _ _ e <- fromException x + cast e + +--------------------------------------------------------------------- +-- CALL STACK + +data Stack = Stack [Key] !KeySet + +instance Show Stack where + show (Stack kk _) = "Stack: " <> intercalate " -> " (map show kk) + +newtype StackException = StackException Stack + deriving (Show) + +instance Exception StackException where + fromException = fromGraphException + toException this@(StackException (Stack stack _)) = toException $ + GraphException (show$ last stack) (map show stack) this + +addStack :: Key -> Stack -> Either StackException Stack +addStack k (Stack ks is) + | k `memberKeySet` is = Left $ StackException stack2 + | otherwise = Right stack2 + where stack2 = Stack (k:ks) (insertKeySet k is) + +memberStack :: Key -> Stack -> Bool +memberStack k (Stack _ ks) = k `memberKeySet` ks + +emptyStack :: Stack +emptyStack = Stack [] mempty +--------------------------------------------------------------------- +-- INSTANCES + +instance Semigroup a => Semigroup (Rules a) where + a <> b = liftA2 (<>) a b + +instance Monoid a => Monoid (Rules a) where + mempty = pure mempty diff --git a/hls-graph/src/Development/IDE/Graph/KeyMap.hs b/hls-graph/src/Development/IDE/Graph/KeyMap.hs new file mode 100644 index 0000000000..30ff4d6cfa --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph/KeyMap.hs @@ -0,0 +1,15 @@ +module Development.IDE.Graph.KeyMap( + Key, + KeyMap, + mapKeyMap, + insertKeyMap, + lookupKeyMap, + lookupDefaultKeyMap, + fromListKeyMap, + fromListWithKeyMap, + toListKeyMap, + elemsKeyMap, + restrictKeysKeyMap, + ) where + +import Development.IDE.Graph.Internal.Key diff --git a/hls-graph/src/Development/IDE/Graph/KeySet.hs b/hls-graph/src/Development/IDE/Graph/KeySet.hs new file mode 100644 index 0000000000..cd0e76e675 --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph/KeySet.hs @@ -0,0 +1,16 @@ +module Development.IDE.Graph.KeySet( + Key, + KeySet, + insertKeySet, + memberKeySet, + toListKeySet, + nullKeySet, + differenceKeySet, + deleteKeySet, + fromListKeySet, + singletonKeySet, + filterKeySet, + lengthKeySet, + ) where + +import Development.IDE.Graph.Internal.Key diff --git a/hls-graph/src/Development/IDE/Graph/Rule.hs b/hls-graph/src/Development/IDE/Graph/Rule.hs new file mode 100644 index 0000000000..34444b8fef --- /dev/null +++ b/hls-graph/src/Development/IDE/Graph/Rule.hs @@ -0,0 +1,15 @@ +{-# LANGUAGE TypeFamilies #-} + +module Development.IDE.Graph.Rule( + -- * Defining builtin rules + -- | Functions and types for defining new types of Shake rules. + addRule, + RunMode(..), RunChanged(..), RunResult(..), + -- * Calling builtin rules + -- | Wrappers around calling Shake rules. In general these should be specialised to a builtin rule. + apply, apply1, applyWithoutDependency + ) where + +import Development.IDE.Graph.Internal.Action +import Development.IDE.Graph.Internal.Rules +import Development.IDE.Graph.Internal.Types diff --git a/hls-graph/src/Paths.hs b/hls-graph/src/Paths.hs new file mode 100644 index 0000000000..291acafad8 --- /dev/null +++ b/hls-graph/src/Paths.hs @@ -0,0 +1,12 @@ +-- | Fake cabal module for local building + +module Paths_hls_graph(getDataDir, version) where + +import Data.Version.Extra + +-- If hls_graph can't find files in the data directory it tries relative to the executable +getDataDir :: IO FilePath +getDataDir = pure "random_path_that_cannot_possibly_exist" + +version :: Version +version = makeVersion [0,0] diff --git a/hls-graph/test/ActionSpec.hs b/hls-graph/test/ActionSpec.hs new file mode 100644 index 0000000000..97ab5555ac --- /dev/null +++ b/hls-graph/test/ActionSpec.hs @@ -0,0 +1,131 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +module ActionSpec where + +import Control.Concurrent (MVar, readMVar) +import qualified Control.Concurrent as C +import Control.Concurrent.STM +import Control.Monad.IO.Class (MonadIO (..)) +import Development.IDE.Graph (shakeOptions) +import Development.IDE.Graph.Database (shakeNewDatabase, + shakeRunDatabase, + shakeRunDatabaseForKeys) +import Development.IDE.Graph.Internal.Database (build, incDatabase) +import Development.IDE.Graph.Internal.Key +import Development.IDE.Graph.Internal.Types +import Development.IDE.Graph.Rule +import Example +import qualified StmContainers.Map as STM +import Test.Hspec + + + +spec :: Spec +spec = do + describe "apply1" $ it "Test build update, Buggy dirty mechanism in hls-graph #4237" $ do + let ruleStep1 :: MVar Int -> Rules () + ruleStep1 m = addRule $ \CountRule _old mode -> do + -- depends on ruleSubBranch, it always changed if dirty + _ :: Int <- apply1 SubBranchRule + let r = 1 + case mode of + -- it update the built step + RunDependenciesChanged -> do + _ <- liftIO $ C.modifyMVar m $ \x -> return (x+1, x) + return $ RunResult ChangedRecomputeSame "" r (return ()) + -- this won't update the built step + RunDependenciesSame -> + return $ RunResult ChangedNothing "" r (return ()) + count <- C.newMVar 0 + count1 <- C.newMVar 0 + db <- shakeNewDatabase shakeOptions $ do + ruleSubBranch count + ruleStep1 count1 + -- bootstrapping the database + _ <- shakeRunDatabase db $ pure $ apply1 CountRule -- count = 1 + let child = newKey SubBranchRule + let parent = newKey CountRule + -- instruct to RunDependenciesChanged then CountRule should be recomputed + -- result should be changed 0, build 1 + _res1 <- shakeRunDatabaseForKeys (Just [child]) db [apply1 CountRule] -- count = 2 + -- since child changed = parent build + -- instruct to RunDependenciesSame then CountRule should not be recomputed + -- result should be changed 0, build 1 + _res3 <- shakeRunDatabaseForKeys (Just [parent]) db [apply1 CountRule] -- count = 2 + -- invariant child changed = parent build should remains after RunDependenciesSame + -- this used to be a bug, with additional computation, see https://github.com/haskell/haskell-language-server/pull/4238 + _res3 <- shakeRunDatabaseForKeys (Just [parent]) db [apply1 CountRule] -- count = 2 + c1 <- readMVar count1 + c1 `shouldBe` 2 + describe "apply1" $ do + it "computes a rule with no dependencies" $ do + db <- shakeNewDatabase shakeOptions ruleUnit + res <- shakeRunDatabase db $ + pure $ apply1 (Rule @()) + res `shouldBe` [()] + it "computes a rule with one dependency" $ do + db <- shakeNewDatabase shakeOptions $ do + ruleUnit + ruleBool + res <- shakeRunDatabase db $ pure $ apply1 Rule + res `shouldBe` [True] + it "tracks direct dependencies" $ do + db@(ShakeDatabase _ _ theDb) <- shakeNewDatabase shakeOptions $ do + ruleUnit + ruleBool + let theKey = Rule @Bool + res <- shakeRunDatabase db $ + pure $ apply1 theKey + res `shouldBe` [True] + Just (Clean res) <- lookup (newKey theKey) <$> getDatabaseValues theDb + resultDeps res `shouldBe` ResultDeps [singletonKeySet $ newKey (Rule @())] + it "tracks reverse dependencies" $ do + db@(ShakeDatabase _ _ Database {..}) <- shakeNewDatabase shakeOptions $ do + ruleUnit + ruleBool + let theKey = Rule @Bool + res <- shakeRunDatabase db $ + pure $ apply1 theKey + res `shouldBe` [True] + Just KeyDetails {..} <- atomically $ STM.lookup (newKey (Rule @())) databaseValues + keyReverseDeps `shouldBe` singletonKeySet (newKey theKey) + it "rethrows exceptions" $ do + db <- shakeNewDatabase shakeOptions $ addRule $ \(Rule :: Rule ()) _old _mode -> error "boom" + let res = shakeRunDatabase db $ pure $ apply1 (Rule @()) + res `shouldThrow` anyErrorCall + it "computes a rule with branching dependencies does not invoke phantom dependencies #3423" $ do + cond <- C.newMVar True + count <- C.newMVar 0 + (ShakeDatabase _ _ theDb) <- shakeNewDatabase shakeOptions $ do + ruleUnit + ruleCond cond + ruleSubBranch count + ruleWithCond + -- build the one with the condition True + -- This should call the SubBranchRule once + -- cond rule would return different results each time + res0 <- build theDb emptyStack [BranchedRule] + snd res0 `shouldBe` [1 :: Int] + incDatabase theDb Nothing + -- build the one with the condition False + -- This should not call the SubBranchRule + res1 <- build theDb emptyStack [BranchedRule] + snd res1 `shouldBe` [2 :: Int] + -- SubBranchRule should be recomputed once before this (when the condition was True) + countRes <- build theDb emptyStack [SubBranchRule] + snd countRes `shouldBe` [1 :: Int] + + describe "applyWithoutDependency" $ it "does not track dependencies" $ do + db@(ShakeDatabase _ _ theDb) <- shakeNewDatabase shakeOptions $ do + ruleUnit + addRule $ \Rule _old _mode -> do + [()] <- applyWithoutDependency [Rule] + return $ RunResult ChangedRecomputeDiff "" True $ return () + + let theKey = Rule @Bool + res <- shakeRunDatabase db $ + pure $ applyWithoutDependency [theKey] + res `shouldBe` [[True]] + Just (Clean res) <- lookup (newKey theKey) <$> getDatabaseValues theDb + resultDeps res `shouldBe` UnknownDeps diff --git a/hls-graph/test/DatabaseSpec.hs b/hls-graph/test/DatabaseSpec.hs new file mode 100644 index 0000000000..9061bfa89d --- /dev/null +++ b/hls-graph/test/DatabaseSpec.hs @@ -0,0 +1,49 @@ +{-# LANGUAGE OverloadedStrings #-} + +module DatabaseSpec where + +import Development.IDE.Graph (newKey, shakeOptions) +import Development.IDE.Graph.Database (shakeNewDatabase, + shakeRunDatabase) +import Development.IDE.Graph.Internal.Action (apply1) +import Development.IDE.Graph.Internal.Database (compute, incDatabase) +import Development.IDE.Graph.Internal.Rules (addRule) +import Development.IDE.Graph.Internal.Types +import Example +import System.Time.Extra (timeout) +import Test.Hspec + + +spec :: Spec +spec = do + describe "Evaluation" $ do + it "detects cycles" $ do + db <- shakeNewDatabase shakeOptions $ do + ruleBool + addRule $ \Rule _old _mode -> do + True <- apply1 (Rule @Bool) + return $ RunResult ChangedRecomputeDiff "" () (return ()) + let res = shakeRunDatabase db $ pure $ apply1 (Rule @()) + timeout 1 res `shouldThrow` \StackException{} -> True + + describe "compute" $ do + it "build step and changed step updated correctly" $ do + (ShakeDatabase _ _ theDb) <- shakeNewDatabase shakeOptions $ do + ruleStep + + let k = newKey $ Rule @() + -- ChangedRecomputeSame + r1@Result{resultChanged=rc1, resultBuilt=rb1} <- compute theDb emptyStack k RunDependenciesChanged Nothing + incDatabase theDb Nothing + -- ChangedRecomputeSame + r2@Result{resultChanged=rc2, resultBuilt=rb2} <- compute theDb emptyStack k RunDependenciesChanged (Just r1) + incDatabase theDb Nothing + -- changed Nothing + Result{resultChanged=rc3, resultBuilt=rb3} <- compute theDb emptyStack k RunDependenciesSame (Just r2) + rc1 `shouldBe` Step 0 + rc2 `shouldBe` Step 0 + rc3 `shouldBe` Step 0 + + rb1 `shouldBe` Step 0 + rb2 `shouldBe` Step 1 + rb3 `shouldBe` Step 1 diff --git a/hls-graph/test/Example.hs b/hls-graph/test/Example.hs new file mode 100644 index 0000000000..c20ea79328 --- /dev/null +++ b/hls-graph/test/Example.hs @@ -0,0 +1,74 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE NoPolyKinds #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} +module Example where + +import qualified Control.Concurrent as C +import Control.Monad.IO.Class (liftIO) +import Development.IDE.Graph +import Development.IDE.Graph.Classes +import Development.IDE.Graph.Rule +import GHC.Generics +import Type.Reflection (typeRep) + +data Rule a = Rule + deriving (Eq, Generic, Hashable, NFData) + +instance Typeable a => Show (Rule a) where + show Rule = show $ typeRep @a + +type instance RuleResult (Rule a) = a + +ruleStep :: Rules () +ruleStep = addRule $ \(Rule :: Rule ()) _old mode -> do + case mode of + RunDependenciesChanged -> return $ RunResult ChangedRecomputeSame "" () (return ()) + RunDependenciesSame -> return $ RunResult ChangedNothing "" () (return ()) + +ruleUnit :: Rules () +ruleUnit = addRule $ \(Rule :: Rule ()) _old _mode -> do + return $ RunResult ChangedRecomputeDiff "" () (return ()) + +-- | Depends on Rule @() +ruleBool :: Rules () +ruleBool = addRule $ \Rule _old _mode -> do + () <- apply1 Rule + return $ RunResult ChangedRecomputeDiff "" True (return ()) + + +data CondRule = CondRule + deriving (Eq, Generic, Hashable, NFData, Show) +type instance RuleResult CondRule = Bool + + +ruleCond :: C.MVar Bool -> Rules () +ruleCond mv = addRule $ \CondRule _old _mode -> do + r <- liftIO $ C.modifyMVar mv $ \x -> return (not x, x) + return $ RunResult ChangedRecomputeDiff "" r (return ()) + +data BranchedRule = BranchedRule + deriving (Eq, Generic, Hashable, NFData, Show) +type instance RuleResult BranchedRule = Int + +ruleWithCond :: Rules () +ruleWithCond = addRule $ \BranchedRule _old _mode -> do + r <- apply1 CondRule + if r then do + _ <- apply1 SubBranchRule + return $ RunResult ChangedRecomputeDiff "" (1 :: Int) (return ()) + else + return $ RunResult ChangedRecomputeDiff "" (2 :: Int) (return ()) + +data SubBranchRule = SubBranchRule + deriving (Eq, Generic, Hashable, NFData, Show) +type instance RuleResult SubBranchRule = Int + +ruleSubBranch :: C.MVar Int -> Rules () +ruleSubBranch mv = addRule $ \SubBranchRule _old _mode -> do + r <- liftIO $ C.modifyMVar mv $ \x -> return (x+1, x) + return $ RunResult ChangedRecomputeDiff "" r (return ()) + +data CountRule = CountRule + deriving (Eq, Generic, Hashable, NFData, Show) +type instance RuleResult CountRule = Int diff --git a/hls-graph/test/Main.hs b/hls-graph/test/Main.hs new file mode 100644 index 0000000000..553982775f --- /dev/null +++ b/hls-graph/test/Main.hs @@ -0,0 +1,7 @@ +import qualified Spec +import Test.Tasty +import Test.Tasty.Hspec +import Test.Tasty.Ingredients.Rerun (defaultMainWithRerun) + +main :: IO () +main = testSpecs Spec.spec >>= defaultMainWithRerun . testGroup "tactics" diff --git a/hls-graph/test/RulesSpec.hs b/hls-graph/test/RulesSpec.hs new file mode 100644 index 0000000000..bb9253fb26 --- /dev/null +++ b/hls-graph/test/RulesSpec.hs @@ -0,0 +1,8 @@ +module RulesSpec where + +import Test.Hspec + +spec :: Spec +spec = do + describe "" $ do + pure () diff --git a/hls-graph/test/Spec.hs b/hls-graph/test/Spec.hs new file mode 100644 index 0000000000..5416ef6a86 --- /dev/null +++ b/hls-graph/test/Spec.hs @@ -0,0 +1 @@ +{-# OPTIONS_GHC -F -pgmF hspec-discover -optF --module-name=Spec #-} diff --git a/hls-plugin-api/LICENSE b/hls-plugin-api/LICENSE new file mode 100644 index 0000000000..261eeb9e9f --- /dev/null +++ b/hls-plugin-api/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/hls-plugin-api/bench/Main.hs b/hls-plugin-api/bench/Main.hs new file mode 100644 index 0000000000..52006af16d --- /dev/null +++ b/hls-plugin-api/bench/Main.hs @@ -0,0 +1,56 @@ +-- A benchmark comparing the performance characteristics of list-based +-- vs RangeMap-based "in-range filtering" approaches +module Main (main) where + +import Control.DeepSeq (force) +import Control.Exception (evaluate) +import Control.Monad (replicateM) +import qualified Criterion +import qualified Criterion.Main +import Data.Random (RVar) +import qualified Data.Random as Fu +import qualified Ide.Plugin.RangeMap as RangeMap +import Language.LSP.Protocol.Types (Position (..), Range (..), UInt, + isSubrangeOf) +import qualified System.Random.Stateful as Random + + +genRangeList :: Int -> RVar [Range] +genRangeList n = replicateM n genRange + +genRange :: RVar Range +genRange = do + x1 <- genPosition + delta <- genRangeLength + let x2 = x1 { _character = _character x1 + delta } + pure $ Range x1 x2 + where + genRangeLength :: RVar UInt + genRangeLength = fromInteger <$> Fu.uniform 5 50 + +genPosition :: RVar Position +genPosition = Position + <$> (fromInteger <$> Fu.uniform 0 10000) + <*> (fromInteger <$> Fu.uniform 0 150) + +filterRangeList :: Range -> [Range] -> [Range] +filterRangeList r = filter (isSubrangeOf r) + +main :: IO () +main = do + rangeLists@[rangeList100, rangeList1000, rangeList10000] + <- traverse (Fu.sampleFrom Random.globalStdGen . genRangeList) [100, 1000, 10000] + [rangeMap100, rangeMap1000, rangeMap10000] <- evaluate $ force $ map (RangeMap.fromList id) rangeLists + targetRange <- Fu.sampleFrom Random.globalStdGen genRange + Criterion.Main.defaultMain + [ Criterion.bgroup "List" + [ Criterion.bench "Size 100" $ Criterion.nf (filterRangeList targetRange) rangeList100 + , Criterion.bench "Size 1000" $ Criterion.nf (filterRangeList targetRange) rangeList1000 + , Criterion.bench "Size 10000" $ Criterion.nf (filterRangeList targetRange) rangeList10000 + ] + , Criterion.bgroup "RangeMap" + [ Criterion.bench "Size 100" $ Criterion.nf (RangeMap.filterByRange targetRange) rangeMap100 + , Criterion.bench "Size 1000" $ Criterion.nf (RangeMap.filterByRange targetRange) rangeMap1000 + , Criterion.bench "Size 10000" $ Criterion.nf (RangeMap.filterByRange targetRange) rangeMap10000 + ] + ] diff --git a/hls-plugin-api/hls-plugin-api.cabal b/hls-plugin-api/hls-plugin-api.cabal new file mode 100644 index 0000000000..bad55992bb --- /dev/null +++ b/hls-plugin-api/hls-plugin-api.cabal @@ -0,0 +1,149 @@ +cabal-version: 2.4 +name: hls-plugin-api +version: 2.11.0.0 +synopsis: Haskell Language Server API for plugin communication +description: + Please see the README on GitHub at + +homepage: https://github.com/haskell/haskell-language-server#readme +bug-reports: https://github.com/haskell/haskell-language-server/issues +license: Apache-2.0 +license-file: LICENSE +author: The Haskell IDE Team +maintainer: alan.zimm@gmail.com +copyright: The Haskell IDE Team +category: Development +build-type: Simple + +flag pedantic + description: Enable -Werror + default: False + manual: True + +-- This flag can be used to avoid the dependency on hw-fingertree. +-- We can set this temporarily if we have problems building hw-fingertree +-- for a new version of GHC. +flag use-fingertree + description: Use fingertree implementation of RangeMap + default: True + manual: False + +source-repository head + type: git + location: https://github.com/haskell/haskell-language-server + +common warnings + ghc-options: + -Wall -Wredundant-constraints -Wunused-packages + -Wno-name-shadowing -Wno-unticked-promoted-constructors + +library + import: warnings + exposed-modules: + Ide.Logger + Ide.Plugin.Config + Ide.Plugin.ConfigUtils + Ide.Plugin.Error + Ide.Plugin.HandleRequestTypes + Ide.Plugin.Properties + Ide.Plugin.RangeMap + Ide.Plugin.Resolve + Ide.PluginUtils + Ide.Types + + hs-source-dirs: src + build-depends: + , aeson + , base >=4.12 && <5 + , co-log-core + , containers + , data-default + , dependent-map + , dependent-sum >=0.7 + , Diff ^>=0.5 || ^>=1.0.0 + , dlist + , extra + , filepath + , ghc + , hashable + , hls-graph == 2.11.0.0 + , lens + , lens-aeson + , lsp ^>=2.7 + , megaparsec >=9.0 + , mtl + , opentelemetry >=0.4 + , optparse-applicative + , prettyprinter + , regex-tdfa >=1.3.1.0 + , stm + , text + , time + , transformers + , unliftio + , unordered-containers + + if os(windows) + build-depends: Win32 + + else + build-depends: unix + + if flag(pedantic) + ghc-options: -Werror + + if flag(use-fingertree) + cpp-options: -DUSE_FINGERTREE + build-depends: hw-fingertree + + default-language: GHC2021 + default-extensions: + DataKinds + +test-suite tests + import: warnings + type: exitcode-stdio-1.0 + default-language: GHC2021 + hs-source-dirs: test + main-is: Main.hs + ghc-options: -threaded -rtsopts -with-rtsopts=-N + other-modules: + Ide.PluginUtilsTest + Ide.TypesTests + + build-depends: + , bytestring + , aeson + , base + , containers + , data-default + , hls-plugin-api + , lens + , lsp-types + , tasty + , tasty-golden + , tasty-hunit + , tasty-quickcheck + , tasty-rerun + , text + +benchmark rangemap-benchmark + import: warnings + -- Benchmark doesn't make sense if fingertree implementation + -- is not used. + if !flag(use-fingertree) + buildable: False + + type: exitcode-stdio-1.0 + default-language: GHC2021 + hs-source-dirs: bench + main-is: Main.hs + ghc-options: -threaded + build-depends: + , base + , criterion + , deepseq + , hls-plugin-api + , lsp-types + , random + , random-fu diff --git a/hls-plugin-api/src/Ide/Logger.hs b/hls-plugin-api/src/Ide/Logger.hs new file mode 100644 index 0000000000..d9d1eb95b3 --- /dev/null +++ b/hls-plugin-api/src/Ide/Logger.hs @@ -0,0 +1,283 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +-- | This is a compatibility module that abstracts over the +-- concrete choice of logging framework so users can plug in whatever +-- framework they want to. +module Ide.Logger + ( Priority(..) + , Recorder(..) + , WithPriority(..) + , logWith + , cmap + , cmapIO + , cfilter + , withFileRecorder + , makeDefaultStderrRecorder + , makeDefaultHandleRecorder + , LoggingColumn(..) + , cmapWithPrio + , withBacklog + , lspClientMessageRecorder + , lspClientLogRecorder + , module PrettyPrinterModule + , renderStrict + , toCologActionWithPrio + , defaultLoggingColumns + ) where + +import Colog.Core (LogAction (..), Severity, + WithSeverity (..)) +import qualified Colog.Core as Colog +import Control.Concurrent (myThreadId) +import Control.Concurrent.Extra (Lock, newLock, withLock) +import Control.Concurrent.STM (atomically, flushTBQueue, + isFullTBQueue, newTBQueueIO, + newTVarIO, readTVarIO, + writeTBQueue, writeTVar) +import Control.Exception (IOException) +import Control.Monad (unless, when, (>=>)) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Data.Foldable (for_) +import Data.Functor.Contravariant (Contravariant (contramap)) +import Data.Maybe (fromMaybe) +import Data.Text (Text) +import qualified Data.Text as Text +import qualified Data.Text.IO as Text +import Data.Time (defaultTimeLocale, formatTime, + getCurrentTime) +import GHC.Stack (CallStack, HasCallStack, + SrcLoc (SrcLoc, srcLocModule, srcLocStartCol, srcLocStartLine), + callStack, getCallStack, + withFrozenCallStack) +import Language.LSP.Protocol.Message (SMethod (SMethod_WindowLogMessage, SMethod_WindowShowMessage)) +import Language.LSP.Protocol.Types (LogMessageParams (..), + MessageType (..), + ShowMessageParams (..)) +import Language.LSP.Server +import qualified Language.LSP.Server as LSP +import Prettyprinter as PrettyPrinterModule +import Prettyprinter.Render.Text (renderStrict) +import System.IO (Handle, IOMode (AppendMode), + hClose, hFlush, openFile, + stderr) +import UnliftIO (MonadUnliftIO, finally, try) + +data Priority +-- Don't change the ordering of this type or you will mess up the Ord +-- instance + = Debug -- ^ Verbose debug logging. + | Info -- ^ Useful information in case an error has to be understood. + | Warning + -- ^ These error messages should not occur in a expected usage, and + -- should be investigated. + | Error -- ^ Such log messages must never occur in expected usage. + deriving (Eq, Show, Read, Ord, Enum, Bounded) + +data WithPriority a = WithPriority { priority :: Priority, callStack_ :: CallStack, payload :: a } deriving Functor + +-- | Note that this is logging actions _of the program_, not of the user. +-- You shouldn't call warning/error if the user has caused an error, only +-- if our code has gone wrong and is itself erroneous (e.g. we threw an exception). +newtype Recorder msg = Recorder + { logger_ :: forall m. (MonadIO m) => msg -> m () } + +logWith :: (HasCallStack, MonadIO m) => Recorder (WithPriority msg) -> Priority -> msg -> m () +logWith recorder priority msg = withFrozenCallStack $ logger_ recorder (WithPriority priority callStack msg) + +instance Semigroup (Recorder msg) where + (<>) Recorder{ logger_ = logger_1 } Recorder{ logger_ = logger_2 } = + Recorder + { logger_ = \msg -> logger_1 msg >> logger_2 msg } + +instance Monoid (Recorder msg) where + mempty = + Recorder + { logger_ = \_ -> pure () } + +instance Contravariant Recorder where + contramap f Recorder{ logger_ } = + Recorder + { logger_ = logger_ . f } + +cmap :: (a -> b) -> Recorder b -> Recorder a +cmap = contramap + +cmapWithPrio :: (a -> b) -> Recorder (WithPriority b) -> Recorder (WithPriority a) +cmapWithPrio f = cmap (fmap f) + +cmapIO :: (a -> IO b) -> Recorder b -> Recorder a +cmapIO f Recorder{ logger_ } = + Recorder + { logger_ = (liftIO . f) >=> logger_ } + +cfilter :: (a -> Bool) -> Recorder a -> Recorder a +cfilter p Recorder{ logger_ } = + Recorder + { logger_ = \msg -> when (p msg) (logger_ msg) } + +textHandleRecorder :: Handle -> Recorder Text +textHandleRecorder handle = + Recorder + { logger_ = \text -> liftIO $ Text.hPutStrLn handle text *> hFlush handle } + +makeDefaultStderrRecorder :: MonadIO m => Maybe [LoggingColumn] -> m (Recorder (WithPriority (Doc a))) +makeDefaultStderrRecorder columns = do + lock <- liftIO newLock + makeDefaultHandleRecorder columns lock stderr + +withFileRecorder + :: MonadUnliftIO m + => FilePath + -- ^ Log file path. + -> Maybe [LoggingColumn] + -- ^ logging columns to display. `Nothing` uses `defaultLoggingColumns` + -> (Either IOException (Recorder (WithPriority (Doc d))) -> m a) + -- ^ action given a recorder, or the exception if we failed to open the file + -> m a +withFileRecorder path columns action = do + lock <- liftIO newLock + let makeHandleRecorder = makeDefaultHandleRecorder columns lock + fileHandle :: Either IOException Handle <- liftIO $ try (openFile path AppendMode) + case fileHandle of + Left e -> action $ Left e + Right fileHandle -> finally (makeHandleRecorder fileHandle >>= action . Right) (liftIO $ hClose fileHandle) + +makeDefaultHandleRecorder + :: MonadIO m + => Maybe [LoggingColumn] + -- ^ built-in logging columns to display. Nothing uses the default + -> Lock + -- ^ lock to take when outputting to handle + -> Handle + -- ^ handle to output to + -> m (Recorder (WithPriority (Doc a))) +makeDefaultHandleRecorder columns lock handle = do + let Recorder{ logger_ } = textHandleRecorder handle + let threadSafeRecorder = Recorder { logger_ = \msg -> liftIO $ withLock lock (logger_ msg) } + let loggingColumns = fromMaybe defaultLoggingColumns columns + let textWithPriorityRecorder = cmapIO (textWithPriorityToText loggingColumns) threadSafeRecorder + pure (cmap docToText textWithPriorityRecorder) + where + docToText = fmap (renderStrict . layoutPretty defaultLayoutOptions) + +data LoggingColumn + = TimeColumn + | ThreadIdColumn + | PriorityColumn + | DataColumn + | SourceLocColumn + +defaultLoggingColumns :: [LoggingColumn] +defaultLoggingColumns = [TimeColumn, PriorityColumn, DataColumn] + +textWithPriorityToText :: [LoggingColumn] -> WithPriority Text -> IO Text +textWithPriorityToText columns WithPriority{ priority, callStack_, payload } = do + textColumns <- mapM loggingColumnToText columns + pure $ Text.intercalate " | " textColumns + where + showAsText :: Show a => a -> Text + showAsText = Text.pack . show + + utcTimeToText utcTime = Text.pack $ formatTime defaultTimeLocale "%Y-%m-%dT%H:%M:%S%6QZ" utcTime + + priorityToText :: Priority -> Text + priorityToText = showAsText + + threadIdToText = showAsText + + callStackToSrcLoc :: CallStack -> Maybe SrcLoc + callStackToSrcLoc callStack = + case getCallStack callStack of + (_, srcLoc) : _ -> Just srcLoc + _ -> Nothing + + srcLocToText = \case + Nothing -> "" + Just SrcLoc{ srcLocModule, srcLocStartLine, srcLocStartCol } -> + Text.pack srcLocModule <> "#" <> showAsText srcLocStartLine <> ":" <> showAsText srcLocStartCol + + loggingColumnToText :: LoggingColumn -> IO Text + loggingColumnToText = \case + TimeColumn -> do + utcTime <- getCurrentTime + pure (utcTimeToText utcTime) + SourceLocColumn -> pure $ (srcLocToText . callStackToSrcLoc) callStack_ + ThreadIdColumn -> do + threadId <- myThreadId + pure (threadIdToText threadId) + PriorityColumn -> pure (priorityToText priority) + DataColumn -> pure payload + +-- | Given a 'Recorder' that requires an argument, produces a 'Recorder' +-- that queues up messages until the argument is provided using the callback, at which +-- point it sends the backlog and begins functioning normally. +withBacklog :: (v -> Recorder a) -> IO (Recorder a, v -> IO ()) +withBacklog recFun = do + -- Arbitrary backlog capacity + backlog <- newTBQueueIO 100 + let backlogRecorder = Recorder $ \it -> liftIO $ atomically $ do + -- If the queue is full just drop the message on the floor. This is most likely + -- to happen if the callback is just never going to be called; in which case + -- we want neither to build up an unbounded backlog in memory, nor block waiting + -- for space! + full <- isFullTBQueue backlog + unless full $ writeTBQueue backlog it + + -- The variable holding the recorder starts out holding the recorder that writes + -- to the backlog. + recVar <- newTVarIO backlogRecorder + -- The callback atomically swaps out the recorder for the final one, and flushes + -- the backlog to it. + let cb arg = do + let recorder = recFun arg + toRecord <- atomically $ writeTVar recVar recorder >> flushTBQueue backlog + for_ toRecord (logger_ recorder) + + -- The recorder we actually return looks in the variable and uses whatever is there. + let varRecorder = Recorder $ \it -> do + r <- liftIO $ readTVarIO recVar + logger_ r it + + pure (varRecorder, cb) + +-- | Creates a recorder that sends logs to the LSP client via @window/showMessage@ notifications. +lspClientMessageRecorder :: LanguageContextEnv config -> Recorder (WithPriority Text) +lspClientMessageRecorder env = Recorder $ \WithPriority {..} -> + liftIO $ LSP.runLspT env $ LSP.sendNotification SMethod_WindowShowMessage + ShowMessageParams + { _type_ = priorityToLsp priority, + _message = payload + } + +-- | Creates a recorder that sends logs to the LSP client via @window/logMessage@ notifications. +lspClientLogRecorder :: LanguageContextEnv config -> Recorder (WithPriority Text) +lspClientLogRecorder env = Recorder $ \WithPriority {..} -> + liftIO $ LSP.runLspT env $ LSP.sendNotification SMethod_WindowLogMessage + LogMessageParams + { _type_ = priorityToLsp priority, + _message = payload + } + +priorityToLsp :: Priority -> MessageType +priorityToLsp = + \case + Debug -> MessageType_Log + Info -> MessageType_Info + Warning -> MessageType_Warning + Error -> MessageType_Error + +toCologActionWithPrio :: (MonadIO m, HasCallStack) => Recorder (WithPriority msg) -> LogAction m (WithSeverity msg) +toCologActionWithPrio (Recorder _logger) = LogAction $ \WithSeverity{..} -> do + let priority = severityToPriority getSeverity + _logger $ WithPriority priority callStack getMsg + where + severityToPriority :: Severity -> Priority + severityToPriority Colog.Debug = Debug + severityToPriority Colog.Info = Info + severityToPriority Colog.Warning = Warning + severityToPriority Colog.Error = Error diff --git a/hls-plugin-api/src/Ide/Plugin/Config.hs b/hls-plugin-api/src/Ide/Plugin/Config.hs new file mode 100644 index 0000000000..4fee92c309 --- /dev/null +++ b/hls-plugin-api/src/Ide/Plugin/Config.hs @@ -0,0 +1,82 @@ +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ViewPatterns #-} +module Ide.Plugin.Config + ( getConfigFromNotification + , Config(..) + , parseConfig + , PluginConfig(..) + , CheckParents(..) + ) where + +import Control.Lens (preview) +import Data.Aeson hiding (Error) +import qualified Data.Aeson as A +import Data.Aeson.Lens (_String) +import qualified Data.Aeson.Types as A +import Data.Default +import qualified Data.Map.Strict as Map +import Data.Maybe (fromMaybe) +import qualified Data.Text as T +import GHC.Exts (toList) +import Ide.Types + +-- --------------------------------------------------------------------- + +-- | Given a DidChangeConfigurationNotification message, this function returns the parsed +-- Config object if possible. +getConfigFromNotification :: IdePlugins s -> Config -> A.Value -> Either T.Text Config +getConfigFromNotification plugins defaultValue p = + case A.parse (parseConfig plugins defaultValue) p of + A.Success c -> Right c + A.Error err -> Left $ T.pack err + +-- --------------------------------------------------------------------- + +parseConfig :: IdePlugins s -> Config -> Value -> A.Parser Config +parseConfig idePlugins defValue = A.withObject "settings" $ \o -> + Config + <$> o .:? "checkParents" .!= checkParents defValue + <*> o .:? "checkProject" .!= checkProject defValue + <*> o .:? "formattingProvider" .!= formattingProvider defValue + <*> o .:? "cabalFormattingProvider" .!= cabalFormattingProvider defValue + <*> o .:? "maxCompletions" .!= maxCompletions defValue + <*> o .:? "sessionLoading" .!= sessionLoading defValue + <*> A.explicitParseFieldMaybe (parsePlugins idePlugins) o "plugin" .!= plugins defValue + +-- | Parse the 'PluginConfig'. +-- Since we need to fall back to default values if we do not find one in the input, +-- we need the map of plugin-provided defaults, as in 'parseConfig'. +parsePlugins :: IdePlugins s -> Value -> A.Parser (Map.Map PluginId PluginConfig) +parsePlugins (IdePlugins plugins) = A.withObject "Config.plugins" $ \o -> do + let -- parseOne :: Key -> Value -> A.Parser (T.Text, PluginConfig) + parseOne (fmap PluginId . preview _String . toJSON -> Just pId) pConfig = do + let defPluginConfig = fromMaybe def $ lookup pId defValue + pConfig' <- parsePluginConfig defPluginConfig pConfig + return (pId, pConfig') + parseOne _ _ = fail "Expected plugin id to be a string" + defValue = map (\p -> (pluginId p, configInitialGenericConfig (pluginConfigDescriptor p))) plugins + plugins <- mapM (uncurry parseOne) (toList o) + return $ Map.fromList plugins + +-- --------------------------------------------------------------------- + +parsePluginConfig :: PluginConfig -> Value -> A.Parser PluginConfig +parsePluginConfig def = A.withObject "PluginConfig" $ \o -> PluginConfig + <$> o .:? "globalOn" .!= plcGlobalOn def + <*> o .:? "callHierarchyOn" .!= plcCallHierarchyOn def + <*> o .:? "codeActionsOn" .!= plcCodeActionsOn def + <*> o .:? "codeLensOn" .!= plcCodeLensOn def + <*> o .:? "inlayHintsOn" .!= plcInlayHintsOn def + <*> o .:? "diagnosticsOn" .!= plcDiagnosticsOn def -- AZ + <*> o .:? "hoverOn" .!= plcHoverOn def + <*> o .:? "symbolsOn" .!= plcSymbolsOn def + <*> o .:? "completionOn" .!= plcCompletionOn def + <*> o .:? "renameOn" .!= plcRenameOn def + <*> o .:? "selectionRangeOn" .!= plcSelectionRangeOn def + <*> o .:? "foldingRangeOn" .!= plcFoldingRangeOn def + <*> o .:? "semanticTokensOn" .!= plcSemanticTokensOn def + <*> o .:? "config" .!= plcConfig def + +-- --------------------------------------------------------------------- diff --git a/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs b/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs new file mode 100644 index 0000000000..a7350ab344 --- /dev/null +++ b/hls-plugin-api/src/Ide/Plugin/ConfigUtils.hs @@ -0,0 +1,242 @@ +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} + +module Ide.Plugin.ConfigUtils ( + pluginsToDefaultConfig, + pluginsToVSCodeExtensionSchema, + pluginsCustomConfigToMarkdownTables + ) where + +import Control.Lens (at, (&), (?~)) +import qualified Data.Aeson as A +import Data.Aeson.Lens (_Object) +import qualified Data.Aeson.Types as A +import Data.Default +import qualified Data.Dependent.Map as DMap +import qualified Data.Dependent.Sum as DSum +import Data.List.Extra (nubOrd) +import Data.String (IsString (fromString)) +import qualified Data.Text as T +import GHC.TypeLits (symbolVal) +import Ide.Plugin.Config +import Ide.Plugin.Properties (KeyNameProxy, MetaData (..), + PluginCustomConfig (..), + PluginCustomConfigParam (..), + Properties (..), + SPropertyKey (..), + SomePropertyKeyWithMetaData (..), + toDefaultJSON, + toVSCodeExtensionSchema) +import Ide.Types +import Language.LSP.Protocol.Message + +-- Attention: +-- 'diagnosticsOn' will never be added into the default config or the schema, +-- since diagnostics emit in arbitrary shake rules -- we don't know +-- whether a plugin is capable of producing diagnostics. + +-- | Generates a default 'Config', but remains only effective items +pluginsToDefaultConfig :: IdePlugins a -> A.Value +pluginsToDefaultConfig IdePlugins {..} = + -- Use '_Object' and 'at' to get at the "plugin" key + -- and actually set it. + A.toJSON defaultConfig & _Object . at "plugin" ?~ pluginSpecificDefaultConfigs + where + defaultConfig = def :: Config + pluginSpecificDefaultConfigs = A.object $ mconcat $ singlePlugin <$> ipMap + -- Splice genericDefaultConfig and dedicatedDefaultConfig + -- Example: + -- + -- { + -- "plugin-id": { + -- "globalOn": true, + -- "codeActionsOn": true, + -- "codeLensOn": true, + -- "config": { + -- "property1": "foo" + -- } + -- } + -- } + singlePlugin :: PluginDescriptor ideState -> [A.Pair] + singlePlugin PluginDescriptor {pluginConfigDescriptor = ConfigDescriptor {..}, ..} = + let x = genericDefaultConfig <> dedicatedDefaultConfig + in [fromString (T.unpack pId) A..= A.object x | not $ null x] + where + (PluginHandlers (DMap.toList -> handlers)) = pluginHandlers + customConfigToDedicatedDefaultConfig (CustomConfig p) = toDefaultJSON p + -- Example: + -- + -- { + -- "codeActionsOn": true, + -- "codeLensOn": true + -- } + -- + genericDefaultConfig = + let x = ["diagnosticsOn" A..= True | configHasDiagnostics] + <> nubOrd (mconcat + (handlersToGenericDefaultConfig configInitialGenericConfig <$> handlers)) + in case x of + -- If the plugin has only one capability, we produce globalOn instead of the specific one; + -- otherwise we omit globalOn + [_] -> ["globalOn" A..= plcGlobalOn configInitialGenericConfig] + _ -> x + -- Example: + -- + -- { + -- "config": { + -- "property1": "foo" + -- } + --} + dedicatedDefaultConfig = + let x = customConfigToDedicatedDefaultConfig configCustomConfig + in ["config" A..= A.object x | not $ null x] + + (PluginId pId) = pluginId + + -- This function captures ide methods registered by the plugin, and then converts it to kv pairs + handlersToGenericDefaultConfig :: PluginConfig -> DSum.DSum IdeMethod f -> [A.Pair] + handlersToGenericDefaultConfig PluginConfig{..} (IdeMethod m DSum.:=> _) = case m of + SMethod_TextDocumentCodeAction -> ["codeActionsOn" A..= plcCodeActionsOn] + SMethod_TextDocumentCodeLens -> ["codeLensOn" A..= plcCodeLensOn] + SMethod_TextDocumentInlayHint -> ["inlayHintsOn" A..= plcInlayHintsOn] + SMethod_TextDocumentRename -> ["renameOn" A..= plcRenameOn] + SMethod_TextDocumentHover -> ["hoverOn" A..= plcHoverOn] + SMethod_TextDocumentDocumentSymbol -> ["symbolsOn" A..= plcSymbolsOn] + SMethod_TextDocumentCompletion -> ["completionOn" A..= plcCompletionOn] + SMethod_TextDocumentPrepareCallHierarchy -> ["callHierarchyOn" A..= plcCallHierarchyOn] + SMethod_TextDocumentSemanticTokensFull -> ["semanticTokensOn" A..= plcSemanticTokensOn] + SMethod_TextDocumentSemanticTokensFullDelta -> ["semanticTokensOn" A..= plcSemanticTokensOn] + _ -> [] + +-- | Generates json schema used in haskell vscode extension +-- Similar to 'pluginsToDefaultConfig' but simpler, since schema has a flatten structure +pluginsToVSCodeExtensionSchema :: IdePlugins a -> A.Value +pluginsToVSCodeExtensionSchema IdePlugins {..} = A.object $ mconcat $ singlePlugin <$> ipMap + where + singlePlugin PluginDescriptor {pluginConfigDescriptor = ConfigDescriptor {..}, ..} = genericSchema <> dedicatedSchema + where + (PluginHandlers (DMap.toList -> handlers)) = pluginHandlers + customConfigToDedicatedSchema (CustomConfig p) = toVSCodeExtensionSchema (withIdPrefix "config.") p + (PluginId pId) = pluginId + genericSchema = + let x = + [toKey' "diagnosticsOn" A..= schemaEntry "diagnostics" True | configHasDiagnostics] + <> nubOrd (mconcat (handlersToGenericSchema configInitialGenericConfig <$> handlers)) + in case x of + -- If the plugin has only one capability, we produce globalOn instead of the specific one; + -- otherwise we don't produce globalOn at all + [_] -> [toKey' "globalOn" A..= schemaEntry "plugin" (plcGlobalOn configInitialGenericConfig)] + _ -> x + dedicatedSchema = customConfigToDedicatedSchema configCustomConfig + handlersToGenericSchema PluginConfig{..} (IdeMethod m DSum.:=> _) = case m of + SMethod_TextDocumentCodeAction -> [toKey' "codeActionsOn" A..= schemaEntry "code actions" plcCodeActionsOn] + SMethod_TextDocumentCodeLens -> [toKey' "codeLensOn" A..= schemaEntry "code lenses" plcCodeLensOn] + SMethod_TextDocumentInlayHint -> [toKey' "inlayHintsOn" A..= schemaEntry "inlay hints" plcInlayHintsOn] + SMethod_TextDocumentRename -> [toKey' "renameOn" A..= schemaEntry "rename" plcRenameOn] + SMethod_TextDocumentHover -> [toKey' "hoverOn" A..= schemaEntry "hover" plcHoverOn] + SMethod_TextDocumentDocumentSymbol -> [toKey' "symbolsOn" A..= schemaEntry "symbols" plcSymbolsOn] + SMethod_TextDocumentCompletion -> [toKey' "completionOn" A..= schemaEntry "completions" plcCompletionOn] + SMethod_TextDocumentPrepareCallHierarchy -> [toKey' "callHierarchyOn" A..= schemaEntry "call hierarchy" plcCallHierarchyOn] + SMethod_TextDocumentSemanticTokensFull -> [toKey' "semanticTokensOn" A..= schemaEntry "semantic tokens" plcSemanticTokensOn] + SMethod_TextDocumentSemanticTokensFullDelta -> [toKey' "semanticTokensOn" A..= schemaEntry "semantic tokens" plcSemanticTokensOn] + _ -> [] + schemaEntry desc defaultVal = + A.object + [ "scope" A..= A.String "resource", + "type" A..= A.String "boolean", + "default" A..= A.Bool defaultVal, + "description" A..= A.String ("Enables " <> pId <> " " <> desc) + ] + withIdPrefix x = "haskell.plugin." <> pId <> "." <> x + toKey' = fromString . T.unpack . withIdPrefix + + +-- | Generates markdown tables for custom config +pluginsCustomConfigToMarkdownTables :: IdePlugins a -> T.Text +pluginsCustomConfigToMarkdownTables IdePlugins {..} = T.unlines + $ map renderCfg + $ filter (\(PluginCustomConfig _ params) -> not $ null params) + $ map toPluginCustomConfig ipMap + where + toPluginCustomConfig :: PluginDescriptor ideState -> PluginCustomConfig + toPluginCustomConfig PluginDescriptor {pluginConfigDescriptor = ConfigDescriptor {configCustomConfig = c}, pluginId = PluginId pId} = + PluginCustomConfig { pcc'Name = pId, pcc'Params = toPluginCustomConfigParams c} + toPluginCustomConfigParams :: CustomConfig -> [PluginCustomConfigParam] + toPluginCustomConfigParams (CustomConfig p) = toPluginCustomConfigParams' p + toPluginCustomConfigParams' :: Properties r -> [PluginCustomConfigParam] + toPluginCustomConfigParams' EmptyProperties = [] + toPluginCustomConfigParams' (ConsProperties (keyNameProxy :: KeyNameProxy s) (k :: SPropertyKey k) (m :: MetaData t) xs) = + toEntry (SomePropertyKeyWithMetaData k m) : toPluginCustomConfigParams' xs + where + toEntry :: SomePropertyKeyWithMetaData -> PluginCustomConfigParam + toEntry (SomePropertyKeyWithMetaData SNumber MetaData {..}) = + PluginCustomConfigParam { + pccp'Name = T.pack $ symbolVal keyNameProxy, + pccp'Description = description, + pccp'Default = T.pack $ show defaultValue, + pccp'EnumValues = [] + } + toEntry (SomePropertyKeyWithMetaData SInteger MetaData {..}) = + PluginCustomConfigParam { + pccp'Name = T.pack $ symbolVal keyNameProxy, + pccp'Description = description, + pccp'Default = T.pack $ show defaultValue, + pccp'EnumValues = [] + } + toEntry (SomePropertyKeyWithMetaData SString MetaData {..}) = + PluginCustomConfigParam { + pccp'Name = T.pack $ symbolVal keyNameProxy, + pccp'Description = description, + pccp'Default = T.pack $ show defaultValue, + pccp'EnumValues = [] + } + toEntry (SomePropertyKeyWithMetaData SBoolean MetaData {..}) = + PluginCustomConfigParam { + pccp'Name = T.pack $ symbolVal keyNameProxy, + pccp'Description = description, + pccp'Default = T.pack $ show defaultValue, + pccp'EnumValues = [] + } + toEntry (SomePropertyKeyWithMetaData (SObject _) MetaData {..}) = + PluginCustomConfigParam { + pccp'Name = T.pack $ symbolVal keyNameProxy, + pccp'Description = description, + pccp'Default = "TODO: nested object", -- T.pack $ show defaultValue, + pccp'EnumValues = [] + } + toEntry (SomePropertyKeyWithMetaData (SArray _) MetaData {..}) = + PluginCustomConfigParam { + pccp'Name = T.pack $ symbolVal keyNameProxy, + pccp'Description = description, + pccp'Default = "TODO: Array values", -- T.pack $ show defaultValue, + pccp'EnumValues = [] + } + toEntry (SomePropertyKeyWithMetaData (SEnum _) EnumMetaData {..}) = + PluginCustomConfigParam { + pccp'Name = T.pack $ symbolVal keyNameProxy, + pccp'Description = description, + pccp'Default = T.pack $ show defaultValue, + pccp'EnumValues = map (T.pack . show) enumValues + } + toEntry (SomePropertyKeyWithMetaData SProperties PropertiesMetaData {..}) = + PluginCustomConfigParam { + pccp'Name = T.pack $ symbolVal keyNameProxy, + pccp'Description = description, + pccp'Default = T.pack $ show defaultValue, + pccp'EnumValues = [] + } + renderCfg :: PluginCustomConfig -> T.Text + renderCfg (PluginCustomConfig pId pccParams) = + T.unlines (pluginHeader : tableHeader : rows pccParams) + where + pluginHeader = "## " <> pId + tableHeader = + "| Property | Description | Default | Allowed values |" <> "\n" <> + "| --- | --- | --- | --- |" + rows = map renderRow + renderRow PluginCustomConfigParam {..} = + "| `" <> pccp'Name <> "` | " <> pccp'Description <> " | `" <> pccp'Default <> "` | " <> renderEnum pccp'EnumValues <> " |" + renderEnum [] = "   " -- Placeholder to prevent missing cells + renderEnum vs = "
      " <> (T.intercalate " " $ map (\x -> "
    • " <> x <> "
    • ") vs) <> "
    " diff --git a/hls-plugin-api/src/Ide/Plugin/Error.hs b/hls-plugin-api/src/Ide/Plugin/Error.hs new file mode 100644 index 0000000000..b323079aff --- /dev/null +++ b/hls-plugin-api/src/Ide/Plugin/Error.hs @@ -0,0 +1,147 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +module Ide.Plugin.Error ( + -- * Plugin Error Handling API + PluginError(..), + toErrorCode, + toPriority, + handleMaybe, + handleMaybeM, + getNormalizedFilePathE, +) where + +import Control.Monad.Extra (maybeM) +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Except (ExceptT (..), throwE) +import qualified Data.Text as T +import Ide.Logger +import Ide.Plugin.HandleRequestTypes (RejectionReason) +import Language.LSP.Protocol.Types + +-- ---------------------------------------------------------------------------- +-- Plugin Error wrapping +-- ---------------------------------------------------------------------------- + +-- |Each PluginError corresponds to either a specific ResponseError we want to +-- return or a specific way we want to log the error. If the currently present +-- ones are insufficient for the needs of your plugin, please feel free to add +-- a new one. +-- +-- Currently the PluginErrors we provide can be broken up into several groups. +-- First is PluginInternalError, which is the most serious of the errors, and +-- also the "default" error that is used for things such as uncaught exceptions. +-- Then we have PluginInvalidParams, which along with PluginInternalError map +-- to a corresponding ResponseError. +-- +-- Next we have PluginRuleFailed and PluginInvalidUserState, with the only +-- difference being PluginRuleFailed is specific to Shake rules and +-- PluginInvalidUserState can be used for everything else. Both of these are +-- "non-errors", and happen whenever the user's code is in a state where the +-- plugin is unable to provide a answer to the users request. PluginStaleResolve +-- is similar to the above two Error types, but is specific to resolve plugins, +-- and is used only when the data provided by the resolve request is stale, +-- preventing the proper resolution of it. +-- +-- Finally we have the outlier, PluginRequestRefused, where we allow a handler +-- to preform "pluginEnabled" checks inside the handler, and reject the request +-- after viewing it. The behavior of only one handler passing `pluginEnabled` +-- and then returning PluginRequestRefused should be the same as if no plugins +-- passed the `pluginEnabled` stage. +data PluginError + = -- |PluginInternalError should be used if an error has occurred. This + -- should only rarely be returned. As it's logged with Error, it will be + -- shown by the client to the user via `showWindow`. All uncaught exceptions + -- will be caught and converted to this error. + -- + -- This error will be be converted into an InternalError response code. It + -- will be logged with Error and takes the highest precedence (1) in being + -- returned as a response to the client. + PluginInternalError T.Text + -- |PluginInvalidParams should be used if the parameters of the request are + -- invalid. This error means that there is a bug in the client's code + -- (otherwise they wouldn't be sending you requests with invalid + -- parameters). + -- + -- This error will be will be converted into a InvalidParams response code. + -- It will be logged with Warning and takes medium precedence (2) in being + -- returned as a response to the client. + | PluginInvalidParams T.Text + -- |PluginInvalidUserState should be thrown when a function that your plugin + -- depends on fails. This should only be used when the function fails + -- because the user's code is in an invalid state. + -- + -- This error takes the name of the function that failed. Prefer to catch + -- this error as close to the source as possible. + -- + -- This error will be logged with Debug, and will be converted into a + -- RequestFailed response. It takes a low precedence (3) in being returned + -- as a response to the client. + | PluginInvalidUserState T.Text + -- |PluginRequestRefused allows your handler to inspect a request before + -- rejecting it. In effect it allows your plugin to act make a secondary + -- `handlesRequest` decision after receiving the request. This should only be + -- used if the decision to accept the request can not be made in + -- `handlesRequest`. + -- + -- This error will be with Debug. If it's the only response to a request, + -- HLS will respond as if no plugins passed the `handlesRequest` stage. + | PluginRequestRefused RejectionReason + -- |PluginRuleFailed should be thrown when a Rule your response depends on + -- fails. + -- + -- This error takes the name of the Rule that failed. + -- + -- This error will be logged with Debug, and will be converted into a + -- RequestFailed response code. It takes a low precedence (3) in being + -- returned as a response to the client. + | PluginRuleFailed T.Text + -- |PluginStaleResolve should be thrown when your resolve request is + -- provided with data it can no longer resolve. + -- + -- This error will be logged with Debug, and will be converted into a + -- ContentModified response. It takes a low precedence (3) in being returned + -- as a response to the client. + | PluginStaleResolve + +instance Pretty PluginError where + pretty = \case + PluginInternalError msg -> "Internal Error:" <+> pretty msg + PluginStaleResolve -> "Stale Resolve" + PluginRuleFailed rule -> "Rule Failed:" <+> pretty rule + PluginInvalidParams text -> "Invalid Params:" <+> pretty text + PluginInvalidUserState text -> "Invalid User State:" <+> pretty text + PluginRequestRefused msg -> "Request Refused: " <+> pretty msg + +-- |Converts to ErrorCode used in LSP ResponseErrors +toErrorCode :: PluginError -> (LSPErrorCodes |? ErrorCodes) +toErrorCode (PluginInternalError _) = InR ErrorCodes_InternalError +toErrorCode (PluginInvalidParams _) = InR ErrorCodes_InvalidParams +toErrorCode (PluginInvalidUserState _) = InL LSPErrorCodes_RequestFailed +-- PluginRequestRefused should never be a argument to `toResponseError`, as +-- it should be dealt with in `extensiblePlugins`, but this is here to make +-- this function complete +toErrorCode (PluginRequestRefused _) = InR ErrorCodes_MethodNotFound +toErrorCode (PluginRuleFailed _) = InL LSPErrorCodes_RequestFailed +toErrorCode PluginStaleResolve = InL LSPErrorCodes_ContentModified + +-- |Converts to a logging priority. In addition to being used by the logger, +-- `combineResponses` currently uses this to choose which response to return, +-- so care should be taken in changing it. +toPriority :: PluginError -> Priority +toPriority (PluginInternalError _) = Error +toPriority (PluginInvalidParams _) = Warning +toPriority (PluginInvalidUserState _) = Debug +toPriority (PluginRequestRefused _) = Debug +toPriority (PluginRuleFailed _) = Debug +toPriority PluginStaleResolve = Debug + +handleMaybe :: Monad m => e -> Maybe b -> ExceptT e m b +handleMaybe msg = maybe (throwE msg) return + +handleMaybeM :: Monad m => e -> m (Maybe b) -> ExceptT e m b +handleMaybeM msg act = maybeM (throwE msg) return $ lift act + +getNormalizedFilePathE :: Monad m => Uri -> ExceptT PluginError m NormalizedFilePath +getNormalizedFilePathE uri = handleMaybe (PluginInvalidParams (T.pack $ "uriToNormalizedFile failed. Uri:" <> show uri)) + $ uriToNormalizedFilePath + $ toNormalizedUri uri diff --git a/hls-plugin-api/src/Ide/Plugin/HandleRequestTypes.hs b/hls-plugin-api/src/Ide/Plugin/HandleRequestTypes.hs new file mode 100644 index 0000000000..20b81efa2d --- /dev/null +++ b/hls-plugin-api/src/Ide/Plugin/HandleRequestTypes.hs @@ -0,0 +1,46 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.HandleRequestTypes where + +import Data.Text +import Prettyprinter + +-- | Reasons why a plugin could reject a specific request. +data RejectionReason = + -- | The resolve request is not meant for this plugin or handler. The text + -- field should contain the identifier for the plugin who owns this resolve + -- request. + NotResolveOwner Text + -- | The plugin is disabled globally in the users config. + | DisabledGlobally + -- | The feature in the plugin that responds to this request is disabled in + -- the users config + | FeatureDisabled + -- | This plugin is not the formatting provider selected in the users config. + -- The text should be the formatting provider in your config. + | NotFormattingProvider Text + -- | This plugin does not support the file type. The text field here should + -- contain the filetype of the rejected request. + | DoesNotSupportFileType Text + deriving (Eq) + +-- | Whether a plugin will handle a request or not. +data HandleRequestResult = HandlesRequest | DoesNotHandleRequest RejectionReason + deriving (Eq) + +instance Pretty HandleRequestResult where + pretty HandlesRequest = "handles this request" + pretty (DoesNotHandleRequest reason) = pretty reason + +instance Pretty RejectionReason where + pretty (NotResolveOwner s) = "does not handle resolve requests for " <> pretty s <> ")." + pretty DisabledGlobally = "is disabled globally in your config." + pretty FeatureDisabled = "'s feature that handles this request is disabled in your config." + pretty (NotFormattingProvider s) = "is not the formatting provider ("<> pretty s<>") you chose in your config." + pretty (DoesNotSupportFileType s) = "does not support " <> pretty s <> " filetypes)." + +-- We always want to keep the leftmost disabled reason +instance Semigroup HandleRequestResult where + HandlesRequest <> HandlesRequest = HandlesRequest + DoesNotHandleRequest r <> _ = DoesNotHandleRequest r + _ <> DoesNotHandleRequest r = DoesNotHandleRequest r diff --git a/hls-plugin-api/src/Ide/Plugin/Properties.hs b/hls-plugin-api/src/Ide/Plugin/Properties.hs new file mode 100644 index 0000000000..49a45721b4 --- /dev/null +++ b/hls-plugin-api/src/Ide/Plugin/Properties.hs @@ -0,0 +1,533 @@ +{-# LANGUAGE ConstraintKinds #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE TypeOperators #-} +{-# LANGUAGE UndecidableInstances #-} + + +module Ide.Plugin.Properties + ( PropertyType (..), + ToHsType, + NotElem, + MetaData (..), + PropertyKey (..), + SPropertyKey (..), + SomePropertyKeyWithMetaData (..), + KeyNameProxy (..), + KeyNamePath (..), + Properties(..), + HasProperty, + HasPropertyByPath, + emptyProperties, + defineNumberProperty, + defineIntegerProperty, + defineStringProperty, + defineBooleanProperty, + defineObjectProperty, + defineArrayProperty, + defineEnumProperty, + definePropertiesProperty, + toDefaultJSON, + toVSCodeExtensionSchema, + usePropertyEither, + useProperty, + usePropertyByPathEither, + usePropertyByPath, + (&), + PluginCustomConfig(..), + PluginCustomConfigParam(..), + ) +where + +import Control.Arrow (first) +import qualified Data.Aeson as A +import qualified Data.Aeson.Types as A +import Data.Either (fromRight) +import Data.Function ((&)) +import Data.Kind (Constraint, Type) +import Data.Proxy (Proxy (..)) +import Data.String (IsString (fromString)) +import qualified Data.Text as T +import GHC.OverloadedLabels (IsLabel (..)) +import GHC.TypeLits + + +-- | Types properties may have +data PropertyType + = TNumber + | TInteger + | TString + | TBoolean + | TObject Type + | TArray Type + | TEnum Type + | TProperties [PropertyKey] -- ^ A typed TObject, defined in a recursive manner + +type family ToHsType (t :: PropertyType) where + ToHsType 'TNumber = Double -- in js, there are no distinct types for integers and floating-point values + ToHsType 'TInteger = Int -- so here we use Double for Number, Int for Integer + ToHsType 'TString = T.Text + ToHsType 'TBoolean = Bool + ToHsType ('TObject a) = a + ToHsType ('TArray a) = [a] + ToHsType ('TEnum a) = a + ToHsType ('TProperties _) = A.Object + +-- --------------------------------------------------------------------- + +-- | Metadata of a property +data MetaData (t :: PropertyType) where + MetaData :: + (IsTEnum t ~ 'False, IsProperties t ~ 'False) => + { defaultValue :: ToHsType t, + description :: T.Text + } -> + MetaData t + EnumMetaData :: + (IsTEnum t ~ 'True) => + { defaultValue :: ToHsType t, + description :: T.Text, + enumValues :: [ToHsType t], + enumDescriptions :: [T.Text] + } -> + MetaData t + PropertiesMetaData :: + (t ~ TProperties rs) => + { + defaultValue :: ToHsType t + , description :: T.Text + , childrenProperties :: Properties rs + } -> + MetaData t + + +-- | Used at type level for name-type mapping in 'Properties' +data PropertyKey = PropertyKey Symbol PropertyType + +-- | Singleton type of 'PropertyKey' +data SPropertyKey (k :: PropertyKey) where + SNumber :: SPropertyKey ('PropertyKey s 'TNumber) + SInteger :: SPropertyKey ('PropertyKey s 'TInteger) + SString :: SPropertyKey ('PropertyKey s 'TString) + SBoolean :: SPropertyKey ('PropertyKey s 'TBoolean) + SObject :: (A.ToJSON a, A.FromJSON a) => Proxy a -> SPropertyKey ('PropertyKey s ('TObject a)) + SArray :: (A.ToJSON a, A.FromJSON a) => Proxy a -> SPropertyKey ('PropertyKey s ('TArray a)) + SEnum :: (A.ToJSON a, A.FromJSON a, Eq a, Show a) => Proxy a -> SPropertyKey ('PropertyKey s ('TEnum a)) + SProperties :: SPropertyKey ('PropertyKey s ('TProperties pp)) + +-- | Existential wrapper of 'SPropertyKey', with an extra 'MetaData' +data SomePropertyKeyWithMetaData + = forall k s t. + (k ~ 'PropertyKey s t) => + SomePropertyKeyWithMetaData (SPropertyKey k) (MetaData t) + +-- | 'Properties' is a partial implementation of json schema, without supporting union types and validation. +-- In hls, it defines a set of properties used in dedicated configuration of a plugin. +-- A property is an immediate child of the json object in each plugin's "config" section. +-- It was designed to be compatible with vscode's settings UI. +-- Use 'emptyProperties' and 'useProperty' to create and consume 'Properties'. +data Properties (r :: [PropertyKey]) where + ConsProperties :: (k ~ 'PropertyKey s t, KnownSymbol s, NotElem s ks) + => KeyNameProxy s -> (SPropertyKey k) -> (MetaData t) -> Properties ks -> Properties (k : ks) + EmptyProperties :: Properties '[] + +-- | A proxy type in order to allow overloaded labels as properties' names at the call site +data KeyNameProxy (s :: Symbol) = KnownSymbol s => KeyNameProxy + +instance (KnownSymbol s', s ~ s') => IsLabel s (KeyNameProxy s') where + fromLabel = KeyNameProxy + +data NonEmptyList a = + a :| NonEmptyList a | NE a + +-- | a path to a property in a json object +data KeyNamePath (r :: NonEmptyList Symbol) where + SingleKey :: KeyNameProxy s -> KeyNamePath (NE s) + ConsKeysPath :: KeyNameProxy s1 -> KeyNamePath ss -> KeyNamePath (s1 :| ss) + +class ParsePropertyPath (rs :: [PropertyKey]) (r :: NonEmptyList Symbol) where + usePropertyByPathEither :: KeyNamePath r -> Properties rs -> A.Object -> Either String (ToHsType (FindByKeyPath r rs)) + useDefault :: KeyNamePath r -> Properties rs -> ToHsType (FindByKeyPath r rs) + usePropertyByPath :: KeyNamePath r -> Properties rs -> A.Object -> ToHsType (FindByKeyPath r rs) + usePropertyByPath p ps x = fromRight (useDefault p ps) $ usePropertyByPathEither p ps x + +instance (HasProperty s k t r) => ParsePropertyPath r (NE s) where + usePropertyByPathEither (SingleKey kn) sm x = parseProperty kn (find kn sm) x + useDefault (SingleKey kn) sm = defaultValue metadata + where (_, metadata) = find kn sm + +instance ( ToHsType (FindByKeyPath ss r2) ~ ToHsType (FindByKeyPath (s :| ss) r) + ,HasProperty s ('PropertyKey s ('TProperties r2)) t2 r + , ParsePropertyPath r2 ss) + => ParsePropertyPath r (s :| ss) where + usePropertyByPathEither (ConsKeysPath kn p) sm x = do + let (key, meta) = find kn sm + interMedia <- parseProperty kn (key, meta) x + case meta of + PropertiesMetaData {..} + -> usePropertyByPathEither p childrenProperties interMedia + useDefault (ConsKeysPath kn p) sm = case find kn sm of + (_, PropertiesMetaData {..}) -> useDefault p childrenProperties + +-- --------------------------------------------------------------------- + +type family IsProperties (t :: PropertyType) :: Bool where + IsProperties ('TProperties pp) = 'True + IsProperties _ = 'False + +type family IsTEnum (t :: PropertyType) :: Bool where + IsTEnum ('TEnum _) = 'True + IsTEnum _ = 'False + +type family FindByKeyPath (ne :: NonEmptyList Symbol) (r :: [PropertyKey]) :: PropertyType where + FindByKeyPath (s :| xs) ('PropertyKey s ('TProperties rs) ': _) = FindByKeyPath xs rs + FindByKeyPath (s :| xs) (_ ': ys) = FindByKeyPath (s :| xs) ys + FindByKeyPath (NE s) ys = FindByKeyName s ys + +type family FindByKeyName (s :: Symbol) (r :: [PropertyKey]) :: PropertyType where + FindByKeyName s ('PropertyKey s t ': _) = t + FindByKeyName s (_ ': xs) = FindByKeyName s xs + +type family IsPropertySymbol (s :: Symbol) (r :: PropertyKey) :: Bool where + IsPropertySymbol s ('PropertyKey s _) = 'True + IsPropertySymbol s _ = 'False + +type family Elem (s :: Symbol) (r :: [PropertyKey]) :: Constraint where + Elem s ('PropertyKey s _ ': _) = () + Elem s (_ ': xs) = Elem s xs + Elem s '[] = TypeError ('Text "The key ‘" ':<>: 'Text s ':<>: 'Text "’ is missing") + +type family NotElem (s :: Symbol) (r :: [PropertyKey]) :: Constraint where + NotElem s ('PropertyKey s _ ': _) = TypeError ('Text "The key ‘" ':<>: 'Text s ':<>: 'Text "’ is already defined") + NotElem s (_ ': xs) = NotElem s xs + NotElem s '[] = () + + +-- | In row @r@, there is a 'PropertyKey' @k@, which has name @s@ and carries haskell type @t@ +type HasProperty s k t r = (k ~ 'PropertyKey s t, Elem s r, FindByKeyPath (NE s) r ~ t, FindByKeyName s r ~ t, KnownSymbol s, FindPropertyMeta s r t) +-- similar to HasProperty, but the path is given as a type-level list of symbols +type HasPropertyByPath props path t = (t ~ FindByKeyPath path props, ParsePropertyPath props path) +class FindPropertyMeta (s :: Symbol) (r :: [PropertyKey]) t where + findSomePropertyKeyWithMetaData :: KeyNameProxy s -> Properties r -> (SPropertyKey ('PropertyKey s t), MetaData t) +instance (FindPropertyMetaIf (IsPropertySymbol symbol k) symbol k ks t) => FindPropertyMeta symbol (k : ks) t where + findSomePropertyKeyWithMetaData = findSomePropertyKeyWithMetaDataIf +class (bool ~ IsPropertySymbol symbol k) => FindPropertyMetaIf bool symbol k ks t where + findSomePropertyKeyWithMetaDataIf :: KeyNameProxy symbol -> Properties (k : ks) -> (SPropertyKey ('PropertyKey symbol t), MetaData t) +instance (k ~ 'PropertyKey s t) => FindPropertyMetaIf 'True s k ks t where + findSomePropertyKeyWithMetaDataIf _ (ConsProperties _ k m _) = (k, m) +instance ('False ~ IsPropertySymbol s k, FindPropertyMeta s ks t) => FindPropertyMetaIf 'False s k ks t where + findSomePropertyKeyWithMetaDataIf s (ConsProperties _ _ _ ks) = findSomePropertyKeyWithMetaData s ks + +-- --------------------------------------------------------------------- + +-- | Creates a 'Properties' that defines no property +-- +-- Useful to start a definitions chain, for example: +-- @ +-- properties = +-- emptyProperties +-- & defineStringProperty +-- #exampleString +-- "Description of exampleString" +-- "Foo" +-- & defineNumberProperty +-- #exampleNumber +-- "Description of exampleNumber" +-- 233 +-- @ + +emptyProperties :: Properties '[] +emptyProperties = EmptyProperties + +insert :: + (k ~ 'PropertyKey s t, NotElem s r, KnownSymbol s) => + KeyNameProxy s -> + SPropertyKey k -> + MetaData t -> + Properties r -> + Properties (k ': r) +insert = ConsProperties + +find :: + (HasProperty s k t r) => + KeyNameProxy s -> + Properties r -> + (SPropertyKey k, MetaData t) +find = findSomePropertyKeyWithMetaData + +-- --------------------------------------------------------------------- + +-- | Given the name of a defined property, generates a JSON parser of 'plcConfig' +usePropertyEither :: + (HasProperty s k t r) => + KeyNameProxy s -> + Properties r -> + A.Object -> + Either String (ToHsType t) +usePropertyEither kn p = parseProperty kn (find kn p) + +-- | Like 'usePropertyEither' but returns 'defaultValue' on parse error +useProperty :: + (HasProperty s k t r) => + KeyNameProxy s -> + Properties r -> + A.Object -> + ToHsType t +useProperty kn p = fromRight (defaultValue metadata) . usePropertyEither kn p + where + (_, metadata) = find kn p + +parseProperty :: + (k ~ 'PropertyKey s t, KnownSymbol s) => + KeyNameProxy s -> + (SPropertyKey k, MetaData t) -> + A.Object -> + Either String (ToHsType t) +parseProperty kn k x = case k of + (SProperties, _) -> parseEither + (SNumber, _) -> parseEither + (SInteger, _) -> parseEither + (SString, _) -> parseEither + (SBoolean, _) -> parseEither + (SObject _, _) -> parseEither + (SArray _, _) -> parseEither + (SEnum _, EnumMetaData {..}) -> + A.parseEither + ( \o -> do + txt <- o A..: key + if txt `elem` enumValues + then pure txt + else + fail $ + "invalid enum member: " + <> show txt + <> ". Expected one of " + <> show enumValues + ) + x + where + key = fromString $ symbolVal kn + parseEither :: forall a. A.FromJSON a => Either String a + parseEither = A.parseEither (A..: key) x + +-- --------------------------------------------------------------------- + +-- | Defines a number property +defineNumberProperty :: + (KnownSymbol s, NotElem s r) => + KeyNameProxy s -> + -- | description + T.Text -> + -- | default value + Double -> + Properties r -> + Properties ('PropertyKey s 'TNumber : r) +defineNumberProperty kn description defaultValue = + insert kn SNumber MetaData {..} + +-- | Defines an integer property +defineIntegerProperty :: + (KnownSymbol s, NotElem s r) => + KeyNameProxy s -> + -- | description + T.Text -> + -- | default value + Int -> + Properties r -> + Properties ('PropertyKey s 'TInteger : r) +defineIntegerProperty kn description defaultValue = + insert kn SInteger MetaData {..} + +-- | Defines a string property +defineStringProperty :: + (KnownSymbol s, NotElem s r) => + KeyNameProxy s -> + -- | description + T.Text -> + -- | default value + T.Text -> + Properties r -> + Properties ('PropertyKey s 'TString : r) +defineStringProperty kn description defaultValue = + insert kn SString MetaData {..} + +-- | Defines a boolean property +defineBooleanProperty :: + (KnownSymbol s, NotElem s r) => + KeyNameProxy s -> + -- | description + T.Text -> + -- | default value + Bool -> + Properties r -> + Properties ('PropertyKey s 'TBoolean : r) +defineBooleanProperty kn description defaultValue = + insert kn SBoolean MetaData {..} + +-- | Defines an object property +defineObjectProperty :: + (KnownSymbol s, NotElem s r, A.ToJSON a, A.FromJSON a) => + KeyNameProxy s -> + -- | description + T.Text -> + -- | default value + a -> + Properties r -> + Properties ('PropertyKey s ('TObject a) : r) +defineObjectProperty kn description defaultValue = + insert kn (SObject Proxy) MetaData {..} + +-- | Defines an array property +defineArrayProperty :: + (KnownSymbol s, NotElem s r, A.ToJSON a, A.FromJSON a) => + KeyNameProxy s -> + -- | description + T.Text -> + -- | default value + [a] -> + Properties r -> + Properties ('PropertyKey s ('TArray a) : r) +defineArrayProperty kn description defaultValue = + insert kn (SArray Proxy) MetaData {..} + +-- | Defines an enum property +defineEnumProperty :: + (KnownSymbol s, NotElem s r, A.ToJSON a, A.FromJSON a, Eq a, Show a) => + KeyNameProxy s -> + -- | description + T.Text -> + -- | valid enum members with each of description + [(a, T.Text)] -> + a -> + Properties r -> + Properties ('PropertyKey s ('TEnum a) : r) +defineEnumProperty kn description enums defaultValue = + insert kn (SEnum Proxy) $ EnumMetaData defaultValue description (fst <$> enums) (snd <$> enums) + +definePropertiesProperty :: + (KnownSymbol s, NotElem s r) => + KeyNameProxy s -> + T.Text -> + Properties childrenProps -> + Properties r -> + Properties ('PropertyKey s ('TProperties childrenProps) : r) +definePropertiesProperty kn description ps rs = + insert kn SProperties (PropertiesMetaData mempty description ps) rs + +-- --------------------------------------------------------------------- + +-- | Converts a properties definition into kv pairs with default values from 'MetaData' +toDefaultJSON :: Properties r -> [A.Pair] +toDefaultJSON pr = case pr of + EmptyProperties -> [] + ConsProperties keyNameProxy k m xs -> + toEntry (symbolVal keyNameProxy) (SomePropertyKeyWithMetaData k m) : toDefaultJSON xs + where + toEntry :: String -> SomePropertyKeyWithMetaData -> A.Pair + toEntry s = \case + (SomePropertyKeyWithMetaData SNumber MetaData {..}) -> + fromString s A..= defaultValue + (SomePropertyKeyWithMetaData SInteger MetaData {..}) -> + fromString s A..= defaultValue + (SomePropertyKeyWithMetaData SString MetaData {..}) -> + fromString s A..= defaultValue + (SomePropertyKeyWithMetaData SBoolean MetaData {..}) -> + fromString s A..= defaultValue + (SomePropertyKeyWithMetaData (SObject _) MetaData {..}) -> + fromString s A..= defaultValue + (SomePropertyKeyWithMetaData (SArray _) MetaData {..}) -> + fromString s A..= defaultValue + (SomePropertyKeyWithMetaData (SEnum _) EnumMetaData {..}) -> + fromString s A..= defaultValue + (SomePropertyKeyWithMetaData SProperties PropertiesMetaData {..}) -> + fromString s A..= A.object (toDefaultJSON childrenProperties) + +-- | Converts a properties definition into kv pairs as vscode schema +toVSCodeExtensionSchema :: T.Text -> Properties r -> [A.Pair] +toVSCodeExtensionSchema prefix p = [fromString (T.unpack prefix <> fromString k) A..= v | (k, v) <- toVSCodeExtensionSchema' p] +toVSCodeExtensionSchema' :: Properties r -> [(String, A.Value)] +toVSCodeExtensionSchema' ps = case ps of + EmptyProperties -> [] + ConsProperties (keyNameProxy :: KeyNameProxy s) (k :: SPropertyKey k) (m :: MetaData t) xs -> + [(symbolVal keyNameProxy <> maybe "" ((<>) ".") k1, v) + | (k1, v) <- toEntry (SomePropertyKeyWithMetaData k m) ] + ++ toVSCodeExtensionSchema' xs + where + wrapEmpty :: A.Value -> [(Maybe String, A.Value)] + wrapEmpty v = [(Nothing, v)] + toEntry :: SomePropertyKeyWithMetaData -> [(Maybe String, A.Value)] + toEntry = \case + (SomePropertyKeyWithMetaData SNumber MetaData {..}) -> + wrapEmpty $ A.object + [ "type" A..= A.String "number", + "markdownDescription" A..= description, + "default" A..= defaultValue, + "scope" A..= A.String "resource" + ] + (SomePropertyKeyWithMetaData SInteger MetaData {..}) -> + wrapEmpty $ A.object + [ "type" A..= A.String "integer", + "markdownDescription" A..= description, + "default" A..= defaultValue, + "scope" A..= A.String "resource" + ] + (SomePropertyKeyWithMetaData SString MetaData {..}) -> + wrapEmpty $ A.object + [ "type" A..= A.String "string", + "markdownDescription" A..= description, + "default" A..= defaultValue, + "scope" A..= A.String "resource" + ] + (SomePropertyKeyWithMetaData SBoolean MetaData {..}) -> + wrapEmpty $ A.object + [ "type" A..= A.String "boolean", + "markdownDescription" A..= description, + "default" A..= defaultValue, + "scope" A..= A.String "resource" + ] + (SomePropertyKeyWithMetaData (SObject _) MetaData {..}) -> + wrapEmpty $ A.object + [ "type" A..= A.String "object", + "markdownDescription" A..= description, + "default" A..= defaultValue, + "scope" A..= A.String "resource" + ] + (SomePropertyKeyWithMetaData (SArray _) MetaData {..}) -> + wrapEmpty $ A.object + [ "type" A..= A.String "array", + "markdownDescription" A..= description, + "default" A..= defaultValue, + "scope" A..= A.String "resource" + ] + (SomePropertyKeyWithMetaData (SEnum _) EnumMetaData {..}) -> + wrapEmpty $ A.object + [ "type" A..= A.String "string", + "description" A..= description, + "enum" A..= enumValues, + "enumDescriptions" A..= enumDescriptions, + "default" A..= defaultValue, + "scope" A..= A.String "resource" + ] + (SomePropertyKeyWithMetaData SProperties PropertiesMetaData {..}) -> + map (first Just) $ toVSCodeExtensionSchema' childrenProperties + +data PluginCustomConfig = PluginCustomConfig { + pcc'Name :: T.Text, + pcc'Params :: [PluginCustomConfigParam] +} +data PluginCustomConfigParam = PluginCustomConfigParam { + pccp'Name :: T.Text, + pccp'Description :: T.Text, + pccp'Default :: T.Text, + pccp'EnumValues :: [T.Text] +} + diff --git a/hls-plugin-api/src/Ide/Plugin/RangeMap.hs b/hls-plugin-api/src/Ide/Plugin/RangeMap.hs new file mode 100644 index 0000000000..6c4b4041c9 --- /dev/null +++ b/hls-plugin-api/src/Ide/Plugin/RangeMap.hs @@ -0,0 +1,93 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DerivingStrategies #-} + +-- | A map that allows fast \"in-range\" filtering. 'RangeMap' is meant +-- to be constructed once and cached as part of a Shake rule. If +-- not, the map will be rebuilt upon each invocation, yielding slower +-- results compared to the list-based approach! +-- +-- Note that 'RangeMap' falls back to the list-based approach if +-- `use-fingertree` flag of `hls-plugin-api` is set to false. +module Ide.Plugin.RangeMap + ( RangeMap(..), + fromList, + fromList', + filterByRange, + elementsInRange, + ) where + +import Development.IDE.Graph.Classes (NFData) + +#ifdef USE_FINGERTREE +import Data.Bifunctor (first) +import qualified HaskellWorks.Data.IntervalMap.FingerTree as IM +import Language.LSP.Protocol.Types (Position, + Range (Range)) +#else +import Language.LSP.Protocol.Types (Range, isSubrangeOf) +#endif + +#if USE_FINGERTREE && !MIN_VERSION_base(4,20,0) +import Data.List (foldl') +#endif + +-- | A map from code ranges to values. +#ifdef USE_FINGERTREE +newtype RangeMap a = RangeMap + { unRangeMap :: IM.IntervalMap Position a + -- ^ 'IM.Interval' of 'Position' corresponds to a 'Range' + } + deriving newtype (NFData, Semigroup, Monoid) + deriving stock (Functor, Foldable, Traversable) +#else +newtype RangeMap a = RangeMap + { unRangeMap :: [(Range, a)] } + deriving newtype (NFData, Semigroup, Monoid) + deriving stock (Functor, Foldable, Traversable) +#endif + +-- | Construct a 'RangeMap' from a 'Range' accessor and a list of values. +fromList :: (a -> Range) -> [a] -> RangeMap a +fromList extractRange = fromList' . map (\x -> (extractRange x, x)) + +fromList' :: [(Range, a)] -> RangeMap a +#ifdef USE_FINGERTREE +fromList' = RangeMap . toIntervalMap . map (first rangeToInterval) + where + toIntervalMap :: Ord v => [(IM.Interval v, a)] -> IM.IntervalMap v a + toIntervalMap = foldl' (\m (i, v) -> IM.insert i v m) IM.empty +#else +fromList' = RangeMap +#endif + +-- | Filter a 'RangeMap' by a given 'Range'. +filterByRange :: Range -> RangeMap a -> [a] +#ifdef USE_FINGERTREE +filterByRange range = map snd . IM.dominators (rangeToInterval range) . unRangeMap +#else +filterByRange range = map snd . filter (isSubrangeOf range . fst) . unRangeMap +#endif + +-- | Extracts all elements from a 'RangeMap' that fall within a given 'Range'. +elementsInRange :: Range -> RangeMap a -> [a] +#ifdef USE_FINGERTREE +elementsInRange range = map snd . IM.intersections (rangeToInterval range) . unRangeMap +#else +elementsInRange range = map snd . filter (flip isSubrangeOf range . fst) . unRangeMap +#endif + +#ifdef USE_FINGERTREE +-- NOTE(ozkutuk): In itself, this conversion is wrong. As Michael put it: +-- "LSP Ranges have exclusive upper bounds, whereas the intervals here are +-- supposed to be closed (i.e. inclusive at both ends)" +-- However, in our use-case this turns out not to be an issue (supported +-- by the accompanying property test). I think the reason for this is, +-- even if rangeToInterval isn't a correct 1:1 conversion by itself, it +-- is used for both the construction of the RangeMap and during the actual +-- filtering (filterByRange), so it still behaves identical to the list +-- approach. +-- This definition isn't exported from the module, therefore we need not +-- worry about other uses where it potentially makes a difference. +rangeToInterval :: Range -> IM.Interval Position +rangeToInterval (Range s e) = IM.Interval s e +#endif diff --git a/hls-plugin-api/src/Ide/Plugin/Resolve.hs b/hls-plugin-api/src/Ide/Plugin/Resolve.hs new file mode 100644 index 0000000000..36c61baaff --- /dev/null +++ b/hls-plugin-api/src/Ide/Plugin/Resolve.hs @@ -0,0 +1,211 @@ +{-# LANGUAGE DisambiguateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-| This module currently includes helper functions to provide fallback support +to code actions that use resolve in HLS. The difference between the two +functions for code actions that don't support resolve is that +mkCodeActionHandlerWithResolve will immediately resolve your code action before +sending it on to the client, while mkCodeActionWithResolveAndCommand will turn +your resolve into a command. + +General support for resolve in HLS can be used with mkResolveHandler from +Ide.Types. Resolve theoretically should allow us to delay computation of parts +of the request till the client needs it, allowing us to answer requests faster +and with less resource usage. +-} +module Ide.Plugin.Resolve +(mkCodeActionHandlerWithResolve, +mkCodeActionWithResolveAndCommand) where + +import Control.Lens (_Just, (&), (.~), (?~), (^.), + (^?)) +import Control.Monad.Error.Class (MonadError (throwError)) +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Except (ExceptT (..)) + +import qualified Data.Aeson as A +import Data.Maybe (catMaybes) +import qualified Data.Text as T +import GHC.Generics (Generic) +import Ide.Logger +import Ide.Plugin.Error +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types + +data Log + = DoesNotSupportResolve T.Text + | forall m . A.ToJSON (ErrorData m) => ApplyWorkspaceEditFailed (TResponseError m) +instance Pretty Log where + pretty = \case + DoesNotSupportResolve fallback-> + "Client does not support resolve," <+> pretty fallback + ApplyWorkspaceEditFailed err -> + "ApplyWorkspaceEditFailed:" <+> pretty err + +-- |When provided with both a codeAction provider and an affiliated codeAction +-- resolve provider, this function creates a handler that automatically uses +-- your resolve provider to fill out you original codeAction if the client doesn't +-- have codeAction resolve support. This means you don't have to check whether +-- the client supports resolve and act accordingly in your own providers. +mkCodeActionHandlerWithResolve + :: forall ideState a. (A.FromJSON a) => + Recorder (WithPriority Log) + -> PluginMethodHandler ideState 'Method_TextDocumentCodeAction + -> ResolveFunction ideState a 'Method_CodeActionResolve + -> PluginHandlers ideState +mkCodeActionHandlerWithResolve recorder codeActionMethod codeResolveMethod = + let newCodeActionMethod ideState pid params = + do codeActionReturn <- codeActionMethod ideState pid params + caps <- lift pluginGetClientCapabilities + case codeActionReturn of + r@(InR Null) -> pure r + (InL ls) | -- We don't need to do anything if the client supports + -- resolve + supportsCodeActionResolve caps -> pure $ InL ls + --This is the actual part where we call resolveCodeAction which fills in the edit data for the client + | otherwise -> do + logWith recorder Debug (DoesNotSupportResolve "filling in the code action") + InL <$> traverse (resolveCodeAction (params ^. L.textDocument . L.uri) ideState pid) ls + in (mkPluginHandler SMethod_TextDocumentCodeAction newCodeActionMethod + <> mkResolveHandler SMethod_CodeActionResolve codeResolveMethod) + where dropData :: CodeAction -> CodeAction + dropData ca = ca & L.data_ .~ Nothing + resolveCodeAction :: Uri -> ideState -> PluginId -> (Command |? CodeAction) -> ExceptT PluginError (HandlerM Config) (Command |? CodeAction) + resolveCodeAction _uri _ideState _plId c@(InL _) = pure c + resolveCodeAction uri ideState pid (InR codeAction@CodeAction{_data_=Just value}) = do + case A.fromJSON value of + A.Error err -> throwError $ parseError (Just value) (T.pack err) + A.Success innerValueDecoded -> do + resolveResult <- codeResolveMethod ideState pid codeAction uri innerValueDecoded + case resolveResult of + CodeAction {_edit = Just _ } -> do + pure $ InR $ dropData resolveResult + _ -> throwError $ invalidParamsError "Returned CodeAction has no data field" + resolveCodeAction _ _ _ (InR CodeAction{_data_=Nothing}) = throwError $ invalidParamsError "CodeAction has no data field" + + +-- |When provided with both a codeAction provider with a data field and a resolve +-- provider, this function creates a handler that creates a command that uses +-- your resolve if the client doesn't have code action resolve support. This means +-- you don't have to check whether the client supports resolve and act +-- accordingly in your own providers. see Note [Code action resolve fallback to commands] +-- Also: This helper only works with workspace edits, not commands. Any command set +-- either in the original code action or in the resolve will be ignored. +mkCodeActionWithResolveAndCommand + :: forall ideState a. (A.FromJSON a) => + Recorder (WithPriority Log) + -> PluginId + -> PluginMethodHandler ideState 'Method_TextDocumentCodeAction + -> ResolveFunction ideState a 'Method_CodeActionResolve + -> ([PluginCommand ideState], PluginHandlers ideState) +mkCodeActionWithResolveAndCommand recorder plId codeActionMethod codeResolveMethod = + let newCodeActionMethod ideState pid params = + do codeActionReturn <- codeActionMethod ideState pid params + caps <- lift pluginGetClientCapabilities + case codeActionReturn of + r@(InR Null) -> pure r + (InL ls) | -- We don't need to do anything if the client supports + -- resolve + supportsCodeActionResolve caps -> pure $ InL ls + -- If they do not we will drop the data field, in addition we will populate the command + -- field with our command to execute the resolve, with the whole code action as it's argument. + | otherwise -> do + logWith recorder Debug (DoesNotSupportResolve "rewriting the code action to use commands") + pure $ InL $ moveDataToCommand (params ^. L.textDocument . L.uri) <$> ls + in ([PluginCommand "codeActionResolve" "Executes resolve for code action" (executeResolveCmd codeResolveMethod)], + mkPluginHandler SMethod_TextDocumentCodeAction newCodeActionMethod + <> mkResolveHandler SMethod_CodeActionResolve codeResolveMethod) + where moveDataToCommand :: Uri -> Command |? CodeAction -> Command |? CodeAction + moveDataToCommand uri ca = + let dat = A.toJSON . wrapWithURI uri <$> ca ^? _R -- We need to take the whole codeAction + -- And put it in the argument for the Command, that way we can later + -- pass it to the resolve handler (which expects a whole code action) + -- It should be noted that mkLspCommand already specifies the command + -- to the plugin, so we don't need to do that here. + cmd = mkLspCommand plId (CommandId "codeActionResolve") "Execute Code Action" (pure <$> dat) + in ca + & _R . L.data_ .~ Nothing -- Set the data field to nothing + & _R . L.command ?~ cmd -- And set the command to our previously created command + wrapWithURI :: Uri -> CodeAction -> CodeAction + wrapWithURI uri codeAction = + codeAction & L.data_ .~ (A.toJSON .WithURI uri <$> data_) + where data_ = codeAction ^? L.data_ . _Just + executeResolveCmd :: ResolveFunction ideState a 'Method_CodeActionResolve -> CommandFunction ideState CodeAction + executeResolveCmd resolveProvider ideState _token ca@CodeAction{_data_=Just value} = do + case A.fromJSON value of + A.Error err -> throwError $ parseError (Just value) (T.pack err) + A.Success (WithURI uri innerValue) -> do + case A.fromJSON innerValue of + A.Error err -> throwError $ parseError (Just value) (T.pack err) + A.Success innerValueDecoded -> do + resolveResult <- resolveProvider ideState plId ca uri innerValueDecoded + case resolveResult of + ca2@CodeAction {_edit = Just wedits } | diffCodeActions ca ca2 == ["edit"] -> do + _ <- ExceptT $ Right <$> pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing wedits) handleWEditCallback + pure $ InR Null + ca2@CodeAction {_edit = Just _ } -> + throwError $ internalError $ + "The resolve provider unexpectedly returned a code action with the following differing fields: " + <> (T.pack $ show $ diffCodeActions ca ca2) + _ -> throwError $ internalError "The resolve provider unexpectedly returned a result with no data field" + executeResolveCmd _ _ _ CodeAction{_data_= value} = throwError $ invalidParamsError ("The code action to resolve has an illegal data field: " <> (T.pack $ show value)) + handleWEditCallback (Left err ) = do + logWith recorder Warning (ApplyWorkspaceEditFailed err) + pure () + handleWEditCallback _ = pure () + +-- TODO: Remove once provided by lsp-types +-- |Compares two CodeActions and returns a list of fields that are not equal +diffCodeActions :: CodeAction -> CodeAction -> [T.Text] +diffCodeActions ca ca2 = + let titleDiff = if ca ^. L.title == ca2 ^. L.title then Nothing else Just "title" + kindDiff = if ca ^. L.kind == ca2 ^. L.kind then Nothing else Just "kind" + diagnosticsDiff = if ca ^. L.diagnostics == ca2 ^. L.diagnostics then Nothing else Just "diagnostics" + commandDiff = if ca ^. L.command == ca2 ^. L.command then Nothing else Just "diagnostics" + isPreferredDiff = if ca ^. L.isPreferred == ca2 ^. L.isPreferred then Nothing else Just "isPreferred" + dataDiff = if ca ^. L.data_ == ca2 ^. L.data_ then Nothing else Just "data" + disabledDiff = if ca ^. L.disabled == ca2 ^. L.disabled then Nothing else Just "disabled" + editDiff = if ca ^. L.edit == ca2 ^. L.edit then Nothing else Just "edit" + in catMaybes [titleDiff, kindDiff, diagnosticsDiff, commandDiff, isPreferredDiff, dataDiff, disabledDiff, editDiff] + +-- |To execute the resolve provider as a command, we need to additionally store +-- the URI that was provided to the original code action. +data WithURI = WithURI { + _uri :: Uri +, _value :: A.Value +} deriving (Generic, Show) +instance A.ToJSON WithURI +instance A.FromJSON WithURI + +-- |Checks if the the client supports resolve for code action. We currently only check +-- whether resolve for the edit field is supported, because that's the only one we care +-- about at the moment. +supportsCodeActionResolve :: ClientCapabilities -> Bool +supportsCodeActionResolve caps = + caps ^? L.textDocument . _Just . L.codeAction . _Just . L.dataSupport . _Just == Just True + && case caps ^? L.textDocument . _Just . L.codeAction . _Just . L.resolveSupport . _Just of + Just ClientCodeActionResolveOptions{_properties} -> "edit" `elem` _properties + _ -> False + +internalError :: T.Text -> PluginError +internalError msg = PluginInternalError ("Ide.Plugin.Resolve: " <> msg) + +invalidParamsError :: T.Text -> PluginError +invalidParamsError msg = PluginInvalidParams ("Ide.Plugin.Resolve: : " <> msg) + +parseError :: Maybe A.Value -> T.Text -> PluginError +parseError value errMsg = PluginInternalError ("Ide.Plugin.Resolve: Error parsing value:"<> (T.pack $ show value) <> " Error: "<> errMsg) + +{- Note [Code action resolve fallback to commands] + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + To make supporting code action resolve easy for plugins, we want to let them + provide one implementation that can be used both when clients support + resolve, and when they don't. + The way we do this is to have them always implement a resolve handler. + Then, if the client doesn't support resolve, we instead install the resolve + handler as a _command_ handler, passing the code action literal itself + as the command argument. This allows the command handler to have + the same interface as the resolve handler! + -} diff --git a/hls-plugin-api/src/Ide/PluginUtils.hs b/hls-plugin-api/src/Ide/PluginUtils.hs new file mode 100644 index 0000000000..e34d19f8b0 --- /dev/null +++ b/hls-plugin-api/src/Ide/PluginUtils.hs @@ -0,0 +1,346 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.PluginUtils + ( -- * LSP Range manipulation functions + normalize, + extendNextLine, + extendLineStart, + extendToFullLines, + WithDeletions(..), + getProcessID, + makeDiffTextEdit, + makeDiffTextEditAdditive, + diffText, + diffText', + pluginDescToIdePlugins, + idePluginsToPluginDesc, + getClientConfig, + getPluginConfig, + configForPlugin, + handlesRequest, + extractTextInRange, + fullRange, + mkLspCommand, + mkLspCmdId, + getPid, + allLspCmdIds, + allLspCmdIds', + installSigUsr1Handler, + subRange, + rangesOverlap, + positionInRange, + usePropertyLsp, + -- * Escape + unescape, + -- * toAbsolute + toAbsolute + ) +where + +import Control.Arrow ((&&&)) +import Control.Lens (_head, _last, re, (%~), (^.)) +import Data.Algorithm.Diff +import Data.Algorithm.DiffOutput +import Data.Char (isPrint, showLitChar) +import Data.Functor (void) +import qualified Data.Map as M +import qualified Data.Text as T +import Data.Void (Void) +import Ide.Plugin.Config +import Ide.Plugin.Properties +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Types +import Language.LSP.Server +import System.FilePath (()) +import qualified Text.Megaparsec as P +import qualified Text.Megaparsec.Char as P +import qualified Text.Megaparsec.Char.Lexer as P + +-- --------------------------------------------------------------------- + +-- | Extend to the line below and above to replace newline character. +-- +-- >>> normalize (Range (Position 5 5) (Position 5 10)) +-- Range (Position 5 0) (Position 6 0) +normalize :: Range -> Range +normalize = extendLineStart . extendNextLine + +-- | Extend 'Range' to the start of the next line. +-- +-- >>> extendNextLine (Range (Position 5 5) (Position 5 10)) +-- Range (Position 5 5) (Position 6 0) +extendNextLine :: Range -> Range +extendNextLine (Range s (Position el _)) = + Range s (Position (el + 1) 0) + +-- | Extend 'Range' to the start of the current line. +-- +-- >>> extendLineStart (Range (Position 5 5) (Position 5 10)) +-- Range (Position 5 0) (Position 5 10) +extendLineStart :: Range -> Range +extendLineStart (Range (Position sl _) e) = + Range (Position sl 0) e + +-- | Extend 'Range' to include the start of the first line and start of the next line of the last line. +-- +-- Caveat: It always extend the last line to the beginning of next line, even when the last position is at column 0. +-- This is to keep the compatibility with the implementation of old function @extractRange@. +-- +-- >>> extendToFullLines (Range (Position 5 5) (Position 5 10)) +-- Range (Position 5 0) (Position 6 0) +-- +-- >>> extendToFullLines (Range (Position 5 5) (Position 7 2)) +-- Range (Position 5 0) (Position 8 0) +-- +-- >>> extendToFullLines (Range (Position 5 5) (Position 7 0)) +-- Range (Position 5 0) (Position 8 0) +extendToFullLines :: Range -> Range +extendToFullLines = extendLineStart . extendNextLine + + +-- --------------------------------------------------------------------- + +data WithDeletions = IncludeDeletions | SkipDeletions + deriving (Eq) + +-- | Generate a 'WorkspaceEdit' value from a pair of source Text +diffText :: ClientCapabilities -> (VersionedTextDocumentIdentifier, T.Text) -> T.Text -> WithDeletions -> WorkspaceEdit +diffText clientCaps old new withDeletions = + let supports = clientSupportsDocumentChanges clientCaps + in diffText' supports old new withDeletions + +makeDiffTextEdit :: T.Text -> T.Text -> [TextEdit] +makeDiffTextEdit f1 f2 = diffTextEdit f1 f2 IncludeDeletions + +makeDiffTextEditAdditive :: T.Text -> T.Text -> [TextEdit] +makeDiffTextEditAdditive f1 f2 = diffTextEdit f1 f2 SkipDeletions + +diffTextEdit :: T.Text -> T.Text -> WithDeletions -> [TextEdit] +diffTextEdit fText f2Text withDeletions = r + where + r = map diffOperationToTextEdit diffOps + d = getGroupedDiff (lines $ T.unpack fText) (lines $ T.unpack f2Text) + + diffOps = + filter + (\x -> (withDeletions == IncludeDeletions) || not (isDeletion x)) + (diffToLineRanges d) + + isDeletion (Deletion _ _) = True + isDeletion _ = False + + diffOperationToTextEdit :: DiffOperation LineRange -> TextEdit + diffOperationToTextEdit (Change fm to) = TextEdit range nt + where + range = calcRange fm + nt = T.pack $ init $ unlines $ lrContents to + + {- + In order to replace everything including newline characters, + the end range should extend below the last line. From the specification: + "If you want to specify a range that contains a line including + the line ending character(s) then use an end position denoting + the start of the next line" + -} + diffOperationToTextEdit (Deletion (LineRange (sl, el) _) _) = TextEdit range "" + where + range = + Range + (Position (fromIntegral $ sl - 1) 0) + (Position (fromIntegral el) 0) + diffOperationToTextEdit (Addition fm l) = TextEdit range nt + where + -- fm has a range wrt to the changed file, which starts in the current file at l + 1 + -- So the range has to be shifted to start at l + 1 + + range = + Range + (Position (fromIntegral l) 0) + (Position (fromIntegral l) 0) + nt = T.pack $ unlines $ lrContents fm + + calcRange fm = Range s e + where + sl = fst $ lrNumbers fm + sc = 0 + s = Position (fromIntegral $ sl - 1) sc -- Note: zero-based lines + el = snd $ lrNumbers fm + ec = fromIntegral $ length $ last $ lrContents fm + e = Position (fromIntegral $ el - 1) ec -- Note: zero-based lines + +-- | A pure version of 'diffText' for testing +diffText' :: Bool -> (VersionedTextDocumentIdentifier, T.Text) -> T.Text -> WithDeletions -> WorkspaceEdit +diffText' supports (verTxtDocId, fText) f2Text withDeletions = + if supports + then WorkspaceEdit Nothing (Just docChanges) Nothing + else WorkspaceEdit (Just h) Nothing Nothing + where + diff = diffTextEdit fText f2Text withDeletions + h = M.singleton (verTxtDocId ^. L.uri) diff + docChanges = [InL docEdit] + docEdit = TextDocumentEdit (verTxtDocId ^. re _versionedTextDocumentIdentifier) $ fmap InL diff + +-- --------------------------------------------------------------------- + +clientSupportsDocumentChanges :: ClientCapabilities -> Bool +clientSupportsDocumentChanges caps = + let ClientCapabilities mwCaps _ _ _ _ _ = caps + supports = do + wCaps <- mwCaps + WorkspaceEditClientCapabilities mDc _ _ _ _ <- _workspaceEdit wCaps + mDc + in Just True == supports + +-- --------------------------------------------------------------------- + +pluginDescToIdePlugins :: [PluginDescriptor ideState] -> IdePlugins ideState +pluginDescToIdePlugins = IdePlugins + +idePluginsToPluginDesc :: IdePlugins ideState -> [PluginDescriptor ideState] +idePluginsToPluginDesc (IdePlugins pp) = pp + +-- --------------------------------------------------------------------- + +-- | Returns the current client configuration. It is not wise to permanently +-- cache the returned value of this function, as clients can at runtime change +-- their configuration. +getClientConfig :: (MonadLsp Config m) => m Config +getClientConfig = getConfig + +-- --------------------------------------------------------------------- + +-- | Returns the current plugin configuration. It is not wise to permanently +-- cache the returned value of this function, as clients can change their +-- configuration at runtime. +getPluginConfig :: (MonadLsp Config m) => PluginDescriptor c -> m PluginConfig +getPluginConfig plugin = do + config <- getClientConfig + return $ configForPlugin config plugin + +-- --------------------------------------------------------------------- + +-- | Returns the value of a property defined by the current plugin. +usePropertyLsp :: + (HasProperty s k t r, MonadLsp Config m) => + KeyNameProxy s -> + PluginDescriptor c -> + Properties r -> + m (ToHsType t) +usePropertyLsp kn pId p = do + config <- getPluginConfig pId + return $ useProperty kn p $ plcConfig config + +-- --------------------------------------------------------------------- + +-- | Extracts exact matching text in the range. +extractTextInRange :: Range -> T.Text -> T.Text +extractTextInRange (Range (Position sl sc) (Position el ec)) s = newS + where + focusLines = + T.lines s + -- NOTE: Always append an empty line to the end to ensure there are + -- sufficient lines to take from. + -- + -- There is a situation that when the end position is placed at the line + -- below the last line, if we simply do `drop` and then `take`, there + -- will be `el - sl` lines left, not `el - sl + 1` lines. And then + -- the last line of code will be emptied unexpectedly. + -- + -- For details, see https://github.com/haskell/haskell-language-server/issues/3847 + & (++ [""]) + & drop (fromIntegral sl) + & take (fromIntegral $ el - sl + 1) + -- NOTE: We have to trim the last line first to handle the single-line case + newS = + focusLines + & _last %~ T.take (fromIntegral ec) + & _head %~ T.drop (fromIntegral sc) + -- NOTE: We cannot use unlines here, because we don't want to add trailing newline! + & T.intercalate "\n" + +-- | Gets the range that covers the entire text +fullRange :: T.Text -> Range +fullRange s = Range startPos endPos + where + startPos = Position 0 0 + endPos = Position lastLine 0 + {- + In order to replace everything including newline characters, + the end range should extend below the last line. From the specification: + "If you want to specify a range that contains a line including + the line ending character(s) then use an end position denoting + the start of the next line" + -} + lastLine = fromIntegral $ length $ T.lines s + +subRange :: Range -> Range -> Bool +subRange = isSubrangeOf + + +-- | Check whether the two 'Range's overlap in any way. +-- +-- >>> rangesOverlap (mkRange 1 0 1 4) (mkRange 1 2 1 5) +-- True +-- >>> rangesOverlap (mkRange 1 2 1 5) (mkRange 1 0 1 4) +-- True +-- >>> rangesOverlap (mkRange 1 0 1 6) (mkRange 1 2 1 4) +-- True +-- >>> rangesOverlap (mkRange 1 2 1 4) (mkRange 1 0 1 6) +-- True +rangesOverlap :: Range -> Range -> Bool +rangesOverlap r1 r2 = + r1 ^. L.start <= r2 ^. L.end && r2 ^. L.start <= r1 ^. L.end + +-- --------------------------------------------------------------------- + +allLspCmdIds' :: T.Text -> IdePlugins ideState -> [T.Text] +allLspCmdIds' pid (IdePlugins ls) = + allLspCmdIds pid $ map (pluginId &&& pluginCommands) ls + +allLspCmdIds :: T.Text -> [(PluginId, [PluginCommand ideState])] -> [T.Text] +allLspCmdIds pid commands = concatMap go commands + where + go (plid, cmds) = map (mkLspCmdId pid plid . commandId) cmds + +-- --------------------------------------------------------------------- + + +type TextParser = P.Parsec Void T.Text + +-- | Unescape printable escape sequences within double quotes. +-- This is useful if you have to call 'show' indirectly, and it escapes some characters which you would prefer to +-- display as is. +unescape :: T.Text -> T.Text +unescape input = + case P.runParser escapedTextParser "inline" input of + Left _ -> input + Right strs -> T.pack strs + +-- | Parser for a string that contains double quotes. Returns unescaped string. +escapedTextParser :: TextParser String +escapedTextParser = concat <$> P.many (outsideStringLiteral P.<|> stringLiteral) + where + outsideStringLiteral :: TextParser String + outsideStringLiteral = P.someTill (P.anySingleBut '"') (P.lookAhead (void (P.char '"') P.<|> P.eof)) + + stringLiteral :: TextParser String + stringLiteral = do + inside <- P.char '"' >> P.manyTill P.charLiteral (P.char '"') + let f '"' = "\\\"" -- double quote should still be escaped + -- Despite the docs, 'showLitChar' and 'showLitString' from 'Data.Char' DOES ESCAPE unicode printable + -- characters. So we need to call 'isPrint' from 'Data.Char' manually. + f ch = if isPrint ch then [ch] else showLitChar ch "" + inside' = concatMap f inside + + pure $ "\"" <> inside' <> "\"" + +-- --------------------------------------------------------------------- + +-- | toAbsolute +-- use `toAbsolute` to state our intention that we are actually make a path absolute +-- the first argument should be the root directory +-- the second argument should be the relative path +toAbsolute :: FilePath -> FilePath -> FilePath +toAbsolute = () diff --git a/hls-plugin-api/src/Ide/Types.hs b/hls-plugin-api/src/Ide/Types.hs new file mode 100644 index 0000000000..3a06656a77 --- /dev/null +++ b/hls-plugin-api/src/Ide/Types.hs @@ -0,0 +1,1295 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE CUSKs #-} +{-# LANGUAGE DefaultSignatures #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE MonadComprehensions #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE ViewPatterns #-} +module Ide.Types +( PluginDescriptor(..), defaultPluginDescriptor, defaultCabalPluginDescriptor +, defaultPluginPriority +, describePlugin +, IdeCommand(..) +, IdeMethod(..) +, IdeNotification(..) +, IdePlugins(IdePlugins, ipMap) +, DynFlagsModifications(..) +, Config(..), PluginConfig(..), CheckParents(..), SessionLoadingPreferenceConfig(..) +, ConfigDescriptor(..), defaultConfigDescriptor, configForPlugin +, CustomConfig(..), mkCustomConfig +, FallbackCodeActionParams(..) +, FormattingType(..), FormattingMethod, FormattingHandler +, HasTracing(..) +, PluginCommand(..), CommandId(..), CommandFunction, mkLspCommand, mkLspCmdId +, PluginId(..) +, PluginHandler(..), mkPluginHandler +, HandlerM, runHandlerM, pluginGetClientCapabilities, pluginSendNotification, pluginSendRequest, pluginWithIndefiniteProgress +, PluginHandlers(..) +, PluginMethod(..) +, PluginMethodHandler +, PluginNotificationHandler(..), mkPluginNotificationHandler +, PluginNotificationHandlers(..) +, PluginRequestMethod(..) +, getProcessID, getPid +, installSigUsr1Handler +, lookupCommandProvider +, ResolveFunction +, mkResolveHandler +) + where + +#ifdef mingw32_HOST_OS + +import qualified System.Win32.Process as P (getCurrentProcessId) + +#else + +import qualified System.Posix.Process as P (getProcessID) +import System.Posix.Signals + +#endif + +import Control.Applicative ((<|>)) +import Control.Arrow ((&&&)) +import Control.Lens (_Just, view, (.~), (?~), (^.), + (^?)) +import Control.Monad (void) +import Control.Monad.Error.Class (MonadError (throwError)) +import Control.Monad.IO.Class (MonadIO) +import Control.Monad.Trans.Except (ExceptT, runExceptT) +import Data.Aeson hiding (Null, defaultOptions) +import qualified Data.Aeson.Types as A +import Data.Default +import Data.Dependent.Map (DMap) +import qualified Data.Dependent.Map as DMap +import qualified Data.DList as DList +import Data.GADT.Compare +import Data.Hashable (Hashable) +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as HashMap +import Data.Kind (Type) +import Data.List.Extra (find, sortOn) +import Data.List.NonEmpty (NonEmpty (..), toList) +import qualified Data.Map as Map +import Data.Maybe +import Data.Ord +import Data.Semigroup +import Data.String +import qualified Data.Text as T +import Data.Text.Encoding (encodeUtf8) +import Development.IDE.Graph +import GHC (DynFlags) +import GHC.Generics +import Ide.Plugin.Error +import Ide.Plugin.HandleRequestTypes +import Ide.Plugin.Properties +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import Language.LSP.Server +import Language.LSP.VFS +import Numeric.Natural +import OpenTelemetry.Eventlog +import Options.Applicative (ParserInfo) +import Prettyprinter as PP +import System.FilePath +import System.IO.Unsafe +import Text.Regex.TDFA.Text () +import UnliftIO (MonadUnliftIO) + +#if !MIN_VERSION_base(4,20,0) +import Data.Foldable (foldl') +#endif + +-- --------------------------------------------------------------------- + +data IdePlugins ideState = IdePlugins_ + { ipMap_ :: HashMap PluginId (PluginDescriptor ideState) + , lookupCommandProvider :: CommandId -> Maybe PluginId + } + +-- | Smart constructor that deduplicates plugins +pattern IdePlugins :: [PluginDescriptor ideState] -> IdePlugins ideState +pattern IdePlugins{ipMap} <- IdePlugins_ (sortOn (Down . pluginPriority) . HashMap.elems -> ipMap) _ + where + IdePlugins ipMap = IdePlugins_{ipMap_ = HashMap.fromList $ (pluginId &&& id) <$> ipMap + , lookupCommandProvider = lookupPluginId ipMap + } +{-# COMPLETE IdePlugins #-} + +instance Semigroup (IdePlugins a) where + (IdePlugins_ a f) <> (IdePlugins_ b g) = IdePlugins_ (a <> b) (\x -> f x <|> g x) + +instance Monoid (IdePlugins a) where + mempty = IdePlugins_ mempty (const Nothing) + +-- | Lookup the plugin that exposes a particular command +lookupPluginId :: [PluginDescriptor a] -> CommandId -> Maybe PluginId +lookupPluginId ls cmd = pluginId <$> find go ls + where + go desc = cmd `elem` map commandId (pluginCommands desc) + +-- | Hooks for modifying the 'DynFlags' at different times of the compilation +-- process. Plugins can install a 'DynFlagsModifications' via +-- 'pluginModifyDynflags' in their 'PluginDescriptor'. +data DynFlagsModifications = + DynFlagsModifications + { -- | Invoked immediately at the package level. Changes to the 'DynFlags' + -- made in 'dynFlagsModifyGlobal' are guaranteed to be seen everywhere in + -- the compilation pipeline. + dynFlagsModifyGlobal :: DynFlags -> DynFlags + -- | Invoked just before the parsing step, and reset immediately + -- afterwards. 'dynFlagsModifyParser' allows plugins to enable language + -- extensions only during parsing. for example, to let them enable + -- certain pieces of syntax. + , dynFlagsModifyParser :: DynFlags -> DynFlags + } + +instance Semigroup DynFlagsModifications where + DynFlagsModifications g1 p1 <> DynFlagsModifications g2 p2 = + DynFlagsModifications (g2 . g1) (p2 . p1) + +instance Monoid DynFlagsModifications where + mempty = DynFlagsModifications id id + +-- --------------------------------------------------------------------- + +newtype IdeCommand state = IdeCommand (state -> IO ()) +instance Show (IdeCommand st) where show _ = "" + +-- --------------------------------------------------------------------- + +-- | We (initially anyway) mirror the hie configuration, so that existing +-- clients can simply switch executable and not have any nasty surprises. There +-- will initially be surprises relating to config options being ignored though. +data Config = + Config + { checkParents :: CheckParents + , checkProject :: !Bool + , formattingProvider :: !T.Text + , cabalFormattingProvider :: !T.Text + , maxCompletions :: !Int + , sessionLoading :: !SessionLoadingPreferenceConfig + , plugins :: !(Map.Map PluginId PluginConfig) + } deriving (Show,Eq) + +instance ToJSON Config where + toJSON Config{..} = + object [ "checkParents" .= checkParents + , "checkProject" .= checkProject + , "formattingProvider" .= formattingProvider + , "cabalFormattingProvider" .= cabalFormattingProvider + , "maxCompletions" .= maxCompletions + , "sessionLoading" .= sessionLoading + , "plugin" .= Map.mapKeysMonotonic (\(PluginId p) -> p) plugins + ] + +instance Default Config where + def = Config + { checkParents = CheckOnSave + , checkProject = True + , formattingProvider = "ormolu" + -- , formattingProvider = "floskell" + -- , formattingProvider = "stylish-haskell" + , cabalFormattingProvider = "cabal-gild" + -- , cabalFormattingProvider = "cabal-fmt" + -- this string value needs to kept in sync with the value provided in HlsPlugins + , maxCompletions = 40 + , sessionLoading = PreferSingleComponentLoading + , plugins = mempty + } + +data CheckParents + -- Note that ordering of constructors is meaningful and must be monotonically + -- increasing in the scenarios where parents are checked + = NeverCheck + | CheckOnSave + | AlwaysCheck + deriving stock (Eq, Ord, Show, Generic) + deriving anyclass (FromJSON, ToJSON) + + +data SessionLoadingPreferenceConfig + = PreferSingleComponentLoading + -- ^ Always load only a singleComponent when a new component + -- is discovered. + | PreferMultiComponentLoading + -- ^ Always prefer loading multiple components in the cradle + -- at once. This might not be always possible, if the tool doesn't + -- support multiple components loading. + -- + -- The cradle can decide how to handle these situations, and whether + -- to honour the preference at all. + deriving stock (Eq, Ord, Show, Generic) + +instance Pretty SessionLoadingPreferenceConfig where + pretty PreferSingleComponentLoading = "Prefer Single Component Loading" + pretty PreferMultiComponentLoading = "Prefer Multiple Components Loading" + +instance ToJSON SessionLoadingPreferenceConfig where + toJSON PreferSingleComponentLoading = + String "singleComponent" + toJSON PreferMultiComponentLoading = + String "multipleComponents" + +instance FromJSON SessionLoadingPreferenceConfig where + parseJSON (String val) = case val of + "singleComponent" -> pure PreferSingleComponentLoading + "multipleComponents" -> pure PreferMultiComponentLoading + _ -> A.prependFailure "parsing SessionLoadingPreferenceConfig failed, " + (A.parseFail $ "Expected one of \"singleComponent\" or \"multipleComponents\" but got " <> T.unpack val ) + parseJSON o = A.prependFailure "parsing SessionLoadingPreferenceConfig failed, " + (A.typeMismatch "String" o) + +-- | A PluginConfig is a generic configuration for a given HLS plugin. It +-- provides a "big switch" to turn it on or off as a whole, as well as small +-- switches per feature, and a slot for custom config. +-- This provides a regular naming scheme for all plugin config. +data PluginConfig = + PluginConfig + { plcGlobalOn :: !Bool + , plcCallHierarchyOn :: !Bool + , plcCodeActionsOn :: !Bool + , plcCodeLensOn :: !Bool + , plcInlayHintsOn :: !Bool + , plcDiagnosticsOn :: !Bool + , plcHoverOn :: !Bool + , plcSymbolsOn :: !Bool + , plcCompletionOn :: !Bool + , plcRenameOn :: !Bool + , plcSelectionRangeOn :: !Bool + , plcFoldingRangeOn :: !Bool + , plcSemanticTokensOn :: !Bool + , plcConfig :: !Object + } deriving (Show,Eq) + +instance Default PluginConfig where + def = PluginConfig + { plcGlobalOn = True + , plcCallHierarchyOn = True + , plcCodeActionsOn = True + , plcCodeLensOn = True + , plcInlayHintsOn = True + , plcDiagnosticsOn = True + , plcHoverOn = True + , plcSymbolsOn = True + , plcCompletionOn = True + , plcRenameOn = True + , plcSelectionRangeOn = True + , plcFoldingRangeOn = True + , plcSemanticTokensOn = True + , plcConfig = mempty + } + +instance ToJSON PluginConfig where + toJSON (PluginConfig g ch ca ih cl d h s c rn sr fr st cfg) = r + where + r = object [ "globalOn" .= g + , "callHierarchyOn" .= ch + , "codeActionsOn" .= ca + , "codeLensOn" .= cl + , "inlayHintsOn" .= ih + , "diagnosticsOn" .= d + , "hoverOn" .= h + , "symbolsOn" .= s + , "completionOn" .= c + , "renameOn" .= rn + , "selectionRangeOn" .= sr + , "foldingRangeOn" .= fr + , "semanticTokensOn" .= st + , "config" .= cfg + ] + +-- --------------------------------------------------------------------- + +data PluginDescriptor (ideState :: Type) = + PluginDescriptor { pluginId :: !PluginId + , pluginDescription :: !T.Text + -- ^ Unique identifier of the plugin. + , pluginPriority :: Natural + -- ^ Plugin handlers are called in priority order, higher priority first + , pluginRules :: !(Rules ()) + , pluginCommands :: ![PluginCommand ideState] + , pluginHandlers :: PluginHandlers ideState + , pluginConfigDescriptor :: ConfigDescriptor + , pluginNotificationHandlers :: PluginNotificationHandlers ideState + , pluginModifyDynflags :: DynFlagsModifications + , pluginCli :: Maybe (ParserInfo (IdeCommand ideState)) + , pluginFileType :: [T.Text] + -- ^ File extension of the files the plugin is responsible for. + -- The plugin is only allowed to handle files with these extensions. + -- When writing handlers, etc. for this plugin it can be assumed that all handled files are of this type. + -- The file extension must have a leading '.'. + } + +describePlugin :: PluginDescriptor c -> Doc ann +describePlugin p = + let + PluginId pid = pluginId p + pdesc = pluginDescription p + in pretty pid <> ":" <> nest 4 (PP.line <> pretty pdesc) + + +-- | An existential wrapper of 'Properties' +data CustomConfig = forall r. CustomConfig (Properties r) + +-- | Describes the configuration of a plugin. +-- A plugin may be configurable as can be seen below: +-- +-- @ +-- { +-- "plugin-id": { +-- "globalOn": true, +-- "codeActionsOn": true, +-- "codeLensOn": true, +-- "config": { +-- "property1": "foo" +-- } +-- } +-- } +-- @ +-- +-- @globalOn@, @codeActionsOn@, and @codeLensOn@ etc. are called generic configs +-- which can be inferred from handlers registered by the plugin. +-- @config@ is called custom config, which is defined using 'Properties'. +data ConfigDescriptor = ConfigDescriptor { + -- | Initial values for the generic config + configInitialGenericConfig :: PluginConfig, + -- | Whether or not to generate @diagnosticsOn@ config. + -- Diagnostics emit in arbitrary shake rules, + -- so we can't know statically if the plugin produces diagnostics + configHasDiagnostics :: Bool, + -- | Custom config. + configCustomConfig :: CustomConfig +} + +mkCustomConfig :: Properties r -> CustomConfig +mkCustomConfig = CustomConfig + +defaultConfigDescriptor :: ConfigDescriptor +defaultConfigDescriptor = + ConfigDescriptor Data.Default.def False (mkCustomConfig emptyProperties) + +-- | Lookup the current config for a plugin +configForPlugin :: Config -> PluginDescriptor c -> PluginConfig +configForPlugin config PluginDescriptor{..} + = Map.findWithDefault (configInitialGenericConfig pluginConfigDescriptor) pluginId (plugins config) + +-- | Checks that a specific plugin is globally enabled in order to respond to +-- requests +pluginEnabledGlobally :: PluginDescriptor c -> Config -> HandleRequestResult +pluginEnabledGlobally desc conf = if plcGlobalOn (configForPlugin conf desc) + then HandlesRequest + else DoesNotHandleRequest DisabledGlobally + +-- | Checks that a specific feature for a given plugin is enabled in order +-- to respond to requests +pluginFeatureEnabled :: (PluginConfig -> Bool) -> PluginDescriptor c -> Config -> HandleRequestResult +pluginFeatureEnabled f desc conf = if f (configForPlugin conf desc) + then HandlesRequest + else DoesNotHandleRequest FeatureDisabled + +-- |Determine whether this request should be routed to the plugin. Fails closed +-- if we can't determine which plugin it should be routed to. +pluginResolverResponsible :: L.HasData_ m (Maybe Value) => m -> PluginDescriptor c -> HandleRequestResult +pluginResolverResponsible + (view L.data_ -> (Just (fromJSON -> (Success (PluginResolveData o@(PluginId ot) _ _))))) + pluginDesc = + if pluginId pluginDesc == o + then HandlesRequest + else DoesNotHandleRequest $ NotResolveOwner ot +-- If we can't determine who this request belongs to, then we don't want any plugin +-- to handle it. +pluginResolverResponsible _ _ = DoesNotHandleRequest $ NotResolveOwner "(unable to determine resolve owner)" + +-- | Check whether the given plugin descriptor supports the file with +-- the given path. Compares the file extension from the msgParams with the +-- file extension the plugin is responsible for. +-- We are passing the msgParams here even though we only need the URI URI here. +-- If in the future we need to be able to provide only an URI it can be +-- separated again. +pluginSupportsFileType :: (L.HasTextDocument m doc, L.HasUri doc Uri) => m -> PluginDescriptor c -> HandleRequestResult +pluginSupportsFileType msgParams pluginDesc = + case mfp of + Just fp | T.pack (takeExtension fp) `elem` pluginFileType pluginDesc -> HandlesRequest + _ -> DoesNotHandleRequest $ DoesNotSupportFileType (maybe "(unable to determine file type)" (T.pack . takeExtension) mfp) + where + mfp = uriToFilePath uri + uri = msgParams ^. L.textDocument . L.uri + +-- | Methods that can be handled by plugins. +-- 'ExtraParams' captures any extra data the IDE passes to the handlers for this method +-- Only methods for which we know how to combine responses can be instances of 'PluginMethod' +class HasTracing (MessageParams m) => PluginMethod (k :: MessageKind) (m :: Method ClientToServer k) where + + -- | Parse the configuration to check if this plugin is globally enabled, and + -- if the feature which handles this method is enabled. Perform sanity checks + -- on the message to see whether the plugin handles this message in particular. + -- This class is only used to determine whether a plugin can handle a specific + -- request. Commands and rules do not use this logic to determine whether or + -- not they are run. + -- + -- + -- A common reason why a plugin won't handle a request even though it is enabled: + -- * The plugin cannot handle requests associated with the specific URI + -- * Since the implementation of [cabal plugins](https://github.com/haskell/haskell-language-server/issues/2940) + -- HLS knows plugins specific to Haskell and specific to [Cabal file descriptions](https://cabal.readthedocs.io/en/3.6/cabal-package.html) + -- * The resolve request is not routed to that specific plugin. Each resolve + -- request needs to be routed to only one plugin. + -- + -- Strictly speaking, we are conflating two concepts here: + -- * Dynamically enabled (e.g. on a per-message basis) + -- * Statically enabled (e.g. by configuration in the lsp-client) + -- * Strictly speaking, this might also change dynamically + -- + -- But there is no use to split it up into two different methods for now. + handlesRequest + :: SMethod m + -- ^ Method type. + -> MessageParams m + -- ^ Whether a plugin is enabled might depend on the message parameters + -- e.g. 'pluginFileType' specifies which file extensions a plugin is allowed to handle + -> PluginDescriptor c + -- ^ Contains meta information such as PluginId and which file types this + -- plugin is able to handle. + -> Config + -- ^ Generic config description, expected to contain 'PluginConfig' configuration + -- for this plugin + -> HandleRequestResult + -- ^ Is this plugin enabled and allowed to respond to the given request + -- with the given parameters? + + default handlesRequest :: (L.HasTextDocument (MessageParams m) doc, L.HasUri doc Uri) + => SMethod m -> MessageParams m -> PluginDescriptor c -> Config -> HandleRequestResult + handlesRequest _ params desc conf = + pluginEnabledGlobally desc conf <> pluginSupportsFileType params desc + +-- | Check if a plugin is enabled, if one of it's specific config's is enabled, +-- and if it supports the file +pluginEnabledWithFeature :: (L.HasTextDocument (MessageParams m) doc, L.HasUri doc Uri) + => (PluginConfig -> Bool) -> SMethod m -> MessageParams m + -> PluginDescriptor c -> Config -> HandleRequestResult +pluginEnabledWithFeature feature _ msgParams pluginDesc config = + pluginEnabledGlobally pluginDesc config + <> pluginFeatureEnabled feature pluginDesc config + <> pluginSupportsFileType msgParams pluginDesc + +-- | Check if a plugin is enabled, if one of it's specific configs is enabled, +-- and if it's the plugin responsible for a resolve request. +pluginEnabledResolve :: L.HasData_ s (Maybe Value) => (PluginConfig -> Bool) -> p -> s -> PluginDescriptor c -> Config -> HandleRequestResult +pluginEnabledResolve feature _ msgParams pluginDesc config = + pluginEnabledGlobally pluginDesc config + <> pluginFeatureEnabled feature pluginDesc config + <> pluginResolverResponsible msgParams pluginDesc + +instance PluginMethod Request Method_TextDocumentCodeAction where + handlesRequest = pluginEnabledWithFeature plcCodeActionsOn + +instance PluginMethod Request Method_CodeActionResolve where + -- See Note [Resolve in PluginHandlers] + handlesRequest = pluginEnabledResolve plcCodeActionsOn + +instance PluginMethod Request Method_TextDocumentDefinition where + handlesRequest _ msgParams pluginDesc _ = pluginSupportsFileType msgParams pluginDesc + +instance PluginMethod Request Method_TextDocumentTypeDefinition where + handlesRequest _ msgParams pluginDesc _ = pluginSupportsFileType msgParams pluginDesc + +instance PluginMethod Request Method_TextDocumentImplementation where + handlesRequest _ msgParams pluginDesc _ = pluginSupportsFileType msgParams pluginDesc + +instance PluginMethod Request Method_TextDocumentDocumentHighlight where + handlesRequest _ msgParams pluginDesc _ = pluginSupportsFileType msgParams pluginDesc + +instance PluginMethod Request Method_TextDocumentReferences where + handlesRequest _ msgParams pluginDesc _ = pluginSupportsFileType msgParams pluginDesc + +instance PluginMethod Request Method_WorkspaceSymbol where + -- Unconditionally enabled, but should it really be? + handlesRequest _ _ _ _ = HandlesRequest + +instance PluginMethod Request Method_TextDocumentInlayHint where + handlesRequest = pluginEnabledWithFeature plcInlayHintsOn + +instance PluginMethod Request Method_InlayHintResolve where + handlesRequest = pluginEnabledResolve plcInlayHintsOn + +instance PluginMethod Request Method_TextDocumentCodeLens where + handlesRequest = pluginEnabledWithFeature plcCodeLensOn + +instance PluginMethod Request Method_CodeLensResolve where + -- See Note [Resolve in PluginHandlers] + handlesRequest = pluginEnabledResolve plcCodeLensOn + +instance PluginMethod Request Method_TextDocumentRename where + handlesRequest = pluginEnabledWithFeature plcRenameOn + +instance PluginMethod Request Method_TextDocumentPrepareRename where + handlesRequest = pluginEnabledWithFeature plcRenameOn + +instance PluginMethod Request Method_TextDocumentHover where + handlesRequest = pluginEnabledWithFeature plcHoverOn + +instance PluginMethod Request Method_TextDocumentDocumentSymbol where + handlesRequest = pluginEnabledWithFeature plcSymbolsOn + +instance PluginMethod Request Method_CompletionItemResolve where + -- See Note [Resolve in PluginHandlers] + handlesRequest = pluginEnabledResolve plcCompletionOn + +instance PluginMethod Request Method_TextDocumentCompletion where + handlesRequest = pluginEnabledWithFeature plcCompletionOn + +instance PluginMethod Request Method_TextDocumentFormatting where + handlesRequest _ msgParams pluginDesc conf = + (if PluginId (formattingProvider conf) == pid + || PluginId (cabalFormattingProvider conf) == pid + then HandlesRequest + else DoesNotHandleRequest (NotFormattingProvider (formattingProvider conf)) ) + <> pluginSupportsFileType msgParams pluginDesc + where + pid = pluginId pluginDesc + +instance PluginMethod Request Method_TextDocumentRangeFormatting where + handlesRequest _ msgParams pluginDesc conf = + (if PluginId (formattingProvider conf) == pid + || PluginId (cabalFormattingProvider conf) == pid + then HandlesRequest + else DoesNotHandleRequest (NotFormattingProvider (formattingProvider conf))) + <> pluginSupportsFileType msgParams pluginDesc + where + pid = pluginId pluginDesc + +instance PluginMethod Request Method_TextDocumentSemanticTokensFull where + handlesRequest = pluginEnabledWithFeature plcSemanticTokensOn + +instance PluginMethod Request Method_TextDocumentSemanticTokensFullDelta where + handlesRequest = pluginEnabledWithFeature plcSemanticTokensOn + +instance PluginMethod Request Method_TextDocumentPrepareCallHierarchy where + handlesRequest = pluginEnabledWithFeature plcCallHierarchyOn + +instance PluginMethod Request Method_TextDocumentSelectionRange where + handlesRequest = pluginEnabledWithFeature plcSelectionRangeOn + +instance PluginMethod Request Method_TextDocumentFoldingRange where + handlesRequest = pluginEnabledWithFeature plcFoldingRangeOn + +instance PluginMethod Request Method_CallHierarchyIncomingCalls where + -- This method has no URI parameter, thus no call to 'pluginResponsible' + handlesRequest _ _ pluginDesc conf = + pluginEnabledGlobally pluginDesc conf + <> pluginFeatureEnabled plcCallHierarchyOn pluginDesc conf + +instance PluginMethod Request Method_CallHierarchyOutgoingCalls where + -- This method has no URI parameter, thus no call to 'pluginResponsible' + handlesRequest _ _ pluginDesc conf = + pluginEnabledGlobally pluginDesc conf + <> pluginFeatureEnabled plcCallHierarchyOn pluginDesc conf + +instance PluginMethod Request Method_WorkspaceExecuteCommand where + handlesRequest _ _ _ _= HandlesRequest + +instance PluginMethod Request (Method_CustomMethod m) where + handlesRequest _ _ _ _ = HandlesRequest + +-- Plugin Notifications + +instance PluginMethod Notification Method_TextDocumentDidOpen where + +instance PluginMethod Notification Method_TextDocumentDidChange where + +instance PluginMethod Notification Method_TextDocumentDidSave where + +instance PluginMethod Notification Method_TextDocumentDidClose where + +instance PluginMethod Notification Method_WorkspaceDidChangeWatchedFiles where + -- This method has no URI parameter, thus no call to 'pluginResponsible'. + handlesRequest _ _ desc conf = pluginEnabledGlobally desc conf + +instance PluginMethod Notification Method_WorkspaceDidChangeWorkspaceFolders where + -- This method has no URI parameter, thus no call to 'pluginResponsible'. + handlesRequest _ _ desc conf = pluginEnabledGlobally desc conf + +instance PluginMethod Notification Method_WorkspaceDidChangeConfiguration where + -- This method has no URI parameter, thus no call to 'pluginResponsible'. + handlesRequest _ _ desc conf = pluginEnabledGlobally desc conf + +instance PluginMethod Notification Method_Initialized where + -- This method has no URI parameter, thus no call to 'pluginResponsible'. + handlesRequest _ _ desc conf = pluginEnabledGlobally desc conf + + +-- --------------------------------------------------------------------- +-- Plugin Requests +-- --------------------------------------------------------------------- + +class PluginMethod Request m => PluginRequestMethod (m :: Method ClientToServer Request) where + -- | How to combine responses from different plugins. + -- + -- For example, for Hover requests, we might have multiple producers of + -- Hover information. We do not want to decide which one to display to the user + -- but instead allow to define how to merge two hover request responses into one + -- glorious hover box. + -- + -- However, as sometimes only one handler of a request can realistically exist + -- (such as TextDocumentFormatting), it is safe to just unconditionally report + -- back one arbitrary result (arbitrary since it should only be one anyway). + combineResponses + :: SMethod m + -> Config -- ^ IDE Configuration + -> ClientCapabilities + -> MessageParams m + -> NonEmpty (MessageResult m) -> MessageResult m + + default combineResponses :: Semigroup (MessageResult m) + => SMethod m -> Config -> ClientCapabilities -> MessageParams m -> NonEmpty (MessageResult m) -> MessageResult m + combineResponses _method _config _caps _params = sconcat + + + +--- +instance PluginRequestMethod Method_TextDocumentCodeAction where + combineResponses _method _config (ClientCapabilities _ textDocCaps _ _ _ _) (CodeActionParams _ _ _ _ context) resps = + InL $ fmap compat $ concatMap (filter wasRequested) $ mapMaybe nullToMaybe $ toList resps + where + compat :: (Command |? CodeAction) -> (Command |? CodeAction) + compat x@(InL _) = x + compat x@(InR action) + | Just _ <- textDocCaps >>= _codeAction >>= _codeActionLiteralSupport + = x + | otherwise = InL cmd + where + cmd = mkLspCommand "hls" "fallbackCodeAction" (action ^. L.title) (Just cmdParams) + cmdParams = [toJSON (FallbackCodeActionParams (action ^. L.edit) (action ^. L.command))] + + wasRequested :: (Command |? CodeAction) -> Bool + wasRequested (InL _) = True + wasRequested (InR ca) + | Nothing <- _only context = True + | Just allowed <- _only context + -- See https://github.com/microsoft/language-server-protocol/issues/970 + -- This is somewhat vague, but due to the hierarchical nature of action kinds, we + -- should check whether the requested kind is a *prefix* of the action kind. + -- That means, for example, we will return actions with kinds `quickfix.import` and + -- `quickfix.somethingElse` if the requested kind is `quickfix`. + , Just caKind <- ca ^. L.kind = any (`codeActionKindSubsumes` caKind) allowed + | otherwise = False + +instance PluginRequestMethod Method_CodeActionResolve where + -- A resolve request should only have one response. + -- See Note [Resolve in PluginHandlers]. + combineResponses _ _ _ _ (x :| _) = x + +instance PluginRequestMethod Method_TextDocumentDefinition where + combineResponses _ _ caps _ (x :| xs) + | Just (Just True) <- caps ^? (L.textDocument . _Just . L.definition . _Just . L.linkSupport) = foldl' mergeDefinitions x xs + | otherwise = downgradeLinks $ foldl' mergeDefinitions x xs + +instance PluginRequestMethod Method_TextDocumentTypeDefinition where + combineResponses _ _ caps _ (x :| xs) + | Just (Just True) <- caps ^? (L.textDocument . _Just . L.typeDefinition . _Just . L.linkSupport) = foldl' mergeDefinitions x xs + | otherwise = downgradeLinks $ foldl' mergeDefinitions x xs + +instance PluginRequestMethod Method_TextDocumentImplementation where + combineResponses _ _ caps _ (x :| xs) + | Just (Just True) <- caps ^? (L.textDocument . _Just . L.implementation . _Just . L.linkSupport) = foldl' mergeDefinitions x xs + | otherwise = downgradeLinks $ foldl' mergeDefinitions x xs + +instance PluginRequestMethod Method_TextDocumentDocumentHighlight where + +instance PluginRequestMethod Method_TextDocumentReferences where + +instance PluginRequestMethod Method_WorkspaceSymbol where + -- TODO: combine WorkspaceSymbol. Currently all WorkspaceSymbols are dumped + -- as it is new of lsp-types 2.0.0.0 + combineResponses _ _ _ _ xs = InL $ mconcat $ takeLefts $ toList xs + +instance PluginRequestMethod Method_TextDocumentCodeLens where + +instance PluginRequestMethod Method_CodeLensResolve where + -- A resolve request should only ever get one response. + -- See note Note [Resolve in PluginHandlers] + combineResponses _ _ _ _ (x :| _) = x + +instance PluginRequestMethod Method_TextDocumentRename where + +instance PluginRequestMethod Method_TextDocumentPrepareRename where + -- TODO more intelligent combining? + combineResponses _ _ _ _ (x :| _) = x + +instance PluginRequestMethod Method_TextDocumentHover where + combineResponses _ _ _ _ (mapMaybe nullToMaybe . toList -> (hs :: [Hover])) = + if null hs + then InR Null + else InL $ Hover (InL mcontent) r + where + r = listToMaybe $ mapMaybe (^. L.range) hs + -- We are only taking MarkupContent here, because MarkedStrings have been + -- deprecated for a while and don't occur in the hls codebase + mcontent :: MarkupContent + mcontent = mconcat $ takeLefts $ map (^. L.contents) hs + +instance PluginRequestMethod Method_TextDocumentDocumentSymbol where + combineResponses _ _ (ClientCapabilities _ tdc _ _ _ _) params xs = res + where + uri' = params ^. L.textDocument . L.uri + supportsHierarchy = Just True == (tdc >>= _documentSymbol >>= _hierarchicalDocumentSymbolSupport) + dsOrSi :: [Either [SymbolInformation] [DocumentSymbol]] + dsOrSi = toEither <$> mapMaybe nullToMaybe' (toList xs) + res :: [SymbolInformation] |? ([DocumentSymbol] |? Null) + res + | supportsHierarchy = InR $ InL $ concatMap (either (fmap siToDs) id) dsOrSi + | otherwise = InL $ concatMap (either id ( concatMap dsToSi)) dsOrSi + -- Is this actually a good conversion? It's what there was before, but some + -- things such as tags are getting lost + siToDs :: SymbolInformation -> DocumentSymbol + siToDs (SymbolInformation name kind _tags cont dep (Location _uri range) ) + = DocumentSymbol name cont kind Nothing dep range range Nothing + dsToSi = go Nothing + go :: Maybe T.Text -> DocumentSymbol -> [SymbolInformation] + go parent ds = + let children' :: [SymbolInformation] + children' = concatMap (go (Just name')) (fromMaybe mempty (ds ^. L.children)) + loc = Location uri' (ds ^. L.range) + name' = ds ^. L.name + si = SymbolInformation name' (ds ^. L.kind) Nothing parent (ds ^. L.deprecated) loc + in [si] <> children' + +instance PluginRequestMethod Method_CompletionItemResolve where + -- A resolve request should only have one response. + -- See Note [Resolve in PluginHandlers] + combineResponses _ _ _ _ (x :| _) = x + +instance PluginRequestMethod Method_TextDocumentCompletion where + combineResponses _ conf _ _ (toList -> xs) = snd $ consumeCompletionResponse limit $ combine xs + where + limit = maxCompletions conf + combine :: [[CompletionItem] |? (CompletionList |? Null)] -> ([CompletionItem] |? (CompletionList |? Null)) + combine cs = go True mempty cs + + go :: Bool -> DList.DList CompletionItem -> [[CompletionItem] |? (CompletionList |? Null)] -> ([CompletionItem] |? (CompletionList |? Null)) + go !comp acc [] = + InR (InL (CompletionList comp Nothing ( DList.toList acc))) + go comp acc ((InL ls) : rest) = + go comp (acc <> DList.fromList ls) rest + go comp acc ( (InR (InL (CompletionList comp' _ ls))) : rest) = + go (comp && comp') (acc <> DList.fromList ls) rest + go comp acc ( (InR (InR Null)) : rest) = + go comp acc rest + -- boolean disambiguators + isCompleteResponse, isIncompleteResponse :: Bool + isIncompleteResponse = True + isCompleteResponse = False + consumeCompletionResponse :: Int -> ([CompletionItem] |? (CompletionList |? Null)) -> (Int, [CompletionItem] |? (CompletionList |? Null)) + consumeCompletionResponse limit it@(InR (InL (CompletionList _ _ xx))) = + case splitAt limit xx of + -- consumed all the items, return the result as is + (_, []) -> (limit - length xx, it) + -- need to crop the response, set the 'isIncomplete' flag + (xx', _) -> (0, InR (InL (CompletionList isIncompleteResponse Nothing xx'))) + consumeCompletionResponse n (InL xx) = + consumeCompletionResponse n (InR (InL (CompletionList isCompleteResponse Nothing xx))) + consumeCompletionResponse n (InR (InR Null)) = (n, InR (InR Null)) +instance PluginRequestMethod Method_TextDocumentFormatting where + combineResponses _ _ _ _ (x :| _) = x + +instance PluginRequestMethod Method_TextDocumentRangeFormatting where + combineResponses _ _ _ _ (x :| _) = x + +instance PluginRequestMethod Method_TextDocumentPrepareCallHierarchy where + +instance PluginRequestMethod Method_TextDocumentSelectionRange where + combineResponses _ _ _ _ (x :| _) = x + +instance PluginRequestMethod Method_TextDocumentFoldingRange where + combineResponses _ _ _ _ x = sconcat x + +instance PluginRequestMethod Method_CallHierarchyIncomingCalls where + +instance PluginRequestMethod Method_CallHierarchyOutgoingCalls where + +instance PluginRequestMethod (Method_CustomMethod m) where + combineResponses _ _ _ _ (x :| _) = x + +instance PluginRequestMethod Method_TextDocumentSemanticTokensFull where + combineResponses _ _ _ _ (x :| _) = x + +instance PluginRequestMethod Method_TextDocumentSemanticTokensFullDelta where + combineResponses _ _ _ _ (x :| _) = x + +instance PluginRequestMethod Method_TextDocumentInlayHint where + combineResponses _ _ _ _ x = sconcat x + +takeLefts :: [a |? b] -> [a] +takeLefts = mapMaybe (\x -> [res | (InL res) <- Just x]) + +nullToMaybe' :: (a |? (b |? Null)) -> Maybe (a |? b) +nullToMaybe' (InL x) = Just $ InL x +nullToMaybe' (InR (InL x)) = Just $ InR x +nullToMaybe' (InR (InR _)) = Nothing + +type Definitions = (Definition |? ([DefinitionLink] |? Null)) + +-- | Merges two definition responses (TextDocumentDefinition | TextDocumentTypeDefinition) +-- into one preserving all locations and their order (including order of the responses). +-- Upgrades Location(s) into LocationLink(s) when one of the responses is LocationLink(s). With following fields: +-- * LocationLink.originSelectionRange = Nothing +-- * LocationLink.targetUri = Location.Uri +-- * LocationLink.targetRange = Location.Range +-- * LocationLink.targetSelectionRange = Location.Range +-- Ignores Null responses. +mergeDefinitions :: Definitions -> Definitions -> Definitions +mergeDefinitions definitions1 definitions2 = case (definitions1, definitions2) of + (InR (InR Null), def2) -> def2 + (def1, InR (InR Null)) -> def1 + (InL def1, InL def2) -> InL $ mergeDefs def1 def2 + (InL def1, InR (InL links)) -> InR $ InL (defToLinks def1 ++ links) + (InR (InL links), InL def2) -> InR $ InL (links ++ defToLinks def2) + (InR (InL links1), InR (InL links2)) -> InR $ InL (links1 ++ links2) + where + defToLinks :: Definition -> [DefinitionLink] + defToLinks (Definition (InL location)) = [locationToDefinitionLink location] + defToLinks (Definition (InR locations)) = map locationToDefinitionLink locations + + locationToDefinitionLink :: Location -> DefinitionLink + locationToDefinitionLink Location{_uri, _range} = DefinitionLink LocationLink{_originSelectionRange = Nothing, _targetUri = _uri, _targetRange = _range, _targetSelectionRange = _range} + + mergeDefs :: Definition -> Definition -> Definition + mergeDefs (Definition (InL loc1)) (Definition (InL loc2)) = Definition $ InR [loc1, loc2] + mergeDefs (Definition (InR locs1)) (Definition (InL loc2)) = Definition $ InR (locs1 ++ [loc2]) + mergeDefs (Definition (InL loc1)) (Definition (InR locs2)) = Definition $ InR (loc1 : locs2) + mergeDefs (Definition (InR locs1)) (Definition (InR locs2)) = Definition $ InR (locs1 ++ locs2) + +downgradeLinks :: Definitions -> Definitions +downgradeLinks (InR (InL links)) = InL . Definition . InR . map linkToLocation $ links + where + linkToLocation :: DefinitionLink -> Location + linkToLocation (DefinitionLink LocationLink{_targetUri, _targetRange}) = Location {_uri = _targetUri, _range = _targetRange} +downgradeLinks defs = defs +-- --------------------------------------------------------------------- +-- Plugin Notifications +-- --------------------------------------------------------------------- + +-- | Plugin Notification methods. No specific methods at the moment, but +-- might contain more in the future. +class PluginMethod Notification m => PluginNotificationMethod (m :: Method ClientToServer Notification) where + + +instance PluginNotificationMethod Method_TextDocumentDidOpen where + +instance PluginNotificationMethod Method_TextDocumentDidChange where + +instance PluginNotificationMethod Method_TextDocumentDidSave where + +instance PluginNotificationMethod Method_TextDocumentDidClose where + +instance PluginNotificationMethod Method_WorkspaceDidChangeWatchedFiles where + +instance PluginNotificationMethod Method_WorkspaceDidChangeWorkspaceFolders where + +instance PluginNotificationMethod Method_WorkspaceDidChangeConfiguration where + +instance PluginNotificationMethod Method_Initialized where + +-- --------------------------------------------------------------------- + +-- | Methods which have a PluginMethod instance +data IdeMethod (m :: Method ClientToServer Request) = PluginRequestMethod m => IdeMethod (SMethod m) +instance GEq IdeMethod where + geq (IdeMethod a) (IdeMethod b) = geq a b +instance GCompare IdeMethod where + gcompare (IdeMethod a) (IdeMethod b) = gcompare a b + +-- | Methods which have a PluginMethod instance +data IdeNotification (m :: Method ClientToServer Notification) = PluginNotificationMethod m => IdeNotification (SMethod m) +instance GEq IdeNotification where + geq (IdeNotification a) (IdeNotification b) = geq a b +instance GCompare IdeNotification where + gcompare (IdeNotification a) (IdeNotification b) = gcompare a b + +-- | Restricted version of 'LspM' specific to plugins. +-- +-- We use this monad for running plugins instead of 'LspM', since there are +-- parts of the LSP server state which plugins should not access directly, but +-- instead only via the build system. +newtype HandlerM config a = HandlerM { _runHandlerM :: LspM config a } + deriving newtype (Applicative, Functor, Monad, MonadIO, MonadUnliftIO) + +runHandlerM :: HandlerM config a -> LspM config a +runHandlerM = _runHandlerM + +-- | Wrapper of 'getClientCapabilities' for HandlerM +pluginGetClientCapabilities :: HandlerM config ClientCapabilities +pluginGetClientCapabilities = HandlerM getClientCapabilities + +-- | Wrapper of 'sendNotification for HandlerM +-- +-- TODO: Return notification in result instead of calling `sendNotification` directly +pluginSendNotification :: forall (m :: Method ServerToClient Notification) config. SServerMethod m -> MessageParams m -> HandlerM config () +pluginSendNotification smethod params = HandlerM $ sendNotification smethod params + +-- | Wrapper of 'sendRequest' for HandlerM +-- +-- TODO: Return request in result instead of calling `sendRequest` directly +pluginSendRequest :: forall (m :: Method ServerToClient Request) config. SServerMethod m -> MessageParams m -> (Either (TResponseError m) (MessageResult m) -> HandlerM config ()) -> HandlerM config (LspId m) +pluginSendRequest smethod params action = HandlerM $ sendRequest smethod params (runHandlerM . action) + +-- | Wrapper of 'withIndefiniteProgress' for HandlerM +pluginWithIndefiniteProgress :: T.Text -> Maybe ProgressToken -> ProgressCancellable -> ((T.Text -> HandlerM config ()) -> HandlerM config a) -> HandlerM config a +pluginWithIndefiniteProgress title progressToken cancellable updateAction = + HandlerM $ + withIndefiniteProgress title progressToken cancellable $ \putUpdate -> + runHandlerM $ updateAction (HandlerM . putUpdate) + +-- | Combine handlers for the +newtype PluginHandler a (m :: Method ClientToServer Request) + = PluginHandler (PluginId -> a -> MessageParams m -> HandlerM Config (NonEmpty (Either PluginError (MessageResult m)))) + +newtype PluginNotificationHandler a (m :: Method ClientToServer Notification) + = PluginNotificationHandler (PluginId -> a -> VFS -> MessageParams m -> LspM Config ()) + +newtype PluginHandlers a = PluginHandlers (DMap IdeMethod (PluginHandler a)) +newtype PluginNotificationHandlers a = PluginNotificationHandlers (DMap IdeNotification (PluginNotificationHandler a)) +instance Semigroup (PluginHandlers a) where + (PluginHandlers a) <> (PluginHandlers b) = PluginHandlers $ DMap.unionWithKey go a b + where + go _ (PluginHandler f) (PluginHandler g) = PluginHandler $ \pid ide params -> + (<>) <$> f pid ide params <*> g pid ide params + +instance Monoid (PluginHandlers a) where + mempty = PluginHandlers mempty + +instance Semigroup (PluginNotificationHandlers a) where + (PluginNotificationHandlers a) <> (PluginNotificationHandlers b) = PluginNotificationHandlers $ DMap.unionWithKey go a b + where + go _ (PluginNotificationHandler f) (PluginNotificationHandler g) = PluginNotificationHandler $ \pid ide vfs params -> + f pid ide vfs params >> g pid ide vfs params + +instance Monoid (PluginNotificationHandlers a) where + mempty = PluginNotificationHandlers mempty + +type PluginMethodHandler a m = a -> PluginId -> MessageParams m -> ExceptT PluginError (HandlerM Config) (MessageResult m) + +type PluginNotificationMethodHandler a m = a -> VFS -> PluginId -> MessageParams m -> LspM Config () + +-- | Make a handler for plugins. For how resolve works with this see +-- Note [Resolve in PluginHandlers] +mkPluginHandler + :: forall ideState m. PluginRequestMethod m + => SClientMethod m + -> PluginMethodHandler ideState m + -> PluginHandlers ideState +mkPluginHandler m f = PluginHandlers $ DMap.singleton (IdeMethod m) (PluginHandler (f' m)) + where + f' :: SMethod m -> PluginId -> ideState -> MessageParams m -> HandlerM Config (NonEmpty (Either PluginError (MessageResult m))) + -- We need to have separate functions for each method that supports resolve, so far we only support CodeActions + -- CodeLens, and Completion methods. + f' SMethod_TextDocumentCodeAction pid ide params@CodeActionParams{_textDocument=TextDocumentIdentifier {_uri}} = + pure . fmap (wrapCodeActions pid _uri) <$> runExceptT (f ide pid params) + f' SMethod_TextDocumentCodeLens pid ide params@CodeLensParams{_textDocument=TextDocumentIdentifier {_uri}} = + pure . fmap (wrapCodeLenses pid _uri) <$> runExceptT (f ide pid params) + f' SMethod_TextDocumentCompletion pid ide params@CompletionParams{_textDocument=TextDocumentIdentifier {_uri}} = + pure . fmap (wrapCompletions pid _uri) <$> runExceptT (f ide pid params) + + -- This is the default case for all other methods + f' _ pid ide params = pure <$> runExceptT (f ide pid params) + + -- Todo: use fancy pancy lenses to make this a few lines + wrapCodeActions pid uri (InL ls) = + let wrapCodeActionItem pid uri (InR c) = InR $ wrapResolveData pid uri c + wrapCodeActionItem _ _ command@(InL _) = command + in InL $ wrapCodeActionItem pid uri <$> ls + wrapCodeActions _ _ (InR r) = InR r + + wrapCodeLenses pid uri (InL ls) = InL $ wrapResolveData pid uri <$> ls + wrapCodeLenses _ _ (InR r) = InR r + + wrapCompletions pid uri (InL ls) = InL $ wrapResolveData pid uri <$> ls + wrapCompletions pid uri (InR (InL cl@(CompletionList{_items}))) = + InR $ InL $ cl & L.items .~ (wrapResolveData pid uri <$> _items) + wrapCompletions _ _ (InR (InR r)) = InR $ InR r + +-- | Make a handler for plugins with no extra data +mkPluginNotificationHandler + :: PluginNotificationMethod m + => SClientMethod (m :: Method ClientToServer Notification) + -> PluginNotificationMethodHandler ideState m + -> PluginNotificationHandlers ideState +mkPluginNotificationHandler m f + = PluginNotificationHandlers $ DMap.singleton (IdeNotification m) (PluginNotificationHandler f') + where + f' pid ide vfs = f ide vfs pid + +defaultPluginPriority :: Natural +defaultPluginPriority = 1000 + +-- | Set up a plugin descriptor, initialized with default values. +-- This plugin descriptor is prepared for @haskell@ files, such as +-- +-- * @.hs@ +-- * @.lhs@ +-- * @.hs-boot@ +-- +-- and handlers will be enabled for files with the appropriate file +-- extensions. +defaultPluginDescriptor :: PluginId -> T.Text -> PluginDescriptor ideState +defaultPluginDescriptor plId desc = + PluginDescriptor + plId + desc + defaultPluginPriority + mempty + mempty + mempty + defaultConfigDescriptor + mempty + mempty + Nothing + [".hs", ".lhs", ".hs-boot"] + +-- | Set up a plugin descriptor, initialized with default values. +-- This plugin descriptor is prepared for @.cabal@ files and as such, +-- will only respond / run when @.cabal@ files are currently in scope. +-- +-- Handles files with the following extensions: +-- * @.cabal@ +defaultCabalPluginDescriptor :: PluginId -> T.Text -> PluginDescriptor ideState +defaultCabalPluginDescriptor plId desc = + PluginDescriptor + plId + desc + defaultPluginPriority + mempty + mempty + mempty + defaultConfigDescriptor + mempty + mempty + Nothing + [".cabal"] + +newtype CommandId = CommandId T.Text + deriving (Show, Read, Eq, Ord) +instance IsString CommandId where + fromString = CommandId . T.pack + +data PluginCommand ideState = forall a. (FromJSON a) => + PluginCommand { commandId :: CommandId + , commandDesc :: T.Text + , commandFunc :: CommandFunction ideState a + } + +-- --------------------------------------------------------------------- + +type CommandFunction ideState a + = ideState + -> Maybe ProgressToken + -> a + -> ExceptT PluginError (HandlerM Config) (Value |? Null) + +-- --------------------------------------------------------------------- + +type ResolveFunction ideState a (m :: Method ClientToServer Request) = + ideState + -> PluginId + -> MessageParams m + -> Uri + -> a + -> ExceptT PluginError (HandlerM Config) (MessageResult m) + +-- | Make a handler for resolve methods. In here we take your provided ResolveFunction +-- and turn it into a PluginHandlers. See Note [Resolve in PluginHandlers] +mkResolveHandler + :: forall ideState a m. (FromJSON a, PluginRequestMethod m, L.HasData_ (MessageParams m) (Maybe Value)) + => SClientMethod m + -> ResolveFunction ideState a m + -> PluginHandlers ideState +mkResolveHandler m f = mkPluginHandler m $ \ideState plId params -> do + case fromJSON <$> (params ^. L.data_) of + (Just (Success (PluginResolveData owner@(PluginId ownerName) uri value) )) -> do + if owner == plId + then + case fromJSON value of + Success decodedValue -> + let newParams = params & L.data_ ?~ value + in f ideState plId newParams uri decodedValue + Error msg -> + -- We are assuming that if we can't decode the data, that this + -- request belongs to another resolve handler for this plugin. + throwError (PluginRequestRefused + (NotResolveOwner (ownerName <> ": error decoding payload:" <> T.pack msg))) + -- If we are getting an owner that isn't us, this means that there is an + -- error, as we filter these our in `pluginEnabled` + else throwError $ PluginInternalError invalidRequest + -- If we are getting params without a decodable data field, this means that + -- there is an error, as we filter these our in `pluginEnabled` + (Just (Error err)) -> throwError $ PluginInternalError (parseError (params ^. L.data_) err) + -- If there are no params at all, this also means that there is an error, + -- as this is filtered out in `pluginEnabled` + _ -> throwError $ PluginInternalError invalidRequest + where invalidRequest = "The resolve request incorrectly got routed to the wrong resolve handler!" + parseError value err = "Unable to decode: " <> T.pack (show value) <> ". Error: " <> T.pack (show err) + +wrapResolveData :: L.HasData_ a (Maybe Value) => PluginId -> Uri -> a -> a +wrapResolveData pid uri hasData = + hasData & L.data_ .~ (toJSON . PluginResolveData pid uri <$> data_) + where data_ = hasData ^? L.data_ . _Just + +-- |Allow plugins to "own" resolve data, allowing only them to be queried for +-- the resolve action. This design has added flexibility at the cost of nested +-- Value types +data PluginResolveData = PluginResolveData { + resolvePlugin :: PluginId +, resolveURI :: Uri +, resolveValue :: Value +} + deriving (Generic, Show) + deriving anyclass (ToJSON, FromJSON) + +newtype PluginId = PluginId T.Text + deriving (Show, Read, Eq, Ord) + deriving newtype (ToJSON, FromJSON, Hashable) + +instance IsString PluginId where + fromString = PluginId . T.pack + + +-- --------------------------------------------------------------------- + +-- | Format the given Text as a whole or only a @Range@ of it. +-- Range must be relative to the text to format. +-- To format the whole document, read the Text from the file and use 'FormatText' +-- as the FormattingType. +data FormattingType = FormatText + | FormatRange Range + + +type FormattingMethod m = + ( L.HasOptions (MessageParams m) FormattingOptions + , L.HasTextDocument (MessageParams m) TextDocumentIdentifier + , MessageResult m ~ ([TextEdit] |? Null) + ) + +type FormattingHandler a + = a + -> Maybe ProgressToken + -> FormattingType + -> T.Text + -> NormalizedFilePath + -> FormattingOptions + -> ExceptT PluginError (HandlerM Config) ([TextEdit] |? Null) + +-- --------------------------------------------------------------------- + +data FallbackCodeActionParams = + FallbackCodeActionParams + { fallbackWorkspaceEdit :: Maybe WorkspaceEdit + , fallbackCommand :: Maybe Command + } + deriving (Generic, ToJSON, FromJSON) + +-- --------------------------------------------------------------------- + +otSetUri :: SpanInFlight -> Uri -> IO () +otSetUri sp (Uri t) = setTag sp "uri" (encodeUtf8 t) + +class HasTracing a where + traceWithSpan :: SpanInFlight -> a -> IO () + traceWithSpan _ _ = pure () + +instance {-# OVERLAPPABLE #-} (L.HasTextDocument a doc, L.HasUri doc Uri) => HasTracing a where + traceWithSpan sp a = otSetUri sp (a ^. L.textDocument . L.uri) + +instance HasTracing Value +instance HasTracing ExecuteCommandParams +instance HasTracing DidChangeWatchedFilesParams where + traceWithSpan sp DidChangeWatchedFilesParams{_changes} = + setTag sp "changes" (encodeUtf8 $ fromString $ show _changes) +instance HasTracing DidChangeWorkspaceFoldersParams +instance HasTracing DidChangeConfigurationParams +instance HasTracing InitializeParams +instance HasTracing InitializedParams +instance HasTracing WorkspaceSymbolParams where + traceWithSpan sp (WorkspaceSymbolParams _ _ query) = setTag sp "query" (encodeUtf8 query) +instance HasTracing CallHierarchyIncomingCallsParams +instance HasTracing CallHierarchyOutgoingCallsParams + +-- Instances for resolve types +instance HasTracing CodeAction +instance HasTracing CodeLens +instance HasTracing CompletionItem +instance HasTracing DocumentLink +instance HasTracing InlayHint +instance HasTracing WorkspaceSymbol +-- --------------------------------------------------------------------- +--Experimental resolve refactoring +{-# NOINLINE pROCESS_ID #-} +pROCESS_ID :: T.Text +pROCESS_ID = unsafePerformIO getPid + +mkLspCommand :: PluginId -> CommandId -> T.Text -> Maybe [Value] -> Command +mkLspCommand plid cn title args = Command title cmdId args + where + cmdId = mkLspCmdId pROCESS_ID plid cn + +mkLspCmdId :: T.Text -> PluginId -> CommandId -> T.Text +mkLspCmdId pid (PluginId plid) (CommandId cid) + = pid <> ":" <> plid <> ":" <> cid + +-- | Get the operating system process id for the running server +-- instance. This should be the same for the lifetime of the instance, +-- and different from that of any other currently running instance. +getPid :: IO T.Text +getPid = T.pack . show <$> getProcessID + +getProcessID :: IO Int +installSigUsr1Handler :: IO () -> IO () + +#ifdef mingw32_HOST_OS +getProcessID = fromIntegral <$> P.getCurrentProcessId +installSigUsr1Handler _ = return () + +#else +getProcessID = fromIntegral <$> P.getProcessID + +installSigUsr1Handler h = void $ installHandler sigUSR1 (Catch h) Nothing +#endif + +{- Note [Resolve in PluginHandlers] + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Resolve methods have a few guarantees that need to be made by HLS, + specifically they need to only be called once, as neither their errors nor + their responses can be easily combined. Whereas commands, which similarly have + the same requirements have their own codepaths for execution, for resolve + methods we are relying on the standard PluginHandlers codepath. + That isn't a problem, but it does mean we need to do some things extra for + these methods. + - First of all, whenever a handler that can be resolved sets the data_ field + in their response, we need to intercept it, and wrap it in a data type + PluginResolveData that allows us to route the future resolve request to the + specific plugin which is responsible for it. (We also throw in the URI for + convenience, because everyone needs that). We do that in mkPluginHandler. + - When we get any resolve requests we check their data field for our + PluginResolveData that will allow us to route the request to the right + plugin. If we can't find out which plugin to route the request to, then we + just don't route it at all. This is done in pluginEnabled, and + pluginResolverResponsible. + - Finally we have mkResolveHandler, which takes the resolve request and + unwraps the plugins data from our PluginResolveData, parses it and passes it + it on to the registered handler. + It should be noted that there are some restrictions with this approach: First, + if a plugin does not set the data_ field, than the request will not be able + to be resolved. This is because we only wrap data_ fields that have been set + with our PluginResolvableData tag. Second, if a plugin were to register two + resolve handlers for the same method, than our assumptions that we never have + two responses break, and behavior is undefined. + -} diff --git a/hls-plugin-api/test/Ide/PluginUtilsTest.hs b/hls-plugin-api/test/Ide/PluginUtilsTest.hs new file mode 100644 index 0000000000..1fa1ace39b --- /dev/null +++ b/hls-plugin-api/test/Ide/PluginUtilsTest.hs @@ -0,0 +1,205 @@ +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-orphans #-} + +module Ide.PluginUtilsTest + ( tests + ) where + +import qualified Data.Aeson as A +import qualified Data.Aeson.Types as A +import Data.ByteString.Lazy (ByteString) +import Data.Function ((&)) +import qualified Data.Set as Set +import qualified Data.Text as T +import Ide.Plugin.Properties (KeyNamePath (..), + definePropertiesProperty, + defineStringProperty, + emptyProperties, toDefaultJSON, + toVSCodeExtensionSchema, + usePropertyByPath, + usePropertyByPathEither) +import qualified Ide.Plugin.RangeMap as RangeMap +import Ide.PluginUtils (extractTextInRange, unescape) +import Language.LSP.Protocol.Types (Position (..), Range (Range), + UInt, isSubrangeOf) +import Test.Tasty +import Test.Tasty.Golden (goldenVsStringDiff) +import Test.Tasty.HUnit +import Test.Tasty.QuickCheck + +tests :: TestTree +tests = testGroup "PluginUtils" + [ unescapeTest + , extractTextInRangeTest + , localOption (QuickCheckMaxSize 10000) $ + testProperty "RangeMap-List filtering identical" $ + prop_rangemapListEq @Int + , propertyTest + ] + +unescapeTest :: TestTree +unescapeTest = testGroup "unescape" + [ testCase "no double quote" $ + unescape "hello世界" @?= "hello世界" + , testCase "whole string quoted" $ + unescape "\"hello\\19990\\30028\"" @?= "\"hello世界\"" + , testCase "text before quotes should not be unescaped" $ + unescape "\\19990a\"hello\\30028\"" @?= "\\19990a\"hello界\"" + , testCase "some text after quotes" $ + unescape "\"hello\\19990\\30028\"abc" @?= "\"hello世界\"abc" + , testCase "many pairs of quote" $ + unescape "oo\"hello\\19990\\30028\"abc\"\1087\1088\1080\1074\1077\1090\"hh" @?= "oo\"hello世界\"abc\"привет\"hh" + , testCase "double quote itself should not be unescaped" $ + unescape "\"\\\"\\19990o\"" @?= "\"\\\"世o\"" + , testCase "control characters should not be escaped" $ + unescape "\"\\n\\t\"" @?= "\"\\n\\t\"" + ] + +extractTextInRangeTest :: TestTree +extractTextInRangeTest = testGroup "extractTextInRange" + [ testCase "inline range" $ + extractTextInRange + ( Range (Position 0 3) (Position 3 5) ) + src + @?= T.intercalate "\n" + [ "ule Main where" + , "" + , "main :: IO ()" + , "main " + ] + , testCase "inline range with empty content" $ + extractTextInRange + ( Range (Position 0 0) (Position 0 1) ) + emptySrc + @?= "" + , testCase "multiline range with empty content" $ + extractTextInRange + ( Range (Position 0 0) (Position 1 0) ) + emptySrc + @?= "\n" + , testCase "multiline range" $ + extractTextInRange + ( Range (Position 1 0) (Position 4 0) ) + src + @?= T.unlines + [ "" + , "main :: IO ()" + , "main = do" + ] + , testCase "multiline range with end pos at the line below the last line" $ + extractTextInRange + ( Range (Position 2 0) (Position 5 0) ) + src + @?= T.unlines + [ "main :: IO ()" + , "main = do" + , " putStrLn \"hello, world\"" + ] + ] + where + src = T.unlines + [ "module Main where" + , "" + , "main :: IO ()" + , "main = do" + , " putStrLn \"hello, world\"" + ] + emptySrc = "\n" + +genRange :: Gen Range +genRange = oneof [ genRangeInline, genRangeMultiline ] + +genRangeInline :: Gen Range +genRangeInline = do + x1 <- genPosition + delta <- genRangeLength + let x2 = x1 { _character = _character x1 + delta } + pure $ Range x1 x2 + where + genRangeLength :: Gen UInt + genRangeLength = uInt (5, 50) + +genRangeMultiline :: Gen Range +genRangeMultiline = do + x1 <- genPosition + let heightDelta = 1 + secondX <- genSecond + let x2 = x1 { _line = _line x1 + heightDelta + , _character = secondX + } + pure $ Range x1 x2 + where + genSecond :: Gen UInt + genSecond = uInt (0, 10) + +genPosition :: Gen Position +genPosition = Position + <$> uInt (0, 1000) + <*> uInt (0, 150) + +uInt :: (Integer, Integer) -> Gen UInt +uInt (a, b) = fromInteger <$> chooseInteger (a, b) + +instance Arbitrary Range where + arbitrary = genRange + +prop_rangemapListEq :: (Show a, Ord a) => Range -> [(Range, a)] -> Property +prop_rangemapListEq r xs = + let filteredList = (map snd . filter (isSubrangeOf r . fst)) xs + filteredRangeMap = RangeMap.filterByRange r (RangeMap.fromList' xs) + in classify (null filteredList) "no matches" $ + cover 5 (length filteredList == 1) "1 match" $ + cover 2 (length filteredList > 1) ">1 matches" $ + Set.fromList filteredList === Set.fromList filteredRangeMap + + +gitDiff :: FilePath -> FilePath -> [String] +gitDiff fRef fNew = ["git", "-c", "core.fileMode=false", "diff", "-w", "--no-index", "--text", "--exit-code", fRef, fNew] + +goldenGitDiff :: TestName -> FilePath -> IO ByteString -> TestTree +goldenGitDiff name = goldenVsStringDiff name gitDiff + +testDir :: FilePath +testDir = "test/testdata/Property" + +propertyTest :: TestTree +propertyTest = testGroup "property api tests" [ + goldenGitDiff "property toVSCodeExtensionSchema" (testDir <> "/NestedPropertyVscode.json") (return $ A.encode $ A.object $ toVSCodeExtensionSchema "top." nestedPropertiesExample) + , goldenGitDiff "property toDefaultJSON" (testDir <> "/NestedPropertyDefault.json") (return $ A.encode $ A.object $ toDefaultJSON nestedPropertiesExample) + , testCase "parsePropertyPath single key path" $ do + let obj = A.object (toDefaultJSON nestedPropertiesExample) + let key1 = A.parseEither (A.withObject "test parsePropertyPath" $ \o -> do + let key1 = usePropertyByPathEither examplePath1 nestedPropertiesExample o + return key1) obj + key1 @?= Right (Right "baz") + , testCase "parsePropertyPath two key path" $ do + let obj = A.object (toDefaultJSON nestedPropertiesExample) + let key1 = A.parseEither (A.withObject "test parsePropertyPath" $ \o -> do + let key1 = usePropertyByPathEither examplePath2 nestedPropertiesExample o + return key1) obj + key1 @?= Right (Right "foo") + , testCase "parsePropertyPath two key path default" $ do + let obj = A.object [] + let key1 = A.parseEither (A.withObject "test parsePropertyPath" $ \o -> do + let key1 = usePropertyByPath examplePath2 nestedPropertiesExample o + return key1) obj + key1 @?= Right "foo" + , testCase "parsePropertyPath two key path not default" $ do + let obj = A.object (toDefaultJSON nestedPropertiesExample2) + let key1 = A.parseEither (A.withObject "test parsePropertyPath" $ \o -> do + let key1 = usePropertyByPathEither examplePath2 nestedPropertiesExample o + return key1) obj + key1 @?= Right (Right "xxx") + ] + where + nestedPropertiesExample = emptyProperties + & definePropertiesProperty #parent "parent" (emptyProperties & defineStringProperty #foo "foo" "foo" & defineStringProperty #boo "boo" "boo") + & defineStringProperty #baz "baz" "baz" + + nestedPropertiesExample2 = emptyProperties + & definePropertiesProperty #parent "parent" (emptyProperties & defineStringProperty #foo "foo" "xxx") + & defineStringProperty #baz "baz" "baz" + + examplePath1 = SingleKey #baz + examplePath2 = ConsKeysPath #parent (SingleKey #foo) diff --git a/hls-plugin-api/test/Ide/TypesTests.hs b/hls-plugin-api/test/Ide/TypesTests.hs new file mode 100644 index 0000000000..07556d625c --- /dev/null +++ b/hls-plugin-api/test/Ide/TypesTests.hs @@ -0,0 +1,249 @@ +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.TypesTests + ( tests + ) where +import Control.Lens ((?~), (^?)) +import Data.Default (Default (def)) +import Data.Function ((&)) +import Data.List.NonEmpty (NonEmpty ((:|))) +import Data.Maybe (isJust) +import qualified Data.Text as Text +import Ide.Types (PluginRequestMethod (combineResponses)) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message (MessageParams, MessageResult, + SMethod (..)) +import Language.LSP.Protocol.Types (ClientCapabilities, + Definition (Definition), + DefinitionClientCapabilities (DefinitionClientCapabilities, _dynamicRegistration, _linkSupport), + DefinitionLink (DefinitionLink), + DefinitionParams (DefinitionParams, _partialResultToken, _position, _textDocument, _workDoneToken), + Location (Location), + LocationLink (LocationLink), + Null (Null), + Position (Position), + Range (Range), + TextDocumentClientCapabilities, + TextDocumentIdentifier (TextDocumentIdentifier), + TypeDefinitionClientCapabilities (TypeDefinitionClientCapabilities, _dynamicRegistration, _linkSupport), + TypeDefinitionParams (..), + Uri (Uri), _L, _R, _definition, + _typeDefinition, filePathToUri, + type (|?) (..)) +import Test.Tasty (TestTree, testGroup) +import Test.Tasty.HUnit (testCase, (@=?)) +import Test.Tasty.QuickCheck (ASCIIString (ASCIIString), + Arbitrary (arbitrary), Gen, + arbitraryBoundedEnum, cover, + listOf1, oneof, testProperty, + (===)) + +tests :: TestTree +tests = testGroup "PluginTypes" + [ combineResponsesTests ] + +combineResponsesTests :: TestTree +combineResponsesTests = testGroup "combineResponses" + [ combineResponsesTextDocumentDefinitionTests + , combineResponsesTextDocumentTypeDefinitionTests + ] + +combineResponsesTextDocumentDefinitionTests :: TestTree +combineResponsesTextDocumentDefinitionTests = testGroup "TextDocumentDefinition" $ + defAndTypeDefSharedTests SMethod_TextDocumentDefinition definitionParams + +combineResponsesTextDocumentTypeDefinitionTests :: TestTree +combineResponsesTextDocumentTypeDefinitionTests = testGroup "TextDocumentTypeDefinition" $ + defAndTypeDefSharedTests SMethod_TextDocumentTypeDefinition typeDefinitionParams + +defAndTypeDefSharedTests :: + ( MessageResult m ~ (Definition |? ([DefinitionLink] |? Null)) + , PluginRequestMethod m + ) + => SMethod m -> MessageParams m -> [TestTree] +defAndTypeDefSharedTests message params = + [ testCase "merges all single location responses into one response with all locations (without upgrading to links)" $ do + let pluginResponses :: NonEmpty (Definition |? ([DefinitionLink] |? Null)) + pluginResponses = + (InL . Definition . InL . Location testFileUri $ range1) :| + [ InL . Definition . InL . Location testFileUri $ range2 + , InL . Definition . InL . Location testFileUri $ range3 + ] + + result = combineResponses message def supportsLinkInAllDefinitionCaps params pluginResponses + + expectedResult :: Definition |? ([DefinitionLink] |? Null) + expectedResult = InL . Definition . InR $ + [ Location testFileUri range1 + , Location testFileUri range2 + , Location testFileUri range3 + ] + expectedResult @=? result + + , testCase "merges all location link responses into one with all links (with link support)" $ do + let pluginResponses :: NonEmpty (Definition |? ([DefinitionLink] |? Null)) + pluginResponses = + (InR . InL $ [DefinitionLink $ LocationLink Nothing testFileUri range1 range1]) :| + [ InR . InL $ + [ DefinitionLink $ LocationLink Nothing testFileUri range2 range2 + , DefinitionLink $ LocationLink Nothing testFileUri range3 range3 + ] + ] + + result = combineResponses message def supportsLinkInAllDefinitionCaps params pluginResponses + + expectedResult :: Definition |? ([DefinitionLink] |? Null) + expectedResult = InR . InL $ + [ DefinitionLink $ LocationLink Nothing testFileUri range1 range1 + , DefinitionLink $ LocationLink Nothing testFileUri range2 range2 + , DefinitionLink $ LocationLink Nothing testFileUri range3 range3 + ] + expectedResult @=? result + + , testCase "merges location responses with link responses into link responses (with link support)" $ do + let pluginResponses :: NonEmpty (Definition |? ([DefinitionLink] |? Null)) + pluginResponses = + (InL . Definition . InL . Location testFileUri $ range1) :| + [ InR . InL $ [ DefinitionLink $ LocationLink Nothing testFileUri range2 range2 ] + , InL . Definition . InR $ [Location testFileUri range3] + ] + + result = combineResponses message def supportsLinkInAllDefinitionCaps params pluginResponses + + expectedResult :: Definition |? ([DefinitionLink] |? Null) + expectedResult = InR . InL $ + [ DefinitionLink $ LocationLink Nothing testFileUri range1 range1 + , DefinitionLink $ LocationLink Nothing testFileUri range2 range2 + , DefinitionLink $ LocationLink Nothing testFileUri range3 range3 + ] + expectedResult @=? result + + , testCase "preserves link-specific data when merging link and location responses (with link support)" $ do + let pluginResponses :: NonEmpty (Definition |? ([DefinitionLink] |? Null)) + pluginResponses = + (InL . Definition . InL . Location testFileUri $ range1) :| + [ InR . InL $ [ DefinitionLink $ LocationLink (Just range1) testFileUri range2 range3 ] ] + + result = combineResponses message def supportsLinkInAllDefinitionCaps params pluginResponses + + expectedResult :: Definition |? ([DefinitionLink] |? Null) + expectedResult = InR . InL $ + [ DefinitionLink $ LocationLink Nothing testFileUri range1 range1 + , DefinitionLink $ LocationLink (Just range1) testFileUri range2 range3 + ] + expectedResult @=? result + + , testCase "ignores Null responses when other responses are available" $ do + let pluginResponses :: NonEmpty (Definition |? ([DefinitionLink] |? Null)) + pluginResponses = + (InL . Definition . InL . Location testFileUri $ range1) :| + [ InR . InR $ Null + , InR . InL $ [DefinitionLink $ LocationLink Nothing testFileUri range3 range3] + ] + + result = combineResponses message def supportsLinkInAllDefinitionCaps params pluginResponses + + expectedResult :: Definition |? ([DefinitionLink] |? Null) + expectedResult = InR . InL $ + [ DefinitionLink $ LocationLink Nothing testFileUri range1 range1 + , DefinitionLink $ LocationLink Nothing testFileUri range3 range3 + ] + expectedResult @=? result + + , testCase "returns Null when all responses are Null" $ do + let pluginResponses :: NonEmpty (Definition |? ([DefinitionLink] |? Null)) + pluginResponses = + (InR . InR $ Null) :| + [ InR . InR $ Null + , InR . InR $ Null + ] + + result = combineResponses message def supportsLinkInAllDefinitionCaps params pluginResponses + + expectedResult :: Definition |? ([DefinitionLink] |? Null) + expectedResult = InR . InR $ Null + expectedResult @=? result + + , testProperty "downgrades all locationLinks to locations when missing link support in capabilities" $ \(MkGeneratedNonEmpty responses) -> do + let pluginResponses = fmap (\(MkGeneratedDefinition definition) -> definition) responses + + result = combineResponses message def def params pluginResponses + + cover 70 (any (isJust . (>>= (^? _L)) . (^? _R)) pluginResponses) "Has at least one response with links" $ + cover 10 (any (isJust . (^? _L)) pluginResponses) "Has at least one response with locations" $ + cover 10 (any (isJust . (>>= (^? _R)) . (^? _R)) pluginResponses) "Has at least one response with Null" $ + (isJust (result ^? _L) || isJust (result ^? _R >>= (^? _R))) === True + ] + + +range1, range2, range3 :: Range +range1 = Range (Position 3 0) $ Position 3 5 +range2 = Range (Position 5 7) $ Position 5 13 +range3 = Range (Position 24 30) $ Position 24 40 + +supportsLinkInAllDefinitionCaps :: ClientCapabilities +supportsLinkInAllDefinitionCaps = def & L.textDocument ?~ textDocumentCaps + where + textDocumentCaps :: TextDocumentClientCapabilities + textDocumentCaps = def + { _definition = Just DefinitionClientCapabilities { _linkSupport = Just True, _dynamicRegistration = Nothing } + , _typeDefinition = Just TypeDefinitionClientCapabilities { _linkSupport = Just True, _dynamicRegistration = Nothing } + } + +definitionParams :: DefinitionParams +definitionParams = DefinitionParams + { _textDocument = TextDocumentIdentifier testFileUri + , _position = Position 5 4 + , _workDoneToken = Nothing + , _partialResultToken = Nothing + } + +typeDefinitionParams :: TypeDefinitionParams +typeDefinitionParams = TypeDefinitionParams + { _textDocument = TextDocumentIdentifier testFileUri + , _position = Position 5 4 + , _workDoneToken = Nothing + , _partialResultToken = Nothing + } + +testFileUri :: Uri +testFileUri = filePathToUri "file://tester/Test.hs" + +newtype GeneratedDefinition = MkGeneratedDefinition (Definition |? ([DefinitionLink] |? Null)) deriving newtype (Show) + +instance Arbitrary GeneratedDefinition where + arbitrary = MkGeneratedDefinition <$> oneof + [ InL . Definition . InL <$> generateLocation + , InL . Definition . InR <$> listOf1 generateLocation + , InR . InL . map DefinitionLink <$> listOf1 generateLocationLink + , pure . InR . InR $ Null + ] + where + generateLocation :: Gen Location + generateLocation = do + (LocationLink _ uri range _) <- generateLocationLink + pure $ Location uri range + + generateLocationLink :: Gen LocationLink + generateLocationLink = LocationLink <$> generateMaybe generateRange <*> generateUri <*> generateRange <*> generateRange + + generateMaybe :: Gen a -> Gen (Maybe a) + generateMaybe gen = oneof [Just <$> gen, pure Nothing] + + generateUri :: Gen Uri + generateUri = do + (ASCIIString str) <- arbitrary + pure . Uri . Text.pack $ str + + generateRange :: Gen Range + generateRange = Range <$> generatePosition <*> generatePosition + + generatePosition :: Gen Position + generatePosition = Position <$> arbitraryBoundedEnum <*> arbitraryBoundedEnum + +newtype GeneratedNonEmpty a = MkGeneratedNonEmpty (NonEmpty a) deriving newtype (Show) + +instance Arbitrary a => Arbitrary (GeneratedNonEmpty a) where + arbitrary = MkGeneratedNonEmpty <$> ((:|) <$> arbitrary <*> arbitrary) diff --git a/hls-plugin-api/test/Main.hs b/hls-plugin-api/test/Main.hs new file mode 100644 index 0000000000..006052631d --- /dev/null +++ b/hls-plugin-api/test/Main.hs @@ -0,0 +1,15 @@ +module Main where + +import qualified Ide.PluginUtilsTest as PluginUtilsTest +import qualified Ide.TypesTests as PluginTypesTests +import Test.Tasty +import Test.Tasty.Ingredients.Rerun + +main :: IO () +main = defaultMainWithRerun tests + +tests :: TestTree +tests = testGroup "Main" + [ PluginUtilsTest.tests + , PluginTypesTests.tests + ] diff --git a/hls-plugin-api/test/testdata/Property/NestedPropertyDefault.json b/hls-plugin-api/test/testdata/Property/NestedPropertyDefault.json new file mode 100644 index 0000000000..0d8f57656c --- /dev/null +++ b/hls-plugin-api/test/testdata/Property/NestedPropertyDefault.json @@ -0,0 +1 @@ +{"baz":"baz","parent":{"boo":"boo","foo":"foo"}} \ No newline at end of file diff --git a/hls-plugin-api/test/testdata/Property/NestedPropertyVscode.json b/hls-plugin-api/test/testdata/Property/NestedPropertyVscode.json new file mode 100644 index 0000000000..4c9e721c4d --- /dev/null +++ b/hls-plugin-api/test/testdata/Property/NestedPropertyVscode.json @@ -0,0 +1 @@ +{"top.baz":{"default":"baz","markdownDescription":"baz","scope":"resource","type":"string"},"top.parent.boo":{"default":"boo","markdownDescription":"boo","scope":"resource","type":"string"},"top.parent.foo":{"default":"foo","markdownDescription":"foo","scope":"resource","type":"string"}} diff --git a/hls-test-utils/LICENSE b/hls-test-utils/LICENSE new file mode 100644 index 0000000000..261eeb9e9f --- /dev/null +++ b/hls-test-utils/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/hls-test-utils/hls-test-utils.cabal b/hls-test-utils/hls-test-utils.cabal new file mode 100644 index 0000000000..084de98534 --- /dev/null +++ b/hls-test-utils/hls-test-utils.cabal @@ -0,0 +1,72 @@ +cabal-version: 2.4 +name: hls-test-utils +version: 2.11.0.0 +synopsis: Utilities used in the tests of Haskell Language Server +description: + Please see the README on GitHub at + +homepage: https://github.com/haskell/haskell-language-server#readme +bug-reports: https://github.com/haskell/haskell-language-server/issues +license: Apache-2.0 +license-file: LICENSE +author: The Haskell IDE Team +maintainer: alan.zimm@gmail.com +copyright: The Haskell IDE Team +category: Development +build-type: Simple + +flag pedantic + description: Enable -Werror + default: False + manual: True + +source-repository head + type: git + location: https://github.com/haskell/haskell-language-server + +library + exposed-modules: + Test.Hls + Test.Hls.Util + Test.Hls.FileSystem + Development.IDE.Test + Development.IDE.Test.Diagnostic + + hs-source-dirs: src + build-depends: + , aeson + , async + , base >=4.12 && <5 + , bytestring + , containers + , data-default + , directory + , extra + , filepath + , ghcide == 2.11.0.0 + , hls-plugin-api == 2.11.0.0 + , lens + , lsp + , lsp-test ^>=0.17 + , lsp-types ^>=2.3 + , neat-interpolation + , safe-exceptions + , tasty + , tasty-expected-failure + , tasty-golden + , tasty-hunit + , tasty-rerun + , temporary + , text + , text-rope + + ghc-options: + -Wall + -Wunused-packages + -Wno-name-shadowing + -Wno-unticked-promoted-constructors + + if flag(pedantic) + ghc-options: -Werror + + default-language: GHC2021 diff --git a/hls-test-utils/src/Development/IDE/Test.hs b/hls-test-utils/src/Development/IDE/Test.hs new file mode 100644 index 0000000000..a1bd2dec0e --- /dev/null +++ b/hls-test-utils/src/Development/IDE/Test.hs @@ -0,0 +1,264 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} + +module Development.IDE.Test + ( Cursor + , cursorPosition + , requireDiagnostic + , diagnostic + , expectDiagnostics + , expectDiagnosticsWithTags + , ExpectedDiagnostic + , ExpectedDiagnosticWithTag + , expectNoMoreDiagnostics + , expectMessages + , expectCurrentDiagnostics + , checkDiagnosticsForDoc + , canonicalizeUri + , standardizeQuotes + , flushMessages + , waitForAction + , getInterfaceFilesDir + , garbageCollectDirtyKeys + , getFilesOfInterest + , waitForTypecheck + , waitForBuildQueue + , getStoredKeys + , waitForCustomMessage + , waitForGC + , configureCheckProject + , isReferenceReady + , referenceReady) where + +import Control.Applicative.Combinators +import Control.Lens hiding (List) +import Control.Monad +import Control.Monad.IO.Class +import Data.Aeson (toJSON) +import qualified Data.Aeson as A +import Data.Bifunctor (second) +import Data.Default +import qualified Data.Map.Strict as Map +import Data.Maybe (fromJust) +import Data.Proxy +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE.Plugin.Test (TestRequest (..), + WaitForIdeRuleResult, + ideResultSuccess) +import Development.IDE.Test.Diagnostic +import GHC.TypeLits (symbolVal) +import Ide.Plugin.Config (CheckParents, checkProject) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import Language.LSP.Test hiding (message) +import qualified Language.LSP.Test as LspTest +import System.Directory (canonicalizePath) +import System.FilePath (equalFilePath) +import System.Time.Extra +import Test.Tasty.HUnit + +expectedDiagnosticWithNothing :: ExpectedDiagnostic -> ExpectedDiagnosticWithTag +expectedDiagnosticWithNothing (ds, c, t, code) = (ds, c, t, code, Nothing) + +requireDiagnosticM + :: (Foldable f, Show (f Diagnostic), HasCallStack) + => f Diagnostic + -> ExpectedDiagnosticWithTag + -> Assertion +requireDiagnosticM actuals expected = case requireDiagnostic actuals expected of + Nothing -> pure () + Just err -> assertFailure err + +-- |wait for @timeout@ seconds and report an assertion failure +-- if any diagnostic messages arrive in that period +expectNoMoreDiagnostics :: HasCallStack => Seconds -> Session () +expectNoMoreDiagnostics timeout = + expectMessages SMethod_TextDocumentPublishDiagnostics timeout $ \diagsNot -> do + let fileUri = diagsNot ^. L.params . L.uri + actual = diagsNot ^. L.params . L.diagnostics + unless (null actual) $ liftIO $ + assertFailure $ + "Got unexpected diagnostics for " <> show fileUri + <> " got " + <> show actual + +expectMessages :: SMethod m -> Seconds -> (TServerMessage m -> Session ()) -> Session () +expectMessages m timeout handle = do + -- Give any further diagnostic messages time to arrive. + liftIO $ sleep timeout + -- Send a dummy message to provoke a response from the server. + -- This guarantees that we have at least one message to + -- process, so message won't block or timeout. + let cm = SMethod_CustomMethod (Proxy @"test") + i <- sendRequest cm $ A.toJSON GetShakeSessionQueueCount + go cm i + where + go cm i = handleMessages + where + handleMessages = (LspTest.message m >>= handle) <|> (void $ responseForId cm i) <|> ignoreOthers + ignoreOthers = void anyMessage >> handleMessages + +flushMessages :: Session () +flushMessages = do + let cm = SMethod_CustomMethod (Proxy @"non-existent-method") + i <- sendRequest cm A.Null + void (responseForId cm i) <|> ignoreOthers cm i + where + ignoreOthers cm i = skipManyTill anyMessage (responseForId cm i) >> flushMessages + +-- | It is not possible to use 'expectDiagnostics []' to assert the absence of diagnostics, +-- only that existing diagnostics have been cleared. +-- +-- Rather than trying to assert the absence of diagnostics, introduce an +-- expected diagnostic (e.g. a redundant import) and assert the singleton diagnostic. +expectDiagnostics :: HasCallStack => [(FilePath, [ExpectedDiagnostic])] -> Session () +expectDiagnostics + = expectDiagnosticsWithTags + . map (second (map expectedDiagnosticWithNothing)) + +unwrapDiagnostic :: TServerMessage Method_TextDocumentPublishDiagnostics -> (Uri, [Diagnostic]) +unwrapDiagnostic diagsNot = (diagsNot^. L.params . L.uri, diagsNot^. L.params . L.diagnostics) + +expectDiagnosticsWithTags :: HasCallStack => [(String, [ExpectedDiagnosticWithTag])] -> Session () +expectDiagnosticsWithTags expected = do + let toSessionPath = getDocUri >=> liftIO . canonicalizeUri >=> pure . toNormalizedUri + next = unwrapDiagnostic <$> skipManyTill anyMessage diagnostic + expected' <- Map.fromListWith (<>) <$> traverseOf (traverse . _1) toSessionPath expected + expectDiagnosticsWithTags' next expected' + +expectDiagnosticsWithTags' :: + (HasCallStack, MonadIO m) => + m (Uri, [Diagnostic]) -> + Map.Map NormalizedUri [ExpectedDiagnosticWithTag] -> + m () +expectDiagnosticsWithTags' next m | null m = do + (_,actual) <- next + case actual of + [] -> + return () + _ -> + liftIO $ assertFailure $ "Got unexpected diagnostics:" <> show actual + +expectDiagnosticsWithTags' next expected = go expected + where + go m + | Map.null m = pure () + | otherwise = do + (fileUri, actual) <- next + canonUri <- liftIO $ toNormalizedUri <$> canonicalizeUri fileUri + case Map.lookup canonUri m of + Nothing -> do + liftIO $ + assertFailure $ + "Got diagnostics for " <> show fileUri + <> " but only expected diagnostics for " + <> show (Map.keys m) + <> " got " + <> show actual + Just expected -> do + liftIO $ mapM_ (requireDiagnosticM actual) expected + liftIO $ + unless (length expected == length actual) $ + assertFailure $ + "Incorrect number of diagnostics for " <> show fileUri + <> ", expected " + <> show expected + <> " but got " + <> show actual + go $ Map.delete canonUri m + +expectCurrentDiagnostics :: HasCallStack => TextDocumentIdentifier -> [ExpectedDiagnostic] -> Session () +expectCurrentDiagnostics doc expected = do + diags <- getCurrentDiagnostics doc + checkDiagnosticsForDoc doc expected diags + +checkDiagnosticsForDoc :: HasCallStack => TextDocumentIdentifier -> [ExpectedDiagnostic] -> [Diagnostic] -> Session () +checkDiagnosticsForDoc TextDocumentIdentifier {_uri} expected obtained = do + let expected' = Map.singleton nuri (map expectedDiagnosticWithNothing expected) + nuri = toNormalizedUri _uri + expectDiagnosticsWithTags' (return (_uri, obtained)) expected' + +canonicalizeUri :: Uri -> IO Uri +canonicalizeUri uri = filePathToUri <$> canonicalizePath (fromJust (uriToFilePath uri)) + +diagnostic :: Session (TNotificationMessage Method_TextDocumentPublishDiagnostics) +diagnostic = LspTest.message SMethod_TextDocumentPublishDiagnostics + +tryCallTestPlugin :: (A.FromJSON b) => TestRequest -> Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) b) +tryCallTestPlugin cmd = do + let cm = SMethod_CustomMethod (Proxy @"test") + waitId <- sendRequest cm (A.toJSON cmd) + TResponseMessage{_result} <- skipManyTill anyMessage $ responseForId cm waitId + return $ case _result of + Left e -> Left e + Right json -> case A.fromJSON json of + A.Success a -> Right a + A.Error e -> error e + +callTestPlugin :: (A.FromJSON b) => TestRequest -> Session b +callTestPlugin cmd = do + res <- tryCallTestPlugin cmd + case res of + Left (TResponseError t err _) -> error $ show t <> ": " <> T.unpack err + Right a -> pure a + + +waitForAction :: String -> TextDocumentIdentifier -> Session WaitForIdeRuleResult +waitForAction key TextDocumentIdentifier{_uri} = + callTestPlugin (WaitForIdeRule key _uri) + +getInterfaceFilesDir :: TextDocumentIdentifier -> Session FilePath +getInterfaceFilesDir TextDocumentIdentifier{_uri} = callTestPlugin (GetInterfaceFilesDir _uri) + +garbageCollectDirtyKeys :: CheckParents -> Int -> Session [String] +garbageCollectDirtyKeys parents age = callTestPlugin (GarbageCollectDirtyKeys parents age) + +getStoredKeys :: Session [Text] +getStoredKeys = callTestPlugin GetStoredKeys + +waitForTypecheck :: TextDocumentIdentifier -> Session Bool +waitForTypecheck tid = ideResultSuccess <$> waitForAction "typecheck" tid + +waitForBuildQueue :: Session () +waitForBuildQueue = callTestPlugin WaitForShakeQueue + +getFilesOfInterest :: Session [FilePath] +getFilesOfInterest = callTestPlugin GetFilesOfInterest + +waitForCustomMessage :: T.Text -> (A.Value -> Maybe res) -> Session res +waitForCustomMessage msg pred = + skipManyTill anyMessage $ satisfyMaybe $ \case + FromServerMess (SMethod_CustomMethod p) (NotMess TNotificationMessage{_params = value}) + | symbolVal p == T.unpack msg -> pred value + _ -> Nothing + +waitForGC :: Session [T.Text] +waitForGC = waitForCustomMessage "ghcide/GC" $ \v -> + case A.fromJSON v of + A.Success x -> Just x + _ -> Nothing + +configureCheckProject :: Bool -> Session () +configureCheckProject overrideCheckProject = setConfigSection "haskell" (toJSON $ def{checkProject = overrideCheckProject}) + +-- | Pattern match a message from ghcide indicating that a file has been indexed +isReferenceReady :: FilePath -> Session () +isReferenceReady p = void $ referenceReady (equalFilePath p) + +referenceReady :: (FilePath -> Bool) -> Session FilePath +referenceReady pred = satisfyMaybe $ \case + FromServerMess (SMethod_CustomMethod p) (NotMess TNotificationMessage{_params}) + | A.Success fp <- A.fromJSON _params + , pred fp + , symbolVal p == "ghcide/reference/ready" + -> Just fp + _ -> Nothing + diff --git a/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs b/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs new file mode 100644 index 0000000000..4fa81a2d57 --- /dev/null +++ b/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs @@ -0,0 +1,87 @@ +{-# LANGUAGE CPP #-} +module Development.IDE.Test.Diagnostic where + +import Control.Lens ((^.)) +import qualified Data.Text as T +import Development.IDE.GHC.Compat (GhcVersion (..), ghcVersion) +import GHC.Stack (HasCallStack) +import Language.LSP.Protocol.Lens +import Language.LSP.Protocol.Types + +-- | (0-based line number, 0-based column number) +type Cursor = (UInt, UInt) + +cursorPosition :: Cursor -> Position +cursorPosition (line, col) = Position line col + +type ErrorMsg = String + + +-- | Expected diagnostics have the following components: +-- +-- 1. severity +-- 2. cursor (line and column numbers) +-- 3. infix of the message +-- 4. code (e.g. GHC-87543) +type ExpectedDiagnostic = + ( DiagnosticSeverity + , Cursor + , T.Text + , Maybe T.Text + ) + +-- | Expected diagnostics with a tag have the following components: +-- +-- 1. severity +-- 2. cursor (line and column numbers) +-- 3. infix of the message +-- 4. code (e.g. GHC-87543) +-- 5. tag (unnecessary or deprecated) +type ExpectedDiagnosticWithTag = + ( DiagnosticSeverity + , Cursor + , T.Text + , Maybe T.Text + , Maybe DiagnosticTag + ) + +requireDiagnostic + :: (Foldable f, Show (f Diagnostic), HasCallStack) + => f Diagnostic + -> ExpectedDiagnosticWithTag + -> Maybe ErrorMsg +requireDiagnostic actuals expected@(severity, cursor, expectedMsg, mbExpectedCode, expectedTag) + | any match actuals = Nothing + | otherwise = Just $ + "Could not find " <> show expected <> + " in " <> show actuals + where + match :: Diagnostic -> Bool + match d = + Just severity == _severity d + && cursorPosition cursor == d ^. range . start + && standardizeQuotes (T.toLower expectedMsg) `T.isInfixOf` + standardizeQuotes (T.toLower $ d ^. message) + && hasTag expectedTag (d ^. tags) + && codeMatches d + + codeMatches d + | ghcVersion >= GHC96 = + case (mbExpectedCode, _code d) of + (Nothing, _) -> True + (Just _, Nothing) -> False + (Just expectedCode, Just actualCode) -> InR expectedCode == actualCode + | otherwise = True + + hasTag :: Maybe DiagnosticTag -> Maybe [DiagnosticTag] -> Bool + hasTag Nothing _ = True + hasTag (Just _) Nothing = False + hasTag (Just actualTag) (Just tags) = actualTag `elem` tags + +standardizeQuotes :: T.Text -> T.Text +standardizeQuotes msg = let + repl '‘' = '\'' + repl '’' = '\'' + repl '`' = '\'' + repl c = c + in T.map repl msg diff --git a/hls-test-utils/src/Test/Hls.hs b/hls-test-utils/src/Test/Hls.hs new file mode 100644 index 0000000000..1193b2dd19 --- /dev/null +++ b/hls-test-utils/src/Test/Hls.hs @@ -0,0 +1,918 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedLists #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} +module Test.Hls + ( module Test.Tasty.HUnit, + module Test.Tasty, + module Test.Tasty.ExpectedFailure, + module Test.Hls.Util, + module Language.LSP.Protocol.Types, + module Language.LSP.Protocol.Message, + module Language.LSP.Test, + module Control.Monad.IO.Class, + module Control.Applicative.Combinators, + defaultTestRunner, + goldenGitDiff, + goldenWithHaskellDoc, + goldenWithHaskellDocInTmpDir, + goldenWithHaskellAndCaps, + goldenWithHaskellAndCapsInTmpDir, + goldenWithCabalDoc, + goldenWithHaskellDocFormatter, + goldenWithHaskellDocFormatterInTmpDir, + goldenWithCabalDocFormatter, + goldenWithCabalDocFormatterInTmpDir, + goldenWithTestConfig, + def, + -- * Running HLS for integration tests + runSessionWithServer, + runSessionWithServerInTmpDir, + runSessionWithTestConfig, + -- * Running parameterised tests for a set of test configurations + parameterisedCursorTest, + -- * Helpful re-exports + PluginDescriptor, + IdeState, + -- * Helpers for expected test case failuers + BrokenBehavior(..), + ExpectBroken(..), + unCurrent, + -- * Assertion helper functions + waitForProgressDone, + waitForAllProgressDone, + waitForBuildQueue, + waitForProgressBegin, + waitForTypecheck, + waitForAction, + hlsConfigToClientConfig, + setHlsConfig, + getLastBuildKeys, + waitForKickDone, + waitForKickStart, + -- * Plugin descriptor helper functions for tests + PluginTestDescriptor, + hlsPluginTestRecorder, + mkPluginTestDescriptor, + mkPluginTestDescriptor', + -- * Re-export logger types + -- Avoids slightly annoying ghcide imports when they are unnecessary. + WithPriority(..), + Recorder, + Priority(..), + captureKickDiagnostics, + kick, + TestConfig(..) + ) +where + +import Control.Applicative.Combinators +import Control.Concurrent.Async (async, cancel, wait) +import Control.Concurrent.Extra +import Control.Exception.Safe +import Control.Lens ((^.)) +import Control.Lens.Extras (is) +import Control.Monad (guard, unless, void) +import Control.Monad.Extra (forM) +import Control.Monad.IO.Class +import Data.Aeson (Result (Success), + Value (Null), + fromJSON, toJSON) +import qualified Data.Aeson as A +import Data.ByteString.Lazy (ByteString) +import Data.Default (Default, def) +import qualified Data.Map as M +import Data.Maybe (fromMaybe, mapMaybe) +import Data.Proxy (Proxy (Proxy)) +import qualified Data.Text as T +import qualified Data.Text.Lazy as TL +import qualified Data.Text.Lazy.Encoding as TL +import Development.IDE (IdeState, + LoggingColumn (ThreadIdColumn), + defaultLayoutOptions, + layoutPretty, + renderStrict) +import Development.IDE.Main hiding (Log) +import qualified Development.IDE.Main as IDEMain +import Development.IDE.Plugin.Completions.Types (PosPrefixInfo) +import Development.IDE.Plugin.Test (TestRequest (GetBuildKeysBuilt, WaitForIdeRule, WaitForShakeQueue), + WaitForIdeRuleResult (ideResultSuccess)) +import qualified Development.IDE.Plugin.Test as Test +import Development.IDE.Types.Options +import GHC.IO.Handle +import GHC.TypeLits +import Ide.Logger (Pretty (pretty), + Priority (..), + Recorder, + WithPriority (WithPriority, priority), + cfilter, + cmapWithPrio, + defaultLoggingColumns, + logWith, + makeDefaultStderrRecorder, + (<+>)) +import qualified Ide.Logger as Logger +import Ide.PluginUtils (idePluginsToPluginDesc, + pluginDescToIdePlugins) +import Ide.Types +import Language.LSP.Protocol.Capabilities +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import qualified Language.LSP.Protocol.Message as LSP +import Language.LSP.Protocol.Types hiding (Null) +import qualified Language.LSP.Server as LSP +import Language.LSP.Test +import Prelude hiding (log) +import System.Directory (canonicalizePath, + createDirectoryIfMissing, + getCurrentDirectory, + getTemporaryDirectory, + makeAbsolute, + setCurrentDirectory) +import System.Environment (lookupEnv, setEnv) +import System.FilePath +import System.IO.Extra (newTempDirWithin) +import System.IO.Unsafe (unsafePerformIO) +import System.Process.Extra (createPipe) +import System.Time.Extra +import qualified Test.Hls.FileSystem as FS +import Test.Hls.FileSystem +import Test.Hls.Util +import Test.Tasty hiding (Timeout) +import Test.Tasty.ExpectedFailure +import Test.Tasty.Golden +import Test.Tasty.HUnit +import Test.Tasty.Ingredients.Rerun + +data Log + = LogIDEMain IDEMain.Log + | LogTestHarness LogTestHarness + +instance Pretty Log where + pretty = \case + LogIDEMain log -> pretty log + LogTestHarness log -> pretty log + +data LogTestHarness + = LogTestDir FilePath + | LogCleanup + | LogNoCleanup + + +instance Pretty LogTestHarness where + pretty = \case + LogTestDir dir -> "Test Project located in directory:" <+> pretty dir + LogCleanup -> "Cleaned up temporary directory" + LogNoCleanup -> "No cleanup of temporary directory" + +data BrokenBehavior = Current | Ideal + +data ExpectBroken (k :: BrokenBehavior) a where + BrokenCurrent :: a -> ExpectBroken 'Current a + BrokenIdeal :: a -> ExpectBroken 'Ideal a + +unCurrent :: ExpectBroken 'Current a -> a +unCurrent (BrokenCurrent a) = a + +-- | Run 'defaultMainWithRerun', limiting each single test case running at most 10 minutes +defaultTestRunner :: TestTree -> IO () +defaultTestRunner = defaultMainWithRerun . adjustOption (const $ mkTimeout 600000000) + +gitDiff :: FilePath -> FilePath -> [String] +gitDiff fRef fNew = ["git", "-c", "core.fileMode=false", "diff", "--no-index", "--text", "--exit-code", fRef, fNew] + +goldenGitDiff :: TestName -> FilePath -> IO ByteString -> TestTree +goldenGitDiff name = goldenVsStringDiff name gitDiff + +goldenWithHaskellDoc + :: Pretty b + => Config + -> PluginTestDescriptor b + -> TestName + -> FilePath + -> FilePath + -> FilePath + -> FilePath + -> (TextDocumentIdentifier -> Session ()) + -> TestTree +goldenWithHaskellDoc = goldenWithDoc LanguageKind_Haskell + +goldenWithHaskellDocInTmpDir + :: Pretty b + => Config + -> PluginTestDescriptor b + -> TestName + -> VirtualFileTree + -> FilePath + -> FilePath + -> FilePath + -> (TextDocumentIdentifier -> Session ()) + -> TestTree +goldenWithHaskellDocInTmpDir = goldenWithDocInTmpDir LanguageKind_Haskell + +goldenWithHaskellAndCaps + :: Pretty b + => Config + -> ClientCapabilities + -> PluginTestDescriptor b + -> TestName + -> FilePath + -> FilePath + -> FilePath + -> FilePath + -> (TextDocumentIdentifier -> Session ()) + -> TestTree +goldenWithHaskellAndCaps config clientCaps plugin title testDataDir path desc ext act = + goldenGitDiff title (testDataDir path <.> desc <.> ext) + $ runSessionWithTestConfig def { + testDirLocation = Left testDataDir, + testConfigCaps = clientCaps, + testLspConfig = config, + testPluginDescriptor = plugin + } + $ const +-- runSessionWithServerAndCaps config plugin clientCaps testDataDir + $ TL.encodeUtf8 . TL.fromStrict + <$> do + doc <- openDoc (path <.> ext) "haskell" + void waitForBuildQueue + act doc + documentContents doc + +goldenWithTestConfig + :: Pretty b + => TestConfig b + -> TestName + -> VirtualFileTree + -> FilePath + -> FilePath + -> FilePath + -> (TextDocumentIdentifier -> Session ()) + -> TestTree +goldenWithTestConfig config title tree path desc ext act = + goldenGitDiff title (vftOriginalRoot tree path <.> desc <.> ext) + $ runSessionWithTestConfig config $ const + $ TL.encodeUtf8 . TL.fromStrict + <$> do + doc <- openDoc (path <.> ext) "haskell" + void waitForBuildQueue + act doc + documentContents doc + +goldenWithHaskellAndCapsInTmpDir + :: Pretty b + => Config + -> ClientCapabilities + -> PluginTestDescriptor b + -> TestName + -> VirtualFileTree + -> FilePath + -> FilePath + -> FilePath + -> (TextDocumentIdentifier -> Session ()) + -> TestTree +goldenWithHaskellAndCapsInTmpDir config clientCaps plugin title tree path desc ext act = + goldenGitDiff title (vftOriginalRoot tree path <.> desc <.> ext) + $ + runSessionWithTestConfig def { + testDirLocation = Right tree, + testConfigCaps = clientCaps, + testLspConfig = config, + testPluginDescriptor = plugin + } $ const + $ TL.encodeUtf8 . TL.fromStrict + <$> do + doc <- openDoc (path <.> ext) "haskell" + void waitForBuildQueue + act doc + documentContents doc + +goldenWithCabalDoc + :: Pretty b + => Config + -> PluginTestDescriptor b + -> TestName + -> FilePath + -> FilePath + -> FilePath + -> FilePath + -> (TextDocumentIdentifier -> Session ()) + -> TestTree +goldenWithCabalDoc = goldenWithDoc (LanguageKind_Custom "cabal") + +goldenWithDoc + :: Pretty b + => LanguageKind + -> Config + -> PluginTestDescriptor b + -> TestName + -> FilePath + -> FilePath + -> FilePath + -> FilePath + -> (TextDocumentIdentifier -> Session ()) + -> TestTree +goldenWithDoc languageKind config plugin title testDataDir path desc ext act = + goldenGitDiff title (testDataDir path <.> desc <.> ext) + $ runSessionWithServer config plugin testDataDir + $ TL.encodeUtf8 . TL.fromStrict + <$> do + doc <- openDoc (path <.> ext) languageKind + void waitForBuildQueue + act doc + documentContents doc + +goldenWithDocInTmpDir + :: Pretty b + => LanguageKind + -> Config + -> PluginTestDescriptor b + -> TestName + -> VirtualFileTree + -> FilePath + -> FilePath + -> FilePath + -> (TextDocumentIdentifier -> Session ()) + -> TestTree +goldenWithDocInTmpDir languageKind config plugin title tree path desc ext act = + goldenGitDiff title (vftOriginalRoot tree path <.> desc <.> ext) + $ runSessionWithServerInTmpDir config plugin tree + $ TL.encodeUtf8 . TL.fromStrict + <$> do + doc <- openDoc (path <.> ext) languageKind + void waitForBuildQueue + act doc + documentContents doc + +-- | A parameterised test is similar to a normal test case but allows to run +-- the same test case multiple times with different inputs. +-- A 'parameterisedCursorTest' allows to define a test case based on an input file +-- that specifies one or many cursor positions via the identification value '^'. +-- +-- For example: +-- +-- @ +-- parameterisedCursorTest "Cursor Test" [trimming| +-- foo = 2 +-- ^ +-- bar = 3 +-- baz = foo + bar +-- ^ +-- |] +-- ["foo", "baz"] +-- (\input cursor -> findFunctionNameUnderCursor input cursor) +-- @ +-- +-- Assuming a fitting implementation for 'findFunctionNameUnderCursor'. +-- +-- This test definition will run the test case 'findFunctionNameUnderCursor' for +-- each cursor position, each in its own isolated 'testCase'. +-- Cursor positions are identified via the character '^', which points to the +-- above line as the actual cursor position. +-- Lines containing '^' characters, are removed from the final text, that is +-- passed to the testing function. +-- +-- TODO: Many Haskell and Cabal source may contain '^' characters for good reasons. +-- We likely need a way to change the character for certain test cases in the future. +-- +-- The quasi quoter 'trimming' is very helpful to define such tests, as it additionally +-- allows to interpolate haskell values and functions. We reexport this quasi quoter +-- for easier usage. +parameterisedCursorTest :: (Show a, Eq a) => String -> T.Text -> [a] -> (T.Text -> PosPrefixInfo -> IO a) -> TestTree +parameterisedCursorTest title content expectations act + | lenPrefs /= lenExpected = error $ "parameterisedCursorTest: Expected " <> show lenExpected <> " cursors but found: " <> show lenPrefs + | otherwise = testGroup title $ + map singleTest testCaseSpec + where + lenPrefs = length prefInfos + lenExpected = length expectations + (cleanText, prefInfos) = extractCursorPositions content + + testCaseSpec = zip [1 ::Int ..] (zip expectations prefInfos) + + singleTest (n, (expected, info)) = testCase (title <> " " <> show n) $ do + actual <- act cleanText info + assertEqual (mkParameterisedLabel info) expected actual + +-- ------------------------------------------------------------ +-- Helper function for initialising plugins under test +-- ------------------------------------------------------------ + +-- | Plugin under test where a fitting recorder is injected. +type PluginTestDescriptor b = Recorder (WithPriority b) -> IdePlugins IdeState + +-- | Wrap a plugin you want to test, and inject a fitting recorder as required. +-- +-- If you want to write the logs to stderr, run your tests with +-- "HLS_TEST_PLUGIN_LOG_STDERR=1", e.g. +-- +-- @ +-- HLS_TEST_PLUGIN_LOG_STDERR=1 cabal test +-- @ +-- +-- +-- To write all logs to stderr, including logs of the server, use: +-- +-- @ +-- HLS_TEST_LOG_STDERR=1 cabal test +-- @ +mkPluginTestDescriptor + :: (Recorder (WithPriority b) -> PluginId -> PluginDescriptor IdeState) + -> PluginId + -> PluginTestDescriptor b +mkPluginTestDescriptor pluginDesc plId recorder = IdePlugins [pluginDesc recorder plId] + +-- | Wrap a plugin you want to test. +-- +-- Ideally, try to migrate this plugin to co-log logger style architecture. +-- Therefore, you should prefer 'mkPluginTestDescriptor' to this if possible. +mkPluginTestDescriptor' + :: (PluginId -> PluginDescriptor IdeState) + -> PluginId + -> PluginTestDescriptor b +mkPluginTestDescriptor' pluginDesc plId _recorder = IdePlugins [pluginDesc plId] + +-- | Initialize a recorder that can be instructed to write to stderr by +-- setting one of the environment variables: +-- +-- * HLS_TEST_HARNESS_STDERR=1 +-- * HLS_TEST_LOG_STDERR=1 +-- +-- "HLS_TEST_LOG_STDERR" is intended to enable all logging for the server and the plugins +-- under test. +hlsHelperTestRecorder :: Pretty a => IO (Recorder (WithPriority a)) +hlsHelperTestRecorder = initializeTestRecorder ["HLS_TEST_HARNESS_STDERR", "HLS_TEST_LOG_STDERR"] + + +-- | Initialize a recorder that can be instructed to write to stderr by +-- setting one of the environment variables: +-- +-- * HLS_TEST_PLUGIN_LOG_STDERR=1 +-- * HLS_TEST_LOG_STDERR=1 +-- +-- before running the tests. +-- +-- "HLS_TEST_LOG_STDERR" is intended to enable all logging for the server and the plugins +-- under test. +-- +-- On the cli, use for example: +-- +-- @ +-- HLS_TEST_PLUGIN_LOG_STDERR=1 cabal test +-- @ +-- +-- To write all logs to stderr, including logs of the server, use: +-- +-- @ +-- HLS_TEST_LOG_STDERR=1 cabal test +-- @ +hlsPluginTestRecorder :: Pretty a => IO (Recorder (WithPriority a)) +hlsPluginTestRecorder = initializeTestRecorder ["HLS_TEST_PLUGIN_LOG_STDERR", "HLS_TEST_LOG_STDERR"] + +-- | Generic recorder initialization for plugins and the HLS server for test-cases. +-- +-- The created recorder writes to stderr if any of the given environment variables +-- have been set to a value different to @0@. +-- We allow multiple values, to make it possible to define a single environment variable +-- that instructs all recorders in the test-suite to write to stderr. +-- +-- We have to return the base logger function for HLS server logging initialisation. +-- See 'runSessionWithServer'' for details. +initializeTestRecorder :: Pretty a => [String] -> IO (Recorder (WithPriority a)) +initializeTestRecorder envVars = do + docWithPriorityRecorder <- makeDefaultStderrRecorder (Just $ ThreadIdColumn : defaultLoggingColumns) + -- lspClientLogRecorder + -- There are potentially multiple environment variables that enable this logger + definedEnvVars <- forM envVars (fmap (fromMaybe "0") . lookupEnv) + let logStdErr = any (/= "0") definedEnvVars + + docWithFilteredPriorityRecorder = + if logStdErr then cfilter (\WithPriority{ priority } -> priority >= Debug) docWithPriorityRecorder + else mempty + + pure (cmapWithPrio pretty docWithFilteredPriorityRecorder) + +-- ------------------------------------------------------------ +-- Run an HLS server testing a specific plugin +-- ------------------------------------------------------------ + +runSessionWithServerInTmpDir :: Pretty b => Config -> PluginTestDescriptor b -> VirtualFileTree -> Session a -> IO a +runSessionWithServerInTmpDir config plugin tree act = + runSessionWithTestConfig def + {testLspConfig=config, testPluginDescriptor = plugin, testDirLocation=Right tree} + (const act) + +runWithLockInTempDir :: VirtualFileTree -> (FileSystem -> IO a) -> IO a +runWithLockInTempDir tree act = withLock lockForTempDirs $ do + testRoot <- setupTestEnvironment + helperRecorder <- hlsHelperTestRecorder + -- Do not clean up the temporary directory if this variable is set to anything but '0'. + -- Aids debugging. + cleanupTempDir <- lookupEnv "HLS_TEST_HARNESS_NO_TESTDIR_CLEANUP" + let runTestInDir action = case cleanupTempDir of + Just val | val /= "0" -> do + (tempDir, _) <- newTempDirWithin testRoot + a <- action tempDir + logWith helperRecorder Debug LogNoCleanup + pure a + + _ -> do + (tempDir, cleanup) <- newTempDirWithin testRoot + a <- action tempDir `finally` cleanup + logWith helperRecorder Debug LogCleanup + pure a + runTestInDir $ \tmpDir' -> do + -- we canonicalize the path, so that we do not need to do + -- cannibalization during the test when we compare two paths + tmpDir <- canonicalizePath tmpDir' + logWith helperRecorder Info $ LogTestDir tmpDir + fs <- FS.materialiseVFT tmpDir tree + act fs + +runSessionWithServer :: Pretty b => Config -> PluginTestDescriptor b -> FilePath -> Session a -> IO a +runSessionWithServer config plugin fp act = + runSessionWithTestConfig def { + testLspConfig=config + , testPluginDescriptor=plugin + , testDirLocation = Left fp + } (const act) + + +instance Default (TestConfig b) where + def = TestConfig { + testDirLocation = Right $ VirtualFileTree [] "", + testClientRoot = Nothing, + testServerRoot = Nothing, + testShiftRoot = False, + testDisableKick = False, + testDisableDefaultPlugin = False, + testPluginDescriptor = mempty, + testLspConfig = def, + testConfigSession = def, + testConfigCaps = fullLatestClientCaps, + testCheckProject = False + } + +-- | Setup the test environment for isolated tests. +-- +-- This creates a directory in the temporary directory that will be +-- reused for running isolated tests. +-- It returns the root to the testing directory that tests should use. +-- This directory is not fully cleaned between reruns. +-- However, it is totally safe to delete the directory between runs. +-- +-- Additionally, this overwrites the 'XDG_CACHE_HOME' variable to isolate +-- the tests from existing caches. 'hie-bios' and 'ghcide' honour the +-- 'XDG_CACHE_HOME' environment variable and generate their caches there. +setupTestEnvironment :: IO FilePath +setupTestEnvironment = do + tmpDirRoot <- getTemporaryDirectory + let testRoot = tmpDirRoot "hls-test-root" + testCacheDir = testRoot ".cache" + createDirectoryIfMissing True testCacheDir + setEnv "XDG_CACHE_HOME" testCacheDir + pure testRoot + +goldenWithHaskellDocFormatter + :: Pretty b + => Config + -> PluginTestDescriptor b -- ^ Formatter plugin to be used + -> String -- ^ Name of the formatter to be used + -> PluginConfig + -> TestName -- ^ Title of the test + -> FilePath -- ^ Directory of the test data to be used + -> FilePath -- ^ Path to the testdata to be used within the directory + -> FilePath -- ^ Additional suffix to be appended to the output file + -> FilePath -- ^ Extension of the output file + -> (TextDocumentIdentifier -> Session ()) + -> TestTree +goldenWithHaskellDocFormatter config plugin formatter conf title testDataDir path desc ext act = + let config' = config { formattingProvider = T.pack formatter , plugins = M.singleton (PluginId $ T.pack formatter) conf } + in goldenGitDiff title (testDataDir path <.> desc <.> ext) + $ runSessionWithServer config' plugin testDataDir + $ TL.encodeUtf8 . TL.fromStrict + <$> do + doc <- openDoc (path <.> ext) "haskell" + void waitForBuildQueue + act doc + documentContents doc + +goldenWithCabalDocFormatter + :: Pretty b + => Config + -> PluginTestDescriptor b -- ^ Formatter plugin to be used + -> String -- ^ Name of the formatter to be used + -> PluginConfig + -> TestName -- ^ Title of the test + -> FilePath -- ^ Directory of the test data to be used + -> FilePath -- ^ Path to the testdata to be used within the directory + -> FilePath -- ^ Additional suffix to be appended to the output file + -> FilePath -- ^ Extension of the output file + -> (TextDocumentIdentifier -> Session ()) + -> TestTree +goldenWithCabalDocFormatter config plugin formatter conf title testDataDir path desc ext act = + let config' = config { cabalFormattingProvider = T.pack formatter , plugins = M.singleton (PluginId $ T.pack formatter) conf } + in goldenGitDiff title (testDataDir path <.> desc <.> ext) + $ runSessionWithServer config' plugin testDataDir + $ TL.encodeUtf8 . TL.fromStrict + <$> do + doc <- openDoc (path <.> ext) "cabal" + void waitForBuildQueue + act doc + documentContents doc + +goldenWithHaskellDocFormatterInTmpDir + :: Pretty b + => Config + -> PluginTestDescriptor b -- ^ Formatter plugin to be used + -> String -- ^ Name of the formatter to be used + -> PluginConfig + -> TestName -- ^ Title of the test + -> VirtualFileTree -- ^ Virtual representation of the test project + -> FilePath -- ^ Path to the testdata to be used within the directory + -> FilePath -- ^ Additional suffix to be appended to the output file + -> FilePath -- ^ Extension of the output file + -> (TextDocumentIdentifier -> Session ()) + -> TestTree +goldenWithHaskellDocFormatterInTmpDir config plugin formatter conf title tree path desc ext act = + let config' = config { formattingProvider = T.pack formatter , plugins = M.singleton (PluginId $ T.pack formatter) conf } + in goldenGitDiff title (vftOriginalRoot tree path <.> desc <.> ext) + $ runSessionWithServerInTmpDir config' plugin tree + $ TL.encodeUtf8 . TL.fromStrict + <$> do + doc <- openDoc (path <.> ext) "haskell" + void waitForBuildQueue + act doc + documentContents doc + +goldenWithCabalDocFormatterInTmpDir + :: Pretty b + => Config + -> PluginTestDescriptor b -- ^ Formatter plugin to be used + -> String -- ^ Name of the formatter to be used + -> PluginConfig + -> TestName -- ^ Title of the test + -> VirtualFileTree -- ^ Virtual representation of the test project + -> FilePath -- ^ Path to the testdata to be used within the directory + -> FilePath -- ^ Additional suffix to be appended to the output file + -> FilePath -- ^ Extension of the output file + -> (TextDocumentIdentifier -> Session ()) + -> TestTree +goldenWithCabalDocFormatterInTmpDir config plugin formatter conf title tree path desc ext act = + let config' = config { cabalFormattingProvider = T.pack formatter , plugins = M.singleton (PluginId $ T.pack formatter) conf } + in goldenGitDiff title (vftOriginalRoot tree path <.> desc <.> ext) + $ runSessionWithServerInTmpDir config' plugin tree + $ TL.encodeUtf8 . TL.fromStrict + <$> do + doc <- openDoc (path <.> ext) "cabal" + void waitForBuildQueue + act doc + documentContents doc + +-- | Restore cwd after running an action +keepCurrentDirectory :: IO a -> IO a +keepCurrentDirectory = bracket getCurrentDirectory setCurrentDirectory . const + +{-# NOINLINE lock #-} +-- | Never run in parallel +lock :: Lock +lock = unsafePerformIO newLock + + +{-# NOINLINE lockForTempDirs #-} +-- | Never run in parallel +lockForTempDirs :: Lock +lockForTempDirs = unsafePerformIO newLock + +data TestConfig b = TestConfig + { + testDirLocation :: Either FilePath VirtualFileTree + -- ^ Client capabilities + -- ^ The file tree to use for the test, either a directory or a virtual file tree + -- if using a virtual file tree, + -- Creates a temporary directory, and materializes the VirtualFileTree + -- in the temporary directory. + -- + -- To debug test cases and verify the file system is correctly set up, + -- you should set the environment variable 'HLS_TEST_HARNESS_NO_TESTDIR_CLEANUP=1'. + -- Further, we log the temporary directory location on startup. To view + -- the logs, set the environment variable 'HLS_TEST_HARNESS_STDERR=1'. + -- Example invocation to debug test cases: + -- + -- @ + -- HLS_TEST_HARNESS_NO_TESTDIR_CLEANUP=1 HLS_TEST_HARNESS_STDERR=1 cabal test + -- @ + -- + -- Don't forget to use 'TASTY_PATTERN' to debug only a subset of tests. + -- + -- For plugin test logs, look at the documentation of 'mkPluginTestDescriptor'. + , testShiftRoot :: Bool + -- ^ Whether to shift the current directory to the root of the project + , testClientRoot :: Maybe FilePath + -- ^ Specify the root of (the client or LSP context), + -- if Nothing it is the same as the testDirLocation + -- if Just, it is subdirectory of the testDirLocation + , testServerRoot :: Maybe FilePath + -- ^ Specify root of the server, in exe, it can be specify in command line --cwd, + -- or just the server start directory + -- if Nothing it is the same as the testDirLocation + -- if Just, it is subdirectory of the testDirLocation + , testDisableKick :: Bool + -- ^ Whether to disable the kick action + , testDisableDefaultPlugin :: Bool + -- ^ Whether to disable the default plugin comes with ghcide + , testCheckProject :: Bool + -- ^ Whether to typecheck check the project after the session is loaded + , testPluginDescriptor :: PluginTestDescriptor b + -- ^ Plugin to load on the server. + , testLspConfig :: Config + -- ^ lsp config for the server + , testConfigSession :: SessionConfig + -- ^ config for the test session + , testConfigCaps :: ClientCapabilities + -- ^ Client capabilities + } + + +wrapClientLogger :: Pretty a => Recorder (WithPriority a) -> + IO (Recorder (WithPriority a), LSP.LanguageContextEnv Config -> IO ()) +wrapClientLogger logger = do + (lspLogRecorder', cb1) <- Logger.withBacklog Logger.lspClientLogRecorder + let lspLogRecorder = cmapWithPrio (renderStrict . layoutPretty defaultLayoutOptions. pretty) lspLogRecorder' + return (lspLogRecorder <> logger, cb1) + +-- | Host a server, and run a test session on it. +-- For setting custom timeout, set the environment variable 'LSP_TIMEOUT' +-- * LSP_TIMEOUT=10 cabal test +-- For more detail of the test configuration, see 'TestConfig' +runSessionWithTestConfig :: Pretty b => TestConfig b -> (FilePath -> Session a) -> IO a +runSessionWithTestConfig TestConfig{..} session = + runSessionInVFS testDirLocation $ \root -> shiftRoot root $ do + (inR, inW) <- createPipe + (outR, outW) <- createPipe + let serverRoot = fromMaybe root testServerRoot + let clientRoot = fromMaybe root testClientRoot + + (recorder, cb1) <- wrapClientLogger =<< hlsPluginTestRecorder + (recorderIde, cb2) <- wrapClientLogger =<< hlsHelperTestRecorder + -- This plugin just installs a handler for the `initialized` notification, which then + -- picks up the LSP environment and feeds it to our recorders + let lspRecorderPlugin = pluginDescToIdePlugins [(defaultPluginDescriptor "LSPRecorderCallback" "Internal plugin") + { pluginNotificationHandlers = mkPluginNotificationHandler LSP.SMethod_Initialized $ \_ _ _ _ -> do + env <- LSP.getLspEnv + liftIO $ (cb1 <> cb2) env + }] + + let plugins = testPluginDescriptor recorder <> lspRecorderPlugin + timeoutOverride <- fmap read <$> lookupEnv "LSP_TIMEOUT" + let sconf' = testConfigSession { lspConfig = hlsConfigToClientConfig testLspConfig, messageTimeout = fromMaybe (messageTimeout defaultConfig) timeoutOverride} + arguments = testingArgs serverRoot recorderIde plugins + server <- async $ + IDEMain.defaultMain (cmapWithPrio LogIDEMain recorderIde) + arguments { argsHandleIn = pure inR , argsHandleOut = pure outW } + result <- runSessionWithHandles inW outR sconf' testConfigCaps clientRoot (session root) + hClose inW + timeout 3 (wait server) >>= \case + Just () -> pure () + Nothing -> do + putStrLn "Server does not exit in 3s, canceling the async task..." + (t, _) <- duration $ cancel server + putStrLn $ "Finishing canceling (took " <> showDuration t <> "s)" + pure result + + where + shiftRoot shiftTarget f = + if testShiftRoot + then withLock lock $ keepCurrentDirectory $ setCurrentDirectory shiftTarget >> f + else f + runSessionInVFS (Left testConfigRoot) act = do + root <- makeAbsolute testConfigRoot + act root + runSessionInVFS (Right vfs) act = runWithLockInTempDir vfs $ \fs -> act (fsRoot fs) + testingArgs prjRoot recorderIde plugins = + let + arguments@Arguments{ argsHlsPlugins, argsIdeOptions, argsLspOptions } = defaultArguments (cmapWithPrio LogIDEMain recorderIde) prjRoot plugins + argsHlsPlugins' = if testDisableDefaultPlugin + then plugins + else argsHlsPlugins + hlsPlugins = pluginDescToIdePlugins $ idePluginsToPluginDesc argsHlsPlugins' + ++ [Test.blockCommandDescriptor "block-command", Test.plugin] + ideOptions config sessionLoader = (argsIdeOptions config sessionLoader){ + optTesting = IdeTesting True + , optCheckProject = pure testCheckProject + } + in + arguments + { argsHlsPlugins = hlsPlugins + , argsIdeOptions = ideOptions + , argsLspOptions = argsLspOptions { LSP.optProgressStartDelay = 0, LSP.optProgressUpdateDelay = 0 } + , argsDefaultHlsConfig = testLspConfig + , argsProjectRoot = prjRoot + , argsDisableKick = testDisableKick + } + +-- | Wait for the next progress begin step +waitForProgressBegin :: Session () +waitForProgressBegin = skipManyTill anyMessage $ satisfyMaybe $ \case + FromServerMess SMethod_Progress (TNotificationMessage _ _ (ProgressParams _ v)) | is _workDoneProgressBegin v-> Just () + _ -> Nothing + +-- | Wait for the next progress end step +waitForProgressDone :: Session () +waitForProgressDone = skipManyTill anyMessage $ satisfyMaybe $ \case + FromServerMess SMethod_Progress (TNotificationMessage _ _ (ProgressParams _ v)) | is _workDoneProgressEnd v-> Just () + _ -> Nothing + +-- | Wait for all progress to be done +-- Needs at least one progress done notification to return +waitForAllProgressDone :: Session () +waitForAllProgressDone = loop + where + loop = do + ~() <- skipManyTill anyMessage $ satisfyMaybe $ \case + FromServerMess SMethod_Progress (TNotificationMessage _ _ (ProgressParams _ v)) | is _workDoneProgressEnd v -> Just () + _ -> Nothing + done <- null <$> getIncompleteProgressSessions + unless done loop + +-- | Wait for the build queue to be empty +waitForBuildQueue :: Session Seconds +waitForBuildQueue = do + let m = SMethod_CustomMethod (Proxy @"test") + waitId <- sendRequest m (toJSON WaitForShakeQueue) + (td, resp) <- duration $ skipManyTill anyMessage $ responseForId m waitId + case resp of + TResponseMessage{_result=Right Null} -> return td + -- assume a ghcide binary lacking the WaitForShakeQueue method + _ -> return 0 + +callTestPlugin :: (A.FromJSON b) => TestRequest -> Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) b) +callTestPlugin cmd = do + let cm = SMethod_CustomMethod (Proxy @"test") + waitId <- sendRequest cm (A.toJSON cmd) + TResponseMessage{_result} <- skipManyTill anyMessage $ responseForId cm waitId + return $ do + e <- _result + case A.fromJSON e of + A.Error err -> Left $ TResponseError (InR ErrorCodes_InternalError) (T.pack err) Nothing + A.Success a -> pure a + +waitForAction :: String -> TextDocumentIdentifier -> Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) WaitForIdeRuleResult) +waitForAction key TextDocumentIdentifier{_uri} = + callTestPlugin (WaitForIdeRule key _uri) + +waitForTypecheck :: TextDocumentIdentifier -> Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) Bool) +waitForTypecheck tid = fmap ideResultSuccess <$> waitForAction "typecheck" tid + +getLastBuildKeys :: Session (Either (TResponseError @ClientToServer (Method_CustomMethod "test")) [T.Text]) +getLastBuildKeys = callTestPlugin GetBuildKeysBuilt + +hlsConfigToClientConfig :: Config -> A.Object +hlsConfigToClientConfig config = [("haskell", toJSON config)] + +-- | Set the HLS client configuration, and wait for the server to update to use it. +-- Note that this will only work if we are not ignoring configuration requests, you +-- may need to call @setIgnoringConfigurationRequests False@ first. +setHlsConfig :: Config -> Session () +setHlsConfig config = do + setConfig $ hlsConfigToClientConfig config + -- wait until we get the workspace/configuration request from the server, so + -- we know things are settling. This only works if we're not skipping config + -- requests! + skipManyTill anyMessage (void configurationRequest) + +captureKickDiagnostics :: Session () -> Session () -> Session [Diagnostic] +captureKickDiagnostics start done = do + _ <- skipManyTill anyMessage start + messages <- manyTill anyMessage done + pure $ concat $ mapMaybe diagnostics messages + where + diagnostics :: FromServerMessage' a -> Maybe [Diagnostic] + diagnostics = \msg -> case msg of + FromServerMess SMethod_TextDocumentPublishDiagnostics diags -> Just (diags ^. L.params . L.diagnostics) + _ -> Nothing + +waitForKickDone :: Session () +waitForKickDone = void $ skipManyTill anyMessage nonTrivialKickDone + +waitForKickStart :: Session () +waitForKickStart = void $ skipManyTill anyMessage nonTrivialKickStart + +nonTrivialKickDone :: Session () +nonTrivialKickDone = kick (Proxy @"kick/done") >>= guard . not . null + +nonTrivialKickStart :: Session () +nonTrivialKickStart = kick (Proxy @"kick/start") >>= guard . not . null + + +kick :: KnownSymbol k => Proxy k -> Session [FilePath] +kick proxyMsg = do + NotMess TNotificationMessage{_params} <- customNotification proxyMsg + case fromJSON _params of + Success x -> return x + other -> error $ "Failed to parse kick/done details: " <> show other diff --git a/hls-test-utils/src/Test/Hls/FileSystem.hs b/hls-test-utils/src/Test/Hls/FileSystem.hs new file mode 100644 index 0000000000..c93643badd --- /dev/null +++ b/hls-test-utils/src/Test/Hls/FileSystem.hs @@ -0,0 +1,246 @@ +{-# LANGUAGE OverloadedStrings #-} +module Test.Hls.FileSystem + ( FileSystem(..) + , VirtualFileTree(..) + , FileTree + , Content + -- * init + , materialise + , materialiseVFT + -- * Interaction + , readFileFS + , writeFileFS + -- * Test helpers + , mkVirtualFileTree + , toNfp + , toAbsFp + -- * Builders + , file + , copy + , directory + , text + , ref + , copyDir + -- * Cradle helpers + , directCradle + , simpleCabalCradle + -- * Full project setups + , directProject + , directProjectMulti + , simpleCabalProject + , simpleCabalProject' + ) where + +import Data.Foldable (traverse_) +import qualified Data.Text as T +import qualified Data.Text.IO as T +import Development.IDE (NormalizedFilePath) +import Language.LSP.Protocol.Types (toNormalizedFilePath) +import System.Directory +import System.FilePath as FP +import System.Process.Extra (readProcess) + +-- ---------------------------------------------------------------------------- +-- Top Level definitions +-- ---------------------------------------------------------------------------- + +-- | Representation of a 'VirtualFileTree' that has been 'materialise'd to disk. +-- +data FileSystem = + FileSystem + { fsRoot :: FilePath + , fsTree :: [FileTree] + , fsOriginalRoot :: FilePath + } deriving (Eq, Ord, Show) + +-- | Virtual representation of a filesystem tree. +-- +-- Operations of 'vftTree' are relative to 'vftOriginalRoot'. +-- In particular, any 'copy' etc. operation looks for the sources in 'vftOriginalRoot'. +-- +-- To persist a 'VirtualFileTree', look at 'materialise' and 'materialiseVFT'. +data VirtualFileTree = + VirtualFileTree + { vftTree :: [FileTree] + , vftOriginalRoot :: FilePath + } deriving (Eq, Ord, Show) + +data FileTree + = File FilePath Content -- ^ Create a file with the given content. + | Directory FilePath [FileTree] -- ^ Create a directory with the given files. + | CopiedDirectory FilePath -- ^ Copy a directory from the test data dir. + deriving (Show, Eq, Ord) + +data Content + = Inline T.Text + | Ref FilePath + deriving (Show, Eq, Ord) + +-- ---------------------------------------------------------------------------- +-- API with side effects +-- ---------------------------------------------------------------------------- + +readFileFS :: FileSystem -> FilePath -> IO T.Text +readFileFS fs fp = do + T.readFile (fsRoot fs FP.normalise fp) + +writeFileFS :: FileSystem -> FilePath -> Content -> IO () +writeFileFS fs fp content = do + contents <- case content of + Inline txt -> pure txt + Ref path -> T.readFile (fsOriginalRoot fs FP.normalise path) + T.writeFile (fsRoot fs FP.normalise fp) contents + +-- | Materialise a virtual file tree in the 'rootDir' directory. +-- +-- Synopsis: @'materialise' rootDir fileTree testDataDir@ +-- +-- File references in '[FileTree]' are resolved relative to the @testDataDir@. +materialise :: FilePath -> [FileTree] -> FilePath -> IO FileSystem +materialise rootDir' fileTree testDataDir' = do + let testDataDir = FP.normalise testDataDir' + rootDir = FP.normalise rootDir' + + persist :: FilePath -> FileTree -> IO () + persist root (File name cts) = case cts of + Inline txt -> T.writeFile (root name) txt + Ref path -> copyFile (testDataDir FP.normalise path) (root takeFileName name) + persist root (Directory name nodes) = do + createDirectory (root name) + mapM_ (persist (root name)) nodes + persist root (CopiedDirectory name) = do + copyDir' root name + + copyDir' :: FilePath -> FilePath -> IO () + copyDir' root dir = do + files <- fmap FP.normalise . lines <$> withCurrentDirectory (testDataDir dir) (readProcess "git" ["ls-files", "--cached", "--modified", "--others"] "") + mapM_ (createDirectoryIfMissing True . ((root ) . takeDirectory)) files + mapM_ (\f -> copyFile (testDataDir dir f) (root f)) files + return () + + traverse_ (persist rootDir) fileTree + pure $ FileSystem rootDir fileTree testDataDir + +-- | Materialise a virtual file tree in the 'rootDir' directory. +-- +-- Synopsis: @'materialiseVFT' rootDir virtualFileTree@ +-- +-- File references in 'virtualFileTree' are resolved relative to the @vftOriginalRoot@. +materialiseVFT :: FilePath -> VirtualFileTree -> IO FileSystem +materialiseVFT root fs = materialise root (vftTree fs) (vftOriginalRoot fs) + +-- ---------------------------------------------------------------------------- +-- Test definition helpers +-- ---------------------------------------------------------------------------- + +mkVirtualFileTree :: FilePath -> [FileTree] -> VirtualFileTree +mkVirtualFileTree testDataDir tree = + VirtualFileTree + { vftTree = tree + , vftOriginalRoot = testDataDir + } + +toAbsFp :: FileSystem -> FilePath -> FilePath +toAbsFp fs fp = fsRoot fs FP.normalise fp + +toNfp :: FileSystem -> FilePath -> NormalizedFilePath +toNfp fs fp = + toNormalizedFilePath $ toAbsFp fs fp + +-- ---------------------------------------------------------------------------- +-- Builders +-- ---------------------------------------------------------------------------- + +-- | Create a file in the test project with some content. +-- +-- Only the filename will be used, and any directory components are *not* +-- reflected in the test project. +file :: FilePath -> Content -> FileTree +file fp cts = File fp cts + +-- | Copy a filepath into a test project. The name of the file is also used +-- in the test project. +-- +-- The filepath is always resolved to the root of the test data dir. +copy :: FilePath -> FileTree +copy fp = File fp (Ref fp) + +-- | Copy a directory into a test project. +-- The filepath is always resolved to the root of the test data dir. +copyDir :: FilePath -> FileTree +copyDir dir = CopiedDirectory dir + +directory :: FilePath -> [FileTree] -> FileTree +directory name nodes = Directory name nodes + +-- | Write the given test directly into a file. +text :: T.Text -> Content +text = Inline + +-- | Read the contents of the given file +-- The filepath is always resolved to the root of the test data dir. +ref :: FilePath -> Content +ref = Ref + +-- ---------------------------------------------------------------------------- +-- Cradle Helpers +-- ---------------------------------------------------------------------------- + +-- | Set up a simple direct cradle. +-- +-- All arguments are added to the direct cradle file. +-- Arguments will not be escaped. +directCradle :: [T.Text] -> FileTree +directCradle args = + file "hie.yaml" + ( Inline $ T.unlines $ + [ "cradle:" + , " direct:" + , " arguments:" + ] <> + [ " - " <> arg | arg <- args] + ) + +-- | Set up a simple cabal cradle. +-- +-- Prefer simple cabal cradle, over custom multi cabal cradles if possible. +simpleCabalCradle :: FileTree +simpleCabalCradle = + file "hie.yaml" + (Inline $ T.unlines + [ "cradle:" + , " cabal:" + ] + ) + + +-- ---------------------------------------------------------------------------- +-- Project setup builders +-- ---------------------------------------------------------------------------- + +-- | Set up a test project with a single haskell file. +directProject :: FilePath -> [FileTree] +directProject fp = + [ directCradle [T.pack fp] + , file fp (Ref fp) + ] + +-- | Set up a test project with multiple haskell files. +-- +directProjectMulti :: [FilePath] -> [FileTree] +directProjectMulti fps = + [ directCradle $ fmap T.pack fps + ] <> fmap copy fps + +-- | Set up a simple cabal cradle project and copy all the given filepaths +-- into the test directory. +simpleCabalProject :: [FilePath] -> [FileTree] +simpleCabalProject fps = + [ simpleCabalCradle + ] <> fmap copy fps + +-- | Set up a simple cabal cradle project. +simpleCabalProject' :: [FileTree] -> [FileTree] +simpleCabalProject' fps = + [ simpleCabalCradle + ] <> fps diff --git a/hls-test-utils/src/Test/Hls/Util.hs b/hls-test-utils/src/Test/Hls/Util.hs new file mode 100644 index 0000000000..98c795f8e0 --- /dev/null +++ b/hls-test-utils/src/Test/Hls/Util.hs @@ -0,0 +1,468 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedStrings #-} +module Test.Hls.Util + ( -- * Test Capabilities + codeActionResolveCaps + , codeActionNoResolveCaps + , codeActionNoInlayHintsCaps + , codeActionSupportCaps + , expectCodeAction + -- * Environment specifications + -- for ignoring tests + , ghcVersion, GhcVersion(..) + , hostOS, OS(..) + , matchesCurrentEnv, EnvSpec(..) + , ignoreForGhcVersions + , ignoreInEnv + , onlyRunForGhcVersions + , knownBrokenOnWindows + , knownBrokenForGhcVersions + , knownBrokenInEnv + , knownBrokenInSpecificEnv + , onlyWorkForGhcVersions + -- * Extract code actions + , fromAction + , fromCommand + -- * Session Assertion Helpers + , dontExpectCodeAction + , expectDiagnostic + , expectNoMoreDiagnostics + , failIfSessionTimeout + , getCompletionByLabel + , noLiteralCaps + , inspectCodeAction + , inspectCommand + , inspectDiagnostic + , inspectDiagnosticAny + , waitForDiagnosticsFrom + , waitForDiagnosticsFromSource + , waitForDiagnosticsFromSourceWithTimeout + -- * Temporary directories + , withCurrentDirectoryInTmp + , withCurrentDirectoryInTmp' + , withCanonicalTempDir + -- * Extract positions from input file. + , extractCursorPositions + , mkParameterisedLabel + , trimming + ) +where + +import Control.Applicative.Combinators (skipManyTill, (<|>)) +import Control.Exception (catch, throwIO) +import Control.Lens (_Just, (&), (.~), + (?~), (^.)) +import Control.Monad +import Control.Monad.IO.Class +import qualified Data.Aeson as A +import Data.Bool (bool) +import Data.Default +import Data.List.Extra (find) +import Data.Proxy +import qualified Data.Text as T +import Development.IDE (GhcVersion (..), + ghcVersion) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import qualified Language.LSP.Test as Test +import System.Directory +import System.FilePath +import System.Info.Extra (isMac, isWindows) +import qualified System.IO.Extra +import System.IO.Temp +import System.Time.Extra (Seconds, sleep) +import Test.Tasty (TestTree) +import Test.Tasty.ExpectedFailure (expectFailBecause, + ignoreTestBecause) +import Test.Tasty.HUnit (assertFailure) + +import qualified Data.List as List +import qualified Data.Text.Internal.Search as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.Plugin.Completions.Logic (getCompletionPrefixFromRope) +import Development.IDE.Plugin.Completions.Types (PosPrefixInfo (..)) +import NeatInterpolation (trimming) + +noLiteralCaps :: ClientCapabilities +noLiteralCaps = def & L.textDocument ?~ textDocumentCaps + where + textDocumentCaps = def { _codeAction = Just codeActionCaps } + codeActionCaps = CodeActionClientCapabilities (Just True) Nothing Nothing Nothing Nothing Nothing Nothing + +codeActionSupportCaps :: ClientCapabilities +codeActionSupportCaps = def & L.textDocument ?~ textDocumentCaps + where + textDocumentCaps = def { _codeAction = Just codeActionCaps } + codeActionCaps = CodeActionClientCapabilities (Just True) (Just literalSupport) (Just True) Nothing Nothing Nothing Nothing + literalSupport = ClientCodeActionLiteralOptions (ClientCodeActionKindOptions []) + +codeActionResolveCaps :: ClientCapabilities +codeActionResolveCaps = Test.fullLatestClientCaps + & (L.textDocument . _Just . L.codeAction . _Just . L.resolveSupport . _Just) .~ ClientCodeActionResolveOptions {_properties= ["edit"]} + & (L.textDocument . _Just . L.codeAction . _Just . L.dataSupport . _Just) .~ True + +codeActionNoResolveCaps :: ClientCapabilities +codeActionNoResolveCaps = Test.fullLatestClientCaps + & (L.textDocument . _Just . L.codeAction . _Just . L.resolveSupport) .~ Nothing + & (L.textDocument . _Just . L.codeAction . _Just . L.dataSupport . _Just) .~ False + +codeActionNoInlayHintsCaps :: ClientCapabilities +codeActionNoInlayHintsCaps = Test.fullLatestClientCaps + & (L.textDocument . _Just . L.codeAction . _Just . L.resolveSupport) .~ Nothing + & (L.textDocument . _Just . L.codeAction . _Just . L.dataSupport . _Just) .~ False + & (L.textDocument . _Just . L.inlayHint) .~ Nothing +-- --------------------------------------------------------------------- +-- Environment specification for ignoring tests +-- --------------------------------------------------------------------- + +data EnvSpec = HostOS OS | GhcVer GhcVersion + deriving (Show, Eq) + +matchesCurrentEnv :: EnvSpec -> Bool +matchesCurrentEnv (HostOS os) = hostOS == os +matchesCurrentEnv (GhcVer ver) = ghcVersion == ver + +data OS = Windows | MacOS | Linux + deriving (Show, Eq) + +hostOS :: OS +hostOS + | isWindows = Windows + | isMac = MacOS + | otherwise = Linux + +-- | Mark as broken if /any/ of the environmental specs matches the current environment. +knownBrokenInEnv :: [EnvSpec] -> String -> TestTree -> TestTree +knownBrokenInEnv envSpecs reason + | any matchesCurrentEnv envSpecs = expectFailBecause reason + | otherwise = id + +-- | Mark as broken if /all/ environmental specs match the current environment. +knownBrokenInSpecificEnv :: [EnvSpec] -> String -> TestTree -> TestTree +knownBrokenInSpecificEnv envSpecs reason + | all matchesCurrentEnv envSpecs = expectFailBecause reason + | otherwise = id + +knownBrokenOnWindows :: String -> TestTree -> TestTree +knownBrokenOnWindows = knownBrokenInEnv [HostOS Windows] + +knownBrokenForGhcVersions :: [GhcVersion] -> String -> TestTree -> TestTree +knownBrokenForGhcVersions vers = knownBrokenInEnv (map GhcVer vers) + +-- | IgnoreTest if /any/ of environmental spec mathces the current environment. +ignoreInEnv :: [EnvSpec] -> String -> TestTree -> TestTree +ignoreInEnv envSpecs reason + | any matchesCurrentEnv envSpecs = ignoreTestBecause reason + | otherwise = id + +ignoreForGhcVersions :: [GhcVersion] -> String -> TestTree -> TestTree +ignoreForGhcVersions vers = ignoreInEnv (map GhcVer vers) + +-- | Mark as broken if GHC does not match only work versions. +onlyWorkForGhcVersions :: (GhcVersion -> Bool) -> String -> TestTree -> TestTree +onlyWorkForGhcVersions p reason = + if p ghcVersion + then id + else expectFailBecause reason + +-- | Ignore the test if GHC does not match only work versions. +onlyRunForGhcVersions :: [GhcVersion] -> String -> TestTree -> TestTree +onlyRunForGhcVersions vers = + if ghcVersion `elem` vers + then const id + else ignoreTestBecause + +-- --------------------------------------------------------------------- + +-- | Like 'withCurrentDirectory', but will copy the directory over to the system +-- temporary directory first to avoid haskell-language-server's source tree from +-- interfering with the cradle. +-- +-- Ignores directories containing build artefacts to avoid interference and +-- provide reproducible test-behaviour. +withCurrentDirectoryInTmp :: FilePath -> IO a -> IO a +withCurrentDirectoryInTmp dir f = + withTempCopy ignored dir $ \newDir -> + withCurrentDirectory newDir f + where + ignored = ["dist", "dist-newstyle", ".stack-work"] + + +-- | Like 'withCurrentDirectory', but will copy the directory over to the system +-- temporary directory first to avoid haskell-language-server's source tree from +-- interfering with the cradle. +-- +-- You may specify directories to ignore, but should be careful to maintain reproducibility. +withCurrentDirectoryInTmp' :: [FilePath] -> FilePath -> IO a -> IO a +withCurrentDirectoryInTmp' ignored dir f = + withTempCopy ignored dir $ \newDir -> + withCurrentDirectory newDir f + +-- | Example call: @withTempCopy ignored src f@ +-- +-- Copy directory 'src' to into a temporary directory ignoring any directories +-- (and files) that are listed in 'ignored'. Pass the temporary directory +-- containing the copied sources to the continuation. +withTempCopy :: [FilePath] -> FilePath -> (FilePath -> IO a) -> IO a +withTempCopy ignored srcDir f = do + withSystemTempDirectory "hls-test" $ \newDir -> do + copyDir ignored srcDir newDir + f newDir + +-- | Example call: @copyDir ignored src dst@ +-- +-- Copy directory 'src' to 'dst' ignoring any directories (and files) +-- that are listed in 'ignored'. +copyDir :: [FilePath] -> FilePath -> FilePath -> IO () +copyDir ignored src dst = do + cnts <- listDirectory src + forM_ cnts $ \file -> do + unless (file `elem` ignored) $ do + let srcFp = src file + dstFp = dst file + isDir <- doesDirectoryExist srcFp + if isDir + then createDirectory dstFp >> copyDir ignored srcFp dstFp + else copyFile srcFp dstFp + +fromAction :: (Command |? CodeAction) -> CodeAction +fromAction (InR action) = action +fromAction _ = error "Not a code action" + +fromCommand :: (Command |? CodeAction) -> Command +fromCommand (InL command) = command +fromCommand _ = error "Not a command" + +onMatch :: [a] -> (a -> Bool) -> String -> IO a +onMatch as predicate err = maybe (fail err) return (find predicate as) + +noMatch :: [a] -> (a -> Bool) -> String -> IO () +noMatch [] _ _ = pure () +noMatch as predicate err = bool (pure ()) (fail err) (any predicate as) + +inspectDiagnostic :: [Diagnostic] -> [T.Text] -> IO Diagnostic +inspectDiagnostic diags s = onMatch diags (\ca -> all (`T.isInfixOf` (ca ^. L.message)) s) err + where err = "expected diagnostic matching '" ++ show s ++ "' but did not find one" + +inspectDiagnosticAny :: [Diagnostic] -> [T.Text] -> IO Diagnostic +inspectDiagnosticAny diags s = onMatch diags (\ca -> any (`T.isInfixOf` (ca ^. L.message)) s) err + where err = "expected diagnostic matching one of'" ++ show s ++ "' but did not find one" + +expectDiagnostic :: [Diagnostic] -> [T.Text] -> IO () +expectDiagnostic diags s = void $ inspectDiagnostic diags s + +inspectCodeAction :: [Command |? CodeAction] -> [T.Text] -> IO CodeAction +inspectCodeAction cars s = fromAction <$> onMatch cars predicate err + where predicate (InR ca) = all (`T.isInfixOf` (ca ^. L.title)) s + predicate _ = False + err = "expected code action matching '" ++ show s ++ "' but did not find one" + +expectCodeAction :: [Command |? CodeAction] -> [T.Text] -> IO () +expectCodeAction cars s = void $ inspectCodeAction cars s + +dontExpectCodeAction :: [Command |? CodeAction] -> [T.Text] -> IO () +dontExpectCodeAction cars s = + noMatch cars predicate err + where predicate (InR ca) = all (`T.isInfixOf` (ca ^. L.title)) s + predicate _ = False + err = "didn't expected code action matching '" ++ show s ++ "' but found one anyway" + + +inspectCommand :: [Command |? CodeAction] -> [T.Text] -> IO Command +inspectCommand cars s = fromCommand <$> onMatch cars predicate err + where predicate (InL command) = all (`T.isInfixOf` (command ^. L.title)) s + predicate _ = False + err = "expected code action matching '" ++ show s ++ "' but did not find one" + +waitForDiagnosticsFrom :: TextDocumentIdentifier -> Test.Session [Diagnostic] +waitForDiagnosticsFrom doc = do + diagsNot <- skipManyTill Test.anyMessage (Test.message SMethod_TextDocumentPublishDiagnostics) + let diags = diagsNot ^. L.params . L.diagnostics + if doc ^. L.uri /= diagsNot ^. L.params . L.uri + then waitForDiagnosticsFrom doc + else return diags + +waitForDiagnosticsFromSource :: TextDocumentIdentifier -> String -> Test.Session [Diagnostic] +waitForDiagnosticsFromSource = waitForDiagnosticsFromSourceWithTimeout 5 + +-- | wait for @timeout@ seconds and report an assertion failure +-- if any diagnostic messages arrive in that period +expectNoMoreDiagnostics :: Seconds -> TextDocumentIdentifier -> String -> Test.Session () +expectNoMoreDiagnostics timeout doc src = do + diags <- waitForDiagnosticsFromSourceWithTimeout timeout doc src + unless (null diags) $ + liftIO $ assertFailure $ + "Got unexpected diagnostics for " <> show (doc ^. L.uri) <> + " got " <> show diags + +-- | wait for @timeout@ seconds and return diagnostics for the given @document and @source. +-- If timeout is 0 it will wait until the session timeout +waitForDiagnosticsFromSourceWithTimeout :: Seconds -> TextDocumentIdentifier -> String -> Test.Session [Diagnostic] +waitForDiagnosticsFromSourceWithTimeout timeout document source = do + when (timeout > 0) $ + -- Give any further diagnostic messages time to arrive. + liftIO $ sleep timeout + -- Send a dummy message to provoke a response from the server. + -- This guarantees that we have at least one message to + -- process, so message won't block or timeout. + testId <- Test.sendRequest (SMethod_CustomMethod (Proxy @"test")) A.Null + handleMessages testId + where + matches :: Diagnostic -> Bool + matches d = d ^. L.source == Just (T.pack source) + + handleMessages testId = handleDiagnostic testId <|> handleMethod_CustomMethodResponse testId <|> ignoreOthers testId + handleDiagnostic testId = do + diagsNot <- Test.message SMethod_TextDocumentPublishDiagnostics + let fileUri = diagsNot ^. L.params . L.uri + diags = diagsNot ^. L.params . L.diagnostics + res = filter matches diags + if fileUri == document ^. L.uri && not (null res) + then return res else handleMessages testId + handleMethod_CustomMethodResponse testId = do + _ <- Test.responseForId (SMethod_CustomMethod (Proxy @"test")) testId + pure [] + + ignoreOthers testId = void Test.anyMessage >> handleMessages testId + +failIfSessionTimeout :: IO a -> IO a +failIfSessionTimeout action = action `catch` errorHandler + where errorHandler :: Test.SessionException -> IO a + errorHandler e@(Test.Timeout _) = assertFailure $ show e + errorHandler e = throwIO e + +-- --------------------------------------------------------------------- +getCompletionByLabel :: MonadIO m => T.Text -> [CompletionItem] -> m CompletionItem +getCompletionByLabel desiredLabel compls = + case find (\c -> c ^. L.label == desiredLabel) compls of + Just c -> pure c + Nothing -> liftIO . assertFailure $ + "Completion with label " <> show desiredLabel + <> " not found in " <> show (fmap (^. L.label) compls) + +-- --------------------------------------------------------------------- +-- Run with a canonicalized temp dir +withCanonicalTempDir :: (FilePath -> IO a) -> IO a +withCanonicalTempDir f = System.IO.Extra.withTempDir $ \dir -> do + dir' <- canonicalizePath dir + f dir' + +-- ---------------------------------------------------------------------------- +-- Extract Position data from the source file itself. +-- ---------------------------------------------------------------------------- + +-- | Pretty labelling for tests that use the parameterised test helpers. +mkParameterisedLabel :: PosPrefixInfo -> String +mkParameterisedLabel posPrefixInfo = unlines + [ "Full Line: \"" <> T.unpack (fullLine posPrefixInfo) <> "\"" + , "Cursor Column: \"" <> replicate (fromIntegral $ cursorPos posPrefixInfo ^. L.character) ' ' ++ "^" <> "\"" + , "Prefix Text: \"" <> T.unpack (prefixText posPrefixInfo) <> "\"" + ] + +-- | Given a in-memory representation of a file, where a user can specify the +-- current cursor position using a '^' in the next line. +-- +-- This function allows to generate multiple tests for a single input file, without +-- the hassle of calculating by hand where there cursor is supposed to be. +-- +-- Example (line number has been added for readability): +-- +-- @ +-- 0: foo = 2 +-- 1: ^ +-- 2: bar = +-- 3: ^ +-- @ +-- +-- This example input file contains two cursor positions (y, x), at +-- +-- * (1, 1), and +-- * (3, 5). +-- +-- 'extractCursorPositions' will search for '^' characters, and determine there are +-- two cursor positions in the text. +-- First, it will normalise the text to: +-- +-- @ +-- 0: foo = 2 +-- 1: bar = +-- @ +-- +-- stripping away the '^' characters. Then, the actual cursor positions are: +-- +-- * (0, 1) and +-- * (2, 5). +-- +extractCursorPositions :: T.Text -> (T.Text, [PosPrefixInfo]) +extractCursorPositions t = + let + textLines = T.lines t + foldState = List.foldl' go emptyFoldState textLines + finalText = foldStateToText foldState + reconstructCompletionPrefix pos = getCompletionPrefixFromRope pos (Rope.fromText finalText) + cursorPositions = reverse . fmap reconstructCompletionPrefix $ foldStatePositions foldState + in + (finalText, cursorPositions) + + where + go foldState l = case T.indices "^" l of + [] -> addTextLine foldState l + xs -> List.foldl' addTextCursor foldState xs + +-- | 'FoldState' is an implementation detail used to parse some file contents, +-- extracting the cursor positions identified by '^' and producing a cleaned +-- representation of the file contents. +data FoldState = FoldState + { foldStateRows :: !Int + -- ^ The row index of the cleaned file contents. + -- + -- For example, the file contents + -- + -- @ + -- 0: foo + -- 1: ^ + -- 2: bar + -- @ + -- will report that 'bar' is actually occurring in line '1', as '^' is + -- a cursor position. + -- Lines containing cursor positions are removed. + , foldStatePositions :: ![Position] + -- ^ List of cursors positions found in the file contents. + -- + -- List is stored in reverse for efficient 'cons'ing + , foldStateFinalText :: ![T.Text] + -- ^ Final file contents with all lines containing cursor positions removed. + -- + -- List is stored in reverse for efficient 'cons'ing + } + +emptyFoldState :: FoldState +emptyFoldState = FoldState + { foldStateRows = 0 + , foldStatePositions = [] + , foldStateFinalText = [] + } + +-- | Produce the final file contents, without any lines containing cursor positions. +foldStateToText :: FoldState -> T.Text +foldStateToText state = T.unlines $ reverse $ foldStateFinalText state + +-- | We found a '^' at some location! Add it to the list of known cursor positions. +-- +-- If the row index is '0', we throw an error, as there can't be a cursor position above the first line. +addTextCursor :: FoldState -> Int -> FoldState +addTextCursor state col + | foldStateRows state <= 0 = error $ "addTextCursor: Invalid '^' found at: " <> show (col, foldStateRows state) + | otherwise = state + { foldStatePositions = Position (fromIntegral (foldStateRows state) - 1) (fromIntegral col) : foldStatePositions state + } + +addTextLine :: FoldState -> T.Text -> FoldState +addTextLine state l = state + { foldStateFinalText = l : foldStateFinalText state + , foldStateRows = foldStateRows state + 1 + } diff --git a/install.hs b/install.hs deleted file mode 100755 index c836759786..0000000000 --- a/install.hs +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env stack -{- stack - runghc - --stack-yaml=install/stack.yaml - --package hls-install --} -{- cabal: -build-depends: - base - , hls-install --} --- call as: --- * `cabal v2-run install.hs --project-file install/shake.project ` --- * `stack install.hs ` - --- TODO: set `shake.project` in cabal-config above, when supported --- (see https://github.com/haskell/cabal/issues/6353) - -import HlsInstall (defaultMain) - -main = defaultMain diff --git a/install/cabal.project b/install/cabal.project deleted file mode 100644 index a14a803d42..0000000000 --- a/install/cabal.project +++ /dev/null @@ -1,2 +0,0 @@ -packages: - ./ diff --git a/install/hie.yaml.cbl b/install/hie.yaml.cbl deleted file mode 100644 index 8e3e7a02bd..0000000000 --- a/install/hie.yaml.cbl +++ /dev/null @@ -1,3 +0,0 @@ -cradle: - cabal: - component: "lib:hls-install" diff --git a/install/hie.yaml.stack b/install/hie.yaml.stack deleted file mode 100644 index 762c2dff79..0000000000 --- a/install/hie.yaml.stack +++ /dev/null @@ -1,3 +0,0 @@ -cradle: - stack: - component: "hls-install:lib" diff --git a/install/hls-install.cabal b/install/hls-install.cabal deleted file mode 100644 index 197a0f42a8..0000000000 --- a/install/hls-install.cabal +++ /dev/null @@ -1,40 +0,0 @@ -name: hls-install -version: 0.8.0.0 -synopsis: Install the haskell-language-engine -license: BSD3 -author: Many, TBD when we release -maintainer: samuel.pilz@posteo.net -copyright: 2019 -build-type: Simple -cabal-version: >=2.0 - -library - hs-source-dirs: src - exposed-modules: HlsInstall - other-modules: BuildSystem - , Stack - , Version - , Cabal - , Print - , Env - , Help - build-depends: base >= 4.9 && < 5 - , shake >= 0.16.4 && < 0.19 - , directory - , filepath - , extra - , text - default-extensions: LambdaCase - , TupleSections - , RecordWildCards - default-language: Haskell2010 - - if flag(run-from-stack) - cpp-options: -DRUN_FROM_STACK - else - build-depends: cabal-install-parsers - -flag run-from-stack - description: Inform the application that it is run from stack - default: False - manual: True diff --git a/install/shake.project b/install/shake.project deleted file mode 100644 index 43c7e60728..0000000000 --- a/install/shake.project +++ /dev/null @@ -1,2 +0,0 @@ -packages: - install/ diff --git a/install/src/BuildSystem.hs b/install/src/BuildSystem.hs deleted file mode 100644 index e75dc4ce40..0000000000 --- a/install/src/BuildSystem.hs +++ /dev/null @@ -1,17 +0,0 @@ -{-# LANGUAGE CPP #-} - -module BuildSystem where - -buildSystem :: String -buildSystem = -#if RUN_FROM_STACK - "stack" -#else - "cabal" -#endif - -isRunFromStack :: Bool -isRunFromStack = buildSystem == "stack" - -isRunFromCabal :: Bool -isRunFromCabal = buildSystem == "cabal" diff --git a/install/src/Cabal.hs b/install/src/Cabal.hs deleted file mode 100644 index af38070860..0000000000 --- a/install/src/Cabal.hs +++ /dev/null @@ -1,144 +0,0 @@ -{-# LANGUAGE CPP #-} -module Cabal where - -import Development.Shake -import Development.Shake.FilePath -import Control.Monad -import System.Directory ( copyFile ) - -import Version -import Print -import Env -#if RUN_FROM_STACK -import Control.Exception ( throwIO ) -#else -import Cabal.Config -import Data.Functor.Identity -#endif - -getInstallDir :: IO FilePath -#if RUN_FROM_STACK --- we should never hit this codepath -getInstallDir = throwIO $ userError "Stack and cabal should never be mixed" -#else -getInstallDir = runIdentity . cfgInstallDir <$> readConfig -#endif - -execCabal :: CmdResult r => [String] -> Action r -execCabal = command [] "cabal" - -execCabal_ :: [String] -> Action () -execCabal_ = execCabal - -cabalBuildData :: [String] -> Action () -cabalBuildData args = do - execCabal_ $ ["v2-build", "hoogle"] ++ args - execCabal_ $ ["v2-exec", "hoogle", "generate"] ++ args - -getGhcPathOfOrThrowError :: VersionNumber -> Action GhcPath -getGhcPathOfOrThrowError versionNumber = - getGhcPathOf versionNumber >>= \case - Nothing -> do - printInStars $ ghcVersionNotFoundFailMsg versionNumber - error (ghcVersionNotFoundFailMsg versionNumber) - Just p -> return p - -cabalInstallHls :: VersionNumber -> [String] -> Action () -cabalInstallHls versionNumber args = do - localBin <- liftIO $ getInstallDir - cabalVersion <- getCabalVersion args - ghcPath <- getGhcPathOfOrThrowError versionNumber - - let isCabal3 = checkVersion [3,0,0,0] cabalVersion - installDirOpt | isCabal3 = "--installdir" - | otherwise = "--symlink-bindir" - installMethod | isWindowsSystem && isCabal3 = ["--install-method=copy"] - | otherwise = [] - - projectFile <- getProjectFile versionNumber - - execCabal_ $ - [ "v2-install" - , "exe:haskell-language-server" - , "exe:haskell-language-server-wrapper" - , "-w", ghcPath - , "--write-ghc-environment-files=never" - , installDirOpt, localBin - , "--max-backjumps=5000" - , "--overwrite-policy=always" - , "--project-file=" ++ projectFile - ] - ++ installMethod - ++ args - - let minorVerExe = "haskell-language-server-" ++ versionNumber <.> exe - majorVerExe = "haskell-language-server-" ++ dropExtension versionNumber <.> exe - - liftIO $ do - copyFile (localBin "haskell-language-server" <.> exe) (localBin minorVerExe) - copyFile (localBin "haskell-language-server" <.> exe) (localBin majorVerExe) - - printLine $ "Copied executables " - ++ ("haskell-language-server-wrapper" <.> exe) ++ ", " - ++ ("haskell-language-server" <.> exe) ++ ", " - ++ majorVerExe ++ " and " - ++ minorVerExe - ++ " to " ++ localBin - -getProjectFile :: VersionNumber -> Action FilePath -getProjectFile ver = do - existFile <- doesFileExist $ "cabal.project-" ++ ver - return $ if existFile - then "cabal.project-" ++ ver - else "cabal.project" - -checkCabal_ :: [String] -> Action () -checkCabal_ args = checkCabal args >> return () - --- | check `cabal` has the required version -checkCabal :: [String] -> Action String -checkCabal args = do - cabalVersion <- getCabalVersion args - unless (checkVersion requiredCabalVersion cabalVersion) $ do - printInStars $ cabalInstallIsOldFailMsg cabalVersion - error $ cabalInstallIsOldFailMsg cabalVersion - return cabalVersion - -getCabalVersion :: [String] -> Action String -getCabalVersion args = trimmedStdout <$> (execCabal $ ["--numeric-version"] ++ args) - --- | Error message when the `cabal` binary is an older version -cabalInstallIsOldFailMsg :: String -> String -cabalInstallIsOldFailMsg cabalVersion = - "The `cabal` executable found in $PATH is outdated.\n" - ++ "found version is `" - ++ cabalVersion - ++ "`.\n" - ++ "required version is `" - ++ versionToString requiredCabalVersion - ++ "`." - -requiredCabalVersion :: RequiredVersion -requiredCabalVersion | isWindowsSystem = requiredCabalVersionForWindows - | otherwise = [2, 4, 1, 0] - -requiredCabalVersionForWindows :: RequiredVersion -requiredCabalVersionForWindows = [3, 0, 0, 0] - -getVerbosityArg :: Verbosity -> String -getVerbosityArg v = "-v" ++ cabalVerbosity - where cabalVerbosity = case v of - Silent -> "0" -#if MIN_VERSION_shake(0,18,4) - Error -> "0" - Warn -> "1" - Info -> "1" - Verbose -> "2" -#else - Quiet -> "0" - Normal -> "1" - Loud -> "2" - Chatty -> "2" -#endif - Diagnostic -> "3" - diff --git a/install/src/Env.hs b/install/src/Env.hs deleted file mode 100644 index 6388338639..0000000000 --- a/install/src/Env.hs +++ /dev/null @@ -1,129 +0,0 @@ -module Env where - -import Development.Shake -import Control.Monad.IO.Class -import Control.Monad -import Development.Shake.FilePath -import System.Info ( os ) -import Data.Maybe ( isJust - , mapMaybe - ) -import System.Directory ( findExecutable - , findExecutables - , listDirectory - ) -import Data.Function ( (&) - , on - ) -import Data.List ( sort - , sortBy - , isInfixOf - , nubBy - ) -import Data.Ord ( comparing ) -import Control.Monad.Extra ( mapMaybeM ) - -import qualified Data.Text as T - -import Version -import Print - - -type GhcPath = String - -existsExecutable :: MonadIO m => String -> m Bool -existsExecutable executable = liftIO $ isJust <$> findExecutable executable - - --- | Check if the current system is windows -isWindowsSystem :: Bool -isWindowsSystem = os `elem` ["mingw32", "win32"] - -findInstalledGhcs :: IO [(VersionNumber, GhcPath)] -findInstalledGhcs = do - hlsVersions <- getHlsVersions :: IO [VersionNumber] - knownGhcs <- mapMaybeM - (\version -> getGhcPathOf version >>= \case - Nothing -> return Nothing - Just p -> return $ Just (version, p) - ) - (reverse hlsVersions) - -- filter out not supported ghc versions - availableGhcs <- filter ((`elem` hlsVersions) . fst) <$> getGhcPaths - return - -- sort by version to make it coherent with getHlsVersions - $ sortBy (comparing fst) - -- nub by version. knownGhcs takes precedence. - $ nubBy ((==) `on` fst) - -- filter out stack provided GHCs (assuming that stack programs path is the default one in linux) - $ filter (not . isInfixOf ".stack" . snd) (knownGhcs ++ availableGhcs) - -showInstalledGhcs :: MonadIO m => [(VersionNumber, GhcPath)] -> m () -showInstalledGhcs ghcPaths = do - let msg = "Found the following GHC paths: \n" - ++ unlines - (map (\(version, path) -> "ghc-" ++ version ++ ": " ++ path) - ghcPaths - ) - printInStars msg - -checkInstalledGhcs :: MonadIO m => [(VersionNumber, GhcPath)] -> m () -checkInstalledGhcs ghcPaths = when (null ghcPaths) $ do - let msg = "No ghc installations found in $PATH. \n" - ++ "The script requires at least one ghc in $PATH \n" - ++ " to be able to build haskell-language-server.\n" - printInStars msg - error msg - --- | Get the path to a GHC that has the version specified by `VersionNumber` --- If no such GHC can be found, Nothing is returned. --- First, it is checked whether there is a GHC with the name `ghc-$VersionNumber`. --- If this yields no result, it is checked, whether the numeric-version of the `ghc` --- command fits to the desired version. -getGhcPathOf :: MonadIO m => VersionNumber -> m (Maybe GhcPath) -getGhcPathOf ghcVersion = - liftIO $ findExecutable ("ghc-" ++ ghcVersion <.> exe) >>= \case - Nothing -> lookup ghcVersion <$> getGhcPaths - path -> return path - --- | Get a list of GHCs that are available in $PATH -getGhcPaths :: MonadIO m => m [(VersionNumber, GhcPath)] -getGhcPaths = liftIO $ do - paths <- findExecutables "ghc" - forM paths $ \path -> do - Stdout version <- cmd path ["--numeric-version"] - return (trim version, path) - --- | No suitable ghc version has been found. Show a message. -ghcVersionNotFoundFailMsg :: VersionNumber -> String -ghcVersionNotFoundFailMsg versionNumber = - "No GHC with version " - <> versionNumber - <> " has been found.\n" - <> "Either install a fitting GHC, use the stack targets or modify the PATH variable accordingly." - - --- | Defines all different hls versions that are buildable. --- --- The current directory is scanned for `stack-*.yaml` files. -getHlsVersions :: MonadIO m => m [VersionNumber] -getHlsVersions = do - let stackYamlPrefix = T.pack "stack-" - let stackYamlSuffix = T.pack ".yaml" - files <- liftIO $ listDirectory "." - let hlsVersions = - files - & map T.pack - & mapMaybe - (T.stripPrefix stackYamlPrefix >=> T.stripSuffix stackYamlSuffix) - & map T.unpack - -- the following line excludes `8.6.3`, `8.8.1` and `8.8.2` on windows systems - & filter (\p -> not (isWindowsSystem && p `elem` ["8.6.3", "8.8.1", "8.8.2"])) - & sort - return hlsVersions - - --- | Most recent version of hls. --- Shown in the more concise help message. -mostRecentHlsVersion :: MonadIO m => m VersionNumber -mostRecentHlsVersion = last <$> getHlsVersions diff --git a/install/src/Help.hs b/install/src/Help.hs deleted file mode 100644 index f831f33f1f..0000000000 --- a/install/src/Help.hs +++ /dev/null @@ -1,131 +0,0 @@ --- |Module for Help messages and traget descriptions -module Help where - -import Development.Shake -import Data.List ( intercalate ) - -import Env -import Print -import Version -import BuildSystem - -stackCommand :: TargetDescription -> String -stackCommand target = "stack install.hs " ++ fst target ++ " [options]" - -cabalCommand :: TargetDescription -> String -cabalCommand target = "cabal v2-run install.hs --project-file install/shake.project -- " ++ fst target ++ " [options]" - -buildCommand :: TargetDescription -> String -buildCommand | isRunFromCabal = cabalCommand - | otherwise = stackCommand - -printUsage :: Action () -printUsage = do - printLine "" - printLine "Usage:" - printLineIndented (stackCommand templateTarget) - printLineIndented "or" - printLineIndented (cabalCommand templateTarget) - --- | short help message is printed by default -shortHelpMessage :: Action () -shortHelpMessage = do - hlsVersions <- getHlsVersions - printUsage - printLine "" - printLine "Targets:" - mapM_ (printLineIndented . showHelpItem (spaces hlsVersions)) (targets hlsVersions) - printLine "" - where - spaces hlsVersions = space (targets hlsVersions) - targets hlsVersions = - [ ("help", "Show help message including all targets") - , emptyTarget - , buildTarget - , buildLatestTarget - , hlsTarget $ last hlsVersions - , buildDataTarget - , cabalGhcsTarget - ] - --- | A record that specifies for each build system which versions of @haskell-language-server@ can be built. -data BuildableVersions = BuildableVersions - { stackVersions :: [VersionNumber] - , cabalVersions :: [VersionNumber] - } - -getDefaultBuildSystemVersions :: BuildableVersions -> [VersionNumber] -getDefaultBuildSystemVersions BuildableVersions {..} - | isRunFromStack = stackVersions - | isRunFromCabal = cabalVersions - | otherwise = error $ "unknown build system: " ++ buildSystem - -helpMessage :: BuildableVersions -> Action () -helpMessage versions@BuildableVersions {..} = do - printUsage - printLine "" - printLine "Targets:" - mapM_ (printLineIndented . showHelpItem spaces) targets - printLine "" - printLine "Options:" - mapM_ (printLineIndented . showHelpItem spaces) options - printLine "" - where - spaces = space targets - -- All targets the shake file supports - targets :: [(String, String)] - targets = intercalate - [emptyTarget] - [ generalTargets - , defaultTargets - , if isRunFromCabal then [cabalGhcsTarget] else [stackDevTarget] - , [macosIcuTarget] - ] - options = [ ("-s, --silent", "Don't print anything.") - , ("-q, --quiet", "Print less (pass repeatedly for even less).") - , ("-V, --verbose", "Print more (pass repeatedly for even more).") - ] - - -- All targets with their respective help message. - generalTargets = [helpTarget] - - defaultTargets = [buildTarget, buildLatestTarget, buildDataTarget] - ++ map hlsTarget (getDefaultBuildSystemVersions versions) - --- | Empty target. Purpose is to introduce a newline between the targets -emptyTarget :: (String, String) -emptyTarget = ("", "") - -templateTarget :: (String, String) -templateTarget = ("", "") - -hlsTarget :: String -> TargetDescription -hlsTarget version = - ("hls-" ++ version, "Install haskell-language-server for GHC version " ++ version) - -buildTarget :: TargetDescription -buildTarget = ("hls", "Install haskell-language-server with the latest available GHC and the data files") - -buildLatestTarget :: TargetDescription -buildLatestTarget = ("latest", "Install haskell-language-server with the latest available GHC") - -buildDataTarget :: TargetDescription -buildDataTarget = - ("data", "Get the required data-files for `haskell-language-server` (Hoogle DB)") - --- special targets - -macosIcuTarget :: TargetDescription -macosIcuTarget = ("icu-macos-fix", "Fixes icu related problems in MacOS") - -helpTarget :: TargetDescription -helpTarget = ("help", "Show help message including all targets") - -cabalGhcsTarget :: TargetDescription -cabalGhcsTarget = - ( "ghcs" - , "Show all GHC versions that can be installed via `cabal-build`." - ) - -stackDevTarget :: TargetDescription -stackDevTarget = ("dev", "Install haskell-language-server with the default stack.yaml") diff --git a/install/src/HlsInstall.hs b/install/src/HlsInstall.hs deleted file mode 100644 index 13396763da..0000000000 --- a/install/src/HlsInstall.hs +++ /dev/null @@ -1,112 +0,0 @@ -module HlsInstall where - -import Development.Shake -import Control.Monad -import System.Environment ( unsetEnv ) - -import BuildSystem -import Stack -import Cabal -import Version -import Env -import Help - -defaultMain :: IO () -defaultMain = do - -- unset GHC_PACKAGE_PATH for cabal - unsetEnv "GHC_PACKAGE_PATH" - - -- used for cabal-based targets - ghcPaths <- findInstalledGhcs - let cabalVersions = map fst ghcPaths - - -- used for stack-based targets - stackVersions <- getHlsVersions - - let versions = if isRunFromStack then stackVersions else cabalVersions - - let toolsVersions = BuildableVersions stackVersions cabalVersions - - let latestVersion = last versions - - shakeArgs shakeOptions { shakeFiles = "_build" } $ do - - shakeOptionsRules <- getShakeOptionsRules - - let verbosityArg = if isRunFromStack then Stack.getVerbosityArg else Cabal.getVerbosityArg - - let args = [verbosityArg (shakeVerbosity shakeOptionsRules)] - - phony "show-options" $ do - putNormal $ "Options:" - putNormal $ " Verbosity level: " ++ show (shakeVerbosity shakeOptionsRules) - - want ["short-help"] - -- general purpose targets - phony "submodules" updateSubmodules - phony "short-help" shortHelpMessage - phony "help" (helpMessage toolsVersions) - - phony "check" (if isRunFromStack then checkStack args else checkCabal_ args) - - phony "data" $ do - need ["show-options"] - need ["submodules"] - need ["check"] - liftIO $ putStrLn "Generation of hoogle data files is disabled for now." - -- if isRunFromStack then stackBuildData args else cabalBuildData args - - forM_ - versions - (\version -> phony ("hls-" ++ version) $ do - need ["show-options"] - need ["submodules"] - need ["check"] - if isRunFromStack then - stackInstallHlsWithErrMsg (Just version) args - else - cabalInstallHls version args - ) - - unless (null versions) $ do - phony "latest" (need ["hls-" ++ latestVersion]) - phony "hls" (need ["data", "latest"]) - - -- stack specific targets - -- Default `stack.yaml` uses ghc-8.8.2 and we can't build hls in windows - -- TODO: Enable for windows when it uses ghc-8.8.3 - when isRunFromStack $ - phony "dev" $ do - need ["show-options"] - stackInstallHlsWithErrMsg Nothing args - - -- cabal specific targets - when isRunFromCabal $ do - -- It throws an error if there is no ghc in $PATH - checkInstalledGhcs ghcPaths - phony "ghcs" $ showInstalledGhcs ghcPaths - - -- macos specific targets - phony "icu-macos-fix" $ do - need ["show-options"] - need ["icu-macos-fix-install"] - need ["icu-macos-fix-build"] - - phony "icu-macos-fix-install" (command_ [] "brew" ["install", "icu4c"]) - phony "icu-macos-fix-build" $ mapM_ (flip buildIcuMacosFix $ args) versions - - -buildIcuMacosFix :: VersionNumber -> [String] -> Action () -buildIcuMacosFix version args = execStackWithGhc_ - version $ - [ "build" - , "text-icu" - , "--extra-lib-dirs=/usr/local/opt/icu4c/lib" - , "--extra-include-dirs=/usr/local/opt/icu4c/include" - ] ++ args - --- | update the submodules that the project is in the state as required by the `stack.yaml` files -updateSubmodules :: Action () -updateSubmodules = do - command_ [] "git" ["submodule", "sync"] - command_ [] "git" ["submodule", "update", "--init"] diff --git a/install/src/Print.hs b/install/src/Print.hs deleted file mode 100644 index 063525e7ec..0000000000 --- a/install/src/Print.hs +++ /dev/null @@ -1,45 +0,0 @@ -module Print where - -import Development.Shake -import Control.Monad.IO.Class -import Data.List ( dropWhileEnd - ) -import Data.Char ( isSpace ) - --- | lift putStrLn to MonadIO -printLine :: MonadIO m => String -> m () -printLine = liftIO . putStrLn - --- | print a line prepended with 4 spaces -printLineIndented :: MonadIO m => String -> m () -printLineIndented = printLine . (" " ++) - -embedInStars :: String -> String -embedInStars str = - let starsLine = "\n" <> replicate 80 '*' <> "\n" - in starsLine <> str <> starsLine - -printInStars :: MonadIO m => String -> m () -printInStars = liftIO . putStrLn . embedInStars - - --- | Trim whitespace of both ends of a string -trim :: String -> String -trim = dropWhileEnd isSpace . dropWhile isSpace - --- | Trim the whitespace of the stdout of a command -trimmedStdout :: Stdout String -> String -trimmedStdout (Stdout s) = trim s - -type TargetDescription = (String, String) - --- | Number of spaces the target name including whitespace should have. --- At least twenty, maybe more if target names are long. At most the length of the longest target plus five. -space :: [(String,String)] -> Int -space helpItems = maximum (20 : map ((+ 5) . length . fst) helpItems) - --- | Show a target. --- Concatenates the target with its help message and inserts whitespace between them. -showHelpItem :: Int -> (String,String) -> String -showHelpItem spaces (helpItemKey, msg) = - helpItemKey ++ replicate (spaces - length helpItemKey) ' ' ++ msg diff --git a/install/src/Stack.hs b/install/src/Stack.hs deleted file mode 100644 index 0d5faac1c0..0000000000 --- a/install/src/Stack.hs +++ /dev/null @@ -1,135 +0,0 @@ -{-# LANGUAGE CPP #-} -module Stack where - -import Development.Shake -import Development.Shake.FilePath -import Control.Monad -import System.Directory ( copyFile ) --- import System.FilePath ( () ) -import Version -import Print - -stackInstallHlsWithErrMsg :: Maybe VersionNumber -> [String] -> Action () -stackInstallHlsWithErrMsg mbVersionNumber args = - stackInstallHls mbVersionNumber args - `actionOnException` liftIO (putStrLn stackBuildFailMsg) - --- | copy the built binaries into the localBinDir -stackInstallHls :: Maybe VersionNumber -> [String] -> Action () -stackInstallHls mbVersionNumber args = do - let args' = [ "install" - , ":haskell-language-server-wrapper" - , ":haskell-language-server" - ] ++ args - versionNumber <- - case mbVersionNumber of - Nothing -> do - execStackWithCfgFile_ "stack.yaml" args' - getGhcVersionOfCfgFile "stack.yaml" args - Just vn -> do - execStackWithGhc_ vn args' - return vn - - localBinDir <- getLocalBin args - let hls = "haskell-language-server" <.> exe - liftIO $ do - copyFile (localBinDir hls) - (localBinDir "haskell-language-server-" ++ versionNumber <.> exe) - copyFile (localBinDir hls) - (localBinDir "haskell-language-server-" ++ dropExtension versionNumber <.> exe) - -getGhcVersionOfCfgFile :: String -> [String] -> Action VersionNumber -getGhcVersionOfCfgFile stackFile args = do - Stdout ghcVersion <- - execStackWithCfgFile stackFile $ ["exec", "ghc"] ++ args ++ ["--", "--numeric-version"] - return $ trim ghcVersion - --- | check `stack` has the required version -checkStack :: [String] -> Action () -checkStack args = do - stackVersion <- trimmedStdout <$> (execStackShake $ ["--numeric-version"] ++ args) - unless (checkVersion requiredStackVersion stackVersion) $ do - printInStars $ stackExeIsOldFailMsg stackVersion - error $ stackExeIsOldFailMsg stackVersion - --- | Get the local binary path of stack. --- Equal to the command `stack path --local-bin` -getLocalBin :: [String] -> Action FilePath -getLocalBin args = do - Stdout stackLocalDir' <- execStackShake $ ["path", "--local-bin"] ++ args - return $ trim stackLocalDir' - -stackBuildData :: [String] -> Action () -stackBuildData args = do - execStackShake_ $ ["build", "hoogle"] ++ args - execStackShake_ $ ["exec", "hoogle", "generate"] ++ args - --- | Execute a stack command for a specified ghc, discarding the output -execStackWithGhc_ :: VersionNumber -> [String] -> Action () -execStackWithGhc_ = execStackWithGhc - --- | Execute a stack command for a specified ghc -execStackWithGhc :: CmdResult r => VersionNumber -> [String] -> Action r -execStackWithGhc versionNumber args = do - let stackFile = "stack-" ++ versionNumber ++ ".yaml" - execStackWithCfgFile stackFile args - --- | Execute a stack command for a specified stack.yaml file, discarding the output -execStackWithCfgFile_ :: String -> [String] -> Action () -execStackWithCfgFile_ = execStackWithCfgFile - --- | Execute a stack command for a specified stack.yaml file -execStackWithCfgFile :: CmdResult r => String -> [String] -> Action r -execStackWithCfgFile stackFile args = - command [] "stack" (("--stack-yaml=" ++ stackFile) : args) - --- | Execute a stack command with the same resolver as the build script -execStackShake :: CmdResult r => [String] -> Action r -execStackShake args = command [] "stack" ("--stack-yaml=install/stack.yaml" : args) - --- | Execute a stack command with the same resolver as the build script, discarding the output -execStackShake_ :: [String] -> Action () -execStackShake_ = execStackShake - - --- | Error message when the `stack` binary is an older version -stackExeIsOldFailMsg :: String -> String -stackExeIsOldFailMsg stackVersion = - "The `stack` executable is outdated.\n" - ++ "found version is `" - ++ stackVersion - ++ "`.\n" - ++ "required version is `" - ++ versionToString requiredStackVersion - ++ "`.\n" - ++ "Please run `stack upgrade` to upgrade your stack installation" - -requiredStackVersion :: RequiredVersion -requiredStackVersion = [2, 1, 1] - --- |Stack build fails message -stackBuildFailMsg :: String -stackBuildFailMsg = - embedInStars - $ "Building failed, " - ++ "Try running `stack clean` and restart the build\n" - ++ "If this does not work, open an issue at \n" - ++ "\thttps://github.com/haskell/haskell-language-engine" - -getVerbosityArg :: Verbosity -> String -getVerbosityArg v = "--verbosity=" ++ stackVerbosity - where stackVerbosity = case v of - Silent -> "silent" -#if MIN_VERSION_shake(0,18,4) - Error -> "error" - Warn -> "warn" - Info -> "info" - Verbose -> "info" -#else - Quiet -> "error" - Normal -> "warn" - Loud -> "info" - Chatty -> "info" -#endif - - Diagnostic -> "debug" diff --git a/install/src/Version.hs b/install/src/Version.hs deleted file mode 100644 index 4647004145..0000000000 --- a/install/src/Version.hs +++ /dev/null @@ -1,23 +0,0 @@ -module Version where - -import Data.Version ( Version - , parseVersion - , makeVersion - , showVersion - ) -import Text.ParserCombinators.ReadP ( readP_to_S ) - - -type VersionNumber = String -type RequiredVersion = [Int] - -versionToString :: RequiredVersion -> String -versionToString = showVersion . makeVersion - --- | Parse a version-string into a version. Fails if the version-string is not valid -parseVersionEx :: String -> Version -parseVersionEx = fst . head . filter (("" ==) . snd) . readP_to_S parseVersion - --- | Check that a given version-string is not smaller than the required version -checkVersion :: RequiredVersion -> String -> Bool -checkVersion required given = parseVersionEx given >= makeVersion required diff --git a/install/stack.yaml b/install/stack.yaml deleted file mode 100644 index 23026bccea..0000000000 --- a/install/stack.yaml +++ /dev/null @@ -1,17 +0,0 @@ -resolver: lts-14.27 # Last 8.6.5 - -packages: - - . - -extra-deps: - - cabal-install-parsers-0.3.0.1 - - Cabal-3.2.0.0 - - binary-instances-1.0.0.1 - - lukko-0.1.1.2 - -flags: - hls-install: - run-from-stack: true - -nix: - packages: [ icu libcxx zlib ] diff --git a/plugins/hls-alternate-number-format-plugin/HLSAll.gif b/plugins/hls-alternate-number-format-plugin/HLSAll.gif new file mode 100644 index 0000000000..d0ba7dbd49 Binary files /dev/null and b/plugins/hls-alternate-number-format-plugin/HLSAll.gif differ diff --git a/plugins/hls-alternate-number-format-plugin/README.md b/plugins/hls-alternate-number-format-plugin/README.md new file mode 100644 index 0000000000..d399878516 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/README.md @@ -0,0 +1,46 @@ +# Alternate Number Format Plugin + +The alternate number format plugin provides alternative formatting for Numeric Literals in source code. +These can be any numeric literal such as `123`, `0x45` or any of the other numeric formats. +The Code Action will provide all possible formatting suggestions (and when required insert the associated Language Extension) + +## Setup + +The plugin requires no extra setup to work. Simply place your cursor on top of a literal and invoke the `codeAction` command for your editor. + +## Demo + +![Alternate format suggestions](HLSAll.gif) + +### Currently Supported GHC Extensions: +- `BinaryLiterals` +- `HexFloatLiterals` +- `NumDecimalLiterals` + +## Design + +The plugin is relatively simple, it traverses a files source contents using the GHC API. As it encounters Literals (of the type `HsExpr` with the constructor of either `HsLit` or `HsOverLit`), it will construct an internal `Literal` datatype that has additional information for use to generate suggestions. +Currently, the traversal is done in the file, `Literal.hs`, using the package [SYB](https://hackage.haskell.org/package/syb) for most of the heavy lifting. + +To generate suggestions, the plugin leverages the `Numeric` package which provides a multitude of conversion functions to and from strings/numerics. + +### Known Quirks +- Anything that produces a bad Source Span (i.e. can't be easily replaced by an edit) is ignored as well. + +## Changelog +### 1.0.0.0 +- First Release + +### 1.0.1.0 +- Dependency upgrades + +### 1.0.1.1 +- Buildable with GHC 9.2 + +### 1.0.2.0 +- Test Suite upgraded for 9.2 semantics (GHC2021) +- Fix SYB parsing with GHC 9.2 + +### 1.1.0.0 +- Provide ALL possible formats as suggestions +- Insert Language Extensions when needed diff --git a/plugins/hls-alternate-number-format-plugin/src/Ide/Plugin/AlternateNumberFormat.hs b/plugins/hls-alternate-number-format-plugin/src/Ide/Plugin/AlternateNumberFormat.hs new file mode 100644 index 0000000000..3b00d79d1b --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/src/Ide/Plugin/AlternateNumberFormat.hs @@ -0,0 +1,133 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE TypeFamilies #-} +module Ide.Plugin.AlternateNumberFormat (descriptor, Log(..)) where + +import Control.Lens ((^.)) +import Control.Monad.Except (ExceptT) +import Control.Monad.IO.Class (MonadIO) +import qualified Data.Map as Map +import Data.Text (Text, unpack) +import qualified Data.Text as T +import Development.IDE (GetParsedModule (GetParsedModule), + IdeState, RuleResult, Rules, + define, realSrcSpanToRange, + use) +import Development.IDE.Core.PluginUtils +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat hiding (getSrcSpan) +import Development.IDE.GHC.Util (getExtensions) +import Development.IDE.Graph.Classes (Hashable, NFData, rnf) +import Development.IDE.Spans.Pragmas (NextPragmaInfo, + getFirstPragma, + insertNewPragma) +import GHC.Generics (Generic) +import Ide.Logger as Logger +import Ide.Plugin.Conversion (AlternateFormat, + ExtensionNeeded (NeedsExtension, NoExtension), + alternateFormat) +import Ide.Plugin.Error +import Ide.Plugin.Literals +import Ide.Plugin.RangeMap (RangeMap) +import qualified Ide.Plugin.RangeMap as RangeMap +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types + +newtype Log = LogShake Shake.Log deriving Show + +instance Pretty Log where + pretty = \case + LogShake msg -> pretty msg + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder pId = (defaultPluginDescriptor pId "Provides code actions to convert numeric literals to different formats") + { pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeAction codeActionHandler + , pluginRules = collectLiteralsRule recorder + } + +data CollectLiterals = CollectLiterals + deriving (Show, Eq, Generic) + +instance Hashable CollectLiterals +instance NFData CollectLiterals + +type instance RuleResult CollectLiterals = CollectLiteralsResult + +data CollectLiteralsResult = CLR + { literals :: RangeMap Literal + , enabledExtensions :: [GhcExtension] + } deriving (Generic) + +newtype GhcExtension = GhcExtension { unExt :: Extension } + +instance NFData GhcExtension where + rnf x = x `seq` () + +instance Show CollectLiteralsResult where + show _ = "" + +instance NFData CollectLiteralsResult + +collectLiteralsRule :: Recorder (WithPriority Log) -> Rules () +collectLiteralsRule recorder = define (cmapWithPrio LogShake recorder) $ \CollectLiterals nfp -> do + pm <- use GetParsedModule nfp + -- get the current extensions active and transform them into FormatTypes + let exts = map GhcExtension . getExtensions <$> pm + -- collect all the literals for a file + lits = collectLiterals . pm_parsed_source <$> pm + litMap = RangeMap.fromList (realSrcSpanToRange . getSrcSpan) <$> lits + pure ([], CLR <$> litMap <*> exts) + +codeActionHandler :: PluginMethodHandler IdeState 'Method_TextDocumentCodeAction +codeActionHandler state pId (CodeActionParams _ _ docId currRange _) = do + nfp <- getNormalizedFilePathE (docId ^. L.uri) + CLR{..} <- requestLiterals pId state nfp + pragma <- getFirstPragma pId state nfp + -- remove any invalid literals (see validTarget comment) + let litsInRange = RangeMap.filterByRange currRange literals + -- generate alternateFormats and zip with the literal that generated the alternates + literalPairs = map (\lit -> (lit, alternateFormat lit)) litsInRange + -- make a code action for every literal and its' alternates (then flatten the result) + actions = concatMap (\(lit, alts) -> map (mkCodeAction nfp lit enabledExtensions pragma) alts) literalPairs + pure $ InL actions + where + mkCodeAction :: NormalizedFilePath -> Literal -> [GhcExtension] -> NextPragmaInfo -> AlternateFormat -> Command |? CodeAction + mkCodeAction nfp lit enabled npi af@(alt, ext) = InR CodeAction { + _title = mkCodeActionTitle lit af enabled + , _kind = Just $ CodeActionKind_Custom "quickfix.literals.style" + , _diagnostics = Nothing + , _isPreferred = Nothing + , _disabled = Nothing + , _edit = Just $ mkWorkspaceEdit nfp edits + , _command = Nothing + , _data_ = Nothing + } + where + edits = [TextEdit (realSrcSpanToRange $ getSrcSpan lit) alt] <> pragmaEdit + pragmaEdit = case ext of + NeedsExtension ext' -> [insertNewPragma npi ext' | needsExtension ext' enabled] + NoExtension -> [] + + mkWorkspaceEdit :: NormalizedFilePath -> [TextEdit] -> WorkspaceEdit + mkWorkspaceEdit nfp edits = WorkspaceEdit changes Nothing Nothing + where + changes = Just $ Map.singleton (filePathToUri $ fromNormalizedFilePath nfp) edits + +mkCodeActionTitle :: Literal -> AlternateFormat -> [GhcExtension] -> Text +mkCodeActionTitle lit (alt, ext) ghcExts + | (NeedsExtension ext') <- ext + , needsExtension ext' ghcExts = title <> " (needs extension: " <> T.pack (show ext') <> ")" + | otherwise = title + where + title = "Convert " <> getSrcText lit <> " into " <> alt + + +-- | Checks whether the extension given is already enabled +needsExtension :: Extension -> [GhcExtension] -> Bool +needsExtension ext ghcExts = ext `notElem` map unExt ghcExts + +requestLiterals :: MonadIO m => PluginId -> IdeState -> NormalizedFilePath -> ExceptT PluginError m CollectLiteralsResult +requestLiterals (PluginId pId) state = + runActionE (unpack pId <> ".CollectLiterals") state + . useE CollectLiterals diff --git a/plugins/hls-alternate-number-format-plugin/src/Ide/Plugin/Conversion.hs b/plugins/hls-alternate-number-format-plugin/src/Ide/Plugin/Conversion.hs new file mode 100644 index 0000000000..cbfaa30140 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/src/Ide/Plugin/Conversion.hs @@ -0,0 +1,187 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE ViewPatterns #-} +module Ide.Plugin.Conversion ( + alternateFormat + , hexRegex + , hexFloatRegex + , binaryRegex + , octalRegex + , decimalRegex + , numDecimalRegex + , matchLineRegex + , toOctal + , toDecimal + , toBinary + , toHex + , toFloatDecimal + , toFloatExpDecimal + , toHexFloat + , AlternateFormat + , ExtensionNeeded(..) +) where + +import Data.List (delete) +import Data.List.Extra (enumerate, upper) +import Data.Maybe (mapMaybe) +import Data.Ratio (denominator, numerator) +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE.Graph.Classes (NFData) +import GHC.Generics (Generic) +import GHC.LanguageExtensions.Type (Extension (..)) +import GHC.Show (intToDigit) +import Ide.Plugin.Literals (Literal (..), getSrcText) +import Numeric +import Text.Regex.TDFA ((=~)) + +data FormatType = IntFormat IntFormatType + | FracFormat FracFormatType + | NoFormat + deriving (Show, Eq, Generic) + +instance NFData FormatType + +data IntFormatType = IntDecimalFormat + | HexFormat + | OctalFormat + | BinaryFormat + | NumDecimalFormat + deriving (Show, Eq, Generic, Bounded, Enum) + +instance NFData IntFormatType + +data FracFormatType = FracDecimalFormat + | HexFloatFormat + | ExponentFormat + deriving (Show, Eq, Generic, Bounded, Enum) + +instance NFData FracFormatType + +data ExtensionNeeded = NoExtension + | NeedsExtension Extension + +type AlternateFormat = (Text, ExtensionNeeded) + +-- | Generate alternate formats for a single Literal based on FormatType's given. +alternateFormat :: Literal -> [AlternateFormat] +alternateFormat lit = case lit of + IntLiteral _ _ val -> map (alternateIntFormat val) (removeCurrentFormatInt lit) + FracLiteral _ _ val -> if denominator val == 1 -- floats that can be integers we can represent as ints + then map (alternateIntFormat (numerator val)) (removeCurrentFormatInt lit) + else map (alternateFracFormat val) (removeCurrentFormatFrac lit) + +alternateIntFormat :: Integer -> IntFormatType -> AlternateFormat +alternateIntFormat val = \case + IntDecimalFormat -> (T.pack $ toDecimal val, NoExtension) + HexFormat -> (T.pack $ toHex val, NoExtension) + OctalFormat -> (T.pack $ toOctal val, NoExtension) + BinaryFormat -> (T.pack $ toBinary val, NeedsExtension BinaryLiterals) + NumDecimalFormat -> (T.pack $ toFloatExpDecimal (fromInteger @Double val), NeedsExtension NumDecimals) + +alternateFracFormat :: Rational -> FracFormatType -> AlternateFormat +alternateFracFormat val = \case + FracDecimalFormat -> (T.pack $ toFloatDecimal (fromRational @Double val), NoExtension) + ExponentFormat -> (T.pack $ toFloatExpDecimal (fromRational @Double val), NoExtension) + HexFloatFormat -> (T.pack $ toHexFloat (fromRational @Double val), NeedsExtension HexFloatLiterals) + +-- given a Literal compute it's current Format and delete it from the list of available formats +removeCurrentFormat :: (Foldable t, Eq a) => [a] -> t a -> [a] +removeCurrentFormat fmts toRemove = foldl (flip delete) fmts toRemove + +removeCurrentFormatInt :: Literal -> [IntFormatType] +removeCurrentFormatInt (getSrcText -> srcText) = removeCurrentFormat intFormats (filterIntFormats $ sourceToFormatType srcText) + +removeCurrentFormatFrac :: Literal -> [FracFormatType] +removeCurrentFormatFrac (getSrcText -> srcText) = removeCurrentFormat fracFormats (filterFracFormats $ sourceToFormatType srcText) + +filterIntFormats :: [FormatType] -> [IntFormatType] +filterIntFormats = mapMaybe getIntFormat + where + getIntFormat (IntFormat f) = Just f + getIntFormat _ = Nothing + +filterFracFormats :: [FormatType] -> [FracFormatType] +filterFracFormats = mapMaybe getFracFormat + where + getFracFormat (FracFormat f) = Just f + getFracFormat _ = Nothing + +intFormats :: [IntFormatType] +intFormats = enumerate + +fracFormats :: [FracFormatType] +fracFormats = enumerate + +-- | Regex to match a Haskell Hex Literal +hexRegex :: Text +hexRegex = "0[xX][a-fA-F0-9]+" + +-- | Regex to match a Haskell Hex Float Literal +hexFloatRegex :: Text +hexFloatRegex = "0[xX][a-fA-F0-9]+(\\.)?[a-fA-F0-9]*(p[+-]?[0-9]+)?" + +-- | Regex to match a Haskell Binary Literal +binaryRegex :: Text +binaryRegex = "0[bB][0|1]+" + +-- | Regex to match a Haskell Octal Literal +octalRegex :: Text +octalRegex = "0[oO][0-8]+" + +-- | Regex to match a Haskell Decimal Literal (no decimal points) +decimalRegex :: Text +decimalRegex = "[0-9]+(\\.[0-9]+)?" + +-- | Regex to match a Haskell Literal with an explicit exponent +numDecimalRegex :: Text +numDecimalRegex = "[0-9]+\\.[0-9]+[eE][+-]?[0-9]+" + +-- we want to be explicit in our matches +-- so we need to match the beginning/end of the source text +-- | Wraps a Regex with a beginning ("^") and end ("$") token +matchLineRegex :: Text -> Text +matchLineRegex regex = "^" <> regex <> "$" + +sourceToFormatType :: Text -> [FormatType] +sourceToFormatType srcText + | srcText =~ matchLineRegex hexRegex = [IntFormat HexFormat] + | srcText =~ matchLineRegex hexFloatRegex = [FracFormat HexFloatFormat] + | srcText =~ matchLineRegex octalRegex = [IntFormat OctalFormat] + | srcText =~ matchLineRegex binaryRegex = [IntFormat BinaryFormat] + -- can either be a NumDecimal or just a regular Fractional with an exponent + -- otherwise we wouldn't need to return a list + | srcText =~ matchLineRegex numDecimalRegex = [IntFormat NumDecimalFormat, FracFormat ExponentFormat] + -- just assume we are in base 10 with no decimals + | otherwise = [IntFormat IntDecimalFormat, FracFormat FracDecimalFormat] + +toBase :: (Num a, Ord a) => (a -> ShowS) -> String -> a -> String +toBase conv header n + | n < 0 = '-' : header <> upper (conv (abs n) "") + | otherwise = header <> upper (conv n "") + +#if MIN_VERSION_base(4,17,0) +toOctal, toBinary, toHex :: Integral a => a -> String +#else +toOctal, toBinary, toHex:: (Integral a, Show a) => a -> String +#endif + +toBinary = toBase showBin_ "0b" + where + -- this is not defined in base < 4.16 + showBin_ = showIntAtBase 2 intToDigit + +toOctal = toBase showOct "0o" + +toHex = toBase showHex "0x" + +toDecimal :: Integral a => a -> String +toDecimal = toBase showInt "" + +toFloatDecimal :: RealFloat a => a -> String +toFloatDecimal val = showFFloat Nothing val "" + +toFloatExpDecimal :: RealFloat a => a -> String +toFloatExpDecimal val = showEFloat Nothing val "" + +toHexFloat :: RealFloat a => a -> String +toHexFloat val = showHFloat val "" diff --git a/plugins/hls-alternate-number-format-plugin/src/Ide/Plugin/Literals.hs b/plugins/hls-alternate-number-format-plugin/src/Ide/Plugin/Literals.hs new file mode 100644 index 0000000000..c26227d933 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/src/Ide/Plugin/Literals.hs @@ -0,0 +1,102 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE ViewPatterns #-} +module Ide.Plugin.Literals ( + collectLiterals + , Literal(..) + , getSrcText + , getSrcSpan +) where + +import Data.Maybe (maybeToList) +import Data.Text (Text) +#if __GLASGOW_HASKELL__ >= 908 +import qualified Data.Text.Encoding as T +#else +import qualified Data.Text as T +#endif +import Development.IDE.GHC.Compat hiding (getSrcSpan) +import Development.IDE.Graph.Classes (NFData (rnf)) +import Generics.SYB (Data, everything, extQ) +import qualified GHC.Generics as GHC + +-- data type to capture what type of literal we are dealing with +-- provides location and possibly source text (for OverLits) as well as it's value +-- we currently don't have any use for PrimLiterals. They never have source text so we always drop them +-- | Captures a Numeric Literals Location, Source Text, and Value. +data Literal = IntLiteral LiteralSrcSpan Text Integer + | FracLiteral LiteralSrcSpan Text Rational + deriving (GHC.Generic, Show, Ord, Eq, Data) + +newtype LiteralSrcSpan = LiteralSrcSpan { unLit :: RealSrcSpan } + deriving (GHC.Generic, Show, Ord, Eq, Data) + +instance NFData LiteralSrcSpan where + rnf x = x `seq` () + +instance NFData Literal + + +-- | Return a Literal's Source representation +getSrcText :: Literal -> Text +getSrcText = \case + IntLiteral _ txt _ -> txt + FracLiteral _ txt _ -> txt + +-- | Return a Literal's Real Source location +getSrcSpan :: Literal -> RealSrcSpan +getSrcSpan = \case + IntLiteral ss _ _ -> unLit ss + FracLiteral ss _ _ -> unLit ss + +-- | Find all literals in a Parsed Source File +collectLiterals :: Data ast => ast -> [Literal] +collectLiterals = everything (<>) (maybeToList . (const Nothing `extQ` getLiteral `extQ` getPattern)) + + +-- | Translate from HsLit and HsOverLit Types to our Literal Type +getLiteral :: LHsExpr GhcPs -> Maybe Literal +getLiteral (L (locA -> (RealSrcSpan sSpan _)) expr) = case expr of + HsLit _ lit -> fromLit lit sSpan + HsOverLit _ overLit -> fromOverLit overLit sSpan + _ -> Nothing +getLiteral _ = Nothing + +-- | Destructure Patterns to unwrap any Literals +getPattern :: LPat GhcPs -> Maybe Literal +getPattern (L (locA -> (RealSrcSpan patSpan _)) pat) = case pat of + LitPat _ lit -> case lit of + HsInt _ val -> fromIntegralLit patSpan val + HsRat _ val _ -> fromFractionalLit patSpan val + _ -> Nothing + NPat _ (L (locA -> (RealSrcSpan sSpan _)) overLit) _ _ -> fromOverLit overLit sSpan + NPlusKPat _ _ (L (locA -> (RealSrcSpan sSpan _)) overLit1) _ _ _ -> fromOverLit overLit1 sSpan + _ -> Nothing +getPattern _ = Nothing + +fromLit :: HsLit p -> RealSrcSpan -> Maybe Literal +fromLit lit sSpan = case lit of + HsInt _ val -> fromIntegralLit sSpan val + HsRat _ val _ -> fromFractionalLit sSpan val + _ -> Nothing + +fromOverLit :: HsOverLit p -> RealSrcSpan -> Maybe Literal +fromOverLit OverLit{..} sSpan = case ol_val of + HsIntegral il -> fromIntegralLit sSpan il + HsFractional fl -> fromFractionalLit sSpan fl + _ -> Nothing +fromOverLit _ _ = Nothing + +fromIntegralLit :: RealSrcSpan -> IntegralLit -> Maybe Literal +fromIntegralLit s IL{..} = fmap (\txt' -> IntLiteral (LiteralSrcSpan s) txt' il_value) (fromSourceText il_text) + +fromFractionalLit :: RealSrcSpan -> FractionalLit -> Maybe Literal +fromFractionalLit s fl@FL{fl_text} = fmap (\txt' -> FracLiteral (LiteralSrcSpan s) txt' (rationalFromFractionalLit fl)) (fromSourceText fl_text) + +fromSourceText :: SourceText -> Maybe Text +fromSourceText = \case +#if __GLASGOW_HASKELL__ >= 908 + SourceText s -> Just $ T.decodeUtf8 $ bytesFS s +#else + SourceText s -> Just $ T.pack s +#endif + NoSourceText -> Nothing diff --git a/plugins/hls-alternate-number-format-plugin/test/Main.hs b/plugins/hls-alternate-number-format-plugin/test/Main.hs new file mode 100644 index 0000000000..3a5f205e5a --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/Main.hs @@ -0,0 +1,148 @@ +{-# LANGUAGE ViewPatterns #-} +module Main ( main ) where + +import Data.Either (rights) +import Data.List (find) +import Data.Text (Text) +import qualified Data.Text as T +import qualified Ide.Plugin.AlternateNumberFormat as AlternateNumberFormat +import qualified Ide.Plugin.Conversion as Conversion +import Properties.Conversion (conversions) +import System.FilePath ((<.>), ()) +import Test.Hls +import Text.Regex.TDFA ((=~)) + +main :: IO () +main = defaultTestRunner test + +alternateNumberFormatPlugin :: PluginTestDescriptor AlternateNumberFormat.Log +alternateNumberFormatPlugin = mkPluginTestDescriptor AlternateNumberFormat.descriptor "alternateNumberFormat" + +-- NOTE: For whatever reason, this plugin does not play nice with creating Code Actions on time. +-- As a result tests will mostly pass if `import Prelude` is added at the top. We (mostly fendor) surmise this has something +-- to do with how +test :: TestTree +test = testGroup "alternateNumberFormat" [ + codeActionHex "TIntDtoH" 3 13 + , codeActionOctal "TIntDtoO" 3 13 + , codeActionBinary "TIntDtoB" 4 13 + , codeActionNumDecimal "TIntDtoND" 5 13 + , codeActionFracExp "TFracDtoE" 3 13 + , codeActionFloatHex "TFracDtoHF" 4 13 + , codeActionDecimal "TIntHtoD" 3 13 + , codeActionDecimal "TFracHFtoD" 4 13 + -- to test we don't duplicate pragmas + , codeActionFloatHex "TFracDtoHFWithPragma" 4 13 + , codeActionProperties "TFindLiteralIntPattern" [(4, 25), (5,25)] $ \actions -> do + liftIO $ length actions @?= 8 + , codeActionProperties "TFindLiteralIntCase" [(4, 29)] $ \actions -> do + liftIO $ length actions @?= 4 + , codeActionProperties "TFindLiteralIntCase2" [(5, 21)] $ \actions -> do + liftIO $ length actions @?= 4 + , codeActionProperties "TFindLiteralDoReturn" [(6, 10)] $ \actions -> do + liftIO $ length actions @?= 4 + , codeActionProperties "TFindLiteralDoLet" [(6, 13), (7, 13)] $ \actions -> do + liftIO $ length actions @?= 8 + , codeActionProperties "TFindLiteralList" [(4, 28)] $ \actions -> do + liftIO $ length actions @?= 4 + , conversions + ] + +codeActionProperties :: TestName -> [(Int, Int)] -> ([CodeAction] -> Session ()) -> TestTree +codeActionProperties fp locs assertions = testCase fp $ do + runSessionWithServer def alternateNumberFormatPlugin testDataDir $ do + openDoc (fp <.> ".hs") "haskell" >>= codeActionsFromLocs >>= findAlternateNumberActions >>= assertions + where + -- similar to codeActionTest + codeActionsFromLocs doc = concat <$> mapM (getCodeActions doc . uncurry pointRange) locs + +findAlternateNumberActions :: [Command |? CodeAction] -> Session [CodeAction] +findAlternateNumberActions = pure . filter isAlternateNumberCodeAction . rights . map toEither + where + isAlternateNumberCodeAction CodeAction{_kind} = case _kind of + Nothing -> False + Just kind -> case kind of + CodeActionKind_Custom txt -> txt == "quickfix.literals.style" + _ -> False + +-- most helpers derived from explicit-imports-plugin Main Test file + +testDataDir :: FilePath +testDataDir = "plugins" "hls-alternate-number-format-plugin" "test" "testdata" + +goldenAlternateFormat :: FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +goldenAlternateFormat fp = goldenWithHaskellDoc def alternateNumberFormatPlugin (fp <> " (golden)") testDataDir fp "expected" "hs" + +codeActionTest :: (Maybe Text -> Bool) -> FilePath -> Int -> Int -> TestTree +codeActionTest filter' fp line col = goldenAlternateFormat fp $ \doc -> do + actions <- getCodeActions doc (pointRange line col) + -- can't generate code actions? + case find (filter' . codeActionTitle) actions of + Just (InR x) -> executeCodeAction x + _ -> liftIO $ assertFailure "Unable to find CodeAction" + +codeActionDecimal :: FilePath -> Int -> Int -> TestTree +codeActionDecimal = codeActionTest isDecimalCodeAction + +codeActionHex :: FilePath -> Int -> Int -> TestTree +codeActionHex = codeActionTest isHexCodeAction + +codeActionOctal :: FilePath -> Int -> Int -> TestTree +codeActionOctal = codeActionTest isOctalCodeAction + +codeActionBinary :: FilePath -> Int -> Int -> TestTree +codeActionBinary = codeActionTest isBinaryCodeAction + +codeActionNumDecimal :: FilePath -> Int -> Int -> TestTree +codeActionNumDecimal = codeActionTest isNumDecimalCodeAction + +codeActionFracExp :: FilePath -> Int -> Int -> TestTree +codeActionFracExp = codeActionTest isNumDecimalCodeAction + +codeActionFloatHex :: FilePath -> Int -> Int -> TestTree +codeActionFloatHex = codeActionTest isHexFloatCodeAction + +codeActionTitle :: (Command |? CodeAction) -> Maybe Text +codeActionTitle (InR CodeAction {_title}) = Just _title +codeActionTitle _ = Nothing + +pointRange :: Int -> Int -> Range +pointRange + (subtract 1 -> fromIntegral -> line) + (subtract 1 -> fromIntegral -> col) = + Range (Position line col) (Position line $ col + 1) + +convertPrefix, intoInfix, maybeExtension, hexRegex, hexFloatRegex, binaryRegex, octalRegex, numDecimalRegex, decimalRegex :: Text +convertPrefix = "Convert (" <> T.intercalate "|" [Conversion.hexRegex, Conversion.hexFloatRegex, Conversion.binaryRegex, Conversion.octalRegex, Conversion.numDecimalRegex, Conversion.decimalRegex] <> ")" +intoInfix = " into " +maybeExtension = "( \\(needs extension: .*)?" +hexRegex = intoInfix <> Conversion.hexRegex <> maybeExtension +hexFloatRegex = intoInfix <> Conversion.hexFloatRegex <> maybeExtension +binaryRegex = intoInfix <> Conversion.binaryRegex <> maybeExtension +octalRegex = intoInfix <> Conversion.octalRegex <> maybeExtension +numDecimalRegex = intoInfix <> Conversion.numDecimalRegex <> maybeExtension +decimalRegex = intoInfix <> Conversion.decimalRegex <> maybeExtension + +isCodeAction :: Text -> Maybe Text -> Bool +isCodeAction userRegex (Just txt) = txt =~ Conversion.matchLineRegex (convertPrefix <> userRegex) +isCodeAction _ _ = False + +isHexCodeAction :: Maybe Text -> Bool +isHexCodeAction = isCodeAction hexRegex + +isHexFloatCodeAction :: Maybe Text -> Bool +isHexFloatCodeAction = isCodeAction hexFloatRegex + +isBinaryCodeAction :: Maybe Text -> Bool +isBinaryCodeAction = isCodeAction binaryRegex + +isOctalCodeAction :: Maybe Text -> Bool +isOctalCodeAction = isCodeAction octalRegex + +-- This can match EITHER an integer as NumDecimal extension or a Fractional +-- as in 1.23e-3 (so anything with an exponent really) +isNumDecimalCodeAction :: Maybe Text -> Bool +isNumDecimalCodeAction = isCodeAction numDecimalRegex + +isDecimalCodeAction :: Maybe Text -> Bool +isDecimalCodeAction = isCodeAction decimalRegex diff --git a/plugins/hls-alternate-number-format-plugin/test/Properties/Conversion.hs b/plugins/hls-alternate-number-format-plugin/test/Properties/Conversion.hs new file mode 100644 index 0000000000..07e4617bde --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/Properties/Conversion.hs @@ -0,0 +1,42 @@ +module Properties.Conversion where + +import Ide.Plugin.Conversion +import Test.Hls (TestTree, testGroup) +import Test.Tasty.QuickCheck (testProperty) +import Text.Regex.TDFA ((=~)) + +conversions :: TestTree +conversions = testGroup "Conversions" $ + map (uncurry testProperty) + [ ("Match NumDecimal", prop_regexMatchesNumDecimal) + , ("Match Hex", prop_regexMatchesHex) + , ("Match Octal", prop_regexMatchesOctal) + , ("Match Binary", prop_regexMatchesBinary) + ] + <> + map (uncurry testProperty) + [ ("Match HexFloat", prop_regexMatchesHexFloat) + , ("Match FloatDecimal", prop_regexMatchesFloatDecimal) + , ("Match FloatExpDecimal", prop_regexMatchesFloatExpDecimal) + ] + +prop_regexMatchesNumDecimal :: Integer -> Bool +prop_regexMatchesNumDecimal = (=~ numDecimalRegex) . toFloatExpDecimal @Double . fromInteger + +prop_regexMatchesHex :: Integer -> Bool +prop_regexMatchesHex = (=~ hexRegex ) . toHex + +prop_regexMatchesOctal :: Integer -> Bool +prop_regexMatchesOctal = (=~ octalRegex) . toOctal + +prop_regexMatchesBinary :: Integer -> Bool +prop_regexMatchesBinary = (=~ binaryRegex) . toBinary + +prop_regexMatchesHexFloat :: Double -> Bool +prop_regexMatchesHexFloat = (=~ hexFloatRegex) . toHexFloat + +prop_regexMatchesFloatDecimal :: Double -> Bool +prop_regexMatchesFloatDecimal = (=~ decimalRegex ) . toFloatDecimal + +prop_regexMatchesFloatExpDecimal :: Double -> Bool +prop_regexMatchesFloatExpDecimal = (=~ numDecimalRegex ) . toFloatExpDecimal diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralDoLet.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralDoLet.hs new file mode 100644 index 0000000000..074a06b968 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralDoLet.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE NoBinaryLiterals #-} +module TFindLiteralDoLet where + +doLet :: IO () +doLet = do + let x = 199 + y = 144 + pure () diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralDoReturn.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralDoReturn.hs new file mode 100644 index 0000000000..1954a09348 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralDoReturn.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE NoBinaryLiterals #-} +module TFindLiteralDoReturn where + +doReturn :: IO Integer +doReturn = do + pure 54 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralIntCase.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralIntCase.hs new file mode 100644 index 0000000000..8b8d82ce85 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralIntCase.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE NoBinaryLiterals #-} +module TFindLiteralIntCase where + +caseExpression x = case x + 34 of + _ -> "testing if we find a literal in the case statement" diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralIntCase2.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralIntCase2.hs new file mode 100644 index 0000000000..e267ab69d0 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralIntCase2.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE NoBinaryLiterals #-} +module TFindLiteralIntCase where + +caseExpression x = case x of + 57 -> "testing to find literals in matching cases" + _ -> "" diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralIntPattern.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralIntPattern.hs new file mode 100644 index 0000000000..46c0ea23bf --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralIntPattern.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE NoBinaryLiterals #-} +module TFindLiteralIntPattern where + +patternMatchingFunction 1 = "one" +patternMatchingFunction 2 = "two" +patternMatchingFunction _ = "the rest of the numbers" diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralList.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralList.hs new file mode 100644 index 0000000000..42d5f8be96 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFindLiteralList.hs @@ -0,0 +1,4 @@ +{-# LANGUAGE NoBinaryLiterals #-} +module TFindLiteralList where + +listTest = [reverse $ show 57] diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoE.expected.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoE.expected.hs new file mode 100644 index 0000000000..b0b934cc0e --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoE.expected.hs @@ -0,0 +1,3 @@ +module TFracDtoE where + +convertMe = 1.2345e2 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoE.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoE.hs new file mode 100644 index 0000000000..ed82675f31 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoE.hs @@ -0,0 +1,3 @@ +module TFracDtoE where + +convertMe = 123.45 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoHF.expected.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoHF.expected.hs new file mode 100644 index 0000000000..3933935095 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoHF.expected.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE HexFloatLiterals #-} +module TFracDtoHF where + +convertMe = 0x1.edccccccccccdp6 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoHF.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoHF.hs new file mode 100644 index 0000000000..b6c080b0bb --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoHF.hs @@ -0,0 +1,4 @@ +{-# LANGUAGE Haskell2010 #-} +module TFracDtoHF where + +convertMe = 123.45 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoHFWithPragma.expected.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoHFWithPragma.expected.hs new file mode 100644 index 0000000000..e5c3e5018f --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoHFWithPragma.expected.hs @@ -0,0 +1,4 @@ +{-# LANGUAGE HexFloatLiterals #-} +module TFracDtoHFWithPragma where + +convertMe = 0x1.edccccccccccdp6 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoHFWithPragma.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoHFWithPragma.hs new file mode 100644 index 0000000000..43baf93ebc --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracDtoHFWithPragma.hs @@ -0,0 +1,4 @@ +{-# LANGUAGE HexFloatLiterals #-} +module TFracDtoHFWithPragma where + +convertMe = 123.45 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFracHFtoD.expected.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracHFtoD.expected.hs new file mode 100644 index 0000000000..07def4e222 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracHFtoD.expected.hs @@ -0,0 +1,4 @@ +{-# LANGUAGE HexFloatLiterals #-} +module TFracDtoHF where + +convertMe = 123.45 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TFracHFtoD.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracHFtoD.hs new file mode 100644 index 0000000000..3a5ce2845d --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TFracHFtoD.hs @@ -0,0 +1,4 @@ +{-# LANGUAGE HexFloatLiterals #-} +module TFracDtoHF where + +convertMe = 0x1.edccccccccccdp6 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoB.expected.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoB.expected.hs new file mode 100644 index 0000000000..bf40fd913f --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoB.expected.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE BinaryLiterals #-} +module TIntDtoB where + +convertMe = 0b1100 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoB.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoB.hs new file mode 100644 index 0000000000..1e5ff08378 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoB.hs @@ -0,0 +1,4 @@ +{-# LANGUAGE Haskell2010 #-} +module TIntDtoB where + +convertMe = 12 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoH.expected.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoH.expected.hs new file mode 100644 index 0000000000..b974c16d98 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoH.expected.hs @@ -0,0 +1,3 @@ +module TIntDtoH where + +convertMe = 0xC diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoH.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoH.hs new file mode 100644 index 0000000000..07fef56ab7 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoH.hs @@ -0,0 +1,3 @@ +module TIntDtoH where + +convertMe = 12 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoND.expected.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoND.expected.hs new file mode 100644 index 0000000000..7f2bfd4cca --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoND.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE NumDecimals #-} +module TIntDtoND where + +convertMe :: Integer +convertMe = 1.25345e5 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoND.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoND.hs new file mode 100644 index 0000000000..f8e9dc9481 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoND.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE Haskell2010 #-} +module TIntDtoND where + +convertMe :: Integer +convertMe = 125345 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoO.expected.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoO.expected.hs new file mode 100644 index 0000000000..7089e85ef7 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoO.expected.hs @@ -0,0 +1,3 @@ +module TIntDtoO where + +convertMe = 0o14 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoO.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoO.hs new file mode 100644 index 0000000000..d3db0c599b --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntDtoO.hs @@ -0,0 +1,3 @@ +module TIntDtoO where + +convertMe = 12 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TIntHtoD.expected.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntHtoD.expected.hs new file mode 100644 index 0000000000..07fef56ab7 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntHtoD.expected.hs @@ -0,0 +1,3 @@ +module TIntDtoH where + +convertMe = 12 diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/TIntHtoD.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntHtoD.hs new file mode 100644 index 0000000000..b974c16d98 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/TIntHtoD.hs @@ -0,0 +1,3 @@ +module TIntDtoH where + +convertMe = 0xC diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/hie.yaml b/plugins/hls-alternate-number-format-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..84cd94dcf1 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/hie.yaml @@ -0,0 +1,25 @@ +cradle: + direct: + arguments: + - -i + - -i. + - TIntDtoH + - TIntDtoB + - TIntDtoO + - TIntDtoND + - TFracDtoE + - TFracDtoHF + - TFracDtoHFWithPragma + - TIntHtoD + - TFracHFtoD + - TFindLiteralIntPattern + - TFindLiteralIntCase + - TFindLiteralIntCase2 + - TFindLiteralDoReturn + - TFindLiteralDoLet + - TFindLiteralList + - TExpectNoBinaryFormat + - TExpectBinaryFormat + - TExpectNoHexFloatFormat + - TExpectHexFloatFormat + - -Wall diff --git a/plugins/hls-alternate-number-format-plugin/test/testdata/test/testdata/TIntDtoND.expected.hs b/plugins/hls-alternate-number-format-plugin/test/testdata/test/testdata/TIntDtoND.expected.hs new file mode 100644 index 0000000000..b9624b6418 --- /dev/null +++ b/plugins/hls-alternate-number-format-plugin/test/testdata/test/testdata/TIntDtoND.expected.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE NumDecimals #-} +module TIntDtoND where + +convertMe :: Integer +convertMe = 125.345e3 diff --git a/plugins/hls-cabal-fmt-plugin/src/Ide/Plugin/CabalFmt.hs b/plugins/hls-cabal-fmt-plugin/src/Ide/Plugin/CabalFmt.hs new file mode 100644 index 0000000000..8c49f379d7 --- /dev/null +++ b/plugins/hls-cabal-fmt-plugin/src/Ide/Plugin/CabalFmt.hs @@ -0,0 +1,93 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.CabalFmt where + +import Control.Lens +import Control.Monad.Except (throwError) +import Control.Monad.IO.Class +import qualified Data.Text as T +import Development.IDE hiding (pluginHandlers) +import Development.IDE.Core.PluginUtils (mkFormattingHandlers) +import Ide.Plugin.Error (PluginError (PluginInternalError, PluginInvalidParams)) +import Ide.Plugin.Properties +import Ide.PluginUtils +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Types +import Prelude hiding (log) +import System.Directory +import System.Exit +import System.FilePath +import System.Process.ListLike +import qualified System.Process.Text as Process + +data Log + = LogProcessInvocationFailure Int + | LogReadCreateProcessInfo T.Text [String] + | LogInvalidInvocationInfo + | LogFormatterBinNotFound FilePath + deriving (Show) + +instance Pretty Log where + pretty = \case + LogProcessInvocationFailure exitCode -> "Invocation of cabal-fmt failed with code" <+> pretty exitCode + LogReadCreateProcessInfo stdErrorOut args -> + vcat $ + ["Invocation of cabal-fmt with arguments" <+> pretty args] + ++ ["failed with standard error:" <+> pretty stdErrorOut | not (T.null stdErrorOut)] + LogInvalidInvocationInfo -> "Invocation of cabal-fmt with range was called but is not supported." + LogFormatterBinNotFound fp -> "Couldn't find formatter executable 'cabal-fmt' at:" <+> pretty fp + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultCabalPluginDescriptor plId "Provides formatting of cabal files with cabal-fmt") + { pluginHandlers = mkFormattingHandlers (provider recorder plId) + , pluginConfigDescriptor = defaultConfigDescriptor{configCustomConfig = mkCustomConfig properties} + } + +properties :: Properties '[ 'PropertyKey "path" 'TString] +properties = + emptyProperties + & defineStringProperty + #path + "Set path to 'cabal-fmt' executable" + "cabal-fmt" + +-- | Formatter provider of cabal fmt. +-- Formats the given source in either a given Range or the whole Document. +-- If the provider fails an error is returned that can be displayed to the user. +provider :: Recorder (WithPriority Log) -> PluginId -> FormattingHandler IdeState +provider recorder _ _ _ (FormatRange _) _ _ _ = do + logWith recorder Info LogInvalidInvocationInfo + throwError $ PluginInvalidParams "You cannot format a text-range using cabal-fmt." +provider recorder plId ideState _ FormatText contents nfp opts = do + let cabalFmtArgs = [ "--indent", show tabularSize] + cabalFmtExePath <- fmap T.unpack $ liftIO $ runAction "cabal-fmt" ideState $ usePropertyAction #path plId properties + x <- liftIO $ findExecutable cabalFmtExePath + case x of + Just _ -> do + (exitCode, out, err) <- + liftIO $ Process.readCreateProcessWithExitCode + ( proc cabalFmtExePath cabalFmtArgs + ) + { cwd = Just $ takeDirectory fp + } + contents + log Debug $ LogReadCreateProcessInfo err cabalFmtArgs + case exitCode of + ExitFailure code -> do + log Error $ LogProcessInvocationFailure code + throwError (PluginInternalError "Failed to invoke cabal-fmt") + ExitSuccess -> do + let fmtDiff = makeDiffTextEdit contents out + pure $ InL fmtDiff + Nothing -> do + log Error $ LogFormatterBinNotFound cabalFmtExePath + throwError (PluginInternalError "No installation of cabal-fmt could be found. Please install it globally, or provide the full path to the executable") + where + fp = fromNormalizedFilePath nfp + tabularSize = opts ^. L.tabSize + log = logWith recorder diff --git a/plugins/hls-cabal-fmt-plugin/test/Main.hs b/plugins/hls-cabal-fmt-plugin/test/Main.hs new file mode 100644 index 0000000000..0e458b2163 --- /dev/null +++ b/plugins/hls-cabal-fmt-plugin/test/Main.hs @@ -0,0 +1,76 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +module Main + ( main + ) where + +import Ide.Logger +import qualified Ide.Plugin.Cabal as Cabal +import qualified Ide.Plugin.CabalFmt as CabalFmt +import System.Directory (findExecutable) +import System.FilePath +import Test.Hls + +data TestLog + = LogCabalFmt CabalFmt.Log + | LogCabal Cabal.Log + +instance Pretty TestLog where + pretty = \case + LogCabalFmt msg -> pretty msg + LogCabal msg -> pretty msg + +data CabalFmtFound = Found | NotFound + +isTestIsolated :: Bool +#if hls_isolate_cabalfmt_tests +isTestIsolated = True +#else +isTestIsolated = False +#endif + +isCabalFmtFound :: IO CabalFmtFound +isCabalFmtFound = case isTestIsolated of + True -> pure Found + False -> do + cabalFmt <- findExecutable "cabal-fmt" + pure $ maybe NotFound (const Found) cabalFmt + +main :: IO () +main = do + foundCabalFmt <- isCabalFmtFound + defaultTestRunner (tests foundCabalFmt) + +cabalFmtPlugin :: PluginTestDescriptor TestLog +cabalFmtPlugin = mconcat + [ mkPluginTestDescriptor (CabalFmt.descriptor . cmapWithPrio LogCabalFmt) "cabal-fmt" + , mkPluginTestDescriptor (Cabal.descriptor . cmapWithPrio LogCabal) "cabal" + ] + +tests :: CabalFmtFound -> TestTree +tests found = testGroup "cabal-fmt" + [ knownBrokenOnWindows "Eats newlines between comments" $ + cabalFmtGolden found "formats a simple document" "simple_testdata" "formatted_document" $ \doc -> do + formatDoc doc (FormattingOptions 2 True Nothing Nothing Nothing) + + -- TODO: cabal-fmt can't expand modules if .cabal file is read from stdin. Tracking + -- issue: https://github.com/phadej/cabal-fmt/pull/82 + , cabalFmtGolden found "formats a document with expand:src comment" "commented_testdata" "formatted_document" $ \doc -> do + formatDoc doc (FormattingOptions 2 True Nothing Nothing Nothing) + + , cabalFmtGolden found "formats a document with lib information" "lib_testdata" "formatted_document" $ \doc -> do + formatDoc doc (FormattingOptions 10 True Nothing Nothing Nothing) + ] + +cabalFmtGolden :: CabalFmtFound -> TestName -> FilePath -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +cabalFmtGolden NotFound title _ _ _ = + testCase title $ + assertFailure $ "Couldn't find cabal-fmt on PATH or this is not an isolated run. " + <> "Use cabal flag 'isolateCabalFmtTests' to make it isolated or install cabal-fmt locally." +cabalFmtGolden Found title path desc act = goldenWithCabalDocFormatter def cabalFmtPlugin "cabal-fmt" conf title testDataDir path desc "cabal" act + where + conf = def + +testDataDir :: FilePath +testDataDir = "plugins" "hls-cabal-fmt-plugin" "test" "testdata" diff --git a/plugins/hls-cabal-fmt-plugin/test/testdata/commented_testdata.cabal b/plugins/hls-cabal-fmt-plugin/test/testdata/commented_testdata.cabal new file mode 100644 index 0000000000..ae7bcf6590 --- /dev/null +++ b/plugins/hls-cabal-fmt-plugin/test/testdata/commented_testdata.cabal @@ -0,0 +1,12 @@ +cabal-version: 2.4 +name: testdata +version: 0.1.0.0 +author: Banana +extra-source-files: CHANGELOG.md + +library + -- cabal-fmt: expand src + exposed-modules: MyLib + build-depends: base ^>=4.14.1.0 + hs-source-dirs: src + default-language: Haskell2010 diff --git a/plugins/hls-cabal-fmt-plugin/test/testdata/commented_testdata.formatted_document.cabal b/plugins/hls-cabal-fmt-plugin/test/testdata/commented_testdata.formatted_document.cabal new file mode 100644 index 0000000000..933669a483 --- /dev/null +++ b/plugins/hls-cabal-fmt-plugin/test/testdata/commented_testdata.formatted_document.cabal @@ -0,0 +1,12 @@ +cabal-version: 2.4 +name: testdata +version: 0.1.0.0 +author: Banana +extra-source-files: CHANGELOG.md + +library + -- cabal-fmt: expand src + exposed-modules: MyLib + build-depends: base ^>=4.14.1.0 + hs-source-dirs: src + default-language: Haskell2010 diff --git a/plugins/hls-cabal-fmt-plugin/test/testdata/hie.yaml b/plugins/hls-cabal-fmt-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..824558147d --- /dev/null +++ b/plugins/hls-cabal-fmt-plugin/test/testdata/hie.yaml @@ -0,0 +1,3 @@ +cradle: + direct: + arguments: [] diff --git a/plugins/hls-cabal-fmt-plugin/test/testdata/lib_testdata.cabal b/plugins/hls-cabal-fmt-plugin/test/testdata/lib_testdata.cabal new file mode 100644 index 0000000000..0f07af1d70 --- /dev/null +++ b/plugins/hls-cabal-fmt-plugin/test/testdata/lib_testdata.cabal @@ -0,0 +1,19 @@ +cabal-version: 2.4 +name: testdata +version: 0.1.0.0 +author: Gregg +extra-source-files: CHANGELOG.md + +library + exposed-modules: MyLib + build-depends: base ^>=4.14.1.0 + hs-source-dirs: src + default-language: Haskell2010 + +executable testdata + main-is: Main.hs + build-depends: + base ^>=4.14.1.0,testdata + hs-source-dirs: app + default-language: + Haskell2010 diff --git a/plugins/hls-cabal-fmt-plugin/test/testdata/lib_testdata.formatted_document.cabal b/plugins/hls-cabal-fmt-plugin/test/testdata/lib_testdata.formatted_document.cabal new file mode 100644 index 0000000000..4df43f1b8f --- /dev/null +++ b/plugins/hls-cabal-fmt-plugin/test/testdata/lib_testdata.formatted_document.cabal @@ -0,0 +1,20 @@ +cabal-version: 2.4 +name: testdata +version: 0.1.0.0 +author: Gregg +extra-source-files: CHANGELOG.md + +library + exposed-modules: MyLib + build-depends: base ^>=4.14.1.0 + hs-source-dirs: src + default-language: Haskell2010 + +executable testdata + main-is: Main.hs + build-depends: + , base ^>=4.14.1.0 + , testdata + + hs-source-dirs: app + default-language: Haskell2010 diff --git a/plugins/hls-cabal-fmt-plugin/test/testdata/simple_testdata.cabal b/plugins/hls-cabal-fmt-plugin/test/testdata/simple_testdata.cabal new file mode 100644 index 0000000000..0421a27ddb --- /dev/null +++ b/plugins/hls-cabal-fmt-plugin/test/testdata/simple_testdata.cabal @@ -0,0 +1,36 @@ +cabal-version: 2.4 +name: testdata +version: 0.1.0.0 + +-- A short (one-line) description of the package. +-- synopsis: + +-- A longer description of the package. +-- description: + +-- A URL where users can report bugs. +-- bug-reports: + +-- The license under which the package is released. +-- license: +author: Milky + +-- An email address to which users can send suggestions, bug reports, and patches. +-- maintainer: + +-- A copyright notice. +-- copyright: +-- category: +extra-source-files: CHANGELOG.md + +executable testdata + main-is: Main.hs + + -- Modules included in this executable, other than Main. + -- other-modules: + + -- LANGUAGE extensions used by modules in this package. + -- other-extensions: + build-depends: base ^>=4.14.1.0 + hs-source-dirs: app + default-language: Haskell2010 diff --git a/plugins/hls-cabal-fmt-plugin/test/testdata/simple_testdata.formatted_document.cabal b/plugins/hls-cabal-fmt-plugin/test/testdata/simple_testdata.formatted_document.cabal new file mode 100644 index 0000000000..993cef832d --- /dev/null +++ b/plugins/hls-cabal-fmt-plugin/test/testdata/simple_testdata.formatted_document.cabal @@ -0,0 +1,36 @@ +cabal-version: 2.4 +name: testdata +version: 0.1.0.0 + +-- A short (one-line) description of the package. +-- synopsis: + +-- A longer description of the package. +-- description: + +-- A URL where users can report bugs. +-- bug-reports: + +-- The license under which the package is released. +-- license: +author: Milky + +-- An email address to which users can send suggestions, bug reports, and patches. +-- maintainer: + +-- A copyright notice. +-- copyright: +-- category: +extra-source-files: CHANGELOG.md + +executable testdata + main-is: Main.hs + + -- Modules included in this executable, other than Main. + -- other-modules: + + -- LANGUAGE extensions used by modules in this package. + -- other-extensions: + build-depends: base ^>=4.14.1.0 + hs-source-dirs: app + default-language: Haskell2010 diff --git a/test/testdata/cabal-helper/mono-repo/A/MyLib.hs b/plugins/hls-cabal-fmt-plugin/test/testdata/src/MyLib.hs similarity index 100% rename from test/testdata/cabal-helper/mono-repo/A/MyLib.hs rename to plugins/hls-cabal-fmt-plugin/test/testdata/src/MyLib.hs diff --git a/plugins/hls-cabal-fmt-plugin/test/testdata/src/MyOtherLib.hs b/plugins/hls-cabal-fmt-plugin/test/testdata/src/MyOtherLib.hs new file mode 100644 index 0000000000..15450b43b3 --- /dev/null +++ b/plugins/hls-cabal-fmt-plugin/test/testdata/src/MyOtherLib.hs @@ -0,0 +1,3 @@ +module MyOtherLib where + +bar = 2 diff --git a/plugins/hls-cabal-gild-plugin/src/Ide/Plugin/CabalGild.hs b/plugins/hls-cabal-gild-plugin/src/Ide/Plugin/CabalGild.hs new file mode 100644 index 0000000000..1d698d637b --- /dev/null +++ b/plugins/hls-cabal-gild-plugin/src/Ide/Plugin/CabalGild.hs @@ -0,0 +1,93 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.CabalGild where + +import Control.Monad.Except (throwError) +import Control.Monad.IO.Class +import qualified Data.Text as T +import Development.IDE hiding (pluginHandlers) +import Development.IDE.Core.PluginUtils (mkFormattingHandlers) +import Ide.Plugin.Error (PluginError (PluginInternalError, PluginInvalidParams)) +import Ide.Plugin.Properties +import Ide.PluginUtils +import Ide.Types +import Language.LSP.Protocol.Types +import Prelude hiding (log) +import System.Directory +import System.Exit +import System.FilePath +import System.Process.ListLike +import qualified System.Process.Text as Process + +data Log + = LogProcessInvocationFailure Int T.Text + | LogReadCreateProcessInfo [String] + | LogInvalidInvocationInfo + | LogFormatterBinNotFound FilePath + deriving (Show) + +instance Pretty Log where + pretty = \case + LogProcessInvocationFailure exitCode err -> + vcat + [ "Invocation of cabal-gild failed with code" <+> pretty exitCode + , "Stderr:" <+> pretty err + ] + LogReadCreateProcessInfo args -> + "Formatter invocation: cabal-gild " <+> pretty args + LogInvalidInvocationInfo -> "Invocation of cabal-gild with range was called but is not supported." + LogFormatterBinNotFound fp -> "Couldn't find formatter executable 'cabal-gild' at:" <+> pretty fp + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultCabalPluginDescriptor plId "Provides formatting of cabal files with cabal-gild") + { pluginHandlers = mkFormattingHandlers (provider recorder plId) + , pluginConfigDescriptor = defaultConfigDescriptor{configCustomConfig = mkCustomConfig properties} + } + +properties :: Properties '[ 'PropertyKey "path" 'TString] +properties = + emptyProperties + & defineStringProperty + #path + "Set path to 'cabal-gild' executable" + "cabal-gild" + +-- | Formatter provider of cabal gild. +-- Formats the given source in either a given Range or the whole Document. +-- If the provider fails an error is returned that can be displayed to the user. +provider :: Recorder (WithPriority Log) -> PluginId -> FormattingHandler IdeState +provider recorder _ _ _ (FormatRange _) _ _ _ = do + logWith recorder Info LogInvalidInvocationInfo + throwError $ PluginInvalidParams "You cannot format a text-range using cabal-gild." +provider recorder plId ideState _ FormatText contents nfp _ = do + let cabalGildArgs = ["--stdin=" <> fp, "--input=-"] -- < Read from stdin + + cabalGildExePath <- fmap T.unpack $ liftIO $ runAction "cabal-gild" ideState $ usePropertyAction #path plId properties + x <- liftIO $ findExecutable cabalGildExePath + case x of + Just _ -> do + log Debug $ LogReadCreateProcessInfo cabalGildArgs + (exitCode, out, err) <- + liftIO $ Process.readCreateProcessWithExitCode + ( proc cabalGildExePath cabalGildArgs + ) + { cwd = Just $ takeDirectory fp + } + contents + case exitCode of + ExitFailure code -> do + log Error $ LogProcessInvocationFailure code err + throwError (PluginInternalError "Failed to invoke cabal-gild") + ExitSuccess -> do + let fmtDiff = makeDiffTextEdit contents out + pure $ InL fmtDiff + Nothing -> do + log Error $ LogFormatterBinNotFound cabalGildExePath + throwError (PluginInternalError "No installation of cabal-gild could be found. Please install it globally, or provide the full path to the executable.") + where + fp = fromNormalizedFilePath nfp + log = logWith recorder diff --git a/plugins/hls-cabal-gild-plugin/test/Main.hs b/plugins/hls-cabal-gild-plugin/test/Main.hs new file mode 100644 index 0000000000..5aa5ba9fba --- /dev/null +++ b/plugins/hls-cabal-gild-plugin/test/Main.hs @@ -0,0 +1,73 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +module Main + ( main + ) where + +import Ide.Logger +import qualified Ide.Plugin.Cabal as Cabal +import qualified Ide.Plugin.CabalGild as CabalGild +import System.Directory (findExecutable) +import System.FilePath +import Test.Hls + +data TestLog + = LogCabalGild CabalGild.Log + | LogCabal Cabal.Log + +instance Pretty TestLog where + pretty = \case + LogCabalGild msg -> pretty msg + LogCabal msg -> pretty msg + +data CabalGildFound = Found | NotFound + +isTestIsolated :: Bool +#if hls_isolate_cabalgild_tests +isTestIsolated = True +#else +isTestIsolated = False +#endif + +isCabalFmtFound :: IO CabalGildFound +isCabalFmtFound = case isTestIsolated of + True -> pure Found + False -> do + cabalGild <- findExecutable "cabal-gild" + pure $ maybe NotFound (const Found) cabalGild + +main :: IO () +main = do + foundCabalFmt <- isCabalFmtFound + defaultTestRunner (tests foundCabalFmt) + +cabalGildPlugin :: PluginTestDescriptor TestLog +cabalGildPlugin = mconcat + [ mkPluginTestDescriptor (CabalGild.descriptor . cmapWithPrio LogCabalGild) "cabal-gild" + , mkPluginTestDescriptor (Cabal.descriptor . cmapWithPrio LogCabal) "cabal" + ] + +tests :: CabalGildFound -> TestTree +tests found = testGroup "cabal-gild" + [ cabalGildGolden found "formats a simple document" "simple_testdata" "formatted_document" $ \doc -> do + formatDoc doc (FormattingOptions 2 True Nothing Nothing Nothing) + + , cabalGildGolden found "formats a document with expand:src comment" "commented_testdata" "formatted_document" $ \doc -> do + formatDoc doc (FormattingOptions 2 True Nothing Nothing Nothing) + + , cabalGildGolden found "formats a document with lib information" "lib_testdata" "formatted_document" $ \doc -> do + formatDoc doc (FormattingOptions 10 True Nothing Nothing Nothing) + ] + +cabalGildGolden :: CabalGildFound -> TestName -> FilePath -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +cabalGildGolden NotFound title _ _ _ = + testCase title $ + assertFailure $ "Couldn't find cabal-gild on PATH or this is not an isolated run. " + <> "Use cabal flag 'isolateCabalGildTests' to make it isolated or install cabal-gild locally." +cabalGildGolden Found title path desc act = goldenWithCabalDocFormatter def cabalGildPlugin "cabal-gild" conf title testDataDir path desc "cabal" act + where + conf = def + +testDataDir :: FilePath +testDataDir = "plugins" "hls-cabal-gild-plugin" "test" "testdata" diff --git a/plugins/hls-cabal-gild-plugin/test/testdata/commented_testdata.cabal b/plugins/hls-cabal-gild-plugin/test/testdata/commented_testdata.cabal new file mode 100644 index 0000000000..ed2f1d701e --- /dev/null +++ b/plugins/hls-cabal-gild-plugin/test/testdata/commented_testdata.cabal @@ -0,0 +1,12 @@ +cabal-version: 2.4 +name: testdata +version: 0.1.0.0 +author: Banana +extra-source-files: CHANGELOG.md + +library + -- cabal-gild: discover src + exposed-modules: MyLib + build-depends: base ^>=4.14.1.0 + hs-source-dirs: src + default-language: Haskell2010 diff --git a/plugins/hls-cabal-gild-plugin/test/testdata/commented_testdata.formatted_document.cabal b/plugins/hls-cabal-gild-plugin/test/testdata/commented_testdata.formatted_document.cabal new file mode 100644 index 0000000000..3c88b4a823 --- /dev/null +++ b/plugins/hls-cabal-gild-plugin/test/testdata/commented_testdata.formatted_document.cabal @@ -0,0 +1,15 @@ +cabal-version: 2.4 +name: testdata +version: 0.1.0.0 +author: Banana +extra-source-files: CHANGELOG.md + +library + -- cabal-gild: discover src + exposed-modules: + MyLib + MyOtherLib + + build-depends: base ^>=4.14.1.0 + hs-source-dirs: src + default-language: Haskell2010 diff --git a/plugins/hls-cabal-gild-plugin/test/testdata/hie.yaml b/plugins/hls-cabal-gild-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..824558147d --- /dev/null +++ b/plugins/hls-cabal-gild-plugin/test/testdata/hie.yaml @@ -0,0 +1,3 @@ +cradle: + direct: + arguments: [] diff --git a/plugins/hls-cabal-gild-plugin/test/testdata/lib_testdata.cabal b/plugins/hls-cabal-gild-plugin/test/testdata/lib_testdata.cabal new file mode 100644 index 0000000000..0f07af1d70 --- /dev/null +++ b/plugins/hls-cabal-gild-plugin/test/testdata/lib_testdata.cabal @@ -0,0 +1,19 @@ +cabal-version: 2.4 +name: testdata +version: 0.1.0.0 +author: Gregg +extra-source-files: CHANGELOG.md + +library + exposed-modules: MyLib + build-depends: base ^>=4.14.1.0 + hs-source-dirs: src + default-language: Haskell2010 + +executable testdata + main-is: Main.hs + build-depends: + base ^>=4.14.1.0,testdata + hs-source-dirs: app + default-language: + Haskell2010 diff --git a/plugins/hls-cabal-gild-plugin/test/testdata/lib_testdata.formatted_document.cabal b/plugins/hls-cabal-gild-plugin/test/testdata/lib_testdata.formatted_document.cabal new file mode 100644 index 0000000000..a29e590238 --- /dev/null +++ b/plugins/hls-cabal-gild-plugin/test/testdata/lib_testdata.formatted_document.cabal @@ -0,0 +1,21 @@ +cabal-version: 2.4 +name: testdata +version: 0.1.0.0 +author: Gregg +extra-source-files: CHANGELOG.md + +library + exposed-modules: MyLib + build-depends: base ^>=4.14.1.0 + hs-source-dirs: src + default-language: Haskell2010 + +executable testdata + main-is: Main.hs + build-depends: + base ^>=4.14.1.0, + testdata, + + hs-source-dirs: app + default-language: + Haskell2010 diff --git a/plugins/hls-cabal-gild-plugin/test/testdata/simple_testdata.cabal b/plugins/hls-cabal-gild-plugin/test/testdata/simple_testdata.cabal new file mode 100644 index 0000000000..0421a27ddb --- /dev/null +++ b/plugins/hls-cabal-gild-plugin/test/testdata/simple_testdata.cabal @@ -0,0 +1,36 @@ +cabal-version: 2.4 +name: testdata +version: 0.1.0.0 + +-- A short (one-line) description of the package. +-- synopsis: + +-- A longer description of the package. +-- description: + +-- A URL where users can report bugs. +-- bug-reports: + +-- The license under which the package is released. +-- license: +author: Milky + +-- An email address to which users can send suggestions, bug reports, and patches. +-- maintainer: + +-- A copyright notice. +-- copyright: +-- category: +extra-source-files: CHANGELOG.md + +executable testdata + main-is: Main.hs + + -- Modules included in this executable, other than Main. + -- other-modules: + + -- LANGUAGE extensions used by modules in this package. + -- other-extensions: + build-depends: base ^>=4.14.1.0 + hs-source-dirs: app + default-language: Haskell2010 diff --git a/plugins/hls-cabal-gild-plugin/test/testdata/simple_testdata.formatted_document.cabal b/plugins/hls-cabal-gild-plugin/test/testdata/simple_testdata.formatted_document.cabal new file mode 100644 index 0000000000..f79cba396e --- /dev/null +++ b/plugins/hls-cabal-gild-plugin/test/testdata/simple_testdata.formatted_document.cabal @@ -0,0 +1,28 @@ +cabal-version: 2.4 +name: testdata +version: 0.1.0.0 +-- A short (one-line) description of the package. +-- synopsis: +-- A longer description of the package. +-- description: +-- A URL where users can report bugs. +-- bug-reports: +-- The license under which the package is released. +-- license: +author: Milky +-- An email address to which users can send suggestions, bug reports, and patches. +-- maintainer: +-- A copyright notice. +-- copyright: +-- category: +extra-source-files: CHANGELOG.md + +executable testdata + main-is: Main.hs + -- Modules included in this executable, other than Main. + -- other-modules: + -- LANGUAGE extensions used by modules in this package. + -- other-extensions: + build-depends: base ^>=4.14.1.0 + hs-source-dirs: app + default-language: Haskell2010 diff --git a/test/testdata/cabal-helper/mono-repo/B/MyLib.hs b/plugins/hls-cabal-gild-plugin/test/testdata/src/MyLib.hs similarity index 100% rename from test/testdata/cabal-helper/mono-repo/B/MyLib.hs rename to plugins/hls-cabal-gild-plugin/test/testdata/src/MyLib.hs diff --git a/plugins/hls-cabal-gild-plugin/test/testdata/src/MyOtherLib.hs b/plugins/hls-cabal-gild-plugin/test/testdata/src/MyOtherLib.hs new file mode 100644 index 0000000000..15450b43b3 --- /dev/null +++ b/plugins/hls-cabal-gild-plugin/test/testdata/src/MyOtherLib.hs @@ -0,0 +1,3 @@ +module MyOtherLib where + +bar = 2 diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs new file mode 100644 index 0000000000..7a2c53ee25 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs @@ -0,0 +1,399 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.Cabal (descriptor, haskellInteractionDescriptor, Log (..)) where + +import Control.Lens ((^.)) +import Control.Monad.Extra +import Control.Monad.IO.Class +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Maybe (runMaybeT) +import Data.HashMap.Strict (HashMap) +import qualified Data.List as List +import qualified Data.Maybe as Maybe +import qualified Data.Text () +import qualified Data.Text as T +import Development.IDE as D +import Development.IDE.Core.FileStore (getVersionedTextDoc) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.Shake (restartShakeSession) +import Development.IDE.Graph (Key) +import Development.IDE.LSP.HoverDefinition (foundHover) +import qualified Development.IDE.Plugin.Completions.Logic as Ghcide +import Development.IDE.Types.Shake (toKey) +import qualified Distribution.Fields as Syntax +import Distribution.Package (Dependency) +import Distribution.PackageDescription (allBuildDepends, + depPkgName, + unPackageName) +import Distribution.PackageDescription.Configuration (flattenPackageDescription) +import qualified Distribution.Parsec.Position as Syntax +import qualified Ide.Plugin.Cabal.CabalAdd.CodeAction as CabalAdd +import qualified Ide.Plugin.Cabal.CabalAdd.Command as CabalAdd +import Ide.Plugin.Cabal.Completion.CabalFields as CabalFields +import qualified Ide.Plugin.Cabal.Completion.Completer.Types as CompleterTypes +import qualified Ide.Plugin.Cabal.Completion.Completions as Completions +import Ide.Plugin.Cabal.Completion.Types (ParseCabalCommonSections (ParseCabalCommonSections), + ParseCabalFields (..), + ParseCabalFile (..)) +import qualified Ide.Plugin.Cabal.Completion.Types as Types +import Ide.Plugin.Cabal.Definition (gotoDefinition) +import qualified Ide.Plugin.Cabal.FieldSuggest as FieldSuggest +import qualified Ide.Plugin.Cabal.Files as CabalAdd +import qualified Ide.Plugin.Cabal.LicenseSuggest as LicenseSuggest +import qualified Ide.Plugin.Cabal.OfInterest as OfInterest +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Cabal.Outline +import qualified Ide.Plugin.Cabal.Rules as Rules +import Ide.Plugin.Error +import Ide.Types +import qualified Language.LSP.Protocol.Lens as JL +import qualified Language.LSP.Protocol.Message as LSP +import Language.LSP.Protocol.Types +import qualified Language.LSP.VFS as VFS +import Text.Regex.TDFA + +data Log + = LogModificationTime NormalizedFilePath FileVersion + | LogRule Rules.Log + | LogOfInterest OfInterest.Log + | LogDocOpened Uri + | LogDocModified Uri + | LogDocSaved Uri + | LogDocClosed Uri + | LogFOI (HashMap NormalizedFilePath FileOfInterestStatus) + | LogCompletionContext Types.Context Position + | LogCompletions Types.Log + | LogCabalAdd CabalAdd.Log + deriving (Show) + +instance Pretty Log where + pretty = \case + LogRule log' -> pretty log' + LogOfInterest log' -> pretty log' + LogModificationTime nfp modTime -> + "Modified:" <+> pretty (fromNormalizedFilePath nfp) <+> pretty (show modTime) + LogDocOpened uri -> + "Opened text document:" <+> pretty (getUri uri) + LogDocModified uri -> + "Modified text document:" <+> pretty (getUri uri) + LogDocSaved uri -> + "Saved text document:" <+> pretty (getUri uri) + LogDocClosed uri -> + "Closed text document:" <+> pretty (getUri uri) + LogFOI files -> + "Set files of interest to:" <+> viaShow files + LogCompletionContext context position -> + "Determined completion context:" + <+> pretty context + <+> "for cursor position:" + <+> pretty position + LogCompletions logs -> pretty logs + LogCabalAdd logs -> pretty logs + +{- | Some actions in cabal files can be triggered from haskell files. +This descriptor allows us to hook into the diagnostics of haskell source files and +allows us to provide code actions and commands that interact with `.cabal` files. +-} +haskellInteractionDescriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +haskellInteractionDescriptor recorder plId = + (defaultPluginDescriptor plId "Provides the cabal-add code action in haskell files") + { pluginHandlers = + mconcat + [ mkPluginHandler LSP.SMethod_TextDocumentCodeAction $ cabalAddDependencyCodeAction recorder + , mkPluginHandler LSP.SMethod_TextDocumentCodeAction $ cabalAddModuleCodeAction recorder + ] + , pluginCommands = + [ PluginCommand CabalAdd.cabalAddDependencyCommandId "add a dependency to a cabal file" (CabalAdd.addDependencyCommand cabalAddRecorder) + , PluginCommand CabalAdd.cabalAddModuleCommandId "add a module to a cabal file" (CabalAdd.addModuleCommand cabalAddRecorder) + ] + } + where + cabalAddRecorder = cmapWithPrio LogCabalAdd recorder + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultCabalPluginDescriptor plId "Provides a variety of IDE features in cabal files") + { pluginRules = Rules.cabalRules ruleRecorder plId + , pluginHandlers = + mconcat + [ mkPluginHandler LSP.SMethod_TextDocumentCodeAction licenseSuggestCodeAction + , mkPluginHandler LSP.SMethod_TextDocumentCompletion $ completion recorder + , mkPluginHandler LSP.SMethod_TextDocumentDocumentSymbol moduleOutline + , mkPluginHandler LSP.SMethod_TextDocumentCodeAction $ fieldSuggestCodeAction recorder + , mkPluginHandler LSP.SMethod_TextDocumentDefinition gotoDefinition + , mkPluginHandler LSP.SMethod_TextDocumentHover hover + ] + , pluginNotificationHandlers = + mconcat + [ mkPluginNotificationHandler LSP.SMethod_TextDocumentDidOpen $ + \ide vfs _ (DidOpenTextDocumentParams TextDocumentItem{_uri, _version}) -> liftIO $ do + whenUriFile _uri $ \file -> do + log' Debug $ LogDocOpened _uri + restartCabalShakeSession (shakeExtras ide) vfs file "(opened)" $ + OfInterest.addFileOfInterest ofInterestRecorder ide file Modified{firstOpen = True} + , mkPluginNotificationHandler LSP.SMethod_TextDocumentDidChange $ + \ide vfs _ (DidChangeTextDocumentParams VersionedTextDocumentIdentifier{_uri} _) -> liftIO $ do + whenUriFile _uri $ \file -> do + log' Debug $ LogDocModified _uri + restartCabalShakeSession (shakeExtras ide) vfs file "(changed)" $ + OfInterest.addFileOfInterest ofInterestRecorder ide file Modified{firstOpen = False} + , mkPluginNotificationHandler LSP.SMethod_TextDocumentDidSave $ + \ide vfs _ (DidSaveTextDocumentParams TextDocumentIdentifier{_uri} _) -> liftIO $ do + whenUriFile _uri $ \file -> do + log' Debug $ LogDocSaved _uri + restartCabalShakeSessionPhysical (shakeExtras ide) vfs file "(saved)" $ + OfInterest.addFileOfInterest ofInterestRecorder ide file OnDisk + , mkPluginNotificationHandler LSP.SMethod_TextDocumentDidClose $ + \ide vfs _ (DidCloseTextDocumentParams TextDocumentIdentifier{_uri}) -> liftIO $ do + whenUriFile _uri $ \file -> do + log' Debug $ LogDocClosed _uri + restartCabalShakeSession (shakeExtras ide) vfs file "(closed)" $ + OfInterest.deleteFileOfInterest ofInterestRecorder ide file + ] + , pluginConfigDescriptor = + defaultConfigDescriptor + { configHasDiagnostics = True + } + } + where + log' = logWith recorder + ruleRecorder = cmapWithPrio LogRule recorder + ofInterestRecorder = cmapWithPrio LogOfInterest recorder + + whenUriFile :: Uri -> (NormalizedFilePath -> IO ()) -> IO () + whenUriFile uri act = whenJust (uriToFilePath uri) $ act . toNormalizedFilePath' + +{- | Helper function to restart the shake session, specifically for modifying .cabal files. +No special logic, just group up a bunch of functions you need for the base +Notification Handlers. + +To make sure diagnostics are up to date, we need to tell shake that the file was touched and +needs to be re-parsed. That's what we do when we record the dirty key that our parsing +rule depends on. +Then we restart the shake session, so that changes to our virtual files are actually picked up. +-} +restartCabalShakeSession :: ShakeExtras -> VFS.VFS -> NormalizedFilePath -> String -> IO [Key] -> IO () +restartCabalShakeSession shakeExtras vfs file actionMsg actionBetweenSession = do + restartShakeSession shakeExtras (VFSModified vfs) (fromNormalizedFilePath file ++ " " ++ actionMsg) [] $ do + keys <- actionBetweenSession + return (toKey GetModificationTime file:keys) + +-- | Just like 'restartCabalShakeSession', but records that the 'file' has been changed on disk. +-- So, any action that can only work with on-disk modifications may depend on the 'GetPhysicalModificationTime' +-- rule to get re-run if the file changes on disk. +restartCabalShakeSessionPhysical :: ShakeExtras -> VFS.VFS -> NormalizedFilePath -> String -> IO [Key] -> IO () +restartCabalShakeSessionPhysical shakeExtras vfs file actionMsg actionBetweenSession = do + restartShakeSession shakeExtras (VFSModified vfs) (fromNormalizedFilePath file ++ " " ++ actionMsg) [] $ do + keys <- actionBetweenSession + return (toKey GetModificationTime file:toKey GetPhysicalModificationTime file:keys) + +-- ---------------------------------------------------------------- +-- Code Actions +-- ---------------------------------------------------------------- + +licenseSuggestCodeAction :: PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +licenseSuggestCodeAction ideState _ (CodeActionParams _ _ (TextDocumentIdentifier uri) _range CodeActionContext{_diagnostics = diags}) = do + maxCompls <- fmap maxCompletions . liftIO $ runAction "cabal-plugin.suggestLicense" ideState getClientConfigAction + pure $ InL $ diags >>= (fmap InR . LicenseSuggest.licenseErrorAction maxCompls uri) + +{- | CodeActions for correcting field names with typos in them. + +Provides CodeActions that fix typos in both stanzas and top-level field names. +The suggestions are computed based on the completion context, where we "move" a fake cursor +to the end of the field name and trigger cabal file completions. The completions are then +suggested to the user. + +TODO: Relying on completions here often does not produce the desired results, we should +use some sort of fuzzy matching in the future, see issue #4357. +-} +fieldSuggestCodeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +fieldSuggestCodeAction recorder ide _ (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics = diags}) = do + mContents <- liftIO $ runAction "cabal-plugin.getUriContents" ide $ getUriContents $ toNormalizedUri uri + case (,) <$> mContents <*> uriToFilePath' uri of + Nothing -> pure $ InL [] + Just (fileContents, path) -> do + -- We decide on `useWithStale` here, since `useWithStaleFast` often leads to the wrong completions being suggested. + -- In case it fails, we still will get some completion results instead of an error. + mFields <- liftIO $ runAction "cabal-plugin.fields" ide $ useWithStale ParseCabalFields $ toNormalizedFilePath path + case mFields of + Nothing -> + pure $ InL [] + Just (cabalFields, _) -> do + let fields = Maybe.mapMaybe FieldSuggest.fieldErrorName diags + results <- forM fields (getSuggestion fileContents path cabalFields) + pure $ InL $ map InR $ concat results + where + getSuggestion fileContents fp cabalFields (fieldName, Diagnostic{_range = _range@(Range (Position lineNr col) _)}) = do + let + -- Compute where we would anticipate the cursor to be. + fakeLspCursorPosition = Position lineNr (col + fromIntegral (T.length fieldName)) + lspPrefixInfo = Ghcide.getCompletionPrefixFromRope fakeLspCursorPosition fileContents + cabalPrefixInfo = Completions.getCabalPrefixInfo fp lspPrefixInfo + completions <- liftIO $ computeCompletionsAt recorder ide cabalPrefixInfo fp cabalFields + let completionTexts = fmap (^. JL.label) completions + pure $ FieldSuggest.fieldErrorAction uri fieldName completionTexts _range + +cabalAddDependencyCodeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +cabalAddDependencyCodeAction _ state plId (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics = diags}) = do + let suggestions = concatMap CabalAdd.hiddenPackageSuggestion diags + case suggestions of + [] -> pure $ InL [] + _ -> do + haskellFilePath <- uriToFilePathE uri + mbCabalFile <- liftIO $ CabalAdd.findResponsibleCabalFile haskellFilePath + case mbCabalFile of + Nothing -> pure $ InL [] + Just cabalFilePath -> do + verTxtDocId <- + runActionE "cabalAdd.getVersionedTextDoc" state $ + lift $ + getVersionedTextDoc $ + TextDocumentIdentifier (filePathToUri cabalFilePath) + mbGPD <- liftIO $ runAction "cabal.cabal-add" state $ useWithStale ParseCabalFile $ toNormalizedFilePath cabalFilePath + case mbGPD of + Nothing -> pure $ InL [] + Just (gpd, _) -> do + actions <- + liftIO $ + CabalAdd.addDependencySuggestCodeAction + plId + verTxtDocId + suggestions + haskellFilePath + cabalFilePath + gpd + pure $ InL $ fmap InR actions + +cabalAddModuleCodeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +cabalAddModuleCodeAction recorder state plId (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics = diags}) = + case List.find CabalAdd.isUnknownModuleDiagnostic diags of + Just diag -> + do + haskellFilePath <- uriToFilePathE uri + mbCabalFile <- liftIO $ CabalAdd.findResponsibleCabalFile haskellFilePath + case mbCabalFile of + Nothing -> pure $ InL [] + Just cabalFilePath -> do + verTextDocId <- + runActionE "cabalAdd.getVersionedTextDoc" state $ + lift $ + getVersionedTextDoc $ + TextDocumentIdentifier (filePathToUri cabalFilePath) + (gpd, _) <- runActionE "cabal.cabal-add" state $ useWithStaleE ParseCabalFile $ toNormalizedFilePath cabalFilePath + actions <- + CabalAdd.collectModuleInsertionOptions + (cmapWithPrio LogCabalAdd recorder) + plId + verTextDocId + diag + cabalFilePath + gpd + uri + pure $ InL $ fmap InR actions + Nothing -> pure $ InL [] + +{- | Handler for hover messages. + +If the cursor is hovering on a dependency, add a documentation link to that dependency. +-} +hover :: PluginMethodHandler IdeState LSP.Method_TextDocumentHover +hover ide _ msgParam = do + nfp <- getNormalizedFilePathE uri + cabalFields <- runActionE "cabal.cabal-hover" ide $ useE ParseCabalFields nfp + case CabalFields.findTextWord cursor cabalFields of + Nothing -> + pure $ InR Null + Just cursorText -> do + gpd <- runActionE "cabal.GPD" ide $ useE ParseCabalFile nfp + let depsNames = map dependencyName $ allBuildDepends $ flattenPackageDescription gpd + case filterVersion cursorText of + Nothing -> pure $ InR Null + Just txt -> + if txt `elem` depsNames + then pure $ foundHover (Nothing, [txt <> "\n", documentationText txt]) + else pure $ InR Null + where + cursor = Types.lspPositionToCabalPosition (msgParam ^. JL.position) + uri = msgParam ^. JL.textDocument . JL.uri + + dependencyName :: Dependency -> T.Text + dependencyName dep = T.pack $ unPackageName $ depPkgName dep + + -- \| Removes version requirements like + -- `==1.0.0.0`, `>= 2.1.1` that could be included in + -- hover message. Assumes that the dependency consists + -- of alphanums with dashes in between. Ends with an alphanum. + -- + -- Examples: + -- >>> filterVersion "imp-deps>=2.1.1" + -- "imp-deps" + filterVersion :: T.Text -> Maybe T.Text + filterVersion msg = getMatch (msg =~ regex) + where + regex :: T.Text + regex = "([a-zA-Z0-9-]*[a-zA-Z0-9])" + + getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> Maybe T.Text + getMatch (_, _, _, [dependency]) = Just dependency + getMatch (_, _, _, _) = Nothing -- impossible case + + documentationText :: T.Text -> T.Text + documentationText package = "[Documentation](https://hackage.haskell.org/package/" <> package <> ")" + +-- ---------------------------------------------------------------- +-- Completion +-- ---------------------------------------------------------------- + +completion :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCompletion +completion recorder ide _ complParams = do + let TextDocumentIdentifier uri = complParams ^. JL.textDocument + position = complParams ^. JL.position + mContents <- liftIO $ runAction "cabal-plugin.getUriContents" ide $ getUriContents $ toNormalizedUri uri + case (,) <$> mContents <*> uriToFilePath' uri of + Just (cnts, path) -> do + -- We decide on `useWithStale` here, since `useWithStaleFast` often leads to the wrong completions being suggested. + -- In case it fails, we still will get some completion results instead of an error. + mFields <- liftIO $ runAction "cabal-plugin.fields" ide $ useWithStale ParseCabalFields $ toNormalizedFilePath path + case mFields of + Nothing -> + pure . InR $ InR Null + Just (fields, _) -> do + let lspPrefInfo = Ghcide.getCompletionPrefixFromRope position cnts + cabalPrefInfo = Completions.getCabalPrefixInfo path lspPrefInfo + let res = computeCompletionsAt recorder ide cabalPrefInfo path fields + liftIO $ fmap InL res + Nothing -> pure . InR $ InR Null + +computeCompletionsAt :: Recorder (WithPriority Log) -> IdeState -> Types.CabalPrefixInfo -> FilePath -> [Syntax.Field Syntax.Position] -> IO [CompletionItem] +computeCompletionsAt recorder ide prefInfo fp fields = do + runMaybeT (context fields) >>= \case + Nothing -> pure [] + Just ctx -> do + logWith recorder Debug $ LogCompletionContext ctx pos + let completer = Completions.contextToCompleter ctx + let completerData = + CompleterTypes.CompleterData + { getLatestGPD = do + -- We decide on useWithStaleFast here, since we mostly care about the file's meta information, + -- thus, a quick response gives us the desired result most of the time. + -- The `withStale` option is very important here, since we often call this rule with invalid cabal files. + mGPD <- runAction "cabal-plugin.modulesCompleter.gpd" ide $ useWithStale ParseCabalFile $ toNormalizedFilePath fp + pure $ fmap fst mGPD + , getCabalCommonSections = runAction "cabal-plugin.commonSections" ide $ use ParseCabalCommonSections $ toNormalizedFilePath fp + , cabalPrefixInfo = prefInfo + , stanzaName = + case fst ctx of + Types.Stanza _ name -> name + _ -> Nothing + } + completions <- completer completerRecorder completerData + pure completions + where + pos = Types.completionCursorPosition prefInfo + context fields = Completions.getContext completerRecorder prefInfo fields + completerRecorder = cmapWithPrio LogCompletions recorder diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/CodeAction.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/CodeAction.hs new file mode 100644 index 0000000000..d72ad290fd --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/CodeAction.hs @@ -0,0 +1,343 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE RecordWildCards #-} + +module Ide.Plugin.Cabal.CabalAdd.CodeAction where + +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Except +import Data.Aeson.Types (toJSON) +import Data.Foldable (asum) +import Data.Maybe (mapMaybe) +import qualified Data.Text as T +import Development.IDE.Core.PluginUtils (uriToFilePathE) +import Development.IDE.Types.Location (Uri) +import Distribution.PackageDescription +import Distribution.PackageDescription.Configuration (flattenPackageDescription) +import qualified Distribution.Pretty as CabalPretty +import Distribution.Simple.BuildTarget (BuildTarget, + buildTargetComponentName, + readBuildTargets) +import Distribution.Utils.Path (getSymbolicPath) +import Distribution.Verbosity (silent, + verboseNoStderr) +import Ide.Logger +import Ide.Plugin.Cabal.CabalAdd.Types +import Ide.Plugin.Cabal.Completion.Completer.Module (fpToExposedModulePath) +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Error +import Ide.PluginUtils (mkLspCommand) +import Ide.Types (CommandId (CommandId), + PluginId) + +import Control.Lens ((^.)) +import qualified Language.LSP.Protocol.Lens as JL +import Language.LSP.Protocol.Types (CodeActionKind (..), + VersionedTextDocumentIdentifier) +import qualified Language.LSP.Protocol.Types as J +import System.FilePath +import Text.PrettyPrint (render) +import Text.Regex.TDFA + +-------------------------------------------- +-- Add module to cabal file +-------------------------------------------- + +{- | Takes a path to a cabal file, a module path in exposed module syntax + and the contents of the cabal file and generates all possible + code actions for inserting the module into the cabal file + with the given contents. +-} +collectModuleInsertionOptions :: + (MonadIO m) => + Recorder (WithPriority Log) -> + PluginId -> + VersionedTextDocumentIdentifier -> + J.Diagnostic -> + -- | The file path of the cabal file to insert the new module into + FilePath -> + -- | The generic package description of the cabal file to insert the new module into. + GenericPackageDescription -> + -- | The URI of the unknown haskell file/new module to insert into the cabal file. + Uri -> + ExceptT PluginError m [J.CodeAction] +collectModuleInsertionOptions _ plId txtDocIdentifier diag cabalFilePath gpd haskellFilePathURI = do + haskellFilePath <- uriToFilePathE haskellFilePathURI + let configs = concatMap (mkModuleInsertionConfig txtDocIdentifier cabalFilePath haskellFilePath) (makeStanzaItems gpd) + pure $ map (mkCodeActionForModulePath plId diag) configs + where + makeStanzaItems :: GenericPackageDescription -> [StanzaItem] + makeStanzaItems gpd = + mainLibItem pd + ++ libItems pd + ++ executableItems pd + ++ testSuiteItems pd + ++ benchmarkItems pd + where + pd = flattenPackageDescription gpd + +{- | Takes a buildInfo of a cabal file component as defined in the generic package description, + and translates it to filepaths of the component's hsSourceDirs, + to be processed for adding modules to exposed-, or other-modules fields in a cabal file. +-} +buildInfoToHsSourceDirs :: BuildInfo -> [FilePath] +buildInfoToHsSourceDirs buildInfo = map getSymbolicPath hsSourceDirs' + where + hsSourceDirs' = hsSourceDirs buildInfo + +{- | Takes the path to the cabal file to insert the module into, + the module path to be inserted, and a stanza representation. + + Returns a list of module insertion configs, where each config + represents a possible place to insert the module. +-} +mkModuleInsertionConfig :: VersionedTextDocumentIdentifier -> FilePath -> FilePath -> StanzaItem -> [ModuleInsertionConfig] +mkModuleInsertionConfig txtDocIdentifier cabalFilePath haskellFilePath (StanzaItem{..}) = do + case mkRelativeModulePathM siHsSourceDirs cabalFilePath haskellFilePath of + Just processedModPath -> + [modInsertItem processedModPath "other-modules"] + ++ [modInsertItem processedModPath "exposed-modules" | CLibName _ <- [siComponent]] + _ -> [] + where + modInsertItem :: T.Text -> T.Text -> ModuleInsertionConfig + modInsertItem modPath label = + ModuleInsertionConfig + { targetFile = cabalFilePath + , moduleToInsert = modPath + , modVerTxtDocId = txtDocIdentifier + , insertionStanza = siComponent + , insertionLabel = label + } + +mkCodeActionForModulePath :: PluginId -> J.Diagnostic -> ModuleInsertionConfig -> J.CodeAction +mkCodeActionForModulePath plId diag insertionConfig = + J.CodeAction + { _title = "Add to " <> label <> " as " <> fieldName + , _kind = Just CodeActionKind_Refactor + , _diagnostics = Just [diag] + , _isPreferred = Nothing + , _disabled = Nothing + , _edit = Nothing + , _command = Just command + , _data_ = Nothing + } + where + fieldName = insertionLabel insertionConfig + command = mkLspCommand plId (CommandId cabalAddModuleCommandId) "Add missing module" (Just [toJSON insertionConfig]) + label = T.pack $ CabalPretty.prettyShow $ insertionStanza insertionConfig + +{- | Takes a list of source subdirectories, a cabal source path and a haskell filepath + and returns a path to the module in exposed module syntax. + The path will be relative to one of the subdirectories, in case the module is contained within one of them. +-} +mkRelativeModulePathM :: [FilePath] -> FilePath -> FilePath -> Maybe T.Text +mkRelativeModulePathM hsSourceDirs cabalSrcPath' haskellFilePath = + asum $ + map + ( \srcDir -> do + let relMP = makeRelative (normalise (cabalSrcPath srcDir)) haskellFilePath + if relMP == haskellFilePath then Nothing else Just $ fpToExposedModulePath cabalSrcPath relMP + ) + hsSourceDirs + where + cabalSrcPath = takeDirectory cabalSrcPath' + +isUnknownModuleDiagnostic :: J.Diagnostic -> Bool +isUnknownModuleDiagnostic diag = (msg =~ regex) + where + msg :: T.Text + msg = diag ^. JL.message + regex :: T.Text + regex = "Loading the module [\8216'][^\8217']*[\8217'] failed." + +-------------------------- +-- Below are several utility functions which create a StanzaItem for each of the possible Stanzas, +-- these all have specific constructors we need to match, so we can't generalise this process well. +-------------------------- + +benchmarkItems :: PackageDescription -> [StanzaItem] +benchmarkItems pd = + map + ( \benchmark -> + StanzaItem + { siComponent = CBenchName $ benchmarkName benchmark + , siHsSourceDirs = buildInfoToHsSourceDirs $ benchmarkBuildInfo benchmark + } + ) + (benchmarks pd) + +testSuiteItems :: PackageDescription -> [StanzaItem] +testSuiteItems pd = + map + ( \testSuite -> + StanzaItem + { siComponent = CTestName $ testName testSuite + , siHsSourceDirs = buildInfoToHsSourceDirs $ testBuildInfo testSuite + } + ) + (testSuites pd) + +executableItems :: PackageDescription -> [StanzaItem] +executableItems pd = + map + ( \executable -> + StanzaItem + { siComponent = CExeName $ exeName executable + , siHsSourceDirs = buildInfoToHsSourceDirs $ buildInfo executable + } + ) + (executables pd) + +libItems :: PackageDescription -> [StanzaItem] +libItems pd = + mapMaybe + ( \subLib -> + case libName subLib of + LSubLibName compName -> + Just + StanzaItem + { siComponent = CLibName $ LSubLibName compName + , siHsSourceDirs = buildInfoToHsSourceDirs $ libBuildInfo subLib + } + _ -> Nothing + ) + (subLibraries pd) + +mainLibItem :: PackageDescription -> [StanzaItem] +mainLibItem pd = + case library pd of + Just lib -> + [ StanzaItem + { siComponent = CLibName LMainLibName + , siHsSourceDirs = buildInfoToHsSourceDirs $ libBuildInfo lib + } + ] + Nothing -> [] + +-------------------------------------------- +-- Add dependency to a cabal file +-------------------------------------------- + +{- | Creates a code action that calls the `cabalAddCommand`, + using dependency-version suggestion pairs as input. + + Returns disabled action if no cabal files given. + + Takes haskell and cabal file paths to create a relative path + to the haskell file, which is used to get a `BuildTarget`. +-} +addDependencySuggestCodeAction :: + PluginId -> + -- | Cabal's versioned text identifier + VersionedTextDocumentIdentifier -> + -- | A dependency-version suggestion pairs + [(T.Text, T.Text)] -> + -- | Path to the haskell file (source of diagnostics) + FilePath -> + -- | Path to the cabal file (that will be edited) + FilePath -> + GenericPackageDescription -> + IO [J.CodeAction] +addDependencySuggestCodeAction plId verTxtDocId suggestions haskellFilePath cabalFilePath gpd = do + buildTargets <- liftIO $ getBuildTargets gpd cabalFilePath haskellFilePath + case buildTargets of + -- If there are no build targets found, run the `cabal-add` command with default behaviour + [] -> pure $ mkCodeActionForDependency cabalFilePath Nothing <$> suggestions + -- Otherwise provide actions for all found targets + targets -> + pure $ + concat + [ mkCodeActionForDependency cabalFilePath (Just $ buildTargetToStringRepr target) + <$> suggestions + | target <- targets + ] + where + {- | Note the use of the `pretty` function. + It converts the `BuildTarget` to an acceptable string representation. + It will be used as the input for `cabal-add`'s `executeConfig`. + -} + buildTargetToStringRepr target = render $ CabalPretty.pretty $ buildTargetComponentName target + + {- | Finds the build targets that are used in `cabal-add`. + Note the unorthodox usage of `readBuildTargets`: + If the relative path to the haskell file is provided, + `readBuildTargets` will return the build targets, this + module is mentioned in (either exposed-modules or other-modules). + -} + getBuildTargets :: GenericPackageDescription -> FilePath -> FilePath -> IO [BuildTarget] + getBuildTargets gpd cabalFilePath haskellFilePath = do + let haskellFileRelativePath = makeRelative (dropFileName cabalFilePath) haskellFilePath + readBuildTargets (verboseNoStderr silent) (flattenPackageDescription gpd) [haskellFileRelativePath] + + mkCodeActionForDependency :: FilePath -> Maybe String -> (T.Text, T.Text) -> J.CodeAction + mkCodeActionForDependency cabalFilePath target (suggestedDep, suggestedVersion) = + let + versionTitle = if T.null suggestedVersion then T.empty else "-" <> suggestedVersion + targetTitle = case target of + Nothing -> T.empty + Just t -> " at " <> T.pack t + title = "Add dependency " <> suggestedDep <> versionTitle <> targetTitle + version = if T.null suggestedVersion then Nothing else Just suggestedVersion + + params = + CabalAddDependencyCommandParams + { depCabalPath = cabalFilePath + , depVerTxtDocId = verTxtDocId + , depBuildTarget = target + , depDependency = suggestedDep + , depVersion = version + } + command = mkLspCommand plId (CommandId cabalAddDependencyCommandId) "Add dependency" (Just [toJSON params]) + in + J.CodeAction title (Just CodeActionKind_QuickFix) (Just []) Nothing Nothing Nothing (Just command) Nothing + +{- | Gives a mentioned number of @(dependency, version)@ pairs +found in the "hidden package" diagnostic message. + +For example, if a ghc error looks like this: + +> "Could not load module ‘Data.List.Split’ +> It is a member of the hidden package ‘split-0.2.5’. +> Perhaps you need to add ‘split’ to the build-depends in your .cabal file." + +or this if PackageImports extension is used: + +> "Could not find module ‘Data.List.Split’ +> Perhaps you meant +> Data.List.Split (needs flag -package-id split-0.2.5)" + +It extracts mentioned package names and version numbers. +In this example, it will be @[("split", "0.2.5")]@ + +Also supports messages without a version. + +> "Perhaps you need to add ‘split’ to the build-depends in your .cabal file." + +Will turn into @[("split", "")]@ +-} +hiddenPackageSuggestion :: J.Diagnostic -> [(T.Text, T.Text)] +hiddenPackageSuggestion diag = getMatch (msg =~ regex) + where + msg :: T.Text + msg = diag ^. JL.message + regex :: T.Text + regex = + let regex' = "([a-zA-Z0-9-]*[a-zA-Z0-9])(-([0-9\\.]*))?" + in "It is a member of the hidden package [\8216']" + <> regex' + <> "[\8217']" + <> "|" + <> "needs flag -package-id " + <> regex' + -- Have to do this matching because `Regex.TDFA` doesn't(?) support + -- not-capturing groups like (?:message) + getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> [(T.Text, T.Text)] + getMatch (_, _, _, []) = [] + getMatch (_, _, _, [dependency, _, cleanVersion, "", "", ""]) = [(dependency, cleanVersion)] + getMatch (_, _, _, ["", "", "", dependency, _, cleanVersion]) = [(dependency, cleanVersion)] + getMatch (_, _, _, _) = [] diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Command.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Command.hs new file mode 100644 index 0000000000..83554c6a82 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Command.hs @@ -0,0 +1,232 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE RecordWildCards #-} + +module Ide.Plugin.Cabal.CabalAdd.Command ( + cabalAddDependencyCommandId, + cabalAddModuleCommandId, + addDependencyCommand, + addModuleCommand, + Log, +) +where + +import Control.Monad (void) +import Control.Monad.Except (modifyError) +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Except +import Data.ByteString (ByteString) +import qualified Data.ByteString.Char8 as B +import Data.List.NonEmpty (singleton) +import qualified Data.Text as T +import Data.Text.Encoding (encodeUtf8) +import qualified Data.Text.Encoding as T +import Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.Core.FileStore (getFileContents) +import Development.IDE.Core.Rules (IdeState) +import Development.IDE.Core.Service (runAction) +import Development.IDE.Core.Shake (useWithStale) +import Distribution.Client.Add as Add +import Distribution.Fields (Field) +import Distribution.PackageDescription +import Distribution.Parsec.Position (Position) +import qualified Distribution.Pretty as CabalPretty +import Ide.Logger +import Ide.Plugin.Cabal.CabalAdd.Types +import Ide.Plugin.Cabal.Completion.Types (ParseCabalFields (..), + ParseCabalFile (..)) +import Ide.Plugin.Cabal.Files +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Error +import Ide.PluginUtils (WithDeletions (SkipDeletions), + diffText) +import Ide.Types (CommandFunction, + pluginGetClientCapabilities, + pluginSendRequest) +import Language.LSP.Protocol.Message (SMethod (SMethod_WorkspaceApplyEdit)) +import Language.LSP.Protocol.Types (ApplyWorkspaceEditParams (ApplyWorkspaceEditParams), + ClientCapabilities, + Null (Null), + VersionedTextDocumentIdentifier, + WorkspaceEdit, + toNormalizedFilePath, + type (|?) (InR)) + +-------------------------------------------- +-- Add module to cabal file +-------------------------------------------- + +addModuleCommand :: Recorder (WithPriority Log) -> CommandFunction IdeState ModuleInsertionConfig +addModuleCommand recorder state _ params@(ModuleInsertionConfig{..}) = do + logWith recorder Debug $ LogCalledCabalAddModuleCommand params + caps <- lift pluginGetClientCapabilities + let env = (state, caps, modVerTxtDocId) + edit <- getModuleEdit recorder env targetFile insertionStanza (T.unpack insertionLabel) (T.unpack moduleToInsert) + void $ lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) + logWith recorder Debug LogExecutedCommand + pure $ InR Null + +{- | Constructs prerequisites for the @executeConfig@ + and runs it, given path to the cabal file and a dependency message. + Given the new contents of the cabal file constructs and returns the @edit@. + + Inspired by @main@ in cabal-add, Distribution.Client.Main +-} +getModuleEdit :: + forall m. + (MonadIO m) => + Recorder (WithPriority Log) -> + (IdeState, ClientCapabilities, VersionedTextDocumentIdentifier) -> + -- | The cabal file to edit. + FilePath -> + -- | The component to add the module to. + ComponentName -> + -- | The specific field in the component to add the module to. + String -> + -- | The module to add. + String -> + ExceptT PluginError m WorkspaceEdit +getModuleEdit recorder env cabalFilePath stanza targetFieldStr modulePath = + mkCabalAddConfig + recorder + env + cabalFilePath + mkConfig + where + mkConfig :: (ByteString -> [Field Position] -> GenericPackageDescription -> ExceptT PluginError m AddConfig) + mkConfig cnfOrigContents fields packDescr = do + compName <- + case Add.resolveComponent cabalFilePath (fields, packDescr) $ Just $ CabalPretty.prettyShow stanza of + Right x -> pure x + Left err -> do + logWith recorder Info $ LogFailedToResolveComponent err + throwE $ PluginInternalError $ T.pack err + pure $ + AddConfig + { cnfOrigContents = cnfOrigContents + , cnfFields = fields + , cnfComponent = compName + , cnfTargetField = if targetFieldStr == "exposed-modules" then ExposedModules else OtherModules + , cnfAdditions = singleton $ B.pack modulePath + } + +-------------------------------------------- +-- Add build dependency to cabal file +-------------------------------------------- + +addDependencyCommand :: Recorder (WithPriority Log) -> CommandFunction IdeState CabalAddDependencyCommandParams +addDependencyCommand recorder state _ params@(CabalAddDependencyCommandParams{..}) = do + logWith recorder Debug $ LogCalledCabalAddDependencyCommand params + let specifiedDep = case depVersion of + Nothing -> depDependency + Just ver -> depDependency <> " ^>=" <> ver + caps <- lift pluginGetClientCapabilities + let env = (state, caps, depVerTxtDocId) + edit <- getDependencyEdit recorder env depCabalPath depBuildTarget (T.unpack specifiedDep) + void $ lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) + logWith recorder Debug LogExecutedCommand + pure $ InR Null + +{- | Constructs prerequisites for the @executeConfig@ + and runs it, given path to the cabal file and a dependency message. + Given the new contents of the cabal file constructs and returns the @edit@. + Inspired by @main@ in cabal-add, + Distribution.Client.Main +-} +getDependencyEdit :: + forall m. + (MonadIO m) => + Recorder (WithPriority Log) -> + (IdeState, ClientCapabilities, VersionedTextDocumentIdentifier) -> + FilePath -> + Maybe String -> + String -> + ExceptT PluginError m WorkspaceEdit +getDependencyEdit recorder env cabalFilePath buildTarget dependency = + mkCabalAddConfig recorder env cabalFilePath mkConfig + where + mkConfig :: (ByteString -> [Field Position] -> GenericPackageDescription -> ExceptT PluginError m AddConfig) + mkConfig cnfOrigContents fields packDescr = do + let specVer = specVersion $ packageDescription packDescr + (deps, compName) <- + modifyError (\t -> PluginInternalError $ T.pack t) $ do + deps <- validateDependency specVer dependency + compName <- resolveComponent cabalFilePath (fields, packDescr) buildTarget + pure (deps, compName) + pure $ + AddConfig + { cnfOrigContents = cnfOrigContents + , cnfFields = fields + , cnfComponent = compName + , cnfTargetField = BuildDepends + , cnfAdditions = singleton deps + } + +-------------------------------------------- +-- Shared Functions +-------------------------------------------- + +mkCabalAddConfig :: + (MonadIO m) => + Recorder (WithPriority Log) -> + (IdeState, ClientCapabilities, VersionedTextDocumentIdentifier) -> + -- | The cabal file to edit + FilePath -> + -- | Callback to allow configuration of 'AddConfig' to be used by `cabal-add` + ( ByteString -> + [Field Position] -> + GenericPackageDescription -> + ExceptT PluginError m AddConfig + ) -> + ExceptT PluginError m WorkspaceEdit +mkCabalAddConfig recorder env cabalFilePath mkConfig = do + let (state, caps, verTxtDocId) = env + (mbCnfOrigContents, mbFields, mbPackDescr) <- liftIO $ runAction "cabal.cabal-add" state $ do + contents <- getFileContents $ toNormalizedFilePath cabalFilePath + inFields <- useWithStale ParseCabalFields $ toNormalizedFilePath cabalFilePath + inPackDescr <- useWithStale ParseCabalFile $ toNormalizedFilePath cabalFilePath + let mbCnfOrigContents = case contents of + (Just txt) -> Just $ encodeUtf8 $ Rope.toText txt + _ -> Nothing + let mbFields = fst <$> inFields + let mbPackDescr = fst <$> inPackDescr + pure (mbCnfOrigContents, mbFields, mbPackDescr) + + -- Check if required info was received, + -- otherwise fall back on other options. + (cnfOrigContents, fields, packDescr) <- do + cnfOrigContents <- case mbCnfOrigContents of + (Just cnfOrigContents) -> pure cnfOrigContents + Nothing -> readCabalFile cabalFilePath + (fields, packDescr) <- case (mbFields, mbPackDescr) of + (Just fields, Just packDescr) -> pure (fields, packDescr) + (_, _) -> case parseCabalFile cabalFilePath cnfOrigContents of + Left err -> throwE $ PluginInternalError $ T.pack err + Right (f, gpd) -> pure (f, gpd) + pure (cnfOrigContents, fields, packDescr) + + cabalAddConfig <- mkConfig cnfOrigContents fields packDescr + + case executeAddConfig (validateChanges packDescr) cabalAddConfig of + Nothing -> + throwE $ + PluginInternalError $ + T.pack $ + "Cannot extend " + ++ show (cnfTargetField cabalAddConfig) + ++ " of " + ++ case (cnfComponent cabalAddConfig) of + Right compName -> showComponentName compName + Left commonStanza -> show commonStanza + ++ " in " + ++ cabalFilePath + Just newContents -> do + let edit = diffText caps (verTxtDocId, T.decodeUtf8 cnfOrigContents) (T.decodeUtf8 newContents) SkipDeletions + logWith recorder Debug $ LogCreatedEdit edit + pure edit diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Types.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Types.hs new file mode 100644 index 0000000000..62d6b7a7d3 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Types.hs @@ -0,0 +1,104 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE RecordWildCards #-} + +module Ide.Plugin.Cabal.CabalAdd.Types where + +import Data.Aeson.Types (FromJSON, ToJSON) +import Data.String (IsString) +import qualified Data.Text as T +import Distribution.Compat.Prelude (Generic) +import Distribution.PackageDescription +import Ide.Logger +import Ide.Plugin.Cabal.Orphans () +import Language.LSP.Protocol.Types + +data Log + = LogFoundResponsibleCabalFile FilePath + | LogCalledCabalAddDependencyCommand CabalAddDependencyCommandParams + | LogCalledCabalAddModuleCommand ModuleInsertionConfig + | LogCreatedEdit WorkspaceEdit + | LogExecutedCommand + | LogFailedToResolveComponent String + deriving (Show) + +instance Pretty Log where + pretty = \case + LogFoundResponsibleCabalFile fp -> "Located the responsible cabal file at " <+> pretty fp + LogCalledCabalAddDependencyCommand params -> "Called CabalAddDependency command with:\n" <+> pretty params + LogCalledCabalAddModuleCommand params -> "Called CabalAddModule command with:\n" <+> pretty params + LogCreatedEdit edit -> "Created inplace edit:\n" <+> pretty edit + LogExecutedCommand -> "Executed CabalAdd command" + LogFailedToResolveComponent cS -> "Failed to resolve component in CabalAdd with error:" <+> viaShow cS + +cabalAddDependencyCommandId :: (IsString p) => p +cabalAddDependencyCommandId = "cabalAddDependency" + +cabalAddModuleCommandId :: (IsString p) => p +cabalAddModuleCommandId = "cabalAddModule" + +-- | Relevant data needed to add a module to a cabal file. +-- +-- This will be sent as json to the client with a code action we offer to add this dependency to a cabal file. +-- If the user decides to execute the corresponding code action, the client sends us this data again, and we then +-- use it to execute the `CabalAddDependencyCommand`. +data ModuleInsertionConfig = ModuleInsertionConfig + { targetFile :: FilePath + -- ^ The file we want to insert information about the new module into. + , moduleToInsert :: T.Text + -- ^ The module name of the module to be inserted into the targetFile at the insertionPosition. + , modVerTxtDocId :: VersionedTextDocumentIdentifier + , insertionStanza :: ComponentName + -- ^ Which stanza the module will be inserted into. + , insertionLabel :: T.Text + -- ^ A label which describes which field the module will be inserted into. + } + deriving (Show, Eq, Ord, Generic) + deriving anyclass (FromJSON, ToJSON) + +instance Pretty ModuleInsertionConfig where + pretty ModuleInsertionConfig{..} = + "CabalAddModule parameters:" + <+> vcat + [ "cabal path:" <+> pretty targetFile + , "target:" <+> pretty moduleToInsert + , "stanza:" <+> viaShow insertionStanza + , "label:" <+> pretty insertionLabel + ] + +-- | Contains all source directories of a stanza with the name of the first parameter. +data StanzaItem = StanzaItem + { siComponent :: ComponentName + , siHsSourceDirs :: [FilePath] + } + deriving (Show) + +-- | Relevant data needed to add a dependency to a cabal file. +-- +-- This will be sent as json to the client with a code action we offer to add this dependency to a cabal file. +-- If the user decides to execute the corresponding code action, the client sends us this data again, and we then +-- use it to execute the `CabalAddDependencyCommand`. +data CabalAddDependencyCommandParams = CabalAddDependencyCommandParams + { depCabalPath :: FilePath + , depVerTxtDocId :: VersionedTextDocumentIdentifier + , depBuildTarget :: Maybe String + , depDependency :: T.Text + , depVersion :: Maybe T.Text + } + deriving (Generic, Show) + deriving anyclass (FromJSON, ToJSON) + +instance Pretty CabalAddDependencyCommandParams where + pretty CabalAddDependencyCommandParams{..} = + "CabalAddDependency parameters:" + <+> vcat + [ "cabal path:" <+> pretty depCabalPath + , "target:" <+> pretty depBuildTarget + , "dependendency:" <+> pretty depDependency + , "version:" <+> pretty depVersion + ] diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/CabalFields.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/CabalFields.hs new file mode 100644 index 0000000000..b8cb7ce0d6 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/CabalFields.hs @@ -0,0 +1,303 @@ +module Ide.Plugin.Cabal.Completion.CabalFields + ( findStanzaForColumn + , getModulesNames + , getFieldLSPRange + , findFieldSection + , findTextWord + , findFieldLine + , getOptionalSectionName + , getAnnotation + , getFieldName + , onelineSectionArgs + , getFieldEndPosition + , getSectionArgEndPosition + , getNameEndPosition + , getFieldLineEndPosition + ) + where + +import qualified Data.ByteString as BS +import Data.List (find) +import Data.List.Extra (groupSort) +import Data.List.NonEmpty (NonEmpty) +import qualified Data.List.NonEmpty as NE +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import Data.Tuple (swap) +import qualified Distribution.Fields as Syntax +import qualified Distribution.Parsec.Position as Syntax +import Ide.Plugin.Cabal.Completion.Types +import qualified Language.LSP.Protocol.Types as LSP + +-- ---------------------------------------------------------------- +-- Cabal-syntax utilities I don't really want to write myself +-- ---------------------------------------------------------------- + +-- | Determine the context of a cursor position within a stack of stanza contexts +-- +-- If the cursor is indented more than one of the stanzas in the stack +-- the respective stanza is returned if this is never the case, the toplevel stanza +-- in the stack is returned. +findStanzaForColumn :: Int -> NonEmpty (Int, StanzaContext) -> (StanzaContext, FieldContext) +findStanzaForColumn col ctx = case NE.uncons ctx of + ((_, stanza), Nothing) -> (stanza, None) + ((indentation, stanza), Just res) + | col < indentation -> findStanzaForColumn col res + | otherwise -> (stanza, None) + +-- | Determine the field the cursor is currently a part of. +-- +-- The result is said field and its starting position +-- or Nothing if the passed list of fields is empty. +-- +-- This only looks at the row of the cursor and not at the cursor's +-- position within the row. +-- +-- TODO: we do not handle braces correctly. Add more tests! +findFieldSection :: Syntax.Position -> [Syntax.Field Syntax.Position] -> Maybe (Syntax.Field Syntax.Position) +findFieldSection _cursor [] = Nothing +findFieldSection _cursor [x] = + -- Last field. We decide later, whether we are starting + -- a new section. + Just x +findFieldSection cursor (x:y:ys) + | Syntax.positionRow (getAnnotation x) <= cursorLine && cursorLine < Syntax.positionRow (getAnnotation y) + = Just x + | otherwise = findFieldSection cursor (y:ys) + where + cursorLine = Syntax.positionRow cursor + +-- | Determine the field line the cursor is currently a part of. +-- +-- The result is said field line and its starting position +-- or Nothing if the passed list of fields is empty. +-- +-- This function assumes that elements in a field's @FieldLine@ list +-- do not share the same row. +findFieldLine :: Syntax.Position -> [Syntax.Field Syntax.Position] -> Maybe (Syntax.FieldLine Syntax.Position) +findFieldLine _cursor [] = Nothing +findFieldLine cursor fields = + case findFieldSection cursor fields of + Nothing -> Nothing + Just (Syntax.Field _ fieldLines) -> find filterLineFields fieldLines + Just (Syntax.Section _ _ fields) -> findFieldLine cursor fields + where + cursorLine = Syntax.positionRow cursor + -- In contrast to `Field` or `Section`, `FieldLine` must have the exact + -- same line position as the cursor. + filterLineFields (Syntax.FieldLine pos _) = Syntax.positionRow pos == cursorLine + +-- | Determine the exact word at the current cursor position. +-- +-- The result is said word or Nothing if the passed list is empty +-- or the cursor position is not next to, or on a word. +-- For this function, a word is a sequence of consecutive characters +-- that are not a space or column. +-- +-- This function currently only considers words inside of a @FieldLine@. +findTextWord :: Syntax.Position -> [Syntax.Field Syntax.Position] -> Maybe T.Text +findTextWord _cursor [] = Nothing +findTextWord cursor fields = + case findFieldLine cursor fields of + Nothing -> Nothing + Just (Syntax.FieldLine pos byteString) -> + let decodedText = T.decodeUtf8 byteString + lineFieldCol = Syntax.positionCol pos + lineFieldLen = T.length decodedText + offset = cursorCol - lineFieldCol in + -- Range check if cursor is inside or or next to found line. + -- The latter comparison includes the length of the line as offset, + -- which is done to also include cursors that are at the end of a line. + -- e.g. "foo,bar|" + -- ^ + -- cursor + -- + -- Having an offset which is outside of the line is possible because of `splitAt`. + if offset >= 0 && lineFieldLen >= offset + then + let (lhs, rhs) = T.splitAt offset decodedText + strippedLhs = T.takeWhileEnd isAllowedChar lhs + strippedRhs = T.takeWhile isAllowedChar rhs + resultText = T.concat [strippedLhs, strippedRhs] in + -- It could be possible that the cursor was in-between separators, in this + -- case the resulting text would be empty, which should result in `Nothing`. + -- e.g. " foo ,| bar" + -- ^ + -- cursor + if not $ T.null resultText then Just resultText else Nothing + else + Nothing + where + cursorCol = Syntax.positionCol cursor + separators = [',', ' '] + isAllowedChar = (`notElem` separators) + +type FieldName = T.Text + +getAnnotation :: Syntax.Field ann -> ann +getAnnotation (Syntax.Field (Syntax.Name ann _) _) = ann +getAnnotation (Syntax.Section (Syntax.Name ann _) _ _) = ann + +getFieldName :: Syntax.Field ann -> FieldName +getFieldName (Syntax.Field (Syntax.Name _ fn) _) = T.decodeUtf8 fn +getFieldName (Syntax.Section (Syntax.Name _ fn) _ _) = T.decodeUtf8 fn + +getFieldLineName :: Syntax.FieldLine ann -> FieldName +getFieldLineName (Syntax.FieldLine _ fn) = T.decodeUtf8 fn + +-- | Returns the name of a section if it has a name. +-- +-- This assumes that the given section args belong to named stanza +-- in which case the stanza name is returned. +getOptionalSectionName :: [Syntax.SectionArg ann] -> Maybe T.Text +getOptionalSectionName [] = Nothing +getOptionalSectionName (x:xs) = case x of + Syntax.SecArgName _ name -> Just (T.decodeUtf8 name) + _ -> getOptionalSectionName xs + +type BuildTargetName = T.Text +type ModuleName = T.Text + +-- | Given a cabal AST returns pairs of all respective target names +-- and the module name bound to them. If a target is a main library gives +-- @Nothing@, otherwise @Just target-name@ +-- +-- Examples of input cabal files and the outputs: +-- +-- * Target is a main library module: +-- +-- > library +-- > exposed-modules: +-- > MyLib +-- +-- * @getModulesNames@ output: +-- +-- > [([Nothing], "MyLib")] +-- +-- * Same module names in different targets: +-- +-- > test-suite first-target +-- > other-modules: +-- > Config +-- > test-suite second-target +-- > other-modules: +-- > Config +-- +-- * @getModulesNames@ output: +-- +-- > [([Just "first-target", Just "second-target"], "Config")] +getModulesNames :: [Syntax.Field any] -> [([Maybe BuildTargetName], ModuleName)] +getModulesNames fields = map swap $ groupSort rawModuleTargetPairs + where + rawModuleTargetPairs = concatMap getSectionModuleNames sections + sections = getSectionsWithModules fields + + getSectionModuleNames :: Syntax.Field any -> [(ModuleName, Maybe BuildTargetName)] + getSectionModuleNames (Syntax.Section _ secArgs fields) = map (, getArgsName secArgs) $ concatMap getFieldModuleNames fields + getSectionModuleNames _ = [] + + getArgsName [Syntax.SecArgName _ name] = Just $ T.decodeUtf8 name + getArgsName _ = Nothing -- Can be only a main library, that has no name + -- since it's impossible to have multiple names for a build target + + getFieldModuleNames field@(Syntax.Field _ modules) = if getFieldName field == T.pack "exposed-modules" || + getFieldName field == T.pack "other-modules" + then map getFieldLineName modules + else [] + getFieldModuleNames _ = [] + +-- | Trims a given cabal AST leaving only targets and their +-- @exposed-modules@ and @other-modules@ sections. +-- +-- For example: +-- +-- * Given a cabal file like this: +-- +-- > library +-- > import: extra +-- > hs-source-dirs: source/directory +-- > ... +-- > exposed-modules: +-- > Important.Exposed.Module +-- > other-modules: +-- > Important.Other.Module +-- > +-- > test-suite tests +-- > type: type +-- > build-tool-depends: tool +-- > other-modules: +-- > Important.Other.Module +-- +-- * @getSectionsWithModules@ gives output: +-- +-- > library +-- > exposed-modules: +-- > Important.Exposed.Module +-- > other-modules: +-- > Important.Other.Module +-- > test-suite tests +-- > other-modules: +-- > Important.Other.Module +getSectionsWithModules :: [Syntax.Field any] -> [Syntax.Field any] +getSectionsWithModules fields = concatMap go fields + where + go :: Syntax.Field any -> [Syntax.Field any] + go (Syntax.Field _ _) = [] + go section@(Syntax.Section _ _ fields) = concatMap onlySectionsWithModules (section:fields) + + onlySectionsWithModules :: Syntax.Field any -> [Syntax.Field any] + onlySectionsWithModules (Syntax.Field _ _) = [] + onlySectionsWithModules (Syntax.Section name secArgs fields) + | (not . null) newFields = [Syntax.Section name secArgs newFields] + | otherwise = [] + where newFields = filter subfieldHasModule fields + + subfieldHasModule :: Syntax.Field any -> Bool + subfieldHasModule field@(Syntax.Field _ _) = getFieldName field == T.pack "exposed-modules" || + getFieldName field == T.pack "other-modules" + subfieldHasModule (Syntax.Section _ _ _) = False + +-- | Makes a single text line out of multiple +-- @SectionArg@s. Allows to display conditions, +-- flags, etc in one line, which is easier to read. +-- +-- For example, @flag@ @(@ @pedantic@ @)@ will be joined in +-- one line, instead of four @SectionArg@s separately. +onelineSectionArgs :: [Syntax.SectionArg ann] -> T.Text +onelineSectionArgs sectionArgs = joinedName + where + joinedName = T.unwords $ map getName sectionArgs + + getName :: Syntax.SectionArg ann -> T.Text + getName (Syntax.SecArgName _ identifier) = T.decodeUtf8 identifier + getName (Syntax.SecArgStr _ quotedString) = T.decodeUtf8 quotedString + getName (Syntax.SecArgOther _ string) = T.decodeUtf8 string + +-- | Returns the end position of a provided field +getFieldEndPosition :: Syntax.Field Syntax.Position -> Syntax.Position +getFieldEndPosition (Syntax.Field name []) = getNameEndPosition name +getFieldEndPosition (Syntax.Field _ (x:xs)) = getFieldLineEndPosition $ NE.last (x NE.:| xs) +getFieldEndPosition (Syntax.Section name [] []) = getNameEndPosition name +getFieldEndPosition (Syntax.Section _ (x:xs) []) = getSectionArgEndPosition $ NE.last (x NE.:| xs) +getFieldEndPosition (Syntax.Section _ _ (x:xs)) = getFieldEndPosition $ NE.last (x NE.:| xs) + +-- | Returns the end position of a provided section arg +getSectionArgEndPosition :: Syntax.SectionArg Syntax.Position -> Syntax.Position +getSectionArgEndPosition (Syntax.SecArgName (Syntax.Position row col) byteString) = Syntax.Position row (col + BS.length byteString) +getSectionArgEndPosition (Syntax.SecArgStr (Syntax.Position row col) byteString) = Syntax.Position row (col + BS.length byteString) +getSectionArgEndPosition (Syntax.SecArgOther (Syntax.Position row col) byteString) = Syntax.Position row (col + BS.length byteString) + +-- | Returns the end position of a provided name +getNameEndPosition :: Syntax.Name Syntax.Position -> Syntax.Position +getNameEndPosition (Syntax.Name (Syntax.Position row col) byteString) = Syntax.Position row (col + BS.length byteString) + +-- | Returns the end position of a provided field line +getFieldLineEndPosition :: Syntax.FieldLine Syntax.Position -> Syntax.Position +getFieldLineEndPosition (Syntax.FieldLine (Syntax.Position row col) byteString) = Syntax.Position row (col + BS.length byteString) + +-- | Returns an LSP compatible range for a provided field +getFieldLSPRange :: Syntax.Field Syntax.Position -> LSP.Range +getFieldLSPRange field = LSP.Range startLSPPos endLSPPos + where + startLSPPos = cabalPositionToLSPPosition $ getAnnotation field + endLSPPos = cabalPositionToLSPPosition $ getFieldEndPosition field diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/FilePath.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/FilePath.hs new file mode 100644 index 0000000000..a63777416b --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/FilePath.hs @@ -0,0 +1,187 @@ +{-# LANGUAGE LambdaCase #-} + +module Ide.Plugin.Cabal.Completion.Completer.FilePath where + +import Control.Exception (evaluate, try) +import Control.Monad (filterM) +import Control.Monad.Extra (concatForM, forM) +import qualified Data.Text as T +import Distribution.PackageDescription (GenericPackageDescription) +import Ide.Logger +import Ide.Plugin.Cabal.Completion.Completer.Paths +import Ide.Plugin.Cabal.Completion.Completer.Simple +import Ide.Plugin.Cabal.Completion.Completer.Types +import Ide.Plugin.Cabal.Completion.Types +import System.Directory (doesDirectoryExist, + doesFileExist, + listDirectory) +import qualified System.FilePath as FP +import qualified System.FilePath.Posix as Posix +import qualified Text.Fuzzy.Parallel as Fuzzy + +-- | Completer to be used when a file path can be completed for a field. +-- Completes file paths as well as directories. +filePathCompleter :: Completer +filePathCompleter recorder cData = do + let prefInfo = cabalPrefixInfo cData + complInfo = pathCompletionInfoFromCabalPrefixInfo "" prefInfo + filePathCompletions <- listFileCompletions recorder complInfo + let scored = + Fuzzy.simpleFilter + Fuzzy.defChunkSize + Fuzzy.defMaxResults + (pathSegment complInfo) + (map T.pack filePathCompletions) + forM + scored + ( \compl' -> do + let compl = Fuzzy.original compl' + fullFilePath <- mkFilePathCompletion complInfo compl + pure $ mkCompletionItem (completionRange prefInfo) fullFilePath fullFilePath + ) + +mainIsCompleter :: (Maybe StanzaName -> GenericPackageDescription -> [FilePath]) -> Completer +mainIsCompleter extractionFunction recorder cData = do + mGPD <- getLatestGPD cData + case mGPD of + Just gpd -> do + let srcDirs = extractionFunction sName gpd + concatForM srcDirs + (\dir' -> do + let dir = FP.normalise dir' + let pathInfo = pathCompletionInfoFromCabalPrefixInfo dir prefInfo + completions <- listFileCompletions recorder pathInfo + let scored = Fuzzy.simpleFilter + Fuzzy.defChunkSize + Fuzzy.defMaxResults + (pathSegment pathInfo) + (map T.pack completions) + forM + scored + ( \compl' -> do + let compl = Fuzzy.original compl' + fullFilePath <- mkFilePathCompletion pathInfo compl + pure $ mkCompletionItem (completionRange prefInfo) fullFilePath fullFilePath + ) + ) + Nothing -> do + logWith recorder Debug LogUseWithStaleFastNoResult + pure [] + where + sName = stanzaName cData + prefInfo = cabalPrefixInfo cData + + +-- | Completer to be used when a directory can be completed for the field. +-- Only completes directories. +directoryCompleter :: Completer +directoryCompleter recorder cData = do + let prefInfo = cabalPrefixInfo cData + complInfo = pathCompletionInfoFromCabalPrefixInfo "" prefInfo + directoryCompletions <- listDirectoryCompletions recorder complInfo + let scored = + Fuzzy.simpleFilter + Fuzzy.defChunkSize + Fuzzy.defMaxResults + (pathSegment complInfo) + (map T.pack directoryCompletions) + forM + scored + ( \compl' -> do + let compl = Fuzzy.original compl' + let fullDirPath = mkPathCompletionDir complInfo compl + pure $ mkCompletionItem (completionRange prefInfo) fullDirPath fullDirPath + ) + +{- Note [Using correct file path separators] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Since cabal files only allow for posix style file paths + we need to be careful to use the correct path separators + whenever we work with file paths in cabal files. + + Thus we are using two different kinds of imports. + We use "FP" for platform-compatible file paths with which + we can query files independently of the platform. + We use "Posix" for the posix syntax paths which need to + be used for file path completions to be written to the cabal file. +-} + +-- | Takes a PathCompletionInfo and returns the list of files and directories +-- in the directory which match the path completion info in posix style. +-- +-- The directories end with a posix trailing path separator. +-- Since this is used for completions to be written to the cabal file, +-- we use posix separators here. +-- See Note [Using correct file path separators]. +listFileCompletions :: Recorder (WithPriority Log) -> PathCompletionInfo -> IO [FilePath] +listFileCompletions recorder complInfo = do + let complDir = mkCompletionDirectory complInfo + try (evaluate =<< listDirectory complDir) >>= \case + Right dirs -> do + forM dirs $ \d -> do + isDir <- doesDirectoryExist $ mkDirFromCWD complInfo d + pure $ if isDir then Posix.addTrailingPathSeparator d else d + Left (err :: IOError) -> do + logWith recorder Warning $ LogFilePathCompleterIOError complDir err + pure [] + +-- | Returns a list of all (and only) directories in the +-- directory described by path completion info. +listDirectoryCompletions :: Recorder (WithPriority Log) -> PathCompletionInfo -> IO [FilePath] +listDirectoryCompletions recorder complInfo = do + filepaths <- listFileCompletions recorder complInfo + filterM (doesDirectoryExist . mkDirFromCWD complInfo) filepaths + +-- | Returns the directory where files and directories can be queried from +-- for the passed PathCompletionInfo. +-- +-- Returns the full path to the directory pointed to by the path prefix +-- by combining it with the working directory. +-- +-- Since this is used for querying paths we use platform +-- compatible separators here. +-- See Note [Using correct file path separators]. +mkCompletionDirectory :: PathCompletionInfo -> FilePath +mkCompletionDirectory complInfo = + FP.addTrailingPathSeparator $ + workingDirectory complInfo FP. (FP.normalise $ queryDirectory complInfo) + +-- | Returns the full path for the given path segment +-- by combining the working directory with the path prefix +-- and the path segment. +-- +-- Since this is used for querying paths we use platform +-- compatible separators here. +-- See Note [Using correct file path separators]. +mkDirFromCWD :: PathCompletionInfo -> FilePath -> FilePath +mkDirFromCWD complInfo fp = + FP.addTrailingPathSeparator $ + mkCompletionDirectory complInfo FP. FP.normalise fp + +-- | Takes a PathCompletionInfo and a directory and +-- returns the complete cabal path to be written on completion action +-- by combining the previously written path prefix and the completed +-- path segment. +-- +-- Since this is used for completions we use posix separators here. +-- See Note [Using correct file path separators]. +mkPathCompletionDir :: PathCompletionInfo -> T.Text -> T.Text +mkPathCompletionDir complInfo completion = + T.pack $ + queryDirectory complInfo Posix. T.unpack completion + +-- | Takes a PathCompletionInfo and a completed path segment and +-- generates the whole filepath to be completed. +-- +-- The returned text combines the completion with a relative path +-- generated from a possible previously written path prefix and +-- is relative to the cabal file location. +-- +-- If the completion results in a filepath, we know this is a +-- completed path and can thus apply wrapping of apostrophes if needed. +mkFilePathCompletion :: PathCompletionInfo -> T.Text -> IO T.Text +mkFilePathCompletion complInfo completion = do + let combinedPath = mkPathCompletionDir complInfo completion + isFilePath <- doesFileExist $ T.unpack combinedPath + let completedPath = if isFilePath then applyStringNotation (isStringNotationPath complInfo) combinedPath else combinedPath + pure completedPath diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Module.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Module.hs new file mode 100644 index 0000000000..21dfbb9e1f --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Module.hs @@ -0,0 +1,124 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.Cabal.Completion.Completer.Module where + +import Control.Monad (filterM) +import Control.Monad.Extra (concatForM, + forM) +import Data.List (stripPrefix) +import Data.Maybe (fromMaybe) +import qualified Data.Text as T +import Distribution.PackageDescription (GenericPackageDescription) +import Ide.Logger (Priority (..), + Recorder, + WithPriority, + logWith) +import Ide.Plugin.Cabal.Completion.Completer.FilePath (listFileCompletions, + mkCompletionDirectory) +import Ide.Plugin.Cabal.Completion.Completer.Paths +import Ide.Plugin.Cabal.Completion.Completer.Simple +import Ide.Plugin.Cabal.Completion.Completer.Types +import Ide.Plugin.Cabal.Completion.Types +import System.Directory (doesFileExist) +import qualified System.FilePath as FP +import qualified Text.Fuzzy.Parallel as Fuzzy + +-- | Completer to be used when module paths can be completed for the field. +-- +-- Takes an extraction function which extracts the source directories +-- to be used by the completer. +modulesCompleter :: (Maybe StanzaName -> GenericPackageDescription -> [FilePath]) -> Completer +modulesCompleter extractionFunction recorder cData = do + mGPD <- getLatestGPD cData + case mGPD of + Just gpd -> do + let sourceDirs = extractionFunction sName gpd + filePathCompletions <- + filePathsForExposedModules recorder sourceDirs prefInfo + pure $ map (\compl -> mkSimpleCompletionItem (completionRange prefInfo) compl) filePathCompletions + Nothing -> do + logWith recorder Debug LogUseWithStaleFastNoResult + pure [] + where + sName = stanzaName cData + prefInfo = cabalPrefixInfo cData + +-- | Takes a list of source directories and returns a list of path completions +-- relative to any of the passed source directories which fit the passed prefix info. +filePathsForExposedModules :: Recorder (WithPriority Log) -> [FilePath] -> CabalPrefixInfo -> IO [T.Text] +filePathsForExposedModules recorder srcDirs prefInfo = do + concatForM + srcDirs + ( \dir' -> do + let dir = FP.normalise dir' + pathInfo = pathCompletionInfoFromCabalPrefixInfo dir modPrefInfo + completions <- listFileCompletions recorder pathInfo + validExposedCompletions <- filterM (isValidExposedModulePath pathInfo) completions + let toMatch = pathSegment pathInfo + scored = Fuzzy.simpleFilter + Fuzzy.defChunkSize + Fuzzy.defMaxResults + toMatch + (map T.pack validExposedCompletions) + forM + scored + ( \compl' -> do + let compl = Fuzzy.original compl' + fullFilePath <- mkExposedModulePathCompletion pathInfo $ T.unpack compl + pure fullFilePath + ) + ) + where + prefix = + T.pack $ exposedModulePathToFp $ + completionPrefix prefInfo + -- build completion info relative to the source dir, + -- we overwrite the prefix written in the cabal file with its translation + -- to filepath syntax, since it is in exposed module syntax + modPrefInfo = prefInfo{completionPrefix=prefix} + + -- Takes a PathCompletionInfo and a path segment and checks whether + -- the path segment can be completed for an exposed module. + -- + -- This is the case if the segment represents either a directory or a Haskell file. + isValidExposedModulePath :: PathCompletionInfo -> FilePath -> IO Bool + isValidExposedModulePath pInfo path = do + let dir = mkCompletionDirectory pInfo + fileExists <- doesFileExist (dir FP. path) + pure $ not fileExists || FP.takeExtension path `elem` [".hs", ".lhs"] + +-- | Takes a pathCompletionInfo and a path segment and generates the whole +-- filepath to be written on completion including a possibly already written prefix; +-- using the cabal syntax for exposed modules. +-- +-- Examples: +-- When the partial directory path `Dir.Dir2.` is stored in the PathCompletionInfo +-- and the completed file `HaskellFile.hs` is passed along with that PathCompletionInfo, +-- the result would be `Dir1.Dir2.HaskellFile` +-- +-- When the partial directory path `Dir.` is stored in the PathCompletionInfo +-- and the completed directory `Dir2` is passed along with that PathCompletionInfo, +-- the result would be `Dir1.Dir2.` +mkExposedModulePathCompletion :: PathCompletionInfo -> FilePath -> IO T.Text +mkExposedModulePathCompletion complInfo completion = do + let combinedPath = queryDirectory complInfo FP. completion + isFilePath <- doesFileExist (workingDirectory complInfo FP. combinedPath) + let addTrailingDot modPath = if isFilePath then modPath else modPath <> "." + let exposedPath = FP.makeRelative "." combinedPath + pure $ addTrailingDot $ fpToExposedModulePath "" exposedPath + +-- | Takes a source directory path and a module path and returns +-- the module path relative to the source directory +-- in exposed module syntax where the separators are '.' +-- and the file ending is removed. +-- +-- Synopsis: @'fpToExposedModulePath' sourceDir modPath@. +fpToExposedModulePath :: FilePath -> FilePath -> T.Text +fpToExposedModulePath sourceDir modPath = + T.intercalate "." $ fmap T.pack $ FP.splitDirectories $ FP.dropExtension fp + where + fp = fromMaybe modPath $ stripPrefix sourceDir modPath + +-- | Takes a path in the exposed module syntax and translates it to a platform-compatible file path. +exposedModulePathToFp :: T.Text -> FilePath +exposedModulePathToFp fp = T.unpack $ T.replace "." (T.singleton FP.pathSeparator) fp diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Paths.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Paths.hs new file mode 100644 index 0000000000..0e1053453b --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Paths.hs @@ -0,0 +1,150 @@ +module Ide.Plugin.Cabal.Completion.Completer.Paths where + +import qualified Data.List as List +import Data.List.Extra (dropPrefix) +import qualified Data.Text as T +import Distribution.PackageDescription (Benchmark (..), + BuildInfo (..), + CondTree (condTreeData), + Executable (..), + ForeignLib (..), + GenericPackageDescription (..), + Library (..), + UnqualComponentName, + mkUnqualComponentName, + testBuildInfo) +import Distribution.Utils.Path (getSymbolicPath) +import Ide.Plugin.Cabal.Completion.Types +import qualified System.FilePath as FP +import qualified System.FilePath.Posix as Posix + + +{- | Information used to query and build path completions. + + Note that pathSegment combined with queryDirectory results in + the original prefix. + + Example: + When given the written prefix, @dir1\/dir2\/fi@, the + resulting PathCompletionInfo would be: + + @ + pathSegment = "fi" + queryDirectory = "dir1\/dir2\/fi" + ... + @ +-} +data PathCompletionInfo = PathCompletionInfo + { pathSegment :: T.Text, + -- ^ Partly written segment of the next part of the path. + queryDirectory :: FilePath, + -- ^ Written part of path, in posix format. + workingDirectory :: FilePath, + -- ^ Directory relative to which relative paths are interpreted, platform dependent. + isStringNotationPath :: Maybe Apostrophe + -- ^ Did the completion happen in the context of a string notation, + -- if yes, contains the state of the string notation. + } + deriving (Eq, Show) + + +{- | Posix.splitFileName modification, that drops trailing ./ if + if wasn't present in the original path. + + Fix for the issue #3774 + Examples: + + >>> splitFileNameNoTrailingSlash "" + ("", "") + >>> splitFileNameNoTrailingSlash "./" + ("./", "") + >>> splitFileNameNoTrailingSlash "dir" + ("", "dir") + >>> splitFileNameNoTrailingSlash "./dir" + ("./", "dir") + >>> splitFileNameNoTrailingSlash "dir1/dir2" + ("dir1/","dir2") + >>> splitFileNameNoTrailingSlash "./dir1/dir2" + ("./dir1/","dir2") +-} +splitFileNameNoTrailingSlash :: FilePath -> (String, String) +splitFileNameNoTrailingSlash prefix = rmTrailingSlash ("./" `List.isPrefixOf` prefix) (Posix.splitFileName prefix) + where rmTrailingSlash hadTrailingSlash (queryDirectory', pathSegment') + | hadTrailingSlash = (queryDirectory', pathSegment') + | otherwise = ("./" `dropPrefix` queryDirectory', pathSegment') + +{- | Takes an optional source subdirectory and a prefix info + and creates a path completion info accordingly. + + The source directory represents some subdirectory of the working directory such as a + path from the field @hs-source-dirs@. + + If the source subdirectory is empty, then the working directory is simply set to + the currently handled cabal file's directory. +-} +pathCompletionInfoFromCabalPrefixInfo :: FilePath -> CabalPrefixInfo -> PathCompletionInfo +pathCompletionInfoFromCabalPrefixInfo srcDir prefInfo = + PathCompletionInfo + { pathSegment = T.pack pathSegment', + queryDirectory = queryDirectory', + workingDirectory = completionWorkingDir prefInfo FP. srcDir, + isStringNotationPath = isStringNotation prefInfo + } + where + prefix = T.unpack $ completionPrefix prefInfo + (queryDirectory', pathSegment') = splitFileNameNoTrailingSlash prefix + +-- | Extracts the source directories of the library stanza. +sourceDirsExtractionLibrary :: Maybe StanzaName -> GenericPackageDescription -> [FilePath] +sourceDirsExtractionLibrary Nothing gpd = + -- we use condLibrary to get the information contained in the library stanza + -- since the library in PackageDescription is not populated by us + case libM of + Just lib -> do + map getSymbolicPath $ hsSourceDirs $ libBuildInfo $ condTreeData lib + Nothing -> [] + where + libM = condLibrary gpd +sourceDirsExtractionLibrary name gpd = extractRelativeDirsFromStanza name gpd condSubLibraries libBuildInfo + +-- | Extracts the source directories of the executable stanza with the given name. +sourceDirsExtractionExecutable :: Maybe StanzaName -> GenericPackageDescription -> [FilePath] +sourceDirsExtractionExecutable name gpd = extractRelativeDirsFromStanza name gpd condExecutables buildInfo + +-- | Extracts the source directories of the test suite stanza with the given name. +sourceDirsExtractionTestSuite :: Maybe StanzaName -> GenericPackageDescription -> [FilePath] +sourceDirsExtractionTestSuite name gpd = extractRelativeDirsFromStanza name gpd condTestSuites testBuildInfo + +-- | Extracts the source directories of benchmark stanza with the given name. +sourceDirsExtractionBenchmark :: Maybe StanzaName -> GenericPackageDescription -> [FilePath] +sourceDirsExtractionBenchmark name gpd = extractRelativeDirsFromStanza name gpd condBenchmarks benchmarkBuildInfo + +-- | Extracts the source directories of foreign-lib stanza with the given name. +sourceDirsExtractionForeignLib :: Maybe StanzaName -> GenericPackageDescription -> [FilePath] +sourceDirsExtractionForeignLib name gpd = extractRelativeDirsFromStanza name gpd condForeignLibs foreignLibBuildInfo + +{- | Takes a possible stanza name, a GenericPackageDescription, + a function to access the stanza information we are interested in + and a function to access the build info from the specific stanza. + + Returns a list of relative source directory paths specified for the extracted stanza. +-} +extractRelativeDirsFromStanza :: + Maybe StanzaName -> + GenericPackageDescription -> + (GenericPackageDescription -> [(UnqualComponentName, CondTree b c a)]) -> + (a -> BuildInfo) -> + [FilePath] +extractRelativeDirsFromStanza Nothing _ _ _ = [] +extractRelativeDirsFromStanza (Just name) gpd getStanza getBuildInfo + | Just stanza <- stanzaM = map getSymbolicPath $ hsSourceDirs $ getBuildInfo stanza + | otherwise = [] + where + stanzaM = fmap (condTreeData . snd) res + allStanzasM = getStanza gpd + res = + List.find + ( \(n, _) -> + n == mkUnqualComponentName (T.unpack name) + ) + allStanzasM diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Simple.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Simple.hs new file mode 100644 index 0000000000..b097af5cd2 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Simple.hs @@ -0,0 +1,157 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.Cabal.Completion.Completer.Simple where + +import Control.Lens ((?~)) +import Data.Function ((&)) +import qualified Data.List as List +import Data.Map (Map) +import qualified Data.Map as Map +import Data.Maybe (fromMaybe, + mapMaybe) +import Data.Ord (Down (Down)) +import qualified Data.Text as T +import qualified Distribution.Fields as Syntax +import Ide.Logger (Priority (..), + logWith) +import Ide.Plugin.Cabal.Completion.CabalFields +import Ide.Plugin.Cabal.Completion.Completer.Types +import Ide.Plugin.Cabal.Completion.Types (CabalPrefixInfo (..), + Log) +import qualified Language.LSP.Protocol.Lens as JL +import qualified Language.LSP.Protocol.Types as Compls (CompletionItem (..)) +import qualified Language.LSP.Protocol.Types as LSP +import qualified Text.Fuzzy.Parallel as Fuzzy + +-- | Completer to be used when no completion suggestions +-- are implemented for the field +noopCompleter :: Completer +noopCompleter _ _ = pure [] + +-- | Completer to be used when no completion suggestions +-- are implemented for the field and a log message should be emitted. +errorNoopCompleter :: Log -> Completer +errorNoopCompleter l recorder _ = do + logWith recorder Warning l + pure [] + +-- | Completer to be used when a simple set of values +-- can be completed for a field. +constantCompleter :: [T.Text] -> Completer +constantCompleter completions _ cData = do + let prefInfo = cabalPrefixInfo cData + scored = Fuzzy.simpleFilter Fuzzy.defChunkSize Fuzzy.defMaxResults (completionPrefix prefInfo) completions + range = completionRange prefInfo + pure $ map (mkSimpleCompletionItem range . Fuzzy.original) scored + +-- | Completer to be used for import fields. +-- +-- TODO: Does not exclude imports, defined after the current cursor position +-- which are not allowed according to the cabal specification +importCompleter :: Completer +importCompleter l cData = do + cabalCommonsM <- getCabalCommonSections cData + case cabalCommonsM of + Just cabalCommons -> do + let commonNames = mapMaybe (\case + Syntax.Section (Syntax.Name _ "common") commonNames _ -> getOptionalSectionName commonNames + _ -> Nothing) + cabalCommons + constantCompleter commonNames l cData + Nothing -> noopCompleter l cData + +-- | Completer to be used for the field @name:@ value. +-- +-- This is almost always the name of the cabal file. However, +-- it is not forbidden by the specification to have a different name, +-- it is just forbidden on hackage. +nameCompleter :: Completer +nameCompleter _ cData = do + let scored = Fuzzy.simpleFilter Fuzzy.defChunkSize Fuzzy.defMaxResults (completionPrefix prefInfo) [completionFileName prefInfo] + prefInfo = cabalPrefixInfo cData + range = completionRange prefInfo + pure $ map (mkSimpleCompletionItem range . Fuzzy.original) scored + +-- | Completer to be used when a set of values with priority weights +-- attached to some values are to be completed for a field. +-- +-- The higher the weight, the higher the priority to show +-- the value in the completion suggestion. +-- +-- If the value does not occur in the weighted map its weight is defaulted to zero. +weightedConstantCompleter :: [T.Text] -> Map T.Text Double -> Completer +weightedConstantCompleter completions weights _ cData = do + let scored = + if perfectScore > 0 + then + fmap Fuzzy.original $ + Fuzzy.simpleFilter' Fuzzy.defChunkSize Fuzzy.defMaxResults prefix completions customMatch + else topTenByWeight + range = completionRange prefInfo + pure $ map (mkSimpleCompletionItem range) scored + where + prefInfo = cabalPrefixInfo cData + prefix = completionPrefix prefInfo + -- The perfect score is the score of the word matched with itself + -- this should never return Nothing since we match the word with itself + perfectScore = fromMaybe (error "match is broken") $ Fuzzy.match prefix prefix + -- \| Since the best score is cut off at the perfect score, we use a custom match + -- which allows for the score to be larger than the perfect score. + -- + -- This is necessary since the weight is multiplied with the originally matched + -- score and thus the calculated score may be larger than the perfect score. + customMatch :: (T.Text -> T.Text -> Maybe Int) + customMatch toSearch searchSpace = do + matched <- Fuzzy.match toSearch searchSpace + let weight = fromMaybe 0 $ Map.lookup searchSpace weights + let score = + min + perfectScore + (round (fromIntegral matched * (1 + weight))) + pure score + -- \| Sorts the list in descending order based on the map of weights and then + -- returns the top ten items in the list + topTenByWeight :: [T.Text] + topTenByWeight = take 10 $ map fst $ List.sortOn (Down . snd) $ Map.assocs weights + +-- | Creates a CompletionItem with the given text as the label +-- where the completion item kind is keyword. +mkDefaultCompletionItem :: T.Text -> LSP.CompletionItem +mkDefaultCompletionItem label = + LSP.CompletionItem + { Compls._label = label, + Compls._labelDetails = Nothing, + Compls._kind = Just LSP.CompletionItemKind_Keyword, + Compls._tags = Nothing, + Compls._detail = Nothing, + Compls._documentation = Nothing, + Compls._deprecated = Nothing, + Compls._preselect = Nothing, + Compls._sortText = Nothing, + Compls._filterText = Nothing, + Compls._insertText = Nothing, + Compls._insertTextFormat = Nothing, + Compls._insertTextMode = Nothing, + Compls._textEdit = Nothing, + Compls._textEditText = Nothing, + Compls._additionalTextEdits = Nothing, + Compls._commitCharacters = Nothing, + Compls._command = Nothing, + Compls._data_ = Nothing + } + +-- | Returns a CompletionItem with the given starting position +-- and text to be inserted, where the displayed text is the same as the +-- inserted text. +mkSimpleCompletionItem :: LSP.Range -> T.Text -> LSP.CompletionItem +mkSimpleCompletionItem range txt = + mkDefaultCompletionItem txt + & JL.textEdit ?~ LSP.InL (LSP.TextEdit range txt) + +-- | Returns a completionItem with the given starting position, +-- text to be inserted and text to be displayed in the completion suggestion. +mkCompletionItem :: LSP.Range -> T.Text -> T.Text -> LSP.CompletionItem +mkCompletionItem range insertTxt displayTxt = + mkDefaultCompletionItem displayTxt + & JL.textEdit ?~ LSP.InL (LSP.TextEdit range insertTxt) diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Snippet.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Snippet.hs new file mode 100644 index 0000000000..800a39bfbc --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Snippet.hs @@ -0,0 +1,108 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.Cabal.Completion.Completer.Snippet where + +import Control.Lens ((?~)) +import Control.Monad.Extra (mapMaybeM) +import Data.Function ((&)) +import Data.Map (Map) +import qualified Data.Map as Map +import qualified Data.Text as T +import Ide.Logger (Priority (..), + logWith) +import Ide.Plugin.Cabal.Completion.Completer.Simple +import Ide.Plugin.Cabal.Completion.Completer.Types +import Ide.Plugin.Cabal.Completion.Types +import qualified Language.LSP.Protocol.Lens as JL +import qualified Language.LSP.Protocol.Types as LSP +import qualified Text.Fuzzy.Parallel as Fuzzy + +-- | Maps snippet triggerwords with their completers +snippetCompleter :: Completer +snippetCompleter recorder cData = do + let scored = Fuzzy.simpleFilter Fuzzy.defChunkSize Fuzzy.defMaxResults (completionPrefix prefInfo) $ Map.keys snippets + mapMaybeM + ( \compl -> do + let matched = Fuzzy.original compl + let completion' = Map.lookup matched snippets + case completion' of + Nothing -> do + logWith recorder Warning $ LogMapLookUpOfKnownKeyFailed matched + pure Nothing + Just completion -> + pure $ Just $ mkSnippetCompletion completion matched + ) + scored + where + snippets = snippetMap prefInfo + prefInfo = cabalPrefixInfo cData + mkSnippetCompletion :: T.Text -> T.Text -> LSP.CompletionItem + mkSnippetCompletion insertText toDisplay = + mkDefaultCompletionItem toDisplay + & JL.kind ?~ LSP.CompletionItemKind_Snippet + & JL.insertText ?~ insertText + & JL.insertTextFormat ?~ LSP.InsertTextFormat_Snippet + +type TriggerWord = T.Text + +snippetMap :: CabalPrefixInfo -> Map TriggerWord T.Text +snippetMap prefInfo = + fmap T.unlines $ + Map.fromList + [ ( "library-snippet", + [ "library", + " hs-source-dirs: $1", + " exposed-modules: $2", + " build-depends: base", + " default-language: Haskell2010" + ] + ), + ( "recommended-fields", + [ "cabal-version: $1", + "name: " <> completionFileName prefInfo, + "version: 0.1.0.0", + "maintainer: $4", + "category: $5", + "synopsis: $6", + "license: $7", + "build-type: Simple" + ] + ), + ( "executable-snippet", + [ "executable $1", + " main-is: ${2:Main.hs}", + " build-depends: base" + ] + ), + ( "benchmark-snippet", + [ "benchmark $1", + " type: exitcode-stdio-1.0", + " main-is: ${3:Main.hs}", + " build-depends: base" + ] + ), + ( "testsuite-snippet", + [ "test-suite $1", + " type: exitcode-stdio-1.0", + " main-is: ${3:Main.hs}", + " build-depends: base" + ] + ), + ( "common-warnings", + [ "common warnings", + " ghc-options: -Wall" + ] + ), + ( "source-repo-github-snippet", + [ "source-repository head", + " type: git", + " location: git://github.com/$2" + ] + ), + ( "source-repo-git-snippet", + [ "source-repository head", + " type: git", + " location: $1" + ] + ) + ] diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Types.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Types.hs new file mode 100644 index 0000000000..968b68919b --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completer/Types.hs @@ -0,0 +1,28 @@ +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.Cabal.Completion.Completer.Types where + +import Development.IDE as D +import qualified Distribution.Fields as Syntax +import Distribution.PackageDescription (GenericPackageDescription) +import qualified Distribution.Parsec.Position as Syntax +import Ide.Plugin.Cabal.Completion.Types +import Language.LSP.Protocol.Types (CompletionItem) + +-- | Takes information needed to build possible completion items +-- and returns the list of possible completion items +type Completer = Recorder (WithPriority Log) -> CompleterData -> IO [CompletionItem] + +-- | Contains information to be used by completers. +data CompleterData = CompleterData + { -- | Access to the latest available generic package description for the handled cabal file, + -- relevant for some completion actions which require the file's meta information + -- such as the module completers which require access to source directories + getLatestGPD :: IO (Maybe GenericPackageDescription), + -- | Access to the entries of the handled cabal file as parsed by ParseCabalFields + getCabalCommonSections :: IO (Maybe [Syntax.Field Syntax.Position]), + -- | Prefix info to be used for constructing completion items + cabalPrefixInfo :: CabalPrefixInfo, + -- | The name of the stanza in which the completer is applied + stanzaName :: Maybe StanzaName + } diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completions.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completions.hs new file mode 100644 index 0000000000..83e809fb0f --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Completions.hs @@ -0,0 +1,181 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.Cabal.Completion.Completions (contextToCompleter, getContext, getCabalPrefixInfo) where + +import Control.Lens ((^.)) +import Control.Monad.IO.Class (MonadIO) +import Data.List.NonEmpty (NonEmpty) +import qualified Data.List.NonEmpty as NE +import qualified Data.Map as Map +import qualified Data.Text as T +import Development.IDE as D +import qualified Development.IDE.Plugin.Completions.Types as Ghcide +import qualified Distribution.Fields as Syntax +import qualified Distribution.Parsec.Position as Syntax +import Ide.Plugin.Cabal.Completion.CabalFields +import Ide.Plugin.Cabal.Completion.Completer.Simple +import Ide.Plugin.Cabal.Completion.Completer.Snippet +import Ide.Plugin.Cabal.Completion.Completer.Types (Completer) +import Ide.Plugin.Cabal.Completion.Data +import Ide.Plugin.Cabal.Completion.Types +import qualified Language.LSP.Protocol.Lens as JL +import qualified System.FilePath as FP +import System.FilePath (takeBaseName) + +-- ---------------------------------------------------------------- +-- Public API for Completions +-- ---------------------------------------------------------------- + +-- | Takes information about the completion context within the file +-- and finds the correct completer to be applied. +contextToCompleter :: Context -> Completer +-- if we are in the top level of the cabal file and not in a keyword context, +-- we can write any top level keywords or a stanza declaration +contextToCompleter (TopLevel, None) = + snippetCompleter + <> ( constantCompleter $ + Map.keys (cabalVersionKeyword <> cabalKeywords) ++ Map.keys stanzaKeywordMap + ) +-- if we are in a keyword context in the top level, +-- we look up that keyword in the top level context and can complete its possible values +contextToCompleter (TopLevel, KeyWord kw) = + case Map.lookup kw (cabalVersionKeyword <> cabalKeywords) of + Nothing -> errorNoopCompleter (LogUnknownKeyWordInContextError kw) + Just l -> l +-- if we are in a stanza and not in a keyword context, +-- we can write any of the stanza's keywords or a stanza declaration +contextToCompleter (Stanza s _, None) = + case Map.lookup s stanzaKeywordMap of + Nothing -> errorNoopCompleter (LogUnknownStanzaNameInContextError s) + Just l -> constantCompleter $ Map.keys l +-- if we are in a stanza's keyword's context we can complete possible values of that keyword +contextToCompleter (Stanza s _, KeyWord kw) = + case Map.lookup s stanzaKeywordMap of + Nothing -> errorNoopCompleter (LogUnknownStanzaNameInContextError s) + Just m -> case Map.lookup kw m of + Nothing -> errorNoopCompleter (LogUnknownKeyWordInContextError kw) + Just l -> l + +-- | Takes prefix info about the previously written text +-- and a rope (representing a file), returns the corresponding context. +-- +-- Can return Nothing if an error occurs. +-- +-- TODO: first line can only have cabal-version: keyword +getContext :: (MonadIO m) => Recorder (WithPriority Log) -> CabalPrefixInfo -> [Syntax.Field Syntax.Position] -> m Context +getContext recorder prefInfo fields = do + let ctx = findCursorContext cursor (NE.singleton (0, TopLevel)) (completionPrefix prefInfo) fields + logWith recorder Debug $ LogCompletionContext ctx + pure ctx + where + cursor = lspPositionToCabalPosition (completionCursorPosition prefInfo) + +-- | Takes information about the current file's file path, +-- and the cursor position in the file; and builds a CabalPrefixInfo +-- with the prefix up to that cursor position. +-- Checks whether a suffix needs to be completed +-- and calculates the range in the document +-- where the completion action should be applied. +getCabalPrefixInfo :: FilePath -> Ghcide.PosPrefixInfo -> CabalPrefixInfo +getCabalPrefixInfo fp prefixInfo = + CabalPrefixInfo + { completionPrefix = completionPrefix', + isStringNotation = mkIsStringNotation separator afterCursorText, + completionCursorPosition = Ghcide.cursorPos prefixInfo, + completionRange = Range completionStart completionEnd, + completionWorkingDir = FP.takeDirectory fp, + completionFileName = T.pack $ takeBaseName fp + } + where + completionEnd = Ghcide.cursorPos prefixInfo + completionStart = + Position + (_line completionEnd) + (_character completionEnd - (fromIntegral $ T.length completionPrefix')) + (beforeCursorText, afterCursorText) = T.splitAt cursorColumn $ Ghcide.fullLine prefixInfo + completionPrefix' = T.takeWhileEnd (not . (`elem` stopConditionChars)) beforeCursorText + separator = + -- if there is an opening apostrophe before the cursor in the line somewhere, + -- everything after that apostrophe is the completion prefix + if odd $ T.count "\"" beforeCursorText + then '\"' + else ' ' + cursorColumn = fromIntegral $ Ghcide.cursorPos prefixInfo ^. JL.character + stopConditionChars = separator : [',', ':'] + + -- \| Takes the character occurring exactly before, + -- and the text occurring after the item to be completed and + -- returns whether the item is already surrounded by apostrophes. + -- + -- Example: (@|@ indicates the cursor position) + -- + -- @"./src|@ would call @'\"'@ @""@ and result in Just LeftSide + -- + -- @"./src|"@ would call @'\"'@ @'\"'@ and result in Just Surrounded + -- + mkIsStringNotation :: Char -> T.Text -> Maybe Apostrophe + mkIsStringNotation '\"' restLine + | Just ('\"', _) <- T.uncons restLine = Just Surrounded + | otherwise = Just LeftSide + mkIsStringNotation _ _ = Nothing + +-- ---------------------------------------------------------------- +-- Implementation Details +-- ---------------------------------------------------------------- + +findCursorContext :: + Syntax.Position -> + -- ^ The cursor position we look for in the fields + NonEmpty (Int, StanzaContext) -> + -- ^ A stack of current stanza contexts and their starting line numbers + T.Text -> + -- ^ The cursor's prefix text + [Syntax.Field Syntax.Position] -> + -- ^ The fields to traverse + Context +findCursorContext cursor parentHistory prefixText fields = + case findFieldSection cursor fields of + Nothing -> (snd $ NE.head parentHistory, None) + -- We found the most likely section. Now, are we starting a new section or are we completing an existing one? + Just field@(Syntax.Field _ _) -> classifyFieldContext parentHistory cursor field + Just section@(Syntax.Section _ args sectionFields) + | inSameLineAsSectionName section -> (stanzaCtx, None) -- TODO: test whether keyword in same line is parsed correctly + | getFieldName section `elem` conditionalKeywords -> findCursorContext cursor parentHistory prefixText sectionFields -- Ignore if conditionals, they are not real sections + | otherwise -> + findCursorContext cursor + (NE.cons (Syntax.positionCol (getAnnotation section) + 1, Stanza (getFieldName section) (getOptionalSectionName args)) parentHistory) + prefixText sectionFields + where + inSameLineAsSectionName section = Syntax.positionRow (getAnnotation section) == Syntax.positionRow cursor + stanzaCtx = snd $ NE.head parentHistory + conditionalKeywords = ["if", "elif", "else"] + +-- | Finds the cursor's context, where the cursor is already found to be in a specific field +-- +-- Due to the way the field context is recognised for incomplete cabal files, +-- an incomplete keyword is also recognised as a field, therefore we need to determine +-- the specific context as we could still be in a stanza context in this case. +classifyFieldContext :: NonEmpty (Int, StanzaContext) -> Syntax.Position -> Syntax.Field Syntax.Position -> Context +classifyFieldContext ctx cursor field + -- the cursor is not indented enough to be within the field + -- but still indented enough to be within the stanza + | cursorColumn <= fieldColumn && minIndent <= cursorColumn = (stanzaCtx, None) + -- the cursor is not in the current stanza's context as it is not indented enough + | cursorColumn < minIndent = findStanzaForColumn cursorColumn ctx + | cursorIsInFieldName = (stanzaCtx, None) + | cursorIsBeforeFieldName = (stanzaCtx, None) + | otherwise = (stanzaCtx, KeyWord (getFieldName field <> ":")) + where + (minIndent, stanzaCtx) = NE.head ctx + + cursorIsInFieldName = inSameLineAsFieldName && + fieldColumn <= cursorColumn && + cursorColumn <= fieldColumn + T.length (getFieldName field) + + cursorIsBeforeFieldName = inSameLineAsFieldName && + cursorColumn < fieldColumn + + inSameLineAsFieldName = Syntax.positionRow (getAnnotation field) == Syntax.positionRow cursor + + cursorColumn = Syntax.positionCol cursor + fieldColumn = Syntax.positionCol (getAnnotation field) diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Data.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Data.hs new file mode 100644 index 0000000000..03e517eae2 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Data.hs @@ -0,0 +1,297 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.Cabal.Completion.Data where + +import Data.Map (Map) +import qualified Data.Map as Map +import qualified Data.Text as T +import Development.IDE.GHC.Compat.Core (flagsForCompletion) +import Distribution.CabalSpecVersion (CabalSpecVersion (CabalSpecV2_2), + showCabalSpecVersion) +import Ide.Plugin.Cabal.Completion.Completer.FilePath +import Ide.Plugin.Cabal.Completion.Completer.Module +import Ide.Plugin.Cabal.Completion.Completer.Paths +import Ide.Plugin.Cabal.Completion.Completer.Simple +import Ide.Plugin.Cabal.Completion.Completer.Types (Completer) +import Ide.Plugin.Cabal.Completion.Types +import Ide.Plugin.Cabal.LicenseSuggest (licenseNames) + +-- | Ad-hoc data type for modelling the available top-level stanzas. +-- Not intended right now for anything else but to avoid string +-- comparisons in 'stanzaKeywordMap' and 'libExecTestBenchCommons'. +data TopLevelStanza + = Library + | Executable + | TestSuite + | Benchmark + | ForeignLib + | Common + +-- ---------------------------------------------------------------- +-- Completion Data +-- ---------------------------------------------------------------- + +supportedCabalVersions :: [CabalSpecVersion] +supportedCabalVersions = [CabalSpecV2_2 .. maxBound] + +-- | Keyword for cabal version; required to be the top line in a cabal file +cabalVersionKeyword :: Map KeyWordName Completer +cabalVersionKeyword = + Map.singleton "cabal-version:" $ + constantCompleter $ + -- We only suggest cabal versions newer than 2.2 + -- since we don't recommend using older ones. + map (T.pack . showCabalSpecVersion) supportedCabalVersions + +-- | Top level keywords of a cabal file. +-- +-- TODO: we could add descriptions of field values and +-- then show them when inside the field's context +cabalKeywords :: Map KeyWordName Completer +cabalKeywords = + Map.fromList + [ ("name:", nameCompleter), + ("version:", noopCompleter), + ("build-type:", constantCompleter ["Simple", "Custom", "Configure", "Make"]), + ("license:", weightedConstantCompleter licenseNames weightedLicenseNames), + ("license-file:", filePathCompleter), + ("license-files:", filePathCompleter), + ("copyright:", noopCompleter), + ("author:", noopCompleter), + ("maintainer:", noopCompleter), -- email address, use git config? + ("stability:", noopCompleter), + ("homepage:", noopCompleter), + ("bug-reports:", noopCompleter), + ("package-url:", noopCompleter), + ("synopsis:", noopCompleter), + ("description:", noopCompleter), + ("category:", noopCompleter), + ("tested-with:", constantCompleter ["GHC"]), + ("data-files:", filePathCompleter), + ("data-dir:", directoryCompleter), + ("extra-source-files:", filePathCompleter), + ("extra-doc-files:", filePathCompleter), + ("extra-tmp-files:", filePathCompleter) + ] + +-- | Map, containing all stanzas in a cabal file as keys, +-- and lists of their possible nested keywords as values. +stanzaKeywordMap :: Map StanzaType (Map KeyWordName Completer) +stanzaKeywordMap = + Map.fromList + [ ("library", libraryFields <> libExecTestBenchCommons Library), + ("executable", executableFields <> libExecTestBenchCommons Executable), + ("test-suite", testSuiteFields <> libExecTestBenchCommons TestSuite), + ("benchmark", benchmarkFields <> libExecTestBenchCommons Benchmark), + ("foreign-library", foreignLibraryFields <> libExecTestBenchCommons ForeignLib), + ("common", libExecTestBenchCommons Library), + ("common", libExecTestBenchCommons Common), + ("flag", flagFields), + ("source-repository", sourceRepositoryFields) + ] + +libraryFields :: Map KeyWordName Completer +libraryFields = + Map.fromList + [ ("exposed-modules:", modulesCompleter sourceDirsExtractionLibrary), + ("virtual-modules:", noopCompleter), + ("exposed:", constantCompleter ["True", "False"]), + ("visibility:", constantCompleter ["private", "public"]), + ("reexported-modules:", noopCompleter), + ("signatures:", noopCompleter), + ("other-modules:", modulesCompleter sourceDirsExtractionLibrary) + ] + +executableFields :: Map KeyWordName Completer +executableFields = + Map.fromList + [ ("main-is:", mainIsCompleter sourceDirsExtractionExecutable), + ("scope:", constantCompleter ["public", "private"]), + ("other-modules:", modulesCompleter sourceDirsExtractionExecutable) + ] + +testSuiteFields :: Map KeyWordName Completer +testSuiteFields = + Map.fromList + [ ("type:", constantCompleter ["exitcode-stdio-1.0", "detailed-0.9"]), + ("main-is:", mainIsCompleter sourceDirsExtractionTestSuite), + ("other-modules:", modulesCompleter sourceDirsExtractionTestSuite) + ] + +benchmarkFields :: Map KeyWordName Completer +benchmarkFields = + Map.fromList + [ ("type:", noopCompleter), + ("main-is:", mainIsCompleter sourceDirsExtractionBenchmark), + ("other-modules:", modulesCompleter sourceDirsExtractionBenchmark) + ] + +foreignLibraryFields :: Map KeyWordName Completer +foreignLibraryFields = + Map.fromList + [ ("type:", constantCompleter ["native-static", "native-shared"]), + ("options:", constantCompleter ["standalone"]), + ("mod-def-file:", filePathCompleter), + ("lib-version-info:", noopCompleter), + ("lib-version-linux:", noopCompleter) + ] + +sourceRepositoryFields :: Map KeyWordName Completer +sourceRepositoryFields = + Map.fromList + [ ( "type:", + constantCompleter + [ "darcs", + "git", + "svn", + "cvs", + "mercurial", + "hg", + "bazaar", + "bzr", + "arch", + "monotone" + ] + ), + ("location:", noopCompleter), + ("module:", noopCompleter), + ("branch:", noopCompleter), + ("tag:", noopCompleter), + ("subdir:", directoryCompleter) + ] + +flagFields :: Map KeyWordName Completer +flagFields = + Map.fromList + [ ("description:", noopCompleter), + ("default:", constantCompleter ["True", "False"]), + ("manual:", constantCompleter ["False", "True"]), + ("lib-def-file:", noopCompleter), + ("lib-version-info:", noopCompleter), + ("lib-version-linux:", noopCompleter) + ] + +libExecTestBenchCommons :: TopLevelStanza -> Map KeyWordName Completer +libExecTestBenchCommons st = + Map.fromList + [ ("import:", importCompleter), + ("build-depends:", noopCompleter), + ("hs-source-dirs:", directoryCompleter), + ("default-extensions:", noopCompleter), + ("other-extensions:", noopCompleter), + ("default-language:", constantCompleter ["GHC2021", "Haskell2010", "Haskell98"]), + ("other-languages:", noopCompleter), + ("build-tool-depends:", noopCompleter), + ("buildable:", constantCompleter ["True", "False"]), + ("ghc-options:", constantCompleter ghcOptions), + ("ghc-prof-options:", constantCompleter ghcOptions), + ("ghc-shared-options:", constantCompleter ghcOptions), + ("ghcjs-options:", constantCompleter ghcOptions), + ("ghcjs-prof-options:", constantCompleter ghcOptions), + ("ghcjs-shared-options:", constantCompleter ghcOptions), + ("includes:", filePathCompleter), + ("install-includes:", filePathCompleter), + ("include-dirs:", directoryCompleter), + ("autogen-includes:", filePathCompleter), + ("autogen-modules:", moduleCompleterByTopLevelStanza), + ("c-sources:", filePathCompleter), + ("cxx-sources:", filePathCompleter), + ("asm-sources:", filePathCompleter), + ("cmm-sources:", filePathCompleter), + ("js-sources:", filePathCompleter), + ("extra-libraries:", noopCompleter), + ("extra-ghci-libraries:", noopCompleter), + ("extra-bundled-libraries:", noopCompleter), + ("extra-lib-dirs:", directoryCompleter), + ("cc-options:", noopCompleter), + ("cpp-options:", noopCompleter), + ("cxx-options:", noopCompleter), + ("cmm-options:", noopCompleter), + ("asm-options:", noopCompleter), + ("ld-options:", noopCompleter), + ("pkgconfig-depends:", noopCompleter), + ("frameworks:", noopCompleter), + ("extra-framework-dirs:", directoryCompleter), + ("mixins:", noopCompleter) + ] + where + -- + moduleCompleterByTopLevelStanza = case st of + Library -> modulesCompleter sourceDirsExtractionLibrary + Executable -> modulesCompleter sourceDirsExtractionExecutable + TestSuite -> modulesCompleter sourceDirsExtractionTestSuite + Benchmark -> modulesCompleter sourceDirsExtractionBenchmark + ForeignLib -> modulesCompleter sourceDirsExtractionForeignLib + Common -> + -- TODO: We can't provide a module completer because we provide + -- module completions based on the "hs-source-dirs" after parsing the file, + -- i.e. based on the 'PackageDescription'. + -- "common" stanzas are erased in the 'PackageDescription' representation, + -- thus we can't provide accurate module completers right now, as we don't + -- know what the 'hs-source-dirs' in the "common" stanza are. + -- + -- A potential fix would be to introduce an intermediate representation that + -- parses the '.cabal' file s.t. that we have access to the 'hs-source-dirs', + -- but not have erased the "common" stanza. + noopCompleter + +-- | Contains a map of the most commonly used licenses, weighted by their popularity. +-- +-- The data was extracted by Kleidukos from the alternative hackage frontend flora.pm. +weightedLicenseNames :: Map T.Text Double +weightedLicenseNames = + fmap statisticsToWeight $ + Map.fromList + [ ("BSD-3-Clause", 9955), + ("MIT", 3336), + ("GPL-3.0-only", 679), + ("LicenseRef-OtherLicense", 521), + ("Apache-2.0", 514), + ("LicenseRef-GPL", 443), + ("LicenseRef-PublicDomain", 318), + ("MPL-2.0", 288), + ("BSD-2-Clause", 174), + ("GPL-2.0-only", 160), + ("LicenseRef-LGPL", 146), + ("LGPL-2.1-only", 112), + ("LGPL-3.0-only", 100), + ("AGPL-3.0-only", 96), + ("ISC", 89), + ("LicenseRef-Apache", 45), + ("GPL-3.0-or-later", 43), + ("BSD-2-Clause-Patent", 33), + ("GPL-2.0-or-later", 21), + ("CC0-1.0", 16), + ("AGPL-3.0-or-later", 15), + ("LGPL-2.1-or-later", 12), + ("(BSD-2-Clause OR Apache-2.0)", 10), + ("(Apache-2.0 OR MPL-2.0)", 8), + ("LicenseRef-AGPL", 6), + ("(BSD-3-Clause OR Apache-2.0)", 4), + ("0BSD", 3), + ("BSD-4-Clause", 3), + ("LGPL-3.0-or-later", 3), + ("LicenseRef-LGPL-2", 2), + ("GPL-2.0-or-later AND BSD-3-Clause", 2), + ("NONE", 2), + ("Zlib", 2), + ("(Apache-2.0 OR BSD-3-Clause)", 2), + ("BSD-3-Clause AND GPL-2.0-or-later", 2), + ("BSD-3-Clause AND GPL-3.0-or-later", 2) + ] + where + -- Add weights to each usage value from above, the weights are chosen + -- arbitrarily in order for completions to prioritize which licenses to + -- suggest in a sensible way + statisticsToWeight :: Int -> Double + statisticsToWeight stat + | stat < 10 = 0.1 + | stat < 20 = 0.3 + | stat < 50 = 0.4 + | stat < 100 = 0.5 + | stat < 500 = 0.6 + | stat < 650 = 0.7 + | otherwise = 0.9 + +ghcOptions :: [T.Text] +ghcOptions = map T.pack $ flagsForCompletion False diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Types.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Types.hs new file mode 100644 index 0000000000..59796afe2b --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Completion/Types.hs @@ -0,0 +1,188 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.Cabal.Completion.Types where + +import Control.DeepSeq (NFData) +import Control.Lens ((^.)) +import Data.Hashable +import qualified Data.Text as T +import Development.IDE as D +import qualified Distribution.Fields as Syntax +import qualified Distribution.PackageDescription as PD +import qualified Distribution.Parsec.Position as Syntax +import GHC.Generics +import qualified Language.LSP.Protocol.Lens as JL + +data Log + = LogFileSplitError Position + | -- | This should never occur since we extract the word to lookup from the same map we look it up in. + LogUnknownKeyWordInContextError KeyWordName + | -- | This should never occur since we extract the word to lookup from the same map we look it up in. + LogUnknownStanzaNameInContextError StanzaName + | LogFilePathCompleterIOError FilePath IOError + | LogUseWithStaleFastNoResult + | LogMapLookUpOfKnownKeyFailed T.Text + | LogCompletionContext Context + deriving (Show) + +instance Pretty Log where + pretty = \case + LogFileSplitError pos -> "An error occurred when trying to separate the lines of the cabal file at position:" <+> pretty pos + LogUnknownKeyWordInContextError kw -> + "Lookup of key word failed for:" <+> viaShow kw + LogUnknownStanzaNameInContextError sn -> + "Lookup of stanza name failed for:" <+> viaShow sn + LogFilePathCompleterIOError fp ioErr -> + "When trying to complete the file path:" <+> pretty fp <+> "the following unexpected IO error occurred" <+> viaShow ioErr + LogUseWithStaleFastNoResult -> "Package description couldn't be read" + LogMapLookUpOfKnownKeyFailed key -> "Lookup of key in map failed even though it should exist" <+> pretty key + LogCompletionContext ctx -> "Completion context is:" <+> pretty ctx + +type instance RuleResult ParseCabalFile = PD.GenericPackageDescription + +data ParseCabalFile = ParseCabalFile + deriving (Eq, Show, Generic) + +instance Hashable ParseCabalFile + +instance NFData ParseCabalFile + +type instance RuleResult ParseCabalFields = [Syntax.Field Syntax.Position] + +data ParseCabalFields = ParseCabalFields + deriving (Eq, Show, Generic) + +instance Hashable ParseCabalFields + +instance NFData ParseCabalFields + +type instance RuleResult ParseCabalCommonSections = [Syntax.Field Syntax.Position] + +data ParseCabalCommonSections = ParseCabalCommonSections + deriving (Eq, Show, Generic) + +instance Hashable ParseCabalCommonSections + +instance NFData ParseCabalCommonSections + +-- | The context a cursor can be in within a cabal file. +-- +-- We can be in stanzas or the top level, +-- and additionally we can be in a context where we have already +-- written a keyword but no value for it yet +type Context = (StanzaContext, FieldContext) + +-- | Context inside a cabal file. +-- Used to decide which keywords to suggest. +data StanzaContext + = -- | Top level context in a cabal file such as 'author' + TopLevel + | -- | Nested context in a cabal file, such as 'library'. + -- + -- Stanzas have their own fields which differ from top-level fields. + -- Each stanza must be named, such as 'executable exe', + -- except for the main library. + Stanza !StanzaType !(Maybe StanzaName) + deriving (Eq, Show, Read) + +instance Pretty StanzaContext where + pretty TopLevel = "TopLevel" + pretty (Stanza t ms) = "Stanza" <+> pretty t <+> (maybe mempty pretty ms) + +-- | Keyword context in a cabal file. +-- +-- Used to decide whether to suggest values or keywords. +data FieldContext + = -- | Key word context, where a keyword + -- occurs right before the current word + -- to be completed + KeyWord !KeyWordName + | -- | Keyword context where no keyword occurs + -- right before the current word to be completed + None + deriving (Eq, Show, Read) + +instance Pretty FieldContext where + pretty (KeyWord kw) = "KeyWord" <+> pretty kw + pretty None = "No Keyword" + +type KeyWordName = T.Text + +type StanzaName = T.Text + +type StanzaType = T.Text + +-- | Information regarding the current completion status +-- +-- Example: @"dir1/fi@ having been written to the file +-- would correspond to: +-- +-- @ +-- completionPrefix = "dir1/fi" +-- isStringNotation = LeftSide +-- ... +-- @ +-- +-- We define this type instead of simply using +-- VFS.PosPrefixInfo since e.g. for filepaths we +-- need more than just the word before the +-- cursor (as can be seen above), +-- since we want to capture the whole filepath +-- before the cursor. +-- +-- We also use this type to wrap all information +-- necessary to complete filepaths and other values +-- in a cabal file. +data CabalPrefixInfo = CabalPrefixInfo + { -- | text prefix to complete + completionPrefix :: T.Text, + -- | Did the completion happen in the context of a string notation, + -- i.e. are there apostrophes around the item to be completed + isStringNotation :: Maybe Apostrophe, + -- | the current position of the cursor in the file + completionCursorPosition :: Position, + -- | range where completion is to be inserted + completionRange :: Range, + -- | directory of the handled cabal file + completionWorkingDir :: FilePath, + -- | filename of the handled cabal file + completionFileName :: T.Text + } + deriving (Eq, Show) + +-- | Where are the apostrophes around the item to be completed? +-- +-- 'Surrounded' means the item to complete already has the necessary apostrophes, +-- while 'LeftSide' means, a closing apostrophe has to be added after the completion item. +data Apostrophe = Surrounded | LeftSide + deriving (Eq, Ord, Show) + +-- | Wraps a completion in apostrophes where appropriate. +-- +-- If a completion starts with an apostrophe we want to end it with an apostrophe. +-- If a completed filepath contains a space, it can only be written in the cabal +-- file if it is wrapped in apostrophes, thus we wrap it. +applyStringNotation :: Maybe Apostrophe -> T.Text -> T.Text +applyStringNotation (Just Surrounded) compl = compl +applyStringNotation (Just LeftSide) compl = compl <> "\"" +applyStringNotation Nothing compl + | Just _ <- T.find (== ' ') compl = "\"" <> compl <> "\"" + | otherwise = compl + +-- | Convert an LSP 'Position' to a 'Syntax.Position'. +-- +-- Cabal Positions start their indexing at 1 while LSP starts at 0. +-- This helper makes sure, the translation is done properly. +lspPositionToCabalPosition :: Position -> Syntax.Position +lspPositionToCabalPosition pos = Syntax.Position + (fromIntegral (pos ^. JL.line) + 1) + (fromIntegral (pos ^. JL.character) + 1) + +-- | Convert an 'Syntax.Position' to a LSP 'Position'. +-- +-- Cabal Positions start their indexing at 1 while LSP starts at 0. +-- This helper makes sure, the translation is done properly. +cabalPositionToLSPPosition :: Syntax.Position -> Position +cabalPositionToLSPPosition (Syntax.Position start end) = Position (toEnum start -1) (toEnum end -1) diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Definition.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Definition.hs new file mode 100644 index 0000000000..5f85151199 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Definition.hs @@ -0,0 +1,206 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.Cabal.Definition where + +import Control.Lens ((^.)) +import Control.Monad.Extra +import Control.Monad.IO.Class +import Data.List (find) +import qualified Data.Maybe as Maybe +import qualified Data.Text as T +import Development.IDE as D +import Development.IDE.Core.PluginUtils +import qualified Distribution.Fields as Syntax +import Distribution.PackageDescription (Benchmark (..), + BuildInfo (..), + Executable (..), + ForeignLib (..), + GenericPackageDescription, + Library (..), + LibraryName (LMainLibName, LSubLibName), + PackageDescription (..), + TestSuite (..), + library, + unUnqualComponentName) +import Distribution.PackageDescription.Configuration (flattenPackageDescription) +import qualified Distribution.Parsec.Position as Syntax +import Distribution.Utils.Generic (safeHead) +import Distribution.Utils.Path (getSymbolicPath) +import Ide.Plugin.Cabal.Completion.CabalFields as CabalFields +import Ide.Plugin.Cabal.Completion.Types (ParseCabalCommonSections (ParseCabalCommonSections), + ParseCabalFields (..), + ParseCabalFile (..)) +import qualified Ide.Plugin.Cabal.Completion.Types as Types +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Error +import Ide.Types +import qualified Language.LSP.Protocol.Lens as JL +import qualified Language.LSP.Protocol.Message as LSP +import Language.LSP.Protocol.Types +import System.Directory (doesFileExist) +import System.FilePath (joinPath, + takeDirectory, + (<.>), ()) + +-- | Handler for going to definitions. +-- +-- Provides a handler for going to the definition in a cabal file, +-- gathering all possible definitions by calling subfunctions. + +-- TODO: Resolve more cases for go-to definition. +gotoDefinition :: PluginMethodHandler IdeState LSP.Method_TextDocumentDefinition +gotoDefinition ide _ msgParam = do + nfp <- getNormalizedFilePathE uri + cabalFields <- runActionE "cabal-plugin.commonSections" ide $ useE ParseCabalFields nfp + -- Trim the AST tree, so multiple passes in subfunctions won't hurt the performance. + let fieldsOfInterest = maybe cabalFields (:[] ) $ CabalFields.findFieldSection cursor cabalFields + + commonSections <- runActionE "cabal-plugin.commonSections" ide $ useE ParseCabalCommonSections nfp + let mCommonSectionsDef = gotoCommonSectionDefinition uri commonSections cursor fieldsOfInterest + + mModuleDef <- do + mGPD <- liftIO $ runAction "cabal.GPD" ide $ useWithStale ParseCabalFile nfp + case mGPD of + Nothing -> pure Nothing + Just (gpd, _) -> liftIO $ gotoModulesDefinition nfp gpd cursor fieldsOfInterest + + let defs = Maybe.catMaybes [ mCommonSectionsDef + , mModuleDef + ] + -- Take first found definition. + -- We assume, that there can't be multiple definitions, + -- or the most specific definitions come first. + case safeHead defs of + Nothing -> pure $ InR $ InR Null + Just def -> pure $ InL def + where + cursor = Types.lspPositionToCabalPosition (msgParam ^. JL.position) + uri = msgParam ^. JL.textDocument . JL.uri + +-- | Definitions for Sections. +-- +-- Provides a Definition if cursor is pointed at an identifier, +-- otherwise gives Nothing. +-- The definition is found by traversing the sections and comparing their name to +-- the clicked identifier. +gotoCommonSectionDefinition + :: Uri -- ^ Cabal file URI + -> [Syntax.Field Syntax.Position] -- ^ Found common sections + -> Syntax.Position -- ^ Cursor position + -> [Syntax.Field Syntax.Position] -- ^ Trimmed cabal AST on a cursor + -> Maybe Definition +gotoCommonSectionDefinition uri commonSections cursor fieldsOfInterest = do + cursorText <- CabalFields.findTextWord cursor fieldsOfInterest + commonSection <- find (isSectionArgName cursorText) commonSections + Just $ Definition $ InL $ Location uri $ CabalFields.getFieldLSPRange commonSection + where + isSectionArgName name (Syntax.Section _ sectionArgName _) = name == CabalFields.onelineSectionArgs sectionArgName + isSectionArgName _ _ = False + +-- | Definitions for Modules. +-- +-- Provides a Definition if cursor is pointed at a +-- exposed-module or other-module field, otherwise gives Nothing +-- +-- Definition is found by looking for a module name, +-- the cursor is pointing to and looking for it in @BuildInfo@s. +-- Note that since a trimmed ast is provided, a @Definition@ to +-- a module with the same name as the target one, +-- but in another build target can't be given. +-- +-- See resolving @Config@ module in tests. +gotoModulesDefinition + :: NormalizedFilePath -- ^ Normalized FilePath to the cabal file + -> GenericPackageDescription + -> Syntax.Position -- ^ Cursor position + -> [Syntax.Field Syntax.Position] -- ^ Trimmed cabal AST on a cursor + -> IO (Maybe Definition) +gotoModulesDefinition nfp gpd cursor fieldsOfInterest = do + let mCursorText = CabalFields.findTextWord cursor fieldsOfInterest + moduleNames = CabalFields.getModulesNames fieldsOfInterest + mModuleName = find (isModuleName mCursorText) moduleNames + + case mModuleName of + Nothing -> pure Nothing + Just (mBuildTargetNames, moduleName) -> do + let buildInfos = foldMap (lookupBuildTargetPackageDescription + (flattenPackageDescription gpd)) + mBuildTargetNames + sourceDirs = map getSymbolicPath $ concatMap hsSourceDirs buildInfos + potentialPaths = map (\dir -> takeDirectory (fromNormalizedFilePath nfp) dir toHaskellFile moduleName) sourceDirs + allPaths <- liftIO $ filterM doesFileExist potentialPaths + -- Don't provide the range, since there is little benefit for it + let locations = map (\pth -> Location (filePathToUri pth) (mkRange 0 0 0 0)) allPaths + case safeHead locations of -- We assume there could be only one source location + Nothing -> pure Nothing + Just location -> pure $ Just $ Definition $ InL location + where + isModuleName (Just name) (_, moduleName) = name == moduleName + isModuleName _ _ = False + +-- | Gives all `buildInfo`s given a target name. +-- +-- `Maybe buildTargetName` is provided, and if it's +-- Nothing we assume, that it's a main library. +-- Otherwise looks for the provided name. +lookupBuildTargetPackageDescription :: PackageDescription -> Maybe T.Text -> [BuildInfo] +lookupBuildTargetPackageDescription (PackageDescription {..}) Nothing = + case library of + Nothing -> [] -- Target is a main library but no main library was found + Just (Library {libBuildInfo}) -> [libBuildInfo] +lookupBuildTargetPackageDescription (PackageDescription {..}) (Just buildTargetName) = + Maybe.catMaybes $ + map executableNameLookup executables <> + map subLibraryNameLookup subLibraries <> + map foreignLibsNameLookup foreignLibs <> + map testSuiteNameLookup testSuites <> + map benchmarkNameLookup benchmarks + where + executableNameLookup :: Executable -> Maybe BuildInfo + executableNameLookup (Executable {exeName, buildInfo}) = + if T.pack (unUnqualComponentName exeName) == buildTargetName + then Just buildInfo + else Nothing + subLibraryNameLookup :: Library -> Maybe BuildInfo + subLibraryNameLookup (Library {libName, libBuildInfo}) = + case libName of + (LSubLibName name) -> + if T.pack (unUnqualComponentName name) == buildTargetName + then Just libBuildInfo + else Nothing + LMainLibName -> Nothing + foreignLibsNameLookup :: ForeignLib -> Maybe BuildInfo + foreignLibsNameLookup (ForeignLib {foreignLibName, foreignLibBuildInfo}) = + if T.pack (unUnqualComponentName foreignLibName) == buildTargetName + then Just foreignLibBuildInfo + else Nothing + testSuiteNameLookup :: TestSuite -> Maybe BuildInfo + testSuiteNameLookup (TestSuite {testName, testBuildInfo}) = + if T.pack (unUnqualComponentName testName) == buildTargetName + then Just testBuildInfo + else Nothing + benchmarkNameLookup :: Benchmark -> Maybe BuildInfo + benchmarkNameLookup (Benchmark {benchmarkName, benchmarkBuildInfo}) = + if T.pack (unUnqualComponentName benchmarkName) == buildTargetName + then Just benchmarkBuildInfo + else Nothing + +-- | Converts a name of a module to a FilePath. +-- Is needed to guess the relative path to a file +-- using the name of the module. +-- We assume, that correct module naming is guaranteed. +-- +-- Warning: Generally not advised to use, if there are +-- better ways to get the path. +-- +-- Examples: (output is system dependent) +-- >>> toHaskellFile "My.Module.Lib" +-- "My/Module/Lib.hs" +-- >>> toHaskellFile "Main" +-- "Main.hs" +toHaskellFile :: T.Text -> FilePath +toHaskellFile moduleName = joinPath (map T.unpack $ T.splitOn "." moduleName) <.> ".hs" diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Diagnostics.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Diagnostics.hs new file mode 100644 index 0000000000..5429ac0bb9 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Diagnostics.hs @@ -0,0 +1,93 @@ +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} +module Ide.Plugin.Cabal.Diagnostics +( errorDiagnostic +, warningDiagnostic +, positionFromCabalPosition +, fatalParseErrorDiagnostic + -- * Re-exports +, FileDiagnostic +, Diagnostic(..) +) +where + +import Control.Lens ((&), (.~)) +import qualified Data.Text as T +import Development.IDE (FileDiagnostic) +import Development.IDE.Types.Diagnostics (fdLspDiagnosticL, + ideErrorWithSource) +import Distribution.Fields (showPError, showPWarning) +import qualified Distribution.Parsec as Syntax +import Ide.PluginUtils (extendNextLine) +import Language.LSP.Protocol.Lens (range) +import Language.LSP.Protocol.Types (Diagnostic (..), + DiagnosticSeverity (..), + NormalizedFilePath, + Position (Position), + Range (Range), + fromNormalizedFilePath) + +-- | Produce a diagnostic for a fatal Cabal parser error. +fatalParseErrorDiagnostic :: NormalizedFilePath -> T.Text -> FileDiagnostic +fatalParseErrorDiagnostic fp msg = + mkDiag fp "cabal" DiagnosticSeverity_Error (toBeginningOfNextLine Syntax.zeroPos) msg + +-- | Produce a diagnostic from a Cabal parser error +errorDiagnostic :: NormalizedFilePath -> Syntax.PError -> FileDiagnostic +errorDiagnostic fp err@(Syntax.PError pos _) = + mkDiag fp "cabal" DiagnosticSeverity_Error (toBeginningOfNextLine pos) msg + where + msg = T.pack $ showPError (fromNormalizedFilePath fp) err + +-- | Produce a diagnostic from a Cabal parser warning +warningDiagnostic :: NormalizedFilePath -> Syntax.PWarning -> FileDiagnostic +warningDiagnostic fp warning@(Syntax.PWarning _ pos _) = + mkDiag fp "cabal" DiagnosticSeverity_Warning (toBeginningOfNextLine pos) msg + where + msg = T.pack $ showPWarning (fromNormalizedFilePath fp) warning + +-- | The Cabal parser does not output a _range_ for a warning/error, +-- only a single source code 'Lib.Position'. +-- We define the range to be _from_ this position +-- _to_ the first column of the next line. +toBeginningOfNextLine :: Syntax.Position -> Range +toBeginningOfNextLine cabalPos = extendNextLine $ Range pos pos + where + pos = positionFromCabalPosition cabalPos + +-- | Convert a 'Lib.Position' from Cabal to a 'Range' that LSP understands. +-- +-- Prefer this function over hand-rolled unpacking/packing, since LSP is zero-based, +-- while Cabal is one-based. +-- +-- >>> positionFromCabalPosition $ Lib.Position 1 1 +-- Position 0 0 +positionFromCabalPosition :: Syntax.Position -> Position +positionFromCabalPosition (Syntax.Position line column) = Position (fromIntegral line') (fromIntegral col') + where + -- LSP is zero-based, Cabal is one-based + -- Cabal can return line 0 for errors in the first line + line' = if line <= 0 then 0 else line-1 + col' = if column <= 0 then 0 else column-1 + +-- | Create a 'FileDiagnostic' +mkDiag + :: NormalizedFilePath + -- ^ Cabal file path + -> T.Text + -- ^ Where does the diagnostic come from? + -> DiagnosticSeverity + -- ^ Severity + -> Range + -- ^ Which source code range should the editor highlight? + -> T.Text + -- ^ The message displayed by the editor + -> FileDiagnostic +mkDiag file diagSource sev loc msg = + ideErrorWithSource + (Just diagSource) + (Just sev) + file + msg + Nothing + & fdLspDiagnosticL . range .~ loc diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/FieldSuggest.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/FieldSuggest.hs new file mode 100644 index 0000000000..2e77ccb193 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/FieldSuggest.hs @@ -0,0 +1,70 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.Cabal.FieldSuggest + ( fieldErrorName, + fieldErrorAction, + -- * Re-exports + T.Text, + Diagnostic (..), + ) +where + +import qualified Data.Map.Strict as Map +import qualified Data.Text as T +import Language.LSP.Protocol.Types (CodeAction (..), + CodeActionKind (..), + Diagnostic (..), Position (..), + Range (..), TextEdit (..), Uri, + WorkspaceEdit (..)) +import Text.Regex.TDFA + +-- | Generate all code actions for given file, erroneous/unknown field and suggestions +fieldErrorAction + :: Uri + -- ^ File for which the diagnostic was generated + -> T.Text + -- ^ Original (unknown) field + -> [T.Text] + -- ^ Suggestions for the given file + -> Range + -- ^ Location of diagnostic + -> [CodeAction] +fieldErrorAction uri original suggestions range = + fmap mkCodeAction suggestions + where + mkCodeAction suggestion = + let + -- Range returned by cabal here represents fragment from start of offending identifier + -- to end of line, we modify this range to be to the end of the identifier + adjustRange (Range rangeFrom@(Position lineNr col) _) = + Range rangeFrom (Position lineNr (col + fromIntegral (T.length original))) + title = "Replace with " <> suggestion' + tedit = [TextEdit (adjustRange range ) suggestion'] + edit = WorkspaceEdit (Just $ Map.singleton uri tedit) Nothing Nothing + in CodeAction title (Just CodeActionKind_QuickFix) (Just []) Nothing Nothing (Just edit) Nothing Nothing + where + -- dropping colon from the end of suggestion + suggestion' = T.dropEnd 1 suggestion + +-- | Given a diagnostic returned by 'Ide.Plugin.Cabal.Diag.errorDiagnostic', +-- if it represents an "Unknown field"-error with incorrect identifier +-- then return the incorrect identifier together with original diagnostics. +fieldErrorName :: + Diagnostic -> + -- ^ Output of 'Ide.Plugin.Cabal.Diag.errorDiagnostic' + Maybe (T.Text, Diagnostic) + -- ^ Original (incorrect) field name with the suggested replacement +fieldErrorName diag = + mSuggestion (_message diag) >>= \case + [original] -> Just (original, diag) + _ -> Nothing + where + regex :: T.Text + regex = "Unknown field: \"(.*)\"" + mSuggestion msg = getMatch <$> (msg :: T.Text) =~~ regex + getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> [T.Text] + getMatch (_, _, _, results) = results diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Files.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Files.hs new file mode 100644 index 0000000000..28cf1e39a8 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Files.hs @@ -0,0 +1,56 @@ +module Ide.Plugin.Cabal.Files where + +import Control.Monad (filterM) +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Except +import Data.ByteString (ByteString) +import qualified Data.ByteString.Char8 as B +import qualified Data.Text as T +import Distribution.PackageDescription.Quirks (patchQuirks) +import Distribution.Simple.Utils (safeHead) +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Error +import System.Directory (doesFileExist, + listDirectory) +import System.FilePath + +{- | Given a path to a haskell file, returns the closest cabal file. + If a package.yaml is present in same directory as the .cabal file, returns nothing, + because adding a dependency to a generated cabal file will break propagation of changes + from package.yaml to cabal files in stack projects. + If cabal file wasn't found, returns Nothing. +-} +findResponsibleCabalFile :: FilePath -> IO (Maybe FilePath) +findResponsibleCabalFile haskellFilePath = do + let dirPath = dropFileName haskellFilePath + allDirPaths = reverse $ scanl1 () (splitPath dirPath) -- sorted from most to least specific + go allDirPaths + where + go [] = pure Nothing + go (path : ps) = do + objects <- listDirectory path + let objectsWithPaths = map (\obj -> path <> obj) objects + objectsCabalExtension = filter (\c -> takeExtension c == ".cabal") objectsWithPaths + cabalFiles <- filterM (\c -> doesFileExist c) objectsCabalExtension + case safeHead cabalFiles of + Nothing -> go ps + Just cabalFile -> guardAgainstHpack path cabalFile + where + guardAgainstHpack :: FilePath -> FilePath -> IO (Maybe FilePath) + guardAgainstHpack path cabalFile = do + exists <- doesFileExist $ path "package.yaml" + if exists then pure Nothing else pure $ Just cabalFile + +{- | Gives a cabal file's contents or throws error. + + Inspired by @readCabalFile@ in cabal-add, Distribution.Client.Main + + This is a fallback option! + Use only if the `GetFileContents` fails. +-} +readCabalFile :: (MonadIO m) => FilePath -> ExceptT PluginError m ByteString +readCabalFile fileName = do + cabalFileExists <- liftIO $ doesFileExist fileName + if cabalFileExists + then snd . patchQuirks <$> liftIO (B.readFile fileName) + else throwE $ PluginInternalError $ T.pack ("Failed to read cabal file at " <> fileName) diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/LicenseSuggest.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/LicenseSuggest.hs new file mode 100644 index 0000000000..7da1277289 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/LicenseSuggest.hs @@ -0,0 +1,86 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +module Ide.Plugin.Cabal.LicenseSuggest +( licenseErrorSuggestion +, licenseErrorAction +, licenseNames + -- * Re-exports +, T.Text +, Diagnostic(..) +) +where + +import qualified Data.Map as Map +import qualified Data.Text as T +import Language.LSP.Protocol.Types (CodeAction (CodeAction), + CodeActionKind (CodeActionKind_QuickFix), + Diagnostic (..), + Position (Position), + Range (Range), + TextEdit (TextEdit), Uri, + WorkspaceEdit (WorkspaceEdit)) +import Text.Regex.TDFA + +import qualified Data.List as List +import Distribution.SPDX.LicenseId (licenseId) +import qualified Text.Fuzzy.Parallel as Fuzzy + +-- | Given a diagnostic returned by 'Ide.Plugin.Cabal.Diag.errorDiagnostic', +-- if it represents an "Unknown SPDX license identifier"-error along +-- with a suggestion, then return a 'CodeAction' for replacing the +-- the incorrect license identifier with the suggestion. +licenseErrorAction + :: Int -- ^ Maximum number of suggestions to return + -> Uri -- ^ File for which the diagnostic was generated + -> Diagnostic -- ^ Output of 'Ide.Plugin.Cabal.Diag.errorDiagnostic' + -> [CodeAction] +licenseErrorAction maxCompletions uri diag = + mkCodeAction <$> licenseErrorSuggestion maxCompletions (_message diag) + where + mkCodeAction (original, suggestion) = + let + -- The Cabal parser does not output the _range_ of the incorrect license identifier, + -- only a single source code position. Consequently, in 'Ide.Plugin.Cabal.Diag.errorDiagnostic' + -- we define the range to be from the returned position the first column of the next line. + -- Since the "replace" code action replaces this range, we need to modify the range to + -- start at the first character of the invalid license identifier. We achieve this by + -- subtracting the length of the identifier from the beginning of the range. + adjustRange (Range (Position line col) rangeTo) = + Range (Position line (col - fromIntegral (T.length original))) rangeTo + title = "Replace with " <> suggestion + -- We must also add a newline character to the replacement since the range returned by + -- 'Ide.Plugin.Cabal.Diag.errorDiagnostic' ends at the beginning of the following line. + tedit = [TextEdit (adjustRange $ _range diag) (suggestion <> "\n")] + edit = WorkspaceEdit (Just $ Map.singleton uri tedit) Nothing Nothing + in CodeAction title (Just CodeActionKind_QuickFix) (Just []) Nothing Nothing (Just edit) Nothing Nothing + +-- | License name of every license supported by cabal +licenseNames :: [T.Text] +licenseNames = map (T.pack . licenseId) [minBound .. maxBound] + +-- | Given a diagnostic returned by 'Ide.Plugin.Cabal.Diag.errorDiagnostic', +-- provide possible corrections for SPDX license identifiers +-- based on the list specified in Cabal. +-- Results are sorted by best fit, and prefer solutions that have smaller +-- length distance to the original word. +-- +-- >>> licenseErrorSuggestion 2 (T.pack "Unknown SPDX license identifier: 'BSD3'") +-- [("BSD3","BSD-3-Clause"),("BSD3","BSD-3-Clause-LBNL")] +licenseErrorSuggestion :: + Int -- ^ Maximum number of suggestions to return + -> T.Text -- ^ Output of 'Ide.Plugin.Cabal.Diag.errorDiagnostic' + -> [(T.Text, T.Text)] + -- ^ (Original (incorrect) license identifier, suggested replacement) +licenseErrorSuggestion maxCompletions msg = + (getMatch <$> msg =~~ regex) >>= \case + [original] -> + let matches = map Fuzzy.original $ Fuzzy.simpleFilter Fuzzy.defChunkSize maxCompletions original licenseNames + in [(original,candidate) | candidate <- List.sortOn (lengthDistance original) matches] + _ -> [] + where + regex :: T.Text + regex = "Unknown SPDX license identifier: '(.*)'" + getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> [T.Text] + getMatch (_, _, _, results) = results + lengthDistance original x = abs $ T.length original - T.length x diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/OfInterest.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/OfInterest.hs new file mode 100644 index 0000000000..67cf97ccee --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/OfInterest.hs @@ -0,0 +1,122 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.Cabal.OfInterest (ofInterestRules, getCabalFilesOfInterestUntracked, addFileOfInterest, deleteFileOfInterest, kick, Log) where + +import Control.Concurrent.Strict +import Control.DeepSeq +import Control.Monad.IO.Class +import qualified Data.ByteString as BS +import Data.Hashable +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as HashMap +import Data.Proxy +import qualified Data.Text () +import Development.IDE as D +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.Graph (Key, alwaysRerun) +import Development.IDE.Types.Shake (toKey) +import GHC.Generics +import qualified Ide.Plugin.Cabal.Completion.Types as Types +import Ide.Plugin.Cabal.Orphans () + +data Log + = LogShake Shake.Log + | LogFOI (HashMap NormalizedFilePath FileOfInterestStatus) + deriving (Show) + +instance Pretty Log where + pretty = \case + LogShake log' -> pretty log' + LogFOI files -> + "Set files of interest to:" <+> viaShow files + +-- ---------------------------------------------------------------- +-- Cabal file of interest rules and global variable +-- ---------------------------------------------------------------- + +{- | Cabal files that are currently open in the lsp-client. +Specific actions happen when these files are saved, closed or modified, +such as generating diagnostics, re-parsing, etc... + +We need to store the open files to parse them again if we restart the shake session. +Restarting of the shake session happens whenever these files are modified. +-} +newtype OfInterestCabalVar = OfInterestCabalVar (Var (HashMap NormalizedFilePath FileOfInterestStatus)) + +instance Shake.IsIdeGlobal OfInterestCabalVar + +data IsCabalFileOfInterest = IsCabalFileOfInterest + deriving (Eq, Show, Generic) +instance Hashable IsCabalFileOfInterest +instance NFData IsCabalFileOfInterest + +type instance RuleResult IsCabalFileOfInterest = CabalFileOfInterestResult + +data CabalFileOfInterestResult = NotCabalFOI | IsCabalFOI FileOfInterestStatus + deriving (Eq, Show, Generic) +instance Hashable CabalFileOfInterestResult +instance NFData CabalFileOfInterestResult + +{- | The rule that initialises the files of interest state. + +Needs to be run on start-up. +-} +ofInterestRules :: Recorder (WithPriority Log) -> Rules () +ofInterestRules recorder = do + Shake.addIdeGlobal . OfInterestCabalVar =<< liftIO (newVar HashMap.empty) + Shake.defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleNoDiagnostics $ \IsCabalFileOfInterest f -> do + alwaysRerun + filesOfInterest <- getCabalFilesOfInterestUntracked + let foi = maybe NotCabalFOI IsCabalFOI $ f `HashMap.lookup` filesOfInterest + fp = summarize foi + res = (Just fp, Just foi) + return res + where + summarize NotCabalFOI = BS.singleton 0 + summarize (IsCabalFOI OnDisk) = BS.singleton 1 + summarize (IsCabalFOI (Modified False)) = BS.singleton 2 + summarize (IsCabalFOI (Modified True)) = BS.singleton 3 + +getCabalFilesOfInterestUntracked :: Action (HashMap NormalizedFilePath FileOfInterestStatus) +getCabalFilesOfInterestUntracked = do + OfInterestCabalVar var <- Shake.getIdeGlobalAction + liftIO $ readVar var + +addFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> FileOfInterestStatus -> IO [Key] +addFileOfInterest recorder state f v = do + OfInterestCabalVar var <- Shake.getIdeGlobalState state + (prev, files) <- modifyVar var $ \dict -> do + let (prev, new) = HashMap.alterF (,Just v) f dict + pure (new, (prev, new)) + if prev /= Just v + then do + log' Debug $ LogFOI files + return [toKey IsCabalFileOfInterest f] + else return [] + where + log' = logWith recorder + +deleteFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> IO [Key] +deleteFileOfInterest recorder state f = do + OfInterestCabalVar var <- Shake.getIdeGlobalState state + files <- modifyVar' var $ HashMap.delete f + log' Debug $ LogFOI files + return [toKey IsFileOfInterest f] + where + log' = logWith recorder + +{- | This is the kick function for the cabal plugin. +We run this action, whenever we shake session us run/restarted, which triggers +actions to produce diagnostics for cabal files. + +It is paramount that this kick-function can be run quickly, since it is a blocking +function invocation. +-} +kick :: Action () +kick = do + files <- HashMap.keys <$> getCabalFilesOfInterestUntracked + Shake.runWithSignal (Proxy @"kick/start/cabal") (Proxy @"kick/done/cabal") files Types.ParseCabalFile diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Orphans.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Orphans.hs new file mode 100644 index 0000000000..8ecb361025 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Orphans.hs @@ -0,0 +1,39 @@ +{-# OPTIONS_GHC -Wno-orphans #-} +module Ide.Plugin.Cabal.Orphans where +import Control.DeepSeq +import Data.Aeson +import qualified Data.Aeson as Aeson +import qualified Data.Aeson.Types as Aeson +import qualified Data.Text as T +import Distribution.Fields.Field +import Distribution.PackageDescription (ComponentName) +import Distribution.Parsec +import Distribution.Pretty (prettyShow) + +-- ---------------------------------------------------------------- +-- Cabal-syntax orphan instances we need sometimes +-- ---------------------------------------------------------------- + +instance NFData (Field Position) where + rnf (Field name fieldLines) = rnf name `seq` rnf fieldLines + rnf (Section name sectionArgs fields) = rnf name `seq` rnf sectionArgs `seq` rnf fields + +instance NFData (Name Position) where + rnf (Name ann fName) = rnf ann `seq` rnf fName + +instance NFData (FieldLine Position) where + rnf (FieldLine ann bs) = rnf ann `seq` rnf bs + +instance NFData (SectionArg Position) where + rnf (SecArgName ann bs) = rnf ann `seq` rnf bs + rnf (SecArgStr ann bs) = rnf ann `seq` rnf bs + rnf (SecArgOther ann bs) = rnf ann `seq` rnf bs + +instance ToJSON ComponentName where + toJSON = Aeson.String . T.pack . prettyShow + +instance FromJSON ComponentName where + parseJSON = Aeson.withText "ComponentName" $ \t -> + case eitherParsec (T.unpack t) of + Left err -> Aeson.parseFail err + Right r -> pure r diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Outline.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Outline.hs new file mode 100644 index 0000000000..40f348f88c --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Outline.hs @@ -0,0 +1,119 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} + +module Ide.Plugin.Cabal.Outline where + +import Control.Monad.IO.Class +import Data.Maybe +import qualified Data.Text as T +import Data.Text.Encoding (decodeUtf8) +import Development.IDE.Core.Rules +import Development.IDE.Core.Shake (IdeState (shakeExtras), + runIdeAction, + useWithStaleFast) +import Development.IDE.Types.Location (toNormalizedFilePath') +import Distribution.Fields.Field (Field (Field, Section), + Name (Name)) +import Distribution.Parsec.Position (Position) +import Ide.Plugin.Cabal.Completion.CabalFields (onelineSectionArgs) +import Ide.Plugin.Cabal.Completion.Types (ParseCabalFields (..), + cabalPositionToLSPPosition) +import Ide.Plugin.Cabal.Orphans () +import Ide.Types (PluginMethodHandler) +import Language.LSP.Protocol.Message (Method (..)) +import Language.LSP.Protocol.Types (DocumentSymbol (..)) +import qualified Language.LSP.Protocol.Types as LSP + + +moduleOutline :: PluginMethodHandler IdeState Method_TextDocumentDocumentSymbol +moduleOutline ideState _ LSP.DocumentSymbolParams {_textDocument = LSP.TextDocumentIdentifier uri} = + case LSP.uriToFilePath uri of + Just (toNormalizedFilePath' -> fp) -> do + mFields <- liftIO $ runIdeAction "cabal-plugin.fields" (shakeExtras ideState) (useWithStaleFast ParseCabalFields fp) + case fmap fst mFields of + Just fieldPositions -> pure $ LSP.InR (LSP.InL allSymbols) + where + allSymbols = mapMaybe documentSymbolForField fieldPositions + Nothing -> pure $ LSP.InL [] + Nothing -> pure $ LSP.InL [] + +-- | Creates a @DocumentSymbol@ object for the +-- cabal AST, without displaying @fieldLines@ and +-- displaying @Section Name@ and @SectionArgs@ in one line. +-- +-- @fieldLines@ are leaves of a cabal AST, so they are omitted +-- in the outline. Sections have to be displayed in one line, because +-- the AST representation looks unnatural. See examples: +-- +-- * part of a cabal file: +-- +-- > if impl(ghc >= 9.8) +-- > ghc-options: -Wall +-- +-- * AST representation: +-- +-- > if +-- > impl +-- > ( +-- > ghc >= 9.8 +-- > ) +-- > +-- > ghc-options: +-- > -Wall +-- +-- * resulting @DocumentSymbol@: +-- +-- > if impl(ghc >= 9.8) +-- > ghc-options: +-- > +documentSymbolForField :: Field Position -> Maybe DocumentSymbol +documentSymbolForField (Field (Name pos fieldName) _) = + Just + (defDocumentSymbol range) + { _name = decodeUtf8 fieldName, + _kind = LSP.SymbolKind_Field, + _children = Nothing + } + where + range = cabalPositionToLSPRange pos `addNameLengthToLSPRange` decodeUtf8 fieldName +documentSymbolForField (Section (Name pos fieldName) sectionArgs fields) = + Just + (defDocumentSymbol range) + { _name = joinedName, + _kind = LSP.SymbolKind_Object, + _children = + Just + (mapMaybe documentSymbolForField fields) + } + where + joinedName = decodeUtf8 fieldName <> " " <> onelineSectionArgs sectionArgs + range = cabalPositionToLSPRange pos `addNameLengthToLSPRange` joinedName + +-- | Creates a single point LSP range +-- using cabal position +cabalPositionToLSPRange :: Position -> LSP.Range +cabalPositionToLSPRange pos = LSP.Range lspPos lspPos + where + lspPos = cabalPositionToLSPPosition pos + +addNameLengthToLSPRange :: LSP.Range -> T.Text -> LSP.Range +addNameLengthToLSPRange (LSP.Range pos1 (LSP.Position line char)) name = + LSP.Range + pos1 + (LSP.Position line (char + fromIntegral (T.length name))) + +defDocumentSymbol :: LSP.Range -> DocumentSymbol +defDocumentSymbol range = DocumentSymbol + { _detail = Nothing + , _deprecated = Nothing + , _name = "" + , _kind = LSP.SymbolKind_File + , _range = range + , _selectionRange = range + , _children = Nothing + , _tags = Nothing + } diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Parse.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Parse.hs new file mode 100644 index 0000000000..f2b3d74639 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Parse.hs @@ -0,0 +1,40 @@ +{-# LANGUAGE OverloadedStrings #-} +module Ide.Plugin.Cabal.Parse +( parseCabalFileContents +, readCabalFields +) where + +import qualified Data.ByteString as BS +import Data.List.NonEmpty (NonEmpty (..)) +import Distribution.Fields (PError (..), + PWarning (..)) +import Distribution.Fields.ParseResult (runParseResult) +import Distribution.PackageDescription.Parsec (parseGenericPackageDescription) +import Distribution.Types.GenericPackageDescription (GenericPackageDescription (..)) +import Distribution.Types.Version (Version) +import qualified Ide.Plugin.Cabal.Diagnostics as Diagnostics + +import qualified Data.Text as T +import Development.IDE +import qualified Distribution.Fields.Parser as Syntax +import qualified Distribution.Parsec.Position as Syntax + + +parseCabalFileContents + :: BS.ByteString -- ^ UTF-8 encoded bytestring + -> ([PWarning], Either (Maybe Version, NonEmpty PError) GenericPackageDescription) +parseCabalFileContents bs = + runParseResult (parseGenericPackageDescription bs) + +readCabalFields :: + NormalizedFilePath -> + BS.ByteString -> + Either FileDiagnostic [Syntax.Field Syntax.Position] +readCabalFields file contents = do + case Syntax.readFields' contents of + Left parseError -> + Left $ Diagnostics.fatalParseErrorDiagnostic file + $ "Failed to parse cabal file: " <> T.pack (show parseError) + Right (fields, _warnings) -> do + -- we don't want to double report diagnostics, all diagnostics are produced by 'ParseCabalFile'. + Right fields diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Rules.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Rules.hs new file mode 100644 index 0000000000..de7bb9a5fd --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Rules.hs @@ -0,0 +1,160 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.Cabal.Rules (cabalRules, Log) where + +import Control.Monad.IO.Class +import qualified Data.ByteString as BS +import qualified Data.List as List +import qualified Data.List.NonEmpty as NE +import qualified Data.Maybe as Maybe +import qualified Data.Text () +import qualified Data.Text as T +import qualified Data.Text.Encoding as Encoding +import Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE as D +import qualified Development.IDE.Core.Shake as Shake +import qualified Distribution.CabalSpecVersion as Cabal +import qualified Distribution.Fields as Syntax +import Distribution.Parsec.Error +import qualified Ide.Plugin.Cabal.Completion.Data as Data +import Ide.Plugin.Cabal.Completion.Types (ParseCabalCommonSections (ParseCabalCommonSections), + ParseCabalFields (..), + ParseCabalFile (..)) +import qualified Ide.Plugin.Cabal.Diagnostics as Diagnostics +import qualified Ide.Plugin.Cabal.OfInterest as OfInterest +import Ide.Plugin.Cabal.Orphans () +import qualified Ide.Plugin.Cabal.Parse as Parse +import Ide.Types +import Text.Regex.TDFA + +data Log + = LogModificationTime NormalizedFilePath FileVersion + | LogShake Shake.Log + | LogOfInterest OfInterest.Log + | LogDocSaved Uri + deriving (Show) + +instance Pretty Log where + pretty = \case + LogShake log' -> pretty log' + LogOfInterest log' -> pretty log' + LogModificationTime nfp modTime -> + "Modified:" <+> pretty (fromNormalizedFilePath nfp) <+> pretty (show modTime) + LogDocSaved uri -> + "Saved text document:" <+> pretty (getUri uri) + +cabalRules :: Recorder (WithPriority Log) -> PluginId -> Rules () +cabalRules recorder plId = do + -- Make sure we initialise the cabal files-of-interest. + OfInterest.ofInterestRules (cmapWithPrio LogOfInterest recorder) + -- Rule to produce diagnostics for cabal files. + define (cmapWithPrio LogShake recorder) $ \ParseCabalFields file -> do + config <- getPluginConfigAction plId + if not (plcGlobalOn config && plcDiagnosticsOn config) + then pure ([], Nothing) + else do + -- whenever this key is marked as dirty (e.g., when a user writes stuff to it), + -- we rerun this rule because this rule *depends* on GetModificationTime. + (t, mCabalSource) <- use_ GetFileContents file + log' Debug $ LogModificationTime file t + contents <- case mCabalSource of + Just sources -> + pure $ Encoding.encodeUtf8 $ Rope.toText sources + Nothing -> do + liftIO $ BS.readFile $ fromNormalizedFilePath file + + case Parse.readCabalFields file contents of + Left _ -> + pure ([], Nothing) + Right fields -> + pure ([], Just fields) + + define (cmapWithPrio LogShake recorder) $ \ParseCabalCommonSections file -> do + fields <- use_ ParseCabalFields file + let commonSections = + Maybe.mapMaybe + ( \case + commonSection@(Syntax.Section (Syntax.Name _ "common") _ _) -> Just commonSection + _ -> Nothing + ) + fields + pure ([], Just commonSections) + + define (cmapWithPrio LogShake recorder) $ \ParseCabalFile file -> do + config <- getPluginConfigAction plId + if not (plcGlobalOn config && plcDiagnosticsOn config) + then pure ([], Nothing) + else do + -- whenever this key is marked as dirty (e.g., when a user writes stuff to it), + -- we rerun this rule because this rule *depends* on GetModificationTime. + (t, mCabalSource) <- use_ GetFileContents file + log' Debug $ LogModificationTime file t + contents <- case mCabalSource of + Just sources -> + pure $ Encoding.encodeUtf8 $ Rope.toText sources + Nothing -> do + liftIO $ BS.readFile $ fromNormalizedFilePath file + + -- Instead of fully reparsing the sources to get a 'GenericPackageDescription', + -- we would much rather re-use the already parsed results of 'ParseCabalFields'. + -- Unfortunately, Cabal-syntax doesn't expose the function 'parseGenericPackageDescription'' + -- which allows us to resume the parsing pipeline with '[Field Position]'. + let (pWarnings, pm) = Parse.parseCabalFileContents contents + let warningDiags = fmap (Diagnostics.warningDiagnostic file) pWarnings + case pm of + Left (_cabalVersion, pErrorNE) -> do + let regexUnknownCabalBefore310 :: T.Text + -- We don't support the cabal version, this should not be an error, as the + -- user did not do anything wrong. Instead we cast it to a warning + regexUnknownCabalBefore310 = "Unsupported cabal-version [0-9]+.[0-9]*" + regexUnknownCabalVersion :: T.Text + regexUnknownCabalVersion = "Unsupported cabal format version in cabal-version field: [0-9]+.[0-9]+" + unsupportedCabalHelpText = + unlines + [ "The used `cabal-version` is not fully supported by this `HLS` binary." + , "Either the `cabal-version` is unknown, or too new for this executable." + , "This means that some functionality might not work as expected." + , "If you face any issues, try downgrading to a supported `cabal-version` or upgrading `HLS` if possible." + , "" + , "Supported versions are: " + <> List.intercalate + ", " + (fmap Cabal.showCabalSpecVersion Data.supportedCabalVersions) + ] + errorDiags = + NE.toList $ + NE.map + ( \pe@(PError pos text) -> + if any + (text =~) + [ regexUnknownCabalBefore310 + , regexUnknownCabalVersion + ] + then + Diagnostics.warningDiagnostic + file + ( Syntax.PWarning Syntax.PWTOther pos $ + unlines + [ text + , unsupportedCabalHelpText + ] + ) + else Diagnostics.errorDiagnostic file pe + ) + pErrorNE + allDiags = errorDiags <> warningDiags + pure (allDiags, Nothing) + Right gpd -> do + pure (warningDiags, Just gpd) + + action $ do + -- Run the cabal kick. This code always runs when 'shakeRestart' is run. + -- Must be careful to not impede the performance too much. Crucial to + -- a snappy IDE experience. + OfInterest.kick + where + log' = logWith recorder diff --git a/plugins/hls-cabal-plugin/test/CabalAdd.hs b/plugins/hls-cabal-plugin/test/CabalAdd.hs new file mode 100644 index 0000000000..8cbac90e43 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/CabalAdd.hs @@ -0,0 +1,240 @@ +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} +module CabalAdd ( + cabalAddDependencyTests, + cabalAddModuleTests, +) where + +import Control.Lens ((^.)) +import Control.Lens.Fold ((^?)) +import qualified Data.Maybe as Maybe +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Text.Internal.Search as T +import Distribution.ModuleName (fromString) +import Distribution.PackageDescription +import Distribution.PackageDescription.Configuration (flattenPackageDescription) +import qualified Distribution.Pretty as Pretty +import Distribution.Types.Component +import Distribution.Utils.Generic (safeHead) +import Ide.Plugin.Cabal.CabalAdd.CodeAction (hiddenPackageSuggestion) +import Ide.Plugin.Cabal.Parse (parseCabalFileContents) +import qualified Language.LSP.Protocol.Lens as L +import qualified Language.LSP.Protocol.Types as J +import System.FilePath +import Test.Hls +import Utils + +cabalAddModuleTests :: TestTree +cabalAddModuleTests = + testGroup + "Add Module" + [ runHaskellTestCaseSession "Add to benchmark" ("cabal-add-module" "library") $ do + let compName = CBenchName "test1" + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + , runHaskellTestCaseSession "Add to executable" ("cabal-add-module" "library") $ do + let compName = CExeName "test" + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + , runHaskellTestCaseSession "Add to test-suite" ("cabal-add-module" "library") $ do + let compName = CTestName "test2" + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + , runHaskellTestCaseSession "Add to library" ("cabal-add-module" "library") $ do + let compName = CLibName $ LSubLibName "test3" + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + , runHaskellTestCaseSession "Add to main library" ("cabal-add-module" "library") $ do + let compName = CLibName LMainLibName + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + ] + where + generateAddDependencyTestSession :: FilePath -> FilePath -> ComponentName -> Session PackageDescription + generateAddDependencyTestSession cabalFile haskellFile compName = do + haskellDoc <- openDoc haskellFile "haskell" + cabalDoc <- openDoc cabalFile "cabal" + _ <- waitForDiagnosticsFrom haskellDoc + cas <- Maybe.mapMaybe (^? _R) <$> getAllCodeActions haskellDoc + let selectedCas = filter (\ca -> (T.pack $ "Add to " <> Pretty.prettyShow compName <> " ") `T.isPrefixOf` (ca ^. L.title)) cas + mapM_ executeCodeAction $ selectedCas + _ <- skipManyTill anyMessage $ getDocumentEdit cabalDoc -- Wait for the changes in cabal file + contents <- documentContents cabalDoc + case parseCabalFileContents $ T.encodeUtf8 contents of + (_, Right gpd) -> pure $ flattenPackageDescription gpd + _ -> liftIO $ assertFailure "could not parse cabal file to gpd" + + -- | Verify that the given module was added to the desired component. + -- Note that we do not care whether it was added to exposed-modules or other-modules of that component. + checkModuleAddedTo :: PackageDescription -> String -> ComponentName -> Session () + checkModuleAddedTo pd modName compName = do + let comp = getComponent pd compName + compModules = case comp of + CLib lib -> explicitLibModules lib + CFLib fLib -> foreignLibModules fLib + CExe exe -> exeModules exe + CTest test -> testModules test + CBench bench -> benchmarkModules bench + testDescription = modName <> " was added to " <> showComponentName compName + liftIO $ assertBool testDescription $ fromString modName `elem` compModules + +cabalAddDependencyTests :: TestTree +cabalAddDependencyTests = + testGroup + "Add dependency" + [ runHaskellTestCaseSession "Add to executable" ("cabal-add-testdata" "cabal-add-exe") + (generateAddDependencyTestSession "cabal-add-exe.cabal" ("src" "Main.hs") "split" [253]) + , runHaskellTestCaseSession "Add to library" ("cabal-add-testdata" "cabal-add-lib") + (generateAddDependencyTestSession "cabal-add-lib.cabal" ("src" "MyLib.hs") "split" [348]) + , runHaskellTestCaseSession "Add to testsuite" ("cabal-add-testdata" "cabal-add-tests") + (generateAddDependencyTestSession "cabal-add-tests.cabal" ("test" "Main.hs") "split" [478]) + , runHaskellTestCaseSession "Add to testsuite with PackageImports" ("cabal-add-testdata" "cabal-add-tests") + (generateAddDependencyTestSession "cabal-add-tests.cabal" ("test" "MainPackageImports.hs") "split" [731]) + , runHaskellTestCaseSession "Add to benchmark" ("cabal-add-testdata" "cabal-add-bench") + (generateAddDependencyTestSession "cabal-add-bench.cabal" ("bench" "Main.hs") "split" [403]) + + , runHaskellTestCaseSession "Add to executable, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("src" "Main.hs") "split" [269]) + , runHaskellTestCaseSession "Add to library, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("lib" "MyLib.hs") "split" [413]) + , runHaskellTestCaseSession "Add to internal library, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("lib" "InternalLib.hs") "split" [413]) + , runHaskellTestCaseSession "Add to testsuite, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("test" "Main.hs") "split" [655]) + , runHaskellTestCaseSession "Add to benchmark, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("bench" "Main.hs") "split" [776]) + + + , runHaskellTestCaseSession "Guard against HPack" ("cabal-add-testdata" "cabal-add-packageYaml") + (generatePackageYAMLTestSession ("src" "Main.hs")) + + , testHiddenPackageSuggestions "Check CabalAdd's parser, no version" + [ "It is a member of the hidden package 'base'" + , "It is a member of the hidden package 'Blammo-wai'" + , "It is a member of the hidden package 'BlastHTTP'" + , "It is a member of the hidden package 'CC-delcont-ref-tf'" + , "It is a member of the hidden package '3d-graphics-examples'" + , "It is a member of the hidden package 'AAI'" + , "It is a member of the hidden package 'AWin32Console'" + ] + [ ("base", T.empty) + , ("Blammo-wai", T.empty) + , ("BlastHTTP", T.empty) + , ("CC-delcont-ref-tf", T.empty) + , ("3d-graphics-examples", T.empty) + , ("AAI", T.empty) + , ("AWin32Console", T.empty) + ] + , testHiddenPackageSuggestions "Check CabalAdd's parser, with version" + [ "It is a member of the hidden package 'base-0.1.0.0'" + , "It is a member of the hidden package 'Blammo-wai-0.11.0'" + , "It is a member of the hidden package 'BlastHTTP-2.6.4.3'" + , "It is a member of the hidden package 'CC-delcont-ref-tf-0.0.0.2'" + , "It is a member of the hidden package '3d-graphics-examples-1.1.6'" + , "It is a member of the hidden package 'AAI-0.1'" + , "It is a member of the hidden package 'AWin32Console-1.19.1'" + ] + [ ("base","0.1.0.0") + , ("Blammo-wai", "0.11.0") + , ("BlastHTTP", "2.6.4.3") + , ("CC-delcont-ref-tf", "0.0.0.2") + , ("3d-graphics-examples", "1.1.6") + , ("AAI", "0.1") + , ("AWin32Console", "1.19.1") + ] + , testHiddenPackageSuggestions "Check CabalAdd's parser, no version, unicode comma" + [ "It is a member of the hidden package \8216base\8217" + , "It is a member of the hidden package \8216Blammo-wai\8217" + , "It is a member of the hidden package \8216BlastHTTP\8217" + , "It is a member of the hidden package \8216CC-delcont-ref-tf\8217" + , "It is a member of the hidden package \8216AAI\8217" + , "It is a member of the hidden package \8216AWin32Console\8217" + ] + [ ("base", T.empty) + , ("Blammo-wai", T.empty) + , ("BlastHTTP", T.empty) + , ("CC-delcont-ref-tf", T.empty) + , ("AAI", T.empty) + , ("AWin32Console", T.empty) + ] + , testHiddenPackageSuggestions "Check CabalAdd's parser, with version, unicode comma" + [ "It is a member of the hidden package \8216base-0.1.0.0\8217" + , "It is a member of the hidden package \8216Blammo-wai-0.11.0\8217" + , "It is a member of the hidden package \8216BlastHTTP-2.6.4.3\8217" + , "It is a member of the hidden package \8216CC-delcont-ref-tf-0.0.0.2\8217" + , "It is a member of the hidden package \8216AAI-0.1\8217" + , "It is a member of the hidden package \8216AWin32Console-1.19.1\8217" + ] + [ ("base","0.1.0.0") + , ("Blammo-wai", "0.11.0") + , ("BlastHTTP", "2.6.4.3") + , ("CC-delcont-ref-tf", "0.0.0.2") + , ("AAI", "0.1") + , ("AWin32Console", "1.19.1") + ] + , testHiddenPackageSuggestions "Check CabalAdd's parser, with version, unicode comma" + [ "It is a member of the hidden package \8216\&3d-graphics-examples\8217" + , "It is a member of the hidden package \8216\&3d-graphics-examples-1.1.6\8217" + ] + [ ("3d-graphics-examples", T.empty) + , ("3d-graphics-examples", "1.1.6") + ] + , testHiddenPackageSuggestions "Check CabalAdd's parser, with version, with PackageImports" + [ "(needs flag -package-id base-0.1.0.0)" + , "(needs flag -package-id Blammo-wai-0.11.0)" + , "(needs flag -package-id BlastHTTP-2.6.4.3)" + , "(needs flag -package-id CC-delcont-ref-tf-0.0.0.2)" + , "(needs flag -package-id 3d-graphics-examples-1.1.6)" + , "(needs flag -package-id AAI-0.1)" + , "(needs flag -package-id AWin32Console-1.19.1)" + ] + [ ("base","0.1.0.0") + , ("Blammo-wai", "0.11.0") + , ("BlastHTTP", "2.6.4.3") + , ("CC-delcont-ref-tf", "0.0.0.2") + , ("3d-graphics-examples", "1.1.6") + , ("AAI", "0.1") + , ("AWin32Console", "1.19.1") + ] + ] + where + generateAddDependencyTestSession :: FilePath -> FilePath -> T.Text -> [Int] -> Session () + generateAddDependencyTestSession cabalFile haskellFile dependency indicesRes = do + hsdoc <- openDoc haskellFile "haskell" + cabDoc <- openDoc cabalFile "cabal" + _ <- waitForDiagnosticsFrom hsdoc + cas <- Maybe.mapMaybe (^? _R) <$> getAllCodeActions hsdoc + let selectedCas = filter (\ca -> "Add dependency" `T.isPrefixOf` (ca ^. L.title)) cas + mapM_ executeCodeAction selectedCas + _ <- skipManyTill anyMessage $ getDocumentEdit cabDoc -- Wait for the changes in cabal file + contents <- documentContents cabDoc + liftIO $ assertEqual (T.unpack dependency <> " isn't found in the cabal file") indicesRes (T.indices dependency contents) + testHiddenPackageSuggestions :: String -> [T.Text] -> [(T.Text, T.Text)] -> TestTree + testHiddenPackageSuggestions testTitle messages suggestions = + let diags = map (\msg -> messageToDiagnostic msg) messages + suggestions' = map (safeHead . hiddenPackageSuggestion) diags + assertions = zipWith (@?=) suggestions' (map Just suggestions) + testNames = map (\(f, s) -> "Check if " ++ T.unpack f ++ (if s == "" then "" else "-") ++ T.unpack s ++ " was parsed correctly") suggestions + test = testGroup testTitle $ zipWith testCase testNames assertions + in test + messageToDiagnostic :: T.Text -> Diagnostic + messageToDiagnostic msg = Diagnostic { + J._range = mkRange 0 0 0 0 + , J._severity = Nothing + , J._code = Nothing + , J._source = Nothing + , J._message = msg + , J._relatedInformation = Nothing + , J._tags = Nothing + , J._codeDescription = Nothing + , J._data_ = Nothing + } + + generatePackageYAMLTestSession :: FilePath -> Session () + generatePackageYAMLTestSession haskellFile = do + hsdoc <- openDoc haskellFile "haskell" + _ <- waitForDiagnosticsFrom hsdoc + cas <- Maybe.mapMaybe (^? _R) <$> getAllCodeActions hsdoc + let selectedCas = filter (\ca -> "Add dependency" `T.isPrefixOf` (ca ^. L.title)) cas + liftIO $ assertEqual "PackageYAML" [] selectedCas diff --git a/plugins/hls-cabal-plugin/test/Completer.hs b/plugins/hls-cabal-plugin/test/Completer.hs new file mode 100644 index 0000000000..ab7165b1ac --- /dev/null +++ b/plugins/hls-cabal-plugin/test/Completer.hs @@ -0,0 +1,430 @@ +{-# LANGUAGE DisambiguateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} + + +module Completer where + +import Control.Lens ((^.), (^?)) +import Control.Lens.Prism +import Control.Monad (forM_) +import qualified Data.ByteString as ByteString +import qualified Data.ByteString.Char8 as BS8 +import Data.Maybe (mapMaybe) +import qualified Data.Text as T +import qualified Development.IDE.Plugin.Completions.Types as Ghcide +import qualified Distribution.Fields as Syntax +import Distribution.PackageDescription (GenericPackageDescription) +import Distribution.PackageDescription.Parsec (parseGenericPackageDescriptionMaybe) +import qualified Distribution.Parsec.Position as Syntax +import Ide.Plugin.Cabal.Completion.Completer.FilePath +import Ide.Plugin.Cabal.Completion.Completer.Module +import Ide.Plugin.Cabal.Completion.Completer.Paths +import Ide.Plugin.Cabal.Completion.Completer.Simple (importCompleter) +import Ide.Plugin.Cabal.Completion.Completer.Types (CompleterData (..)) +import Ide.Plugin.Cabal.Completion.Completions +import Ide.Plugin.Cabal.Completion.Types (CabalPrefixInfo (..), + StanzaName) +import qualified Language.LSP.Protocol.Lens as L +import System.FilePath +import Test.Hls +import Utils + +completerTests :: TestTree +completerTests = + testGroup + "Completer Tests" + [ basicCompleterTests, + fileCompleterTests, + filePathCompletionContextTests, + directoryCompleterTests, + completionHelperTests, + filePathExposedModulesTests, + exposedModuleCompleterTests, + importCompleterTests, + autogenFieldCompletionTests + ] + +basicCompleterTests :: TestTree +basicCompleterTests = + testGroup + "Basic Completer Tests" + [ runCabalTestCaseSession "In stanza context - stanza should not be suggested" "" $ do + doc <- openDoc "completer.cabal" "cabal" + compls <- getCompletions doc (Position 11 7) + let complTexts = getTextEditTexts compls + liftIO $ assertBool "does not suggest library" $ "library" `notElem` complTexts + liftIO $ assertBool "suggests library keyword" $ "extra-libraries:" `elem` complTexts + , runCabalTestCaseSession "In top level context - stanza should be suggested" "" $ do + doc <- openDoc "completer.cabal" "cabal" + compls <- getCompletions doc (Position 8 2) + let complTexts = getTextEditTexts compls + liftIO $ assertBool "suggests benchmark" $ "benchmark" `elem` complTexts + , runCabalTestCaseSession "In top level context - stanza should be suggested" "" $ do + doc <- openDoc "completer.cabal" "cabal" + compls <- getCompletions doc (Position 13 2) + let complTexts = getTextEditTexts compls + liftIO $ assertBool "suggests common" $ "common" `elem` complTexts + , runCabalTestCaseSession "Main-is completions should be relative to hs-source-dirs of same stanza" "filepath-completions" $ do + doc <- openDoc "main-is.cabal" "cabal" + compls <- getCompletions doc (Position 10 12) + let complTexts = getTextEditTexts compls + liftIO $ assertBool "suggests f2" $ "f2.hs" `elem` complTexts + liftIO $ assertBool "does not suggest" $ "Content.hs" `notElem` complTexts + ] + where + getTextEditTexts :: [CompletionItem] -> [T.Text] + getTextEditTexts compls = mapMaybe (^? L.textEdit . _Just . _L . L.newText) compls + +fileCompleterTests :: TestTree +fileCompleterTests = + testGroup + "File Completer Tests" + [ testCase "Current Directory - no leading ./ by default" $ do + completions <- completeFilePath "" filePathComplTestDir + completions @?== [".hidden", "Content.hs", "dir1/", "dir2/", "textfile.txt", "main-is.cabal"], + testCase "Current Directory - alternative writing" $ do + completions <- completeFilePath "./" filePathComplTestDir + completions @?== ["./.hidden", "./Content.hs", "./dir1/", "./dir2/", "./textfile.txt", "./main-is.cabal"], + testCase "Current Directory - hidden file start" $ do + completions <- completeFilePath "." filePathComplTestDir + completions @?== ["Content.hs", ".hidden", "textfile.txt", "main-is.cabal"], + testCase "Current Directory - incomplete directory path written" $ do + completions <- completeFilePath "di" filePathComplTestDir + completions @?== ["dir1/", "dir2/"], + testCase "Current Directory - incomplete filepath written" $ do + completions <- completeFilePath "te" filePathComplTestDir + completions @?== ["Content.hs", "textfile.txt"], + testCase "Subdirectory" $ do + completions <- completeFilePath "dir1/" filePathComplTestDir + completions @?== ["dir1/f1.txt", "dir1/f2.hs"], + testCase "Subdirectory - incomplete filepath written" $ do + completions <- completeFilePath "dir2/dir3/MA" filePathComplTestDir + completions @?== ["dir2/dir3/MARKDOWN.md"], + testCase "Nonexistent directory" $ do + completions <- completeFilePath "dir2/dir4/" filePathComplTestDir + completions @?== [] + ] + where + completeFilePath :: T.Text -> TestName -> IO [T.Text] + completeFilePath written dirName = do + completer <- filePathCompleter mempty $ mkCompleterData $ simpleCabalPrefixInfoFromFp written dirName + pure $ fmap extract completer + +filePathCompletionContextTests :: TestTree +filePathCompletionContextTests = + testGroup + "File Path Completion Context Tests" + [ testCase "empty file - start" $ do + let complContext = getCabalPrefixInfo "" (simplePosPrefixInfo "" 0 0) + completionPrefix complContext @?= "", + testCase "only whitespaces" $ do + let complContext = getCabalPrefixInfo "" (simplePosPrefixInfo " " 0 3) + completionPrefix complContext @?= "", + testCase "simple filepath" $ do + let complContext = getCabalPrefixInfo "" (simplePosPrefixInfo " src/" 0 7) + completionPrefix complContext @?= "src/", + testCase "simple filepath - starting apostrophe" $ do + let complContext = getCabalPrefixInfo "" (simplePosPrefixInfo " \"src/" 0 8) + completionPrefix complContext @?= "src/", + testCase "simple filepath - starting apostrophe, already closed" $ do + let complContext = getCabalPrefixInfo "" (simplePosPrefixInfo " \"src/\"" 0 8) + completionPrefix complContext @?= "src/", + testCase "second filepath - starting apostrophe" $ do + let complContext = getCabalPrefixInfo "" (simplePosPrefixInfo "fp.txt \"src/" 0 12) + completionPrefix complContext @?= "src/", + testCase "middle filepath - starting apostrophe" $ do + let complContext = getCabalPrefixInfo "" (simplePosPrefixInfo "fp.txt \"src/ fp2.txt" 0 12) + completionPrefix complContext @?= "src/", + testCase "middle filepath - starting apostrophe, already closed" $ do + let complContext = getCabalPrefixInfo "" (simplePosPrefixInfo "fp.t xt \"src\" fp2.txt" 0 12) + completionPrefix complContext @?= "src", + testCase "middle filepath - starting apostrophe, already closed" $ do + let complContext = getCabalPrefixInfo "" (simplePosPrefixInfo "\"fp.txt\" \"src fp2.txt" 0 13) + completionPrefix complContext @?= "src", + testCase "Current Directory" $ do + compls <- + listFileCompletions + mempty + PathCompletionInfo + { isStringNotationPath = Nothing, + pathSegment = "", + queryDirectory = "", + workingDirectory = filePathComplTestDir + } + compls @?== [".hidden", "Content.hs", "dir1/", "dir2/", "textfile.txt", "main-is.cabal"], + testCase "In directory" $ do + compls <- + listFileCompletions + mempty + PathCompletionInfo + { isStringNotationPath = Nothing, + pathSegment = "", + queryDirectory = "dir1/", + workingDirectory = filePathComplTestDir + } + compls @?== ["f1.txt", "f2.hs"] + ] + where + simplePosPrefixInfo :: T.Text -> UInt -> UInt -> Ghcide.PosPrefixInfo + simplePosPrefixInfo lineString linePos charPos = + Ghcide.PosPrefixInfo + { Ghcide.fullLine = lineString, + Ghcide.prefixScope = "", + Ghcide.prefixText = "", + Ghcide.cursorPos = Position linePos charPos + } + +directoryCompleterTests :: TestTree +directoryCompleterTests = + testGroup + "Directory Completer Tests" + [ testCase "Current Directory - no leading ./ by default" $ do + completions <- completeDirectory "" filePathComplTestDir + completions @?== ["dir1/", "dir2/"], + testCase "Current Directory - alternative writing" $ do + completions <- completeDirectory "./" filePathComplTestDir + completions @?== ["./dir1/", "./dir2/"], + testCase "Current Directory - incomplete directory path written" $ do + completions <- completeDirectory "di" filePathComplTestDir + completions @?== ["dir1/", "dir2/"], + testCase "Current Directory - incomplete filepath written" $ do + completions <- completeDirectory "te" filePathComplTestDir + completions @?== [], + testCase "Subdirectory - no more directories found" $ do + completions <- completeDirectory "dir1/" filePathComplTestDir + completions @?== [], + testCase "Subdirectory - available subdirectory" $ do + completions <- completeDirectory "dir2/" filePathComplTestDir + completions @?== ["dir2/dir3/"], + testCase "Nonexistent directory" $ do + completions <- completeDirectory "dir2/dir4/" filePathComplTestDir + completions @?== [] + ] + where + completeDirectory :: T.Text -> TestName -> IO [T.Text] + completeDirectory written dirName = do + completer <- directoryCompleter mempty $ mkCompleterData $ simpleCabalPrefixInfoFromFp written dirName + pure $ fmap extract completer + +completionHelperTests :: TestTree +completionHelperTests = + testGroup + "Completion Helper Tests" + [ testCase "get FilePath - partly written file path" $ do + getFilePathCursorPrefix "src/a" 0 5 @?= "src/a", + testCase "get FilePath - ignores spaces" $ do + getFilePathCursorPrefix " src/a" 0 7 @?= "src/a", + testCase "get FilePath - ignores spaces and keyword" $ do + getFilePathCursorPrefix "license-file: src/a" 0 19 @?= "src/a", + testCase "get FilePath - with apostrophe, ignores spaces and keyword" $ do + getFilePathCursorPrefix "license-file: \"src/a" 0 20 @?= "src/a", + testCase "get FilePath - ignores list of filepaths beforehand, space separated" $ do + getFilePathCursorPrefix " ./text.txt file.h" 0 19 @?= "file.h", + testCase "get FilePath - ignores list of filepaths after, space separated" $ do + getFilePathCursorPrefix " ./text.t file.h" 0 10 @?= "./text.t", + testCase "get FilePath - ignores list of filepaths and rest of filepath after, space separated" $ do + getFilePathCursorPrefix " ./text.t file.h" 0 6 @?= "./te", + testCase "get FilePath - ignores list of filepaths beforehand, multiple space separated" $ do + getFilePathCursorPrefix " ./text.txt file.h" 0 21 @?= "file.h", + testCase "get FilePath - ignores list of filepaths beforehand, comma separated" $ do + getFilePathCursorPrefix " ./text.txt, file.h" 0 20 @?= "file.h", + testCase "get FilePath - ignores list of filepaths beforehand, comma separated, many whitespaces" $ do + getFilePathCursorPrefix " ./text.txt, file.h" 0 22 @?= "file.h", + testCase "get FilePath - ignores list of filepaths beforehand, comma separated, no whitespace" $ do + getFilePathCursorPrefix " ./text.txt,file.h" 0 19 @?= "file.h", + testCase "get FilePath - with apostrophes, ignores list of filepaths beforehand" $ do + getFilePathCursorPrefix " \"./text.txt\" \"file.h" 0 23 @?= "file.h", + testCase "get FilePath - ignores list of filepaths with apostrophe beforehand" $ do + getFilePathCursorPrefix " \"./text.txt\" file.h" 0 22 @?= "file.h" + ] + where + getFilePathCursorPrefix :: T.Text -> UInt -> UInt -> T.Text + getFilePathCursorPrefix lineString linePos charPos = + completionPrefix . getCabalPrefixInfo "" $ + Ghcide.PosPrefixInfo + { Ghcide.fullLine = lineString, + Ghcide.prefixScope = "", + Ghcide.prefixText = "", + Ghcide.cursorPos = Position linePos charPos + } + +filePathExposedModulesTests :: TestTree +filePathExposedModulesTests = + testGroup + "Filepaths for Exposed Modules Tests" + [ testCase "Root dir" $ do + exposed <- callFilePathsForExposedModules ["./"] + exposed @?== ["Dir1.", "File1"], + testCase "Nested path" $ do + exposed <- callFilePathsForExposedModules ["./Dir1/Dir2/"] + exposed @?== ["File2"], + testCase "Nested empty dir" $ do + exposed <- callFilePathsForExposedModules ["./Dir1/Dir2/Dir4"] + exposed @?== [], + testCase "Two dirs" $ do + exposed <- callFilePathsForExposedModules ["./Dir1/", "Dir1/Dir3/Dir4/"] + exposed @?== ["Dir2.", "Dir3.", "File3"] + ] + where + callFilePathsForExposedModules :: [FilePath] -> IO [T.Text] + callFilePathsForExposedModules srcDirs = do + let prefInfo = simpleCabalPrefixInfoFromFp "" exposedTestDir + filePathsForExposedModules mempty srcDirs prefInfo + +exposedModuleCompleterTests :: TestTree +exposedModuleCompleterTests = + testGroup + "Exposed Modules Completer Tests" + [ testCase "Top level single source dir, library" $ do + completions <- callModulesCompleter Nothing sourceDirsExtractionLibrary "" + completions @?== ["Dir2.", "Dir3."], + testCase "Top level single source dir, benchmark, with prefix" $ do + completions <- callModulesCompleter (Just "benchie") sourceDirsExtractionBenchmark "Fi" + completions @?== ["File1"], + testCase "Top level single source dir, named executable" $ do + completions <- callModulesCompleter (Just "executie") sourceDirsExtractionExecutable "" + completions @?== ["File1", "Dir1.", "Dir2.", "Dir3."], + testCase "Top level single source dir, named executable" $ do + completions <- callModulesCompleter (Just "exe-not-so-cutie") sourceDirsExtractionExecutable "" + completions @?== ["File2", "Dir4."], + testCase "Top level single source dir, nonexistent name" $ do + completions <- callModulesCompleter (Just "exe-the-beste") sourceDirsExtractionExecutable "" + completions @?== [], + testCase "Top level single source dir, testsuite, with prefix" $ do + completions <- callModulesCompleter (Just "suitor") sourceDirsExtractionTestSuite "3" + completions @?== ["File3"], + testCase "Name nothing but not library" $ do + completions <- callModulesCompleter Nothing sourceDirsExtractionTestSuite "3" + completions @?== [] + ] + where + callModulesCompleter :: Maybe StanzaName -> (Maybe StanzaName -> GenericPackageDescription -> [FilePath]) -> T.Text -> IO [T.Text] + callModulesCompleter sName func prefix = do + let cData = simpleCompleterData sName testDataDir prefix + completer <- modulesCompleter func mempty cData + pure $ fmap extract completer + +-- TODO: These tests are a bit barebones at the moment, +-- since we do not take cursorposition into account at this point. +importCompleterTests :: TestTree +importCompleterTests = + testGroup + "Import Completer Tests" + [ testCase "All above common sections are suggested" $ do + completions <- callImportCompleter + ("defaults" `elem` completions) @? "defaults contained" + ("test-defaults" `elem` completions) @? "test-defaults contained" + -- TODO: Only common sections defined before the current stanza may be imported + , testCase "Common sections occuring below are not suggested" $ do + completions <- callImportCompleter + ("notForLib" `elem` completions) @? "notForLib contained, this needs to be fixed" + , testCase "All common sections are suggested when curser is below them" $ do + completions <- callImportCompleter + completions @?== ["defaults", "notForLib" ,"test-defaults"] + ] + where + callImportCompleter :: IO [T.Text] + callImportCompleter = do + let cData' = simpleCompleterData Nothing testDataDir "" + let cabalCommonSections = [makeCommonSection 13 0 "defaults", makeCommonSection 18 0 "test-defaults", makeCommonSection 27 0 "notForLib"] + let cData = cData' {getCabalCommonSections = pure $ Just cabalCommonSections} + completer <- importCompleter mempty cData + pure $ fmap extract completer + makeCommonSection :: Int -> Int -> String -> Syntax.Field Syntax.Position + makeCommonSection row col name = + Syntax.Section + (Syntax.Name (Syntax.Position row col) "common") + [Syntax.SecArgName (Syntax.Position row (col + 7)) (BS8.pack name)] + [] + +autogenFieldCompletionTests :: TestTree +autogenFieldCompletionTests = + testGroup "Autogen Field Completer Tests" + [ testAutogenField "library" "completion/autogen-completion.cabal" (Position 6 9) ["autogen-modules:", "autogen-includes:"] + , testAutogenField "executable" "completion/autogen-completion.cabal" (Position 11 9) ["autogen-modules:", "autogen-includes:"] + , testAutogenField "test-suite" "completion/autogen-completion.cabal" (Position 16 9) ["autogen-modules:", "autogen-includes:"] + , testAutogenField "benchmark" "completion/autogen-completion.cabal" (Position 21 9) ["autogen-modules:", "autogen-includes:"] + , testAutogenField "common" "completion/autogen-completion.cabal" (Position 24 9) ["autogen-modules:", "autogen-includes:"] + ] + + where + testAutogenField :: String -> FilePath -> Position -> [T.Text] -> TestTree + testAutogenField section file pos expected = runCabalTestCaseSession ("autogen-modules completion in " <> section) "" $ do + doc <- openDoc file "cabal" + items <- getCompletions doc pos + let labels = map (^. L.label) items + liftIO $ forM_ expected $ \expect -> + assertBool (T.unpack expect <> " not found in " <> section) $ + any (expect `T.isInfixOf`) labels + +simpleCompleterData :: Maybe StanzaName -> FilePath -> T.Text -> CompleterData +simpleCompleterData sName dir pref = do + CompleterData + { cabalPrefixInfo = simpleExposedCabalPrefixInfo pref dir, + getLatestGPD = do + cabalContents <- ByteString.readFile $ testDataDir "exposed.cabal" + pure $ parseGenericPackageDescriptionMaybe cabalContents, + getCabalCommonSections = undefined, + stanzaName = sName + } + +mkCompleterData :: CabalPrefixInfo -> CompleterData +mkCompleterData prefInfo = CompleterData {getLatestGPD = undefined, getCabalCommonSections = undefined, cabalPrefixInfo = prefInfo, stanzaName = Nothing} + +exposedTestDir :: FilePath +exposedTestDir = addTrailingPathSeparator $ testDataDir "src-modules" + +simpleExposedCabalPrefixInfo :: T.Text -> FilePath -> CabalPrefixInfo +simpleExposedCabalPrefixInfo prefix fp = + CabalPrefixInfo + { completionPrefix = prefix, + isStringNotation = Nothing, + completionCursorPosition = Position 0 0, + completionRange = Range (Position 0 0) (Position 0 0), + completionWorkingDir = fp, + completionFileName = "exposed.cabal" + } + +extract :: CompletionItem -> T.Text +extract item = case item ^. L.textEdit of + Just (InL v) -> v ^. L.newText + _ -> error "" + +importTestData :: T.Text +importTestData = [trimming| +cabal-version: 3.0 +name: hls-cabal-plugin +version: 0.1.0.0 +synopsis: +homepage: +license: MIT +license-file: LICENSE +author: Fendor +maintainer: fendor@posteo.de +category: Development +extra-source-files: CHANGELOG.md + +common defaults + default-language: GHC2021 + -- Should have been in GHC2021, an oversight + default-extensions: ExplicitNamespaces + +common test-defaults + ghc-options: -threaded -rtsopts -with-rtsopts=-N + +library + import: + ^ + exposed-modules: IDE.Plugin.Cabal + build-depends: base ^>=4.14.3.0 + hs-source-dirs: src + default-language: Haskell2010 + +common notForLib + default-language: GHC2021 + +test-suite tests + import: + ^ +|] diff --git a/plugins/hls-cabal-plugin/test/Context.hs b/plugins/hls-cabal-plugin/test/Context.hs new file mode 100644 index 0000000000..8e6176bc5b --- /dev/null +++ b/plugins/hls-cabal-plugin/test/Context.hs @@ -0,0 +1,309 @@ +{-# LANGUAGE DisambiguateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} + +module Context where + +import qualified Data.Text as T +import qualified Data.Text.Encoding as Text +import Development.IDE.Plugin.Completions.Types (PosPrefixInfo (..)) +import Ide.Plugin.Cabal +import Ide.Plugin.Cabal.Completion.Completer.Paths +import Ide.Plugin.Cabal.Completion.Completions +import Ide.Plugin.Cabal.Completion.Types (Context, + FieldContext (KeyWord, None), + StanzaContext (Stanza, TopLevel)) +import qualified Ide.Plugin.Cabal.Parse as Parse +import Test.Hls +import Utils as T + +cabalPlugin :: PluginTestDescriptor Ide.Plugin.Cabal.Log +cabalPlugin = mkPluginTestDescriptor descriptor "cabal context" + +contextTests :: TestTree +contextTests = + testGroup + "Context Tests" + [ pathCompletionInfoFromCompletionContextTests + , getContextTests + ] + +pathCompletionInfoFromCompletionContextTests :: TestTree +pathCompletionInfoFromCompletionContextTests = + testGroup + "Completion Info to Completion Context Tests" + [ testCase "Current Directory - no leading ./ by default" $ do + let complInfo = pathCompletionInfoFromCabalPrefixInfo "" $ simpleCabalPrefixInfoFromFp "" testDataDir + queryDirectory complInfo @?= "" + , testCase "Current Directory - partly written next" $ do + let complInfo = pathCompletionInfoFromCabalPrefixInfo "" $ simpleCabalPrefixInfoFromFp "di" testDataDir + queryDirectory complInfo @?= "" + pathSegment complInfo @?= "di" + , testCase "Current Directory - alternative writing" $ do + let complInfo = pathCompletionInfoFromCabalPrefixInfo "" $ simpleCabalPrefixInfoFromFp "./" testDataDir + queryDirectory complInfo @?= "./" + , testCase "Subdirectory" $ do + let complInfo = pathCompletionInfoFromCabalPrefixInfo "" $ simpleCabalPrefixInfoFromFp "dir1/" testDataDir + queryDirectory complInfo @?= "dir1/" + pathSegment complInfo @?= "" + , testCase "Subdirectory - partly written next" $ do + let complInfo = pathCompletionInfoFromCabalPrefixInfo "" $ simpleCabalPrefixInfoFromFp "dir1/d" testDataDir + queryDirectory complInfo @?= "dir1/" + pathSegment complInfo @?= "d" + , testCase "Subdirectory - partly written next" $ do + let complInfo = pathCompletionInfoFromCabalPrefixInfo "" $ simpleCabalPrefixInfoFromFp "dir1/dir2/d" testDataDir + queryDirectory complInfo @?= "dir1/dir2/" + pathSegment complInfo @?= "d" + ] + +getContextTests :: TestTree +getContextTests = + testGroup + "Context Tests Real" + [ testCase "Empty File - Start" $ do + -- for a completely empty file, the context needs to + -- be top level without a specified keyword + ctx <- callGetContext (Position 0 0) "" "" + ctx @?= (TopLevel, None) + , testCase "Cabal version keyword - no value, no space after :" $ do + -- on a file, where the keyword is already written + -- the context should still be toplevel but the keyword should be recognized + ctx <- callGetContext (Position 0 14) "" "cabal-version:\n" + ctx @?= (TopLevel, KeyWord "cabal-version:") + , testCase "Cabal version keyword - cursor in keyword" $ do + -- on a file, where the keyword is already written + -- but the cursor is in the middle of the keyword, + -- we are not in a keyword context + ctx <- callGetContext (Position 0 5) "cabal" "cabal-version:\n" + ctx @?= (TopLevel, None) + , testCase "Cabal version keyword - no value, many spaces" $ do + -- on a file, where the "cabal-version:" keyword is already written + -- the context should still be top level but the keyword should be recognized + ctx <- callGetContext (Position 0 45) "" ("cabal-version:" <> T.replicate 50 " " <> "\n") + ctx @?= (TopLevel, KeyWord "cabal-version:") + , testCase "Cabal version keyword - keyword partly written" $ do + -- in the first line of the file, if the keyword + -- has not been written completely, the keyword context + -- should still be None + ctx <- callGetContext (Position 0 5) "cabal" "cabal" + ctx @?= (TopLevel, None) + , testCase "Cabal version keyword - value partly written" $ do + -- in the first line of the file, if the keyword + -- has not been written completely, the keyword context + -- should still be None + ctx <- callGetContext (Position 0 17) "1." "cabal-version: 1." + ctx @?= (TopLevel, KeyWord "cabal-version:") + , testCase "Inside Stanza - no keyword" $ do + -- on a file, where the library stanza has been defined + -- but no keyword is defined afterwards, the stanza context should be recognized + ctx <- callGetContext (Position 3 2) "" libraryStanzaData + ctx @?= (Stanza "library" Nothing, None) + , testCase "Inside Stanza - keyword, no value" $ do + -- on a file, where the library stanza and a keyword + -- has been defined, the keyword and stanza should be recognized + ctx <- callGetContext (Position 4 21) "" libraryStanzaData + ctx @?= (Stanza "library" Nothing, KeyWord "build-depends:") + , testCase "Cabal version keyword - no value, next line" $ do + -- if the cabal version keyword has been written but without a value, + -- in the next line we still should be in top level context with no keyword + -- since the cabal version keyword and value pair need to be in the same line. + -- However, that's too much work to implement for virtually no benefit, so we + -- test here the status-quo is satisfied. + ctx <- callGetContext (Position 1 2) "" "cabal-version:\n\n" + ctx @?= (TopLevel, KeyWord "cabal-version:") + , testCase "Non-cabal-version keyword - no value, next line indented position" $ do + -- if a keyword, other than the cabal version keyword has been written + -- with no value, in the next line we still should be in top level keyword context + -- of the keyword with no value, since its value may be written in the next line + ctx <- callGetContext (Position 2 4) "" topLevelData + ctx @?= (TopLevel, KeyWord "name:") + , testCase "Non-cabal-version keyword - no value, next line at start" $ do + -- if a keyword, other than the cabal version keyword has been written + -- with no value, in the next line we still should be in top level context + -- but not the keyword's, since it is not viable to write a value for a + -- keyword a the start of the next line + ctx <- callGetContext (Position 2 0) "" topLevelData + ctx @?= (TopLevel, None) + , testCase "Toplevel after stanza partially written" $ do + ctx <- callGetContext (Position 6 2) "ma" libraryStanzaData + ctx @?= (TopLevel, None) + , testCase "Non-cabal-version keyword - no value, multiple lines between" $ do + -- if a keyword, other than the cabal version keyword has been written + -- with no value, even with multiple lines in between we can still write the + -- value corresponding to the keyword + ctx <- callGetContext (Position 5 4) "" topLevelData + ctx @?= (TopLevel, KeyWord "name:") + , testCase "Keyword inside stanza - cursor indented more than keyword in next line" $ do + -- if a keyword, other than the cabal version keyword has been written + -- in a stanza context with no value, then the value may be written in the next line, + -- when the cursor is indented more than the keyword + ctx <- callGetContext (Position 5 8) "" libraryStanzaData + ctx @?= (Stanza "library" Nothing, KeyWord "build-depends:") + , testCase "Keyword inside stanza - cursor indented less than keyword in next line" $ do + -- if a keyword, other than the cabal version keyword has been written + -- in a stanza context with no value, then the value may not be written in the next line, + -- when the cursor is indented less than the keyword + ctx <- callGetContext (Position 5 2) "" libraryStanzaData + ctx @?= (Stanza "library" Nothing, None) + , testCase "Keyword inside stanza - cursor at start of next line" $ do + -- in a stanza context with no value the value may not be written in the next line, + -- when the cursor is not indented and we are in the top level context + ctx <- callGetContext (Position 5 0) "" libraryStanzaData + ctx @?= (TopLevel, None) + , testCase "Top level - cursor in later line with partially written value" $ do + ctx <- callGetContext (Position 5 13) "eee" topLevelData + ctx @?= (TopLevel, KeyWord "name:") + , testCase "If is ignored" $ do + ctx <- callGetContext (Position 5 18) "" conditionalData + ctx @?= (Stanza "library" Nothing, None) + , testCase "Elif is ignored" $ do + ctx <- callGetContext (Position 7 18) "" conditionalData + ctx @?= (Stanza "library" Nothing, None) + , testCase "Else is ignored" $ do + ctx <- callGetContext (Position 9 18) "" conditionalData + ctx @?= (Stanza "library" Nothing, KeyWord "buildable:") + , testCase "Named Stanza" $ do + ctx <- callGetContext (Position 2 18) "" executableStanzaData + ctx @?= (TopLevel, None) + , testCase "Multi line, finds context in same line" $ do + ctx <- callGetContext (Position 5 18) "" multiLineOptsData + ctx @?= (Stanza "library" Nothing, KeyWord "build-depends:") + , testCase "Multi line, in the middle of option" $ do + ctx <- callGetContext (Position 6 11) "" multiLineOptsData + ctx @?= (Stanza "library" Nothing, KeyWord "build-depends:") + , testCase "Multi line, finds context in between lines" $ do + ctx <- callGetContext (Position 7 8) "" multiLineOptsData + ctx @?= (Stanza "library" Nothing, KeyWord "build-depends:") + , testCase "Multi line, finds context in between lines, start if line" $ do + ctx <- callGetContext (Position 7 0) "" multiLineOptsData + ctx @?= (TopLevel, None) + , testCase "Multi line, end of option" $ do + ctx <- callGetContext (Position 8 14) "" multiLineOptsData + ctx @?= (Stanza "library" Nothing, KeyWord "build-depends:") + , parameterisedCursorTest "Contexts in large testfile" multiPositionTestData + [ (TopLevel, None) + , (TopLevel, KeyWord "cabal-version:") + , (TopLevel, None) + , (TopLevel, KeyWord "description:") + , (TopLevel, KeyWord "extra-source-files:") + , (TopLevel, None) + -- this might not be what we want, maybe add another Context + , (TopLevel, None) + -- this might not be what we want, maybe add another Context + , (TopLevel, None) + , (Stanza "source-repository" (Just "head"), None) + , (Stanza "source-repository" (Just "head"), KeyWord "type:") + , (Stanza "source-repository" (Just "head"), KeyWord "type:") + , (Stanza "source-repository" (Just "head"), KeyWord "type:") + , (Stanza "source-repository" (Just "head"), None) + , (Stanza "common" (Just "cabalfmt"), None) + , (Stanza "common" (Just "cabalfmt"), None) + , (Stanza "common" (Just "cabalfmt"), KeyWord "build-depends:") + ] + $ \fileContent posPrefInfo -> + callGetContext (cursorPos posPrefInfo) (prefixText posPrefInfo) fileContent + ] + where + callGetContext :: Position -> T.Text -> T.Text -> IO Context + callGetContext pos pref ls = do + case Parse.readCabalFields "not-real" (Text.encodeUtf8 ls) of + Left err -> fail $ show err + Right fields -> do + getContext mempty (simpleCabalPrefixInfoFromPos pos pref) fields + +-- ------------------------------------------------------------------------ +-- Test Data +-- ------------------------------------------------------------------------ + +libraryStanzaData :: T.Text +libraryStanzaData = [trimming| +cabal-version: 3.0 +name: simple-cabal +library + default-language: Haskell98 + build-depends: + +ma +|] + +executableStanzaData :: T.Text +executableStanzaData = [trimming| +cabal-version: 3.0 +name: simple-cabal +executable exeName + default-language: Haskell2010 + hs-source-dirs: test/preprocessor +|] + +topLevelData :: T.Text +topLevelData = [trimming| +cabal-version: 3.0 +name: + + + + eee +|] + +conditionalData :: T.Text +conditionalData = [trimming| +cabal-version: 3.0 +name: simple-cabal +library + if os(windows) + buildable: + elif os(linux) + buildable: + else + buildable: +|] +multiLineOptsData :: T.Text +multiLineOptsData = [trimming| +cabal-version: 3.0 +name: + + +library + build-depends: + base, + + text , +|] + +multiPositionTestData :: T.Text +multiPositionTestData = [trimming| +cabal-version: 3.4 + ^ ^ +category: Development +^ +name: haskell-language-server +description: + Please see the README on GitHub at + ^ +extra-source-files: + README.md + ChangeLog.md + test/testdata/**/*.project + test/testdata/**/*.cabal + test/testdata/**/*.yaml + test/testdata/**/*.hs + test/testdata/**/*.json + ^ + -- These globs should only match test/testdata + plugins/**/*.project + +source-repository head + ^ ^ ^ + type: git + ^ ^ ^ ^ + location: https://github.com/haskell/haskell-language-server + + ^ +common cabalfmt + + ^ + build-depends: haskell-language-server:hls-cabal-fmt-plugin + ^ ^ + cpp-options: -Dhls_cabalfmt +|] diff --git a/plugins/hls-cabal-plugin/test/Definition.hs b/plugins/hls-cabal-plugin/test/Definition.hs new file mode 100644 index 0000000000..33163c03eb --- /dev/null +++ b/plugins/hls-cabal-plugin/test/Definition.hs @@ -0,0 +1,126 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Definition ( + gotoDefinitionTests, +) where + +import Control.Lens ((^.)) +import Data.List.Extra (isSuffixOf) +import qualified Data.Text as T +import Ide.Plugin.Cabal.Definition (toHaskellFile) +import qualified Language.LSP.Protocol.Lens as L +import qualified Language.LSP.Protocol.Types as LSP +import System.FilePath +import Test.Hls +import Utils + + +gotoDefinitionTests :: TestTree +gotoDefinitionTests = testGroup "Goto Definition" + [ gotoCommonSectionDefinitionTests + , gotoModuleDefinitionTests + ] + +gotoModuleDefinitionTests :: TestTree +gotoModuleDefinitionTests = testGroup "Goto Module Definition" + [ testGoToDefinitionLink "simple cabal test" "simple-cabal" "simple-cabal.cabal" + (Position 8 23) (toTestHaskellPath "" "A") + + , testGoToDefinitionLink "library start of exposed-modules" ("goto-definition" "modules") "module-examples.cabal" + (Position 6 22) (toTestHaskellPath "src" "Library.Lib") + , testGoToDefinitionLink "library middle of exposed-modules" ("goto-definition" "modules") "module-examples.cabal" + (Position 6 29) (toTestHaskellPath "src" "Library.Lib") + , testGoToDefinitionLink "library end of exposed-modules" ("goto-definition" "modules") "module-examples.cabal" + (Position 6 33) (toTestHaskellPath "src" "Library.Lib") + , testGoToDefinitionLink "library start of other-modules" ("goto-definition" "modules") "module-examples.cabal" + (Position 9 22) (toTestHaskellPath "src" "Library.Other.OtherLib") + , testGoToDefinitionLink "library end of other-modules" ("goto-definition" "modules") "module-examples.cabal" + (Position 9 44) (toTestHaskellPath "src" "Library.Other.OtherLib") + + , testGoToDefinitionLink "executable other-modules" ("goto-definition" "modules") "module-examples.cabal" + (Position 22 10) (toTestHaskellPath ("src" "exe") "Config") + + , testGoToDefinitionLink "test-suite other-modules Config" ("goto-definition" "modules") "module-examples.cabal" + (Position 31 10) (toTestHaskellPath ("src" "test") "Config") + , testGoToDefinitionLink "test-suite other-modules Library" ("goto-definition" "modules") "module-examples.cabal" + (Position 34 10) (toTestHaskellPath ("src" "test") "Library") + + , testGoToDefinitionLink "benchmark other-modules Config" ("goto-definition" "modules") "module-examples.cabal" + (Position 45 30) (toTestHaskellPath ("src" "bench") "Config") + + , testGoToDefinitionLinkNoLocation "not existent module" ("goto-definition" "modules") "module-examples.cabal" (Position 48 25) + , testGoToDefinitionLinkNoLocation "behind module" ("goto-definition" "modules") "module-examples.cabal" (Position 9 20) + , testGoToDefinitionLinkNoLocation "after module" ("goto-definition" "modules") "module-examples.cabal" (Position 9 50) + ] + where + toTestHaskellPath :: FilePath -> T.Text -> FilePath + toTestHaskellPath dir moduleName = dir toHaskellFile moduleName + + getUriFromDefinition :: Show b => (Definition |? b) -> Uri + getUriFromDefinition (InL (Definition (InL loc))) = loc^.L.uri + getUriFromDefinition unk = error $ "Unexpected pattern '" ++ show unk ++ "' , expected '(InL (Definition (InL loc))'" + + testGoToDefinitionLink :: TestName -> FilePath -> FilePath -> Position -> FilePath -> TestTree + testGoToDefinitionLink testName testDir cabalFile cursorPos expectedFilePath = + runCabalTestCaseSession testName testDir $ do + doc <- openDoc cabalFile "cabal" + definitions <- getDefinitions doc cursorPos + let uri = getUriFromDefinition definitions + mFilePath = (testDir ) <$> uriToFilePath uri + case mFilePath of + Nothing -> error $ "Not possible to convert Uri " <> show uri <> " to FilePath" + Just filePath -> do + let filePathWithDir = testDir expectedFilePath + isCorrectPath = filePathWithDir `isSuffixOf` filePath + liftIO $ isCorrectPath @? ("Absolute path expected to end on " <> filePathWithDir <> + " but " <> filePath <> " was given.") + + testGoToDefinitionLinkNoLocation :: TestName -> FilePath -> FilePath -> Position -> TestTree + testGoToDefinitionLinkNoLocation testName testDir cabalFile cursorPos = + runCabalTestCaseSession testName testDir $ do + doc <- openDoc cabalFile "cabal" + empty <- getDefinitions doc cursorPos + liftIO $ empty @?= (InR $ InR LSP.Null) + +gotoCommonSectionDefinitionTests :: TestTree +gotoCommonSectionDefinitionTests = testGroup "Goto Common Section Definition" + [ positiveTest "middle of identifier" (Position 27 16) (mkRange 6 0 7 22) + , positiveTest "left of identifier" (Position 30 12) (mkRange 10 0 17 40) + , positiveTest "right of identifier" (Position 33 22) (mkRange 20 0 23 34) + , positiveTest "left of '-' in identifier" (Position 36 20) (mkRange 6 0 7 22) + , positiveTest "right of '-' in identifier" (Position 39 19) (mkRange 10 0 17 40) + , positiveTest "identifier in identifier list" (Position 42 16) (mkRange 20 0 23 34) + , positiveTest "left of ',' right of identifier" (Position 45 33) (mkRange 10 0 17 40) + , positiveTest "right of ',' left of identifier" (Position 48 34) (mkRange 6 0 7 22) + + , negativeTest "right of ',' left of space" (Position 51 23) + , negativeTest "right of ':' left of space" (Position 54 11) + , negativeTest "not a definition" (Position 57 8) + , negativeTest "empty space" (Position 59 7) + ] + where + getRangeFromDefinition :: Show b => (Definition |? b) -> Range + getRangeFromDefinition (InL (Definition (InL loc))) = loc^.L.range + getRangeFromDefinition unk = error $ "Unexpected pattern '" ++ show unk ++ "' , expected '(InL (Definition (InL loc))'" + + -- A positive test checks if the provided range is equal + -- to the expected range from the definition in the test file. + -- The test emulates a goto-definition request of an actual definition. + positiveTest :: TestName -> Position -> Range -> TestTree + positiveTest testName cursorPos expectedRange = + runCabalTestCaseSession testName ("goto-definition" "common-section") $ do + doc <- openDoc "simple-with-common.cabal" "cabal" + definitions <- getDefinitions doc cursorPos + let range = getRangeFromDefinition definitions + liftIO $ range @?= expectedRange + + -- A negative test checks if the request failed and + -- the provided result is empty, i.e. `InR $ InR Null`. + -- The test emulates a goto-definition request of anything but an + -- actual definition. + negativeTest :: TestName -> Position -> TestTree + negativeTest testName cursorPos = + runCabalTestCaseSession testName ("goto-definition" "common-section") $ do + doc <- openDoc "simple-with-common.cabal" "cabal" + empty <- getDefinitions doc cursorPos + liftIO $ empty @?= (InR $ InR LSP.Null) diff --git a/plugins/hls-cabal-plugin/test/Main.hs b/plugins/hls-cabal-plugin/test/Main.hs new file mode 100644 index 0000000000..43794e753d --- /dev/null +++ b/plugins/hls-cabal-plugin/test/Main.hs @@ -0,0 +1,327 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DisambiguateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} + +module Main ( + main, +) where + +import CabalAdd (cabalAddDependencyTests, + cabalAddModuleTests) +import Completer (completerTests) +import Context (contextTests) +import Control.Lens ((^.)) +import Control.Lens.Fold ((^?)) +import Control.Monad (guard) +import qualified Data.ByteString as BS +import Data.Either (isRight) +import Data.List.Extra (nubOrdOn) +import qualified Data.Maybe as Maybe +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Text.IO as Text +import Definition (gotoDefinitionTests) +import Development.IDE.Test +import Ide.Plugin.Cabal.LicenseSuggest (licenseErrorSuggestion) +import qualified Ide.Plugin.Cabal.Parse as Lib +import qualified Language.LSP.Protocol.Lens as L +import qualified Language.LSP.Protocol.Message as L +import Outline (outlineTests) +import System.FilePath +import Test.Hls +import Test.Hls.FileSystem +import Utils + +main :: IO () +main = do + defaultTestRunner $ + testGroup + "Cabal Plugin Tests" + [ unitTests + , pluginTests + , completerTests + , contextTests + , outlineTests + , codeActionTests + , gotoDefinitionTests + , hoverTests + , reloadOnCabalChangeTests + ] + +-- ------------------------------------------------------------------------ +-- Unit Tests +-- ------------------------------------------------------------------------ + +unitTests :: TestTree +unitTests = + testGroup + "Unit Tests" + [ cabalParserUnitTests + , codeActionUnitTests + ] + +cabalParserUnitTests :: TestTree +cabalParserUnitTests = + testGroup + "Parsing Cabal" + [ testCase "Simple Parsing works" $ do + fileContents <- BS.readFile (testDataDir "simple.cabal") + let (warnings, pm) = Lib.parseCabalFileContents fileContents + liftIO $ do + null warnings @? "Found unexpected warnings" + isRight pm @? "Failed to parse GenericPackageDescription" + ] + +codeActionUnitTests :: TestTree +codeActionUnitTests = + testGroup + "Code Action Tests" + [ testCase "Unknown format" $ do + -- the message has the wrong format + licenseErrorSuggestion maxCompletions "Unknown license identifier: 'BSD3' Do you mean BSD-3-Clause?" @?= [] + , testCase "BSD-3-Clause" $ do + take 2 (licenseErrorSuggestion maxCompletions "Unknown SPDX license identifier: 'BSD3' Do you mean BSD-3-Clause?") + @?= +-- Cabal-syntax 3.12.0.0 added bunch of new licenses, so now more licenses match "BSD3" pattern +#if MIN_VERSION_Cabal_syntax(3,12,0) + [("BSD3", "BSD-4.3RENO"), ("BSD3", "BSD-3-Clause")] +#else + [("BSD3", "BSD-3-Clause"), ("BSD3", "BSD-3-Clause-LBNL")] +#endif + , testCase "MiT" $ do + -- contains no suggestion + take 2 (licenseErrorSuggestion maxCompletions "Unknown SPDX license identifier: 'MiT'") + @?= [("MiT", "MIT"), ("MiT", "MIT-0")] + ] + where + maxCompletions = 100 + + +-- ------------------------------------------------------------------------ +-- Integration Tests +-- ------------------------------------------------------------------------ + +pluginTests :: TestTree +pluginTests = + testGroup + "Plugin Tests" + [ testGroup + "Diagnostics" + [ runCabalTestCaseSession "Publishes Diagnostics on Error" "" $ do + _ <- openDoc "invalid.cabal" "cabal" + diags <- cabalCaptureKick + unknownLicenseDiag <- liftIO $ inspectDiagnostic diags ["Unknown SPDX license identifier: 'BSD3'"] + liftIO $ do + length diags @?= 1 + unknownLicenseDiag ^. L.range @?= Range (Position 3 24) (Position 4 0) + unknownLicenseDiag ^. L.severity @?= Just DiagnosticSeverity_Error + , runCabalTestCaseSession "Publishes Diagnostics on unsupported cabal version as Warning" "" $ do + _ <- openDoc "unsupportedVersion.cabal" "cabal" + diags <- cabalCaptureKick + unknownVersionDiag <- liftIO $ inspectDiagnosticAny diags ["Unsupported cabal-version 99999.0", "Unsupported cabal format version in cabal-version field: 99999.0"] + liftIO $ do + length diags @?= 1 + unknownVersionDiag ^. L.range @?= Range (Position 0 0) (Position 1 0) + unknownVersionDiag ^. L.severity @?= Just DiagnosticSeverity_Warning + , runCabalTestCaseSession "Clears diagnostics" "" $ do + doc <- openDoc "invalid.cabal" "cabal" + diags <- cabalCaptureKick + unknownLicenseDiag <- liftIO $ inspectDiagnostic diags ["Unknown SPDX license identifier: 'BSD3'"] + liftIO $ do + length diags @?= 1 + unknownLicenseDiag ^. L.range @?= Range (Position 3 24) (Position 4 0) + unknownLicenseDiag ^. L.severity @?= Just DiagnosticSeverity_Error + _ <- applyEdit doc $ TextEdit (Range (Position 3 20) (Position 4 0)) "BSD-3-Clause\n" + newDiags <- cabalCaptureKick + liftIO $ newDiags @?= [] + ] + ] +-- ---------------------------------------------------------------------------- +-- Code Action Tests +-- ---------------------------------------------------------------------------- + +codeActionTests :: TestTree +codeActionTests = testGroup "Code Actions" + [ runCabalTestCaseSession "BSD-3" "" $ do + doc <- openDoc "licenseCodeAction.cabal" "cabal" + diags <- waitForDiagnosticsFromSource doc "cabal" + reduceDiag <- liftIO $ inspectDiagnostic diags ["Unknown SPDX license identifier: 'BSD3'"] + liftIO $ do + length diags @?= 1 + reduceDiag ^. L.range @?= Range (Position 3 24) (Position 4 0) + reduceDiag ^. L.severity @?= Just DiagnosticSeverity_Error + [codeAction] <- getLicenseAction "BSD-3-Clause" <$> getCodeActions doc (Range (Position 3 24) (Position 4 0)) + executeCodeAction codeAction + contents <- documentContents doc + liftIO $ + contents + @?= T.unlines + [ "cabal-version: 3.0" + , "name: licenseCodeAction" + , "version: 0.1.0.0" + , "license: BSD-3-Clause" + , "" + , "library" + , " build-depends: base" + , " default-language: Haskell2010" + ] + , runCabalTestCaseSession "Apache-2.0" "" $ do + doc <- openDoc "licenseCodeAction2.cabal" "cabal" + diags <- waitForDiagnosticsFromSource doc "cabal" + -- test if it supports typos in license name, here 'apahe' + reduceDiag <- liftIO $ inspectDiagnostic diags ["Unknown SPDX license identifier: 'APAHE'"] + liftIO $ do + length diags @?= 1 + reduceDiag ^. L.range @?= Range (Position 3 25) (Position 4 0) + reduceDiag ^. L.severity @?= Just DiagnosticSeverity_Error + [codeAction] <- getLicenseAction "Apache-2.0" <$> getCodeActions doc (Range (Position 3 24) (Position 4 0)) + executeCodeAction codeAction + contents <- documentContents doc + liftIO $ + contents + @?= T.unlines + [ "cabal-version: 3.0" + , "name: licenseCodeAction2" + , "version: 0.1.0.0" + , "license: Apache-2.0" + , "" + , "library" + , " build-depends: base" + , " default-language: Haskell2010" + ] + , runCabalGoldenSession "Code Actions - Can fix field names" "code-actions" "FieldSuggestions" $ \doc -> do + _ <- waitForDiagnosticsFrom doc + cas <- Maybe.mapMaybe (^? _R) <$> getAllCodeActions doc + -- Filter out the code actions we want to invoke. + -- We only want to invoke Code Actions with certain titles, and + -- we want to invoke them only once, not once for each cursor request. + -- 'getAllCodeActions' iterates over each cursor position and requests code actions. + let selectedCas = nubOrdOn (^. L.title) $ filter + (\ca -> (ca ^. L.title) `elem` + [ "Replace with license" + , "Replace with build-type" + , "Replace with extra-doc-files" + , "Replace with ghc-options" + , "Replace with location" + , "Replace with default-language" + , "Replace with import" + , "Replace with build-depends" + , "Replace with main-is" + , "Replace with hs-source-dirs" + ]) cas + mapM_ executeCodeAction selectedCas + pure () + , cabalAddDependencyTests + , cabalAddModuleTests + ] + where + getLicenseAction :: T.Text -> [Command |? CodeAction] -> [CodeAction] + getLicenseAction license codeActions = do + InR action@CodeAction{_title} <- codeActions + guard (_title == "Replace with " <> license) + pure action + +-- ---------------------------------------------------------------------------- +-- Hover Tests +-- ---------------------------------------------------------------------------- + +hoverTests :: TestTree +hoverTests = testGroup "Hover" + [ hoverOnDependencyTests + ] + +hoverOnDependencyTests :: TestTree +hoverOnDependencyTests = testGroup "Hover Dependency" + [ hoverContainsTest "base with separated version" "hover-deps.cabal" (Position 6 25) "[Documentation](https://hackage.haskell.org/package/base)" + , hoverContainsTest "aeson with not separated version " "hover-deps.cabal" (Position 7 25) "[Documentation](https://hackage.haskell.org/package/aeson)" + , hoverContainsTest "lens no version" "hover-deps.cabal" (Position 7 42) "[Documentation](https://hackage.haskell.org/package/lens)" + + , hoverIsNullTest "name has no documentation" "hover-deps.cabal" (Position 1 25) + , hoverIsNullTest "exposed-modules has no documentation" "hover-deps.cabal" (Position 5 25) + , hoverIsNullTest "hs-source-dirs has no documentation" "hover-deps.cabal" (Position 8 25) + ] + where + hoverContainsTest :: TestName -> FilePath -> Position -> T.Text -> TestTree + hoverContainsTest testName cabalFile pos containedText = + runCabalTestCaseSession testName "hover" $ do + doc <- openDoc cabalFile "cabal" + h <- getHover doc pos + case h of + Nothing -> liftIO $ assertFailure "No hover" + Just (Hover contents _) -> case contents of + InL (MarkupContent _ txt) -> do + liftIO + $ assertBool ("Failed to find `" <> T.unpack containedText <> "` in hover message: " <> T.unpack txt) + $ containedText `T.isInfixOf` txt + _ -> liftIO $ assertFailure "Unexpected content type" + closeDoc doc + + hoverIsNullTest :: TestName -> FilePath -> Position -> TestTree + hoverIsNullTest testName cabalFile pos = + runCabalTestCaseSession testName "hover" $ do + doc <- openDoc cabalFile "cabal" + h <- getHover doc pos + liftIO $ assertBool ("Found hover `" <> show h <> "`") $ Maybe.isNothing h + closeDoc doc + +-- ---------------------------------------------------------------------------- +-- Reloading of Haskell files on .cabal changes +-- ---------------------------------------------------------------------------- + +simpleCabalVft :: [FileTree] +simpleCabalVft = + [ copy "hie.yaml" + , copy "simple-reload.cabal" + , copy "Main.hs" + ] + +simpleCabalFs :: VirtualFileTree +simpleCabalFs = mkVirtualFileTree + (testDataDir "simple-reload") + simpleCabalVft + +-- Slow tests +reloadOnCabalChangeTests :: TestTree +reloadOnCabalChangeTests = testGroup "Reload on .cabal changes" + [ runCabalTestCaseSessionVft "Change warnings when .cabal file changes" simpleCabalFs $ do + _ <- openDoc "Main.hs" "haskell" + expectDiagnostics [("Main.hs", [(DiagnosticSeverity_Warning, (8, 0), "Top-level binding with no type signature", Just "GHC-38417")])] + waitForAllProgressDone + cabalDoc <- openDoc "simple-reload.cabal" "cabal" + skipManyTill anyMessage cabalKickDone + saveDoc cabalDoc + [trimming| + cabal-version: 3.4 + name: simple-reload + version: 0.1.0.0 + -- copyright: + build-type: Simple + + common warnings + ghc-options: -Wall -Wno-missing-signatures + + executable simple-reload + import: warnings + main-is: Main.hs + build-depends: base + default-language: Haskell2010 + |] + + expectDiagnostics [("Main.hs", [(DiagnosticSeverity_Warning, (2, 0), "The import of \8216Data.List\8217 is redundant", Nothing)])] + ] + +-- | Persists the given contents to the 'TextDocumentIdentifier' on disk +-- and sends the @textDocument/didSave@ notification. +saveDoc :: TextDocumentIdentifier -> Text -> Session () +saveDoc docId t = do + -- I couldn't figure out how to get the virtual file contents, so we write it + -- to disk and send the 'SMethod_TextDocumentDidSave' notification + case uriToFilePath (docId ^. L.uri) of + Nothing -> pure () + Just fp -> do + liftIO $ Text.writeFile fp t + + let params = DidSaveTextDocumentParams docId Nothing + sendNotification L.SMethod_TextDocumentDidSave params diff --git a/plugins/hls-cabal-plugin/test/Outline.hs b/plugins/hls-cabal-plugin/test/Outline.hs new file mode 100644 index 0000000000..cb7279e387 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/Outline.hs @@ -0,0 +1,103 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Outline ( + outlineTests, +) where + +import Language.LSP.Protocol.Types (DocumentSymbol (..), + Position (..), Range (..)) +import qualified Test.Hls as T +import Utils + +testSymbols :: (T.HasCallStack) => T.TestName -> FilePath -> [DocumentSymbol] -> T.TestTree +testSymbols testName path expectedSymbols = + runCabalTestCaseSession testName "outline-cabal" $ do + docId <- T.openDoc path "cabal" + symbols <- T.getDocumentSymbols docId + T.liftIO $ symbols T.@?= Right expectedSymbols + +outlineTests :: T.TestTree +outlineTests = + T.testGroup + "Cabal Outline Tests" + [ testSymbols + "cabal Field outline test" + "field.cabal" + [fieldDocumentSymbol] + , testSymbols + "cabal FieldLine outline test" + "fieldline.cabal" + [fieldLineDocumentSymbol] + , testSymbols + "cabal Section outline test" + "section.cabal" + [sectionDocumentSymbol] + , testSymbols + "cabal SectionArg outline test" + "sectionarg.cabal" + [sectionArgDocumentSymbol] + ] + where + fieldDocumentSymbol :: DocumentSymbol + fieldDocumentSymbol = + ( defDocumentSymbol + ( Range { _start = Position{_line = 0, _character = 0} + , _end = Position{_line = 0, _character = 8} }) + ) + { _name = "homepage" + , _kind = T.SymbolKind_Field + , _children = Nothing + } + fieldLineDocumentSymbol :: DocumentSymbol + fieldLineDocumentSymbol = + ( defDocumentSymbol + ( Range { _start = Position{_line = 0, _character = 0} + , _end = Position{_line = 0, _character = 13} }) + ) + { _name = "cabal-version" + , _kind = T.SymbolKind_Field + , _children = Nothing -- the values of fieldLine are removed from the outline + } + sectionDocumentSymbol :: DocumentSymbol + sectionDocumentSymbol = + ( defDocumentSymbol + ( Range { _start = Position{_line = 0, _character = 2} + , _end = Position{_line = 0, _character = 15} }) + ) + { _name = "build-depends" + , _kind = T.SymbolKind_Field + , _children = Nothing -- the values of fieldLine are removed from the outline + } + sectionArgDocumentSymbol :: DocumentSymbol + sectionArgDocumentSymbol = + ( defDocumentSymbol + ( Range { _start = Position{_line = 0, _character = 2} + , _end = Position{_line = 0, _character = 19} }) + ) + { _name = "if os ( windows )" + , _kind = T.SymbolKind_Object + , _children = Just $ [sectionArgChildrenDocumentSymbol] + } + sectionArgChildrenDocumentSymbol :: DocumentSymbol + sectionArgChildrenDocumentSymbol = + ( defDocumentSymbol + ( Range { _start = Position{_line = 1, _character = 4} + , _end = Position{_line = 1, _character = 17} }) + ) + { _name = "build-depends" + , _kind = T.SymbolKind_Field + , _children = Nothing + } + +defDocumentSymbol :: Range -> DocumentSymbol +defDocumentSymbol range = + DocumentSymbol + { _detail = Nothing + , _deprecated = Nothing + , _name = "" + , _kind = T.SymbolKind_File + , _range = range + , _selectionRange = range + , _children = Nothing + , _tags = Nothing + } diff --git a/plugins/hls-cabal-plugin/test/Utils.hs b/plugins/hls-cabal-plugin/test/Utils.hs new file mode 100644 index 0000000000..0264fec2c6 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/Utils.hs @@ -0,0 +1,93 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DisambiguateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} + +module Utils where + +import Control.Monad (guard) +import Data.List (sort) +import Data.Proxy (Proxy (Proxy)) +import qualified Data.Text as T +import Ide.Plugin.Cabal (descriptor, + haskellInteractionDescriptor) +import qualified Ide.Plugin.Cabal +import Ide.Plugin.Cabal.Completion.Types +import System.FilePath +import Test.Hls +import Test.Hls.FileSystem (VirtualFileTree) + + +cabalPlugin :: PluginTestDescriptor Ide.Plugin.Cabal.Log +cabalPlugin = mkPluginTestDescriptor descriptor "cabal" + +cabalHaskellPlugin :: PluginTestDescriptor Ide.Plugin.Cabal.Log +cabalHaskellPlugin = mkPluginTestDescriptor haskellInteractionDescriptor "cabal-haskell" + +simpleCabalPrefixInfoFromPos :: Position -> T.Text -> CabalPrefixInfo +simpleCabalPrefixInfoFromPos pos prefix = + CabalPrefixInfo + { completionPrefix = prefix + , completionCursorPosition = pos + , isStringNotation = Nothing + , completionRange = Range pos (Position 0 0) + , completionWorkingDir = "" + , completionFileName = "test" + } + +simpleCabalPrefixInfoFromFp :: T.Text -> FilePath -> CabalPrefixInfo +simpleCabalPrefixInfoFromFp prefix fp = + CabalPrefixInfo + { completionPrefix = prefix + , isStringNotation = Nothing + , completionCursorPosition = Position 0 0 + , completionRange = Range (Position 0 0) (Position 0 0) + , completionWorkingDir = fp + , completionFileName = "test" + } + +filePathComplTestDir :: FilePath +filePathComplTestDir = addTrailingPathSeparator $ testDataDir "filepath-completions" + +runCabalTestCaseSession :: TestName -> FilePath -> Session () -> TestTree +runCabalTestCaseSession title subdir = testCase title . runCabalSession subdir + +runHaskellTestCaseSession :: TestName -> FilePath -> Session () -> TestTree +runHaskellTestCaseSession title subdir = testCase title . runHaskellAndCabalSession subdir + +runCabalSession :: FilePath -> Session a -> IO a +runCabalSession subdir = + failIfSessionTimeout . runSessionWithServer def cabalPlugin (testDataDir subdir) + +runCabalTestCaseSessionVft :: TestName -> VirtualFileTree -> Session () -> TestTree +runCabalTestCaseSessionVft title vft = testCase title . runCabalSessionVft vft + +runCabalSessionVft :: VirtualFileTree -> Session a -> IO a +runCabalSessionVft vft = + failIfSessionTimeout . runSessionWithServerInTmpDir def cabalPlugin vft + +runHaskellAndCabalSession :: FilePath -> Session a -> IO a +runHaskellAndCabalSession subdir = + failIfSessionTimeout . runSessionWithServer def (cabalPlugin <> cabalHaskellPlugin) (testDataDir subdir) + +runCabalGoldenSession :: TestName -> FilePath -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +runCabalGoldenSession title subdir fp act = goldenWithCabalDoc def cabalPlugin title testDataDir (subdir fp) "golden" "cabal" act + +testDataDir :: FilePath +testDataDir = "plugins" "hls-cabal-plugin" "test" "testdata" + +-- | these functions are used to detect cabal kicks +-- and look at diagnostics for cabal files +-- kicks are run everytime there is a shake session run/restart +cabalKickDone :: Session () +cabalKickDone = kick (Proxy @"kick/done/cabal") >>= guard . not . null + +cabalKickStart :: Session () +cabalKickStart = kick (Proxy @"kick/start/cabal") >>= guard . not . null + +cabalCaptureKick :: Session [Diagnostic] +cabalCaptureKick = captureKickDiagnostics cabalKickStart cabalKickDone + +-- | list comparison where the order in the list is irrelevant +(@?==) :: (HasCallStack, Ord a, Show a) => [a] -> [a] -> Assertion +(@?==) l1 l2 = sort l1 @?= sort l2 + diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/Main.hs new file mode 100644 index 0000000000..c2e4af9606 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = undefined diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/cabal.project b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/cabal.project new file mode 100644 index 0000000000..e6fdbadb43 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/cabal.project @@ -0,0 +1 @@ +packages: . diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/hie.yaml b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/hie.yaml new file mode 100644 index 0000000000..04cd24395e --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/hie.yaml @@ -0,0 +1,2 @@ +cradle: + cabal: diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/test.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/test.cabal new file mode 100644 index 0000000000..bb6dc95f2f --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/test.cabal @@ -0,0 +1,26 @@ +cabal-version: 3.0 +name: test +version: 0.1.0.0 +build-type: Simple + +library + hs-source-dirs: . + exposed-modules: + build-depends: base + default-language: Haskell2010 + +executable test + main-is: bla + build-depends: base + +benchmark test1 + type: exitcode-stdio-1.0 + main-is: bla + build-depends: base + +test-suite test2 + type: exitcode-stdio-1.0 + main-is: bla + build-depends: base + +library test3 diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-bench/bench/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-bench/bench/Main.hs new file mode 100644 index 0000000000..e5c42398f2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-bench/bench/Main.hs @@ -0,0 +1,6 @@ +module Main (main) where + +import Data.List.Split + +main :: IO () +main = putStrLn "Test suite not yet implemented." diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-bench/cabal-add-bench.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-bench/cabal-add-bench.cabal new file mode 100644 index 0000000000..b58a6d3302 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-bench/cabal-add-bench.cabal @@ -0,0 +1,17 @@ +cabal-version: 2.4 +name: cabal-add-bench +version: 0.1.0.0 +license: NONE +author: George Gerasev +maintainer: george30032002@gmail.com +build-type: Simple + +common warnings + ghc-options: -Wall + +benchmark benchmark + type: exitcode-stdio-1.0 + ghc-options: -threaded + main-is: Main.hs + hs-source-dirs: bench + build-depends: base diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-exe/cabal-add-exe.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-exe/cabal-add-exe.cabal new file mode 100644 index 0000000000..a3499bbf97 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-exe/cabal-add-exe.cabal @@ -0,0 +1,15 @@ +cabal-version: 2.4 +name: cabal-add-exe +version: 0.1.0.0 +build-type: Simple + +executable cabal-add-exe + main-is: Main.hs + hs-source-dirs: src + ghc-options: -Wall + build-depends: base + default-language: Haskell2010 + +library + build-depends: base >= 4 && < 5 + ghc-options: -Wall diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-exe/src/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-exe/src/Main.hs new file mode 100644 index 0000000000..0bf3e99dae --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-exe/src/Main.hs @@ -0,0 +1,5 @@ +module Main where + +import Data.List.Split + +main = putStrLn "Hello, Haskell!" \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-lib/cabal-add-lib.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-lib/cabal-add-lib.cabal new file mode 100644 index 0000000000..b00b45bb6b --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-lib/cabal-add-lib.cabal @@ -0,0 +1,17 @@ +cabal-version: 2.4 +name: cabal-add-lib +version: 0.1.0.0 +license: NONE +author: George Gerasev +maintainer: george30032002@gmail.com +build-type: Simple + +common warnings + ghc-options: -Wall + +library + import: warnings + exposed-modules: MyLib + build-depends: base + hs-source-dirs: src + default-language: Haskell2010 diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-lib/src/MyLib.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-lib/src/MyLib.hs new file mode 100644 index 0000000000..bbb506d001 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-lib/src/MyLib.hs @@ -0,0 +1,6 @@ +module MyLib (someFunc) where + +import Data.List.Split + +someFunc :: IO () +someFunc = putStrLn "someFunc" diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/bench/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/bench/Main.hs new file mode 100644 index 0000000000..e5c42398f2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/bench/Main.hs @@ -0,0 +1,6 @@ +module Main (main) where + +import Data.List.Split + +main :: IO () +main = putStrLn "Test suite not yet implemented." diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/cabal-add-multitarget.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/cabal-add-multitarget.cabal new file mode 100644 index 0000000000..677986768e --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/cabal-add-multitarget.cabal @@ -0,0 +1,33 @@ +cabal-version: 2.4 +name: cabal-add-multitarget +version: 0.1.0.0 +build-type: Simple + +executable cabal-add-exe + main-is: Main.hs + hs-source-dirs: src + ghc-options: -Wall + build-depends: base + default-language: Haskell2010 + +library + exposed-modules: MyLib + other-modules: InternalLib + build-depends: base >= 4 && < 5 + hs-source-dirs: lib + ghc-options: -Wall + +test-suite cabal-add-tests-test + main-is: Main.hs + hs-source-dirs: test + type: exitcode-stdio-1.0 + build-depends: base + default-language: Haskell2010 + +benchmark benchmark + main-is: Main.hs + build-depends: base + hs-source-dirs: bench + type: exitcode-stdio-1.0 + ghc-options: -threaded + diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/lib/InternalLib.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/lib/InternalLib.hs new file mode 100644 index 0000000000..5a3dd79258 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/lib/InternalLib.hs @@ -0,0 +1,6 @@ +module InternalLib (internalFunc) where + +import Data.List.Split + +internalFunc :: IO () +internalFunc = putStrLn "internalFunc" diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/lib/MyLib.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/lib/MyLib.hs new file mode 100644 index 0000000000..bbb506d001 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/lib/MyLib.hs @@ -0,0 +1,6 @@ +module MyLib (someFunc) where + +import Data.List.Split + +someFunc :: IO () +someFunc = putStrLn "someFunc" diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/src/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/src/Main.hs new file mode 100644 index 0000000000..0bf3e99dae --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/src/Main.hs @@ -0,0 +1,5 @@ +module Main where + +import Data.List.Split + +main = putStrLn "Hello, Haskell!" \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/test/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/test/Main.hs new file mode 100644 index 0000000000..e5c42398f2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-multitarget/test/Main.hs @@ -0,0 +1,6 @@ +module Main (main) where + +import Data.List.Split + +main :: IO () +main = putStrLn "Test suite not yet implemented." diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/cabal-add-packageYaml.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/cabal-add-packageYaml.cabal new file mode 100644 index 0000000000..3ac549aa60 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/cabal-add-packageYaml.cabal @@ -0,0 +1,17 @@ +cabal-version: 2.4 +name: cabal-add-packageYaml +version: 0.1.0.0 +license: NONE +author: George Gerasev +maintainer: george30032002@gmail.com +build-type: Simple + +common warnings + ghc-options: -Wall + +benchmark benchmark-packageYaml + type: exitcode-stdio-1.0 + ghc-options: -threaded + main-is: Main.hs + hs-source-dirs: bench + build-depends: base diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/package.yaml b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/package.yaml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/src/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/src/Main.hs new file mode 100644 index 0000000000..e5c42398f2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-packageYaml/src/Main.hs @@ -0,0 +1,6 @@ +module Main (main) where + +import Data.List.Split + +main :: IO () +main = putStrLn "Test suite not yet implemented." diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/cabal-add-tests.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/cabal-add-tests.cabal new file mode 100644 index 0000000000..9adc498231 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/cabal-add-tests.cabal @@ -0,0 +1,26 @@ +cabal-version: 2.4 +name: cabal-add-tests +version: 0.1.0.0 +license: NONE +author: George Gerasev +maintainer: george30032002@gmail.com +build-type: Simple + +common warnings + ghc-options: -Wall + +test-suite cabal-add-tests-test + import: warnings + default-language: Haskell2010 + type: exitcode-stdio-1.0 + hs-source-dirs: test + main-is: Main.hs + build-depends: base + +test-suite cabal-add-tests-test-package-imports + import: warnings + default-language: Haskell2010 + type: exitcode-stdio-1.0 + hs-source-dirs: test + main-is: MainPackageImports.hs + build-depends: base diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/Main.hs new file mode 100644 index 0000000000..e5c42398f2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/Main.hs @@ -0,0 +1,6 @@ +module Main (main) where + +import Data.List.Split + +main :: IO () +main = putStrLn "Test suite not yet implemented." diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/MainPackageImports.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/MainPackageImports.hs new file mode 100644 index 0000000000..753dd165dd --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal-add-tests/test/MainPackageImports.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE PackageImports #-} + +module Main (main) where + +import "split" Data.List.Split + +main :: IO () +main = putStrLn "Test suite not yet implemented." diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal.project b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal.project new file mode 100644 index 0000000000..21eb1f63eb --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/cabal.project @@ -0,0 +1,6 @@ +packages: cabal-add-exe + cabal-add-lib + cabal-add-tests + cabal-add-bench + cabal-add-multitarget + cabal-add-packageYaml diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/hie.yaml b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/hie.yaml new file mode 100644 index 0000000000..f0c7014d7f --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-testdata/hie.yaml @@ -0,0 +1,2 @@ +cradle: + cabal: \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/code-actions/FieldSuggestions.cabal b/plugins/hls-cabal-plugin/test/testdata/code-actions/FieldSuggestions.cabal new file mode 100644 index 0000000000..e32f77b614 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/code-actions/FieldSuggestions.cabal @@ -0,0 +1,36 @@ +cabal-version: 3.0 +name: FieldSuggestions +version: 0.1.0 +licens: BSD-3-Clause + +buil-type: Simple + +extra-doc-fils: + ChangeLog + +-- Default warnings in HLS +common warnings + ghc-option: -Wall + -Wredundant-constraints + -Wunused-packages + -Wno-name-shadowing + -Wno-unticked-promoted-constructors + +source-repository head + type: git + loc: fake + +library + default-lang: Haskell2010 + -- Import isn't supported right now. + impor: warnings + build-dep: base + +executable my-exe + mains: Main.hs + +test-suite Test + type: exitcode-stdio-1.0 + main-is: Test.hs + hs-source-drs: + diff --git a/plugins/hls-cabal-plugin/test/testdata/code-actions/FieldSuggestions.golden.cabal b/plugins/hls-cabal-plugin/test/testdata/code-actions/FieldSuggestions.golden.cabal new file mode 100644 index 0000000000..99bf84dfd7 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/code-actions/FieldSuggestions.golden.cabal @@ -0,0 +1,36 @@ +cabal-version: 3.0 +name: FieldSuggestions +version: 0.1.0 +license: BSD-3-Clause + +build-type: Simple + +extra-doc-files: + ChangeLog + +-- Default warnings in HLS +common warnings + ghc-options: -Wall + -Wredundant-constraints + -Wunused-packages + -Wno-name-shadowing + -Wno-unticked-promoted-constructors + +source-repository head + type: git + location: fake + +library + default-language: Haskell2010 + -- Import isn't supported right now. + import: warnings + build-depends: base + +executable my-exe + main-is: Main.hs + +test-suite Test + type: exitcode-stdio-1.0 + main-is: Test.hs + hs-source-dirs: + diff --git a/plugins/hls-cabal-plugin/test/testdata/completer.cabal b/plugins/hls-cabal-plugin/test/testdata/completer.cabal new file mode 100644 index 0000000000..141bdd7d2d --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/completer.cabal @@ -0,0 +1,14 @@ +cabal-version: 3.4 +name: test-hls +version: 0.1.0.0 +maintainer: milky +synopsis: example cabal file :) +license: Apache-2.0 +build-type: Simple + +be + +library + lib + +co \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/completion/autogen-completion.cabal b/plugins/hls-cabal-plugin/test/testdata/completion/autogen-completion.cabal new file mode 100644 index 0000000000..dd5c86d339 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/completion/autogen-completion.cabal @@ -0,0 +1,25 @@ +cabal-version: 3.0 +name: autogen-completion +version: 0.1.0.0 + +library + hs-source-dirs: src + autogen- + +executable autoexe + main-is: Main.hs + hs-source-dirs: src + autogen- + +test-suite autotest + type: exitcode-stdio-1.0 + hs-source-dirs: src + autogen- + +benchmark autobench + type: exitcode-stdio-1.0 + hs-source-dirs: src + autogen- + +common defaults + autogen- diff --git a/plugins/hls-cabal-plugin/test/testdata/exposed.cabal b/plugins/hls-cabal-plugin/test/testdata/exposed.cabal new file mode 100644 index 0000000000..7237979fc2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/exposed.cabal @@ -0,0 +1,40 @@ +cabal-version: 3.4 +name: test-hls +version: 0.1.0.0 +maintainer: milky +category: Dev +synopsis: dsasd +license: MIT +license-file: ./LICENSE.md + +library + hs-source-dirs: ./src-modules/Dir1/ + exposed-modules: + build-depends: base + default-language: Haskell2010 + +benchmark benchie + type: exitcode-stdio-1.0 + main-is: Main.hs + build-depends: base + hs-source-dirs: ./src-modules/ + exposed-modules: + +executable executie + main-is: Main.hs + build-depends: base + hs-source-dirs: ./src-modules/ ./src-modules/Dir1/ + exposed-modules: + +executable exe-not-so-cutie + main-is: Main.hs + build-depends: base + hs-source-dirs: ./src-modules/Dir1/Dir2/ ./src-modules/Dir1/Dir3 + exposed-modules: + +test-suite suitor + type: exitcode-stdio-1.0 + main-is: Main.hs + build-depends: base + hs-source-dirs: "./src-modules/Dir1/Dir3/Dir4" + exposed-modules: diff --git a/plugins/hls-cabal-plugin/test/testdata/filepath-completions/.hidden b/plugins/hls-cabal-plugin/test/testdata/filepath-completions/.hidden new file mode 100644 index 0000000000..82df2e0fff --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/filepath-completions/.hidden @@ -0,0 +1 @@ +test hidden file diff --git a/plugins/hls-cabal-plugin/test/testdata/filepath-completions/Content.hs b/plugins/hls-cabal-plugin/test/testdata/filepath-completions/Content.hs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/hls-cabal-plugin/test/testdata/filepath-completions/dir1/f1.txt b/plugins/hls-cabal-plugin/test/testdata/filepath-completions/dir1/f1.txt new file mode 100644 index 0000000000..016496005a --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/filepath-completions/dir1/f1.txt @@ -0,0 +1 @@ +test text file diff --git a/plugins/hls-cabal-plugin/test/testdata/filepath-completions/dir1/f2.hs b/plugins/hls-cabal-plugin/test/testdata/filepath-completions/dir1/f2.hs new file mode 100644 index 0000000000..6c5963631f --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/filepath-completions/dir1/f2.hs @@ -0,0 +1 @@ +-- test haskell file diff --git a/plugins/hls-cabal-plugin/test/testdata/filepath-completions/dir2/dir3/MARKDOWN.md b/plugins/hls-cabal-plugin/test/testdata/filepath-completions/dir2/dir3/MARKDOWN.md new file mode 100644 index 0000000000..95c3d0e549 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/filepath-completions/dir2/dir3/MARKDOWN.md @@ -0,0 +1 @@ +test markdown file diff --git a/plugins/hls-cabal-plugin/test/testdata/filepath-completions/main-is.cabal b/plugins/hls-cabal-plugin/test/testdata/filepath-completions/main-is.cabal new file mode 100644 index 0000000000..777f6ef769 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/filepath-completions/main-is.cabal @@ -0,0 +1,11 @@ +cabal-version: 3.4 +name: test-hls +version: 0.1.0.0 +maintainer: milky +synopsis: example cabal file :) +license: Apache-2.0 +build-type: Simple + +executable exe + hs-source-dirs: ./dir1/ + main-is: diff --git a/plugins/hls-cabal-plugin/test/testdata/filepath-completions/textfile.txt b/plugins/hls-cabal-plugin/test/testdata/filepath-completions/textfile.txt new file mode 100644 index 0000000000..016496005a --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/filepath-completions/textfile.txt @@ -0,0 +1 @@ +test text file diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/common-section/simple-with-common.cabal b/plugins/hls-cabal-plugin/test/testdata/goto-definition/common-section/simple-with-common.cabal new file mode 100644 index 0000000000..c71e369b30 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/common-section/simple-with-common.cabal @@ -0,0 +1,62 @@ +cabal-version: 3.0 +name: simple-cabal +version: 0.1.0.0 +license: MIT + +-- Range : (6, 0) - (7, 22) +common warnings-0 + ghc-options: -Wall + +-- Range : (10, 0) - (17, 40) +common warnings-1 + ghc-options: -Wall + -Wredundant-constraints + -Wunused-packages + + -Wno-name-shadowing + + -Wno-unticked-promoted-constructors + +-- Range : (20, 0) - (23, 34) +common warnings-2 + ghc-options: -Wall + -Wredundant-constraints + -Wunused-packages + +library + + import: warnings-0 +-- ^ Position: (27, 16), middle of identifier + + import: warnings-1 +-- ^ Position: (30, 12), left of identifier + + import: warnings-2 +-- ^ Position: (33, 22), right of identifier + + import: warnings-0 +-- ^ Position: (36, 20), left of '-' in identifier + + import: warnings-1 +-- ^ Position: (39, 19), right of "-" in identifier + + import: warnings-2,warnings-1,warnings-0 +-- ^ Position: (42, 16), identifier in identifier list + + import: warnings-2,warnings-1,warnings-0 +-- ^ Position: (45, 33), left of ',' right of identifier + + import: warnings-2,warnings-1,warnings-0 +-- ^ Position: (48, 34), right of ',' left of identifier + + import: warnings-2, warnings-1,warnings-0 +-- ^ Position: (51, 37), right of ',' left of space + + import: warnings-0 +-- ^ Position: (54, 11), right of ':' left of space + + import: warnings-0 +-- ^ Position: (57, 8), not a definition + + -- EOL +-- ^ Position: (59, 7), empty space \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/module-examples.cabal b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/module-examples.cabal new file mode 100644 index 0000000000..24c2bb854e --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/module-examples.cabal @@ -0,0 +1,51 @@ +cabal-version: 3.0 +name: module-examples +version: 0.1.0.0 + + +library + exposed-modules: Library.Lib +-- ^ Position: (6, 22) +-- ^ Position: (6, 33) + other-modules: Library.Other.OtherLib +-- ^ Position: (9, 22) +-- ^ Position: (9, 44) + + build-depends: base + hs-source-dirs: src + default-language: Haskell2010 + +executable exec + hs-source-dirs: src/exe + main-is: Main.hs + build-depends: base + other-modules: + Config +-- ^ Position: (22, 8) +-- ^ Position: (22, 14) + +test-suite module-examples-test + type: exitcode-stdio-1.0 + hs-source-dirs: src/test + main-is: Main.hs + other-modules: + Config +-- ^ Position: (31, 8) +-- ^ Position: (31, 14) + Library +-- ^ Position: (34, 8) +-- ^ Position: (34, 15) + build-depends: base + +benchmark benchmark + type: exitcode-stdio-1.0 + main-is: Main.hs + hs-source-dirs: src/bench + build-depends: base + other-modules: + Config +-- ^ Position: (45, 28) +-- ^ Position: (45, 34) + NotExistent +-- ^ Position: (48, 19) +-- ^ Position: (48, 30) diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/Library/Lib.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/Library/Lib.hs new file mode 100644 index 0000000000..e2cde3780b --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/Library/Lib.hs @@ -0,0 +1 @@ +module Library.Lib where diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/Library/Other/OtherLib.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/Library/Other/OtherLib.hs new file mode 100644 index 0000000000..625be777dc --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/Library/Other/OtherLib.hs @@ -0,0 +1 @@ +module Library.Other.OtherLib where diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/bench/Config.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/bench/Config.hs new file mode 100644 index 0000000000..6ea268c214 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/bench/Config.hs @@ -0,0 +1 @@ +module Config where diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/bench/Main.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/bench/Main.hs new file mode 100644 index 0000000000..6640b61707 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/bench/Main.hs @@ -0,0 +1,3 @@ +module Main where + +main = undefined \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/exe/Config.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/exe/Config.hs new file mode 100644 index 0000000000..3a2489708e --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/exe/Config.hs @@ -0,0 +1 @@ +module Confing where \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/exe/Main.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/exe/Main.hs new file mode 100644 index 0000000000..6640b61707 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/exe/Main.hs @@ -0,0 +1,3 @@ +module Main where + +main = undefined \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Config.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Config.hs new file mode 100644 index 0000000000..39e39fc16a --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Config.hs @@ -0,0 +1 @@ +module Config where \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Library.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Library.hs new file mode 100644 index 0000000000..7899749de8 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Library.hs @@ -0,0 +1 @@ +module Library where \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Main.hs b/plugins/hls-cabal-plugin/test/testdata/goto-definition/modules/src/test/Main.hs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/hls-cabal-plugin/test/testdata/goto-definition/simple-with-common.cabal b/plugins/hls-cabal-plugin/test/testdata/goto-definition/simple-with-common.cabal new file mode 100644 index 0000000000..c71e369b30 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/goto-definition/simple-with-common.cabal @@ -0,0 +1,62 @@ +cabal-version: 3.0 +name: simple-cabal +version: 0.1.0.0 +license: MIT + +-- Range : (6, 0) - (7, 22) +common warnings-0 + ghc-options: -Wall + +-- Range : (10, 0) - (17, 40) +common warnings-1 + ghc-options: -Wall + -Wredundant-constraints + -Wunused-packages + + -Wno-name-shadowing + + -Wno-unticked-promoted-constructors + +-- Range : (20, 0) - (23, 34) +common warnings-2 + ghc-options: -Wall + -Wredundant-constraints + -Wunused-packages + +library + + import: warnings-0 +-- ^ Position: (27, 16), middle of identifier + + import: warnings-1 +-- ^ Position: (30, 12), left of identifier + + import: warnings-2 +-- ^ Position: (33, 22), right of identifier + + import: warnings-0 +-- ^ Position: (36, 20), left of '-' in identifier + + import: warnings-1 +-- ^ Position: (39, 19), right of "-" in identifier + + import: warnings-2,warnings-1,warnings-0 +-- ^ Position: (42, 16), identifier in identifier list + + import: warnings-2,warnings-1,warnings-0 +-- ^ Position: (45, 33), left of ',' right of identifier + + import: warnings-2,warnings-1,warnings-0 +-- ^ Position: (48, 34), right of ',' left of identifier + + import: warnings-2, warnings-1,warnings-0 +-- ^ Position: (51, 37), right of ',' left of space + + import: warnings-0 +-- ^ Position: (54, 11), right of ':' left of space + + import: warnings-0 +-- ^ Position: (57, 8), not a definition + + -- EOL +-- ^ Position: (59, 7), empty space \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/hie.yaml b/plugins/hls-cabal-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..95d800026a --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/hie.yaml @@ -0,0 +1,3 @@ +cradle: + direct: + arguments: [] \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/hover/hover-deps.cabal b/plugins/hls-cabal-plugin/test/testdata/hover/hover-deps.cabal new file mode 100644 index 0000000000..ddc4a6107a --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/hover/hover-deps.cabal @@ -0,0 +1,10 @@ +cabal-version: 3.0 +name: hover-deps +version: 0.1.0.0 + +library + exposed-modules: Module + build-depends: base ^>=4.14.3.0 + , aeson==1.0.0.0 , lens + hs-source-dirs: src + default-language: Haskell2010 diff --git a/plugins/hls-cabal-plugin/test/testdata/invalid.cabal b/plugins/hls-cabal-plugin/test/testdata/invalid.cabal new file mode 100644 index 0000000000..26f9b8f2d6 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/invalid.cabal @@ -0,0 +1,8 @@ +cabal-version: 3.0 +name: invalid +version: 0.1.0.0 +license: BSD3 + +library + build-depends: base + default-language: Haskell2010 diff --git a/plugins/hls-cabal-plugin/test/testdata/licenseCodeAction.cabal b/plugins/hls-cabal-plugin/test/testdata/licenseCodeAction.cabal new file mode 100644 index 0000000000..d1bbf8b5c2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/licenseCodeAction.cabal @@ -0,0 +1,8 @@ +cabal-version: 3.0 +name: licenseCodeAction +version: 0.1.0.0 +license: BSD3 + +library + build-depends: base + default-language: Haskell2010 diff --git a/plugins/hls-cabal-plugin/test/testdata/licenseCodeAction2.cabal b/plugins/hls-cabal-plugin/test/testdata/licenseCodeAction2.cabal new file mode 100644 index 0000000000..6f8a886ba1 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/licenseCodeAction2.cabal @@ -0,0 +1,8 @@ +cabal-version: 3.0 +name: licenseCodeAction2 +version: 0.1.0.0 +license: APAHE + +library + build-depends: base + default-language: Haskell2010 diff --git a/plugins/hls-cabal-plugin/test/testdata/outline-cabal/field.cabal b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/field.cabal new file mode 100644 index 0000000000..c3e3d80df2 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/field.cabal @@ -0,0 +1 @@ +homepage: \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/outline-cabal/fieldline.cabal b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/fieldline.cabal new file mode 100644 index 0000000000..998369e5f1 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/fieldline.cabal @@ -0,0 +1 @@ +cabal-version: 3.0 diff --git a/plugins/hls-cabal-plugin/test/testdata/outline-cabal/section.cabal b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/section.cabal new file mode 100644 index 0000000000..8a140c7517 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/section.cabal @@ -0,0 +1,2 @@ + build-depends: + base >=4.16 && <5 \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/outline-cabal/sectionarg.cabal b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/sectionarg.cabal new file mode 100644 index 0000000000..060d067377 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/outline-cabal/sectionarg.cabal @@ -0,0 +1,2 @@ + if os(windows) + build-depends: Win32 \ No newline at end of file diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-cabal/A.hs b/plugins/hls-cabal-plugin/test/testdata/simple-cabal/A.hs new file mode 100644 index 0000000000..c72a91d81a --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-cabal/A.hs @@ -0,0 +1,4 @@ +module A where + +-- definitions don't matter here. +foo = 1 diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-cabal/cabal.project b/plugins/hls-cabal-plugin/test/testdata/simple-cabal/cabal.project new file mode 100644 index 0000000000..e6fdbadb43 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-cabal/cabal.project @@ -0,0 +1 @@ +packages: . diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-cabal/hie.yaml b/plugins/hls-cabal-plugin/test/testdata/simple-cabal/hie.yaml new file mode 100644 index 0000000000..04cd24395e --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-cabal/hie.yaml @@ -0,0 +1,2 @@ +cradle: + cabal: diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-cabal/simple-cabal.cabal b/plugins/hls-cabal-plugin/test/testdata/simple-cabal/simple-cabal.cabal new file mode 100644 index 0000000000..48ac100d3d --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-cabal/simple-cabal.cabal @@ -0,0 +1,10 @@ +cabal-version: 3.0 +name: simple-cabal +version: 0.1.0.0 +license: MIT + +library + build-depends: base + hs-source-dirs: . + exposed-modules: A + default-language: Haskell2010 diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-reload/Main.hs b/plugins/hls-cabal-plugin/test/testdata/simple-reload/Main.hs new file mode 100644 index 0000000000..5f0cdfad80 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-reload/Main.hs @@ -0,0 +1,9 @@ +module Main where + +import Data.List -- Intentionally unused import, used in the testcase + +main :: IO () +main = foo + +-- Missing signature +foo = putStrLn "Hello, World" diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-reload/cabal.project b/plugins/hls-cabal-plugin/test/testdata/simple-reload/cabal.project new file mode 100644 index 0000000000..e6fdbadb43 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-reload/cabal.project @@ -0,0 +1 @@ +packages: . diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-reload/hie.yaml b/plugins/hls-cabal-plugin/test/testdata/simple-reload/hie.yaml new file mode 100644 index 0000000000..04cd24395e --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-reload/hie.yaml @@ -0,0 +1,2 @@ +cradle: + cabal: diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-reload/simple-reload.cabal b/plugins/hls-cabal-plugin/test/testdata/simple-reload/simple-reload.cabal new file mode 100644 index 0000000000..359940aebc --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-reload/simple-reload.cabal @@ -0,0 +1,14 @@ +cabal-version: 3.4 +name: simple-reload +version: 0.1.0.0 +-- copyright: +build-type: Simple + +common warnings + ghc-options: -Wall -Wno-unused-imports + +executable simple-reload + import: warnings + main-is: Main.hs + build-depends: base + default-language: Haskell2010 diff --git a/plugins/hls-cabal-plugin/test/testdata/simple.cabal b/plugins/hls-cabal-plugin/test/testdata/simple.cabal new file mode 100644 index 0000000000..1adb3b2795 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple.cabal @@ -0,0 +1,24 @@ +cabal-version: 3.0 +name: hls-cabal-plugin +version: 0.1.0.0 +synopsis: +homepage: +license: MIT +license-file: LICENSE +author: Fendor +maintainer: fendor@posteo.de +category: Development +extra-source-files: CHANGELOG.md + +library + exposed-modules: IDE.Plugin.Cabal + build-depends: base ^>=4.14.3.0 + hs-source-dirs: src + default-language: Haskell2010 + +test-suite hls-cabal-plugin-test + default-language: Haskell2010 + type: exitcode-stdio-1.0 + hs-source-dirs: test + main-is: Main.hs + build-depends: base ^>=4.14.3.0 diff --git a/plugins/hls-cabal-plugin/test/testdata/src-modules/Dir1/Dir2/File2.hs b/plugins/hls-cabal-plugin/test/testdata/src-modules/Dir1/Dir2/File2.hs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/hls-cabal-plugin/test/testdata/src-modules/Dir1/Dir3/Dir4/File3.hs b/plugins/hls-cabal-plugin/test/testdata/src-modules/Dir1/Dir3/Dir4/File3.hs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/hls-cabal-plugin/test/testdata/src-modules/File1.hs b/plugins/hls-cabal-plugin/test/testdata/src-modules/File1.hs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/hls-cabal-plugin/test/testdata/src-modules/test.cabal b/plugins/hls-cabal-plugin/test/testdata/src-modules/test.cabal new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/hls-cabal-plugin/test/testdata/unsupportedVersion.cabal b/plugins/hls-cabal-plugin/test/testdata/unsupportedVersion.cabal new file mode 100644 index 0000000000..328d373cd8 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/unsupportedVersion.cabal @@ -0,0 +1,3 @@ +cabal-version: 99999.0 +name: invalid +version: 0.1.0.0 \ No newline at end of file diff --git a/plugins/hls-call-hierarchy-plugin/README.md b/plugins/hls-call-hierarchy-plugin/README.md new file mode 100644 index 0000000000..ae2d3fdf95 --- /dev/null +++ b/plugins/hls-call-hierarchy-plugin/README.md @@ -0,0 +1,47 @@ +# Call hierarchy plugin for the [Haskell Language Server](https://github.com/haskell/haskell-language-server#readme) + +The call hierarchy plugin can review the code to determine where functions are called and how they relate to other functions. + +This plugin is useful when debugging and refactoring code because it allows you to see how different parts of the code are related. And it is more conducive for users to quickly understand their macro architecture in the face of strange code. + +## Demo + +![Call Hierarchy in Emacs](call-hierarchy-in-emacs.gif) + +![Call Hierarchy in VSCode](call-hierarchy-in-vscode.gif) + +## Prerequisite +None. You can experience the whole feature without any setting. + +## Configuration +Enabled by default. You can disable it in your editor settings whenever you like. + +```json +{ + "haskell.plugin.callHierarchy.globalOn": true +} +``` + +## Change log +### 1.1.0.0 +- Support ghc-9.4. +- Refactor code base and force four space indent. +### 1.0.3.0 +Remove force update `HieDb` logic in queries. +### 1.0.1.0 +- Support call from a type signature. +- Support call from a function pattern. +- Incoming call now will go to typeclass instance instand of its definition. +### 1.0.0.1 +- Support call hierarchy on type signatures. +### 1.0.0.0 +- Released! + +## Known issues: +- Outgoing call have difficulty with going to typeclass instance due to `HieDb` lack of adequate info. + +## Acknowledgments +Supported by + +* [Google Summer of Code](https://summerofcode.withgoogle.com/) +* Warm and timely help from mentors [@jneira](https://github.com/jneira) and [@pepeiborra](https://github.com/pepeiborra) diff --git a/plugins/hls-call-hierarchy-plugin/call-hierarchy-in-emacs.gif b/plugins/hls-call-hierarchy-plugin/call-hierarchy-in-emacs.gif new file mode 100644 index 0000000000..545baf1555 Binary files /dev/null and b/plugins/hls-call-hierarchy-plugin/call-hierarchy-in-emacs.gif differ diff --git a/plugins/hls-call-hierarchy-plugin/call-hierarchy-in-vscode.gif b/plugins/hls-call-hierarchy-plugin/call-hierarchy-in-vscode.gif new file mode 100644 index 0000000000..2f4ddc64bd Binary files /dev/null and b/plugins/hls-call-hierarchy-plugin/call-hierarchy-in-vscode.gif differ diff --git a/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy.hs b/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy.hs new file mode 100644 index 0000000000..165a51013a --- /dev/null +++ b/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy.hs @@ -0,0 +1,15 @@ +{-# LANGUAGE OverloadedStrings #-} +module Ide.Plugin.CallHierarchy (descriptor) where + +import Development.IDE +import qualified Ide.Plugin.CallHierarchy.Internal as X +import Ide.Types +import Language.LSP.Protocol.Message + +descriptor :: PluginId -> PluginDescriptor IdeState +descriptor plId = (defaultPluginDescriptor plId "Provides call-hierarchy support in Haskell") + { Ide.Types.pluginHandlers = + mkPluginHandler SMethod_TextDocumentPrepareCallHierarchy X.prepareCallHierarchy + <> mkPluginHandler SMethod_CallHierarchyIncomingCalls X.incomingCalls + <> mkPluginHandler SMethod_CallHierarchyOutgoingCalls X.outgoingCalls + } diff --git a/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Internal.hs b/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Internal.hs new file mode 100644 index 0000000000..b897fa5abb --- /dev/null +++ b/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Internal.hs @@ -0,0 +1,281 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +module Ide.Plugin.CallHierarchy.Internal ( + prepareCallHierarchy +, incomingCalls +, outgoingCalls +) where + +import Control.Lens (Lens', (^.)) +import Control.Monad.IO.Class +import Data.Aeson as A +import Data.Functor ((<&>)) +import Data.List (groupBy, sortBy) +import qualified Data.Map as M +import Data.Maybe +import Data.Ord (comparing) +import qualified Data.Set as S +import qualified Data.Text as T +import Data.Tuple.Extra +import Development.IDE +import Development.IDE.Core.Shake +import Development.IDE.GHC.Compat as Compat +import Development.IDE.Spans.AtPoint +import GHC.Iface.Ext.Types (ContextInfo (..), + DeclType (..), HieAST (..), + HieASTs (..), Identifier, + IdentifierDetails (..), + RecFieldContext (..), Span) +import GHC.Iface.Ext.Utils (getNameBinding) +import HieDb (Symbol (Symbol)) +import qualified Ide.Plugin.CallHierarchy.Query as Q +import Ide.Plugin.CallHierarchy.Types +import Ide.Plugin.Error +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import Prelude hiding (mod, span) +import Text.Read (readMaybe) + +-- | Render prepare call hierarchy request. +prepareCallHierarchy :: PluginMethodHandler IdeState Method_TextDocumentPrepareCallHierarchy +prepareCallHierarchy state _ param = do + nfp <- getNormalizedFilePathE (param ^. (L.textDocument . L.uri)) + items <- liftIO + $ runAction "CallHierarchy.prepareHierarchy" state + $ prepareCallHierarchyItem nfp (param ^. L.position) + pure $ InL items + +prepareCallHierarchyItem :: NormalizedFilePath -> Position -> Action [CallHierarchyItem] +prepareCallHierarchyItem nfp pos = use GetHieAst nfp <&> \case + Nothing -> mempty + Just (HAR _ hf _ _ _) -> prepareByAst hf pos nfp + +prepareByAst :: HieASTs a -> Position -> NormalizedFilePath -> [CallHierarchyItem] +prepareByAst hf pos nfp = + case listToMaybe $ pointCommand hf pos extract of + Nothing -> mempty + Just infos -> mapMaybe (construct nfp hf) infos + +extract :: HieAST a -> [(Identifier, [ContextInfo], Span)] +extract ast = let span = nodeSpan ast + infos = M.toList $ M.map (S.toList . identInfo) (Compat.getNodeIds ast) + in [(ident, contexts, span) | (ident, contexts) <- infos] + +recFieldInfo, declInfo, valBindInfo, classTyDeclInfo, + useInfo, patternBindInfo, tyDeclInfo, matchBindInfo :: [ContextInfo] -> Maybe ContextInfo +recFieldInfo ctxs = listToMaybe [ctx | ctx@RecField{} <- ctxs] +declInfo ctxs = listToMaybe [ctx | ctx@Decl{} <- ctxs] +valBindInfo ctxs = listToMaybe [ctx | ctx@ValBind{} <- ctxs] +classTyDeclInfo ctxs = listToMaybe [ctx | ctx@ClassTyDecl{} <- ctxs] +useInfo ctxs = listToMaybe [Use | Use <- ctxs] +patternBindInfo ctxs = listToMaybe [ctx | ctx@PatternBind{} <- ctxs] +tyDeclInfo ctxs = listToMaybe [TyDecl | TyDecl <- ctxs] +matchBindInfo ctxs = listToMaybe [MatchBind | MatchBind <- ctxs] + +construct :: NormalizedFilePath -> HieASTs a -> (Identifier, [ContextInfo], Span) -> Maybe CallHierarchyItem +construct nfp hf (ident, contexts, ssp) + | isInternalIdentifier ident = Nothing + + | Just (RecField RecFieldDecl _) <- recFieldInfo contexts + -- ignored type span + = Just $ mkCallHierarchyItem' ident SymbolKind_Field ssp ssp + + | isJust (matchBindInfo contexts) && isNothing (valBindInfo contexts) + = Just $ mkCallHierarchyItem' ident SymbolKind_Function ssp ssp + + | Just ctx <- valBindInfo contexts + = Just $ case ctx of + ValBind _ _ span -> mkCallHierarchyItem' ident SymbolKind_Function (renderSpan span) ssp + _ -> mkCallHierarchyItem' ident skUnknown ssp ssp + + | Just ctx <- declInfo contexts + = Just $ case ctx of + Decl ClassDec span -> mkCallHierarchyItem' ident SymbolKind_Interface (renderSpan span) ssp + Decl ConDec span -> mkCallHierarchyItem' ident SymbolKind_Constructor (renderSpan span) ssp + Decl DataDec span -> mkCallHierarchyItem' ident SymbolKind_Struct (renderSpan span) ssp + Decl FamDec span -> mkCallHierarchyItem' ident SymbolKind_Function (renderSpan span) ssp + Decl InstDec span -> mkCallHierarchyItem' ident SymbolKind_Interface (renderSpan span) ssp + Decl SynDec span -> mkCallHierarchyItem' ident SymbolKind_TypeParameter (renderSpan span) ssp + _ -> mkCallHierarchyItem' ident skUnknown ssp ssp + + | Just (ClassTyDecl span) <- classTyDeclInfo contexts + = Just $ mkCallHierarchyItem' ident SymbolKind_Method (renderSpan span) ssp + + | Just (PatternBind _ _ span) <- patternBindInfo contexts + = Just $ mkCallHierarchyItem' ident SymbolKind_Function (renderSpan span) ssp + + | Just _ <- useInfo contexts = Just $ mkCallHierarchyItem' ident SymbolKind_Interface ssp ssp + + | Just _ <- tyDeclInfo contexts = renderTyDecl + + | otherwise = Nothing + where + renderSpan (Just span) = span + renderSpan _ = ssp + + -- https://github.com/haskell/lsp/blob/e11b7c09658610f6d815d04db08a64e7cf6b4467/lsp-types/src/Language/LSP/Types/DocumentSymbol.hs#L97 + -- There is no longer an unknown symbol, thus using SymbolKind_Function + -- as this is the call-hierarchy plugin + skUnknown = SymbolKind_Function + + mkCallHierarchyItem' = mkCallHierarchyItem nfp + + isInternalIdentifier = \case + Left _ -> False + Right name -> isInternalName name + + renderTyDecl = case ident of + Left _ -> Nothing + Right name -> case getNameBinding name (getAsts hf) of + Nothing -> Nothing + Just sp -> listToMaybe $ prepareByAst hf (realSrcSpanToRange sp ^. L.start) nfp + +mkCallHierarchyItem :: NormalizedFilePath -> Identifier -> SymbolKind -> Span -> Span -> CallHierarchyItem +mkCallHierarchyItem nfp ident kind span selSpan = + CallHierarchyItem + (T.pack $ optimizeDisplay $ identifierName ident) + kind + Nothing + (Just $ T.pack $ identifierToDetail ident) + (fromNormalizedUri $ normalizedFilePathToUri nfp) + (realSrcSpanToRange span) + (realSrcSpanToRange selSpan) + (toJSON . show <$> mkSymbol ident) + where + identifierToDetail :: Identifier -> String + identifierToDetail = \case + Left modName -> moduleNameString modName + Right name -> (moduleNameString . moduleName . nameModule) name + + identifierName :: Identifier -> String + identifierName = \case + Left modName -> moduleNameString modName + Right name -> occNameString $ nameOccName name + + optimizeDisplay :: String -> String + optimizeDisplay name -- Optimize display for DuplicateRecordFields + | "$sel:" == take 5 name = drop 5 name + | otherwise = name + +mkSymbol :: Identifier -> Maybe Symbol +mkSymbol = \case + Left _ -> Nothing + Right name -> Just $ Symbol (occName name) (nameModule name) + +---------------------------------------------------------------------- +-------------- Incoming calls and outgoing calls --------------------- +---------------------------------------------------------------------- + +-- | Render incoming calls request. +incomingCalls :: PluginMethodHandler IdeState Method_CallHierarchyIncomingCalls +incomingCalls state _pluginId param = do + calls <- liftIO + $ runAction "CallHierarchy.incomingCalls" state + $ queryCalls + (param ^. L.item) + Q.incomingCalls + mkCallHierarchyIncomingCall + (mergeCalls CallHierarchyIncomingCall L.from) + pure $ InL calls + where + mkCallHierarchyIncomingCall :: Vertex -> Action (Maybe CallHierarchyIncomingCall) + mkCallHierarchyIncomingCall = mkCallHierarchyCall CallHierarchyIncomingCall + +-- | Render outgoing calls request. +outgoingCalls :: PluginMethodHandler IdeState Method_CallHierarchyOutgoingCalls +outgoingCalls state _pluginId param = do + calls <- liftIO + $ runAction "CallHierarchy.outgoingCalls" state + $ queryCalls + (param ^. L.item) + Q.outgoingCalls + mkCallHierarchyOutgoingCall + (mergeCalls CallHierarchyOutgoingCall L.to) + pure $ InL calls + where + mkCallHierarchyOutgoingCall :: Vertex -> Action (Maybe CallHierarchyOutgoingCall) + mkCallHierarchyOutgoingCall = mkCallHierarchyCall CallHierarchyOutgoingCall + +-- | Merge calls from the same place +mergeCalls :: + L.HasFromRanges s [Range] + => (CallHierarchyItem -> [Range] -> s) + -> Lens' s CallHierarchyItem + -> [s] + -> [s] +mergeCalls constructor target = + concatMap merge + . groupBy (\a b -> a ^. target == b ^. target) + . sortBy (comparing (^. target)) + where + merge [] = [] + merge calls@(call:_) = + let ranges = concatMap (^. L.fromRanges) calls + in [constructor (call ^. target) ranges] + +mkCallHierarchyCall :: (CallHierarchyItem -> [Range] -> a) -> Vertex -> Action (Maybe a) +mkCallHierarchyCall mk v@Vertex{..} = do + let pos = Position (fromIntegral $ sl - 1) (fromIntegral $ sc - 1) + nfp = toNormalizedFilePath' hieSrc + range = mkRange + (fromIntegral $ casl - 1) + (fromIntegral $ casc - 1) + (fromIntegral $ cael - 1) + (fromIntegral $ caec - 1) + + prepareCallHierarchyItem nfp pos >>= + \case + [item] -> pure $ Just $ mk item [range] + _ -> do + ShakeExtras{withHieDb} <- getShakeExtras + sps <- liftIO (withHieDb (`Q.getSymbolPosition` v)) + case sps of + (x:_) -> do + items <- prepareCallHierarchyItem + nfp + (Position (fromIntegral $ psl x - 1) (fromIntegral $ psc x - 1)) + case items of + [item] -> pure $ Just $ mk item [range] + _ -> pure Nothing + [] -> pure Nothing + +-- | Unified queries include incoming calls and outgoing calls. +queryCalls :: + CallHierarchyItem + -> (HieDb -> Symbol -> IO [Vertex]) + -> (Vertex -> Action (Maybe a)) + -> ([a] -> [a]) + -> Action [a] +queryCalls item queryFunc makeFunc merge + | Just nfp <- uriToNormalizedFilePath $ toNormalizedUri uri = do + ShakeExtras{withHieDb} <- getShakeExtras + maySymbol <- getSymbol nfp + case maySymbol of + Nothing -> pure mempty + Just symbol -> do + vs <- liftIO $ withHieDb (`queryFunc` symbol) + items <- catMaybes <$> mapM makeFunc vs + pure $ merge items + | otherwise = pure mempty + where + uri = item ^. L.uri + pos = item ^. (L.selectionRange . L.start) + + getSymbol nfp = case item ^. L.data_ of + Just xdata -> case fromJSON xdata of + A.Success (symbolStr :: String) -> maybe (getSymbolFromAst nfp pos) (pure . pure) $ readMaybe symbolStr + A.Error _ -> getSymbolFromAst nfp pos + Nothing -> getSymbolFromAst nfp pos -- Fallback if xdata lost, some editor(VSCode) will drop it + + getSymbolFromAst :: NormalizedFilePath -> Position -> Action (Maybe Symbol) + getSymbolFromAst nfp pos_ = use GetHieAst nfp <&> \case + Nothing -> Nothing + Just (HAR _ hf _ _ _) -> do + case listToMaybe $ pointCommand hf pos_ extract of + Just infos -> mkSymbol . fst3 =<< listToMaybe infos + Nothing -> Nothing diff --git a/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Query.hs b/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Query.hs new file mode 100644 index 0000000000..2303aa94b9 --- /dev/null +++ b/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Query.hs @@ -0,0 +1,80 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} + +module Ide.Plugin.CallHierarchy.Query ( + incomingCalls +, outgoingCalls +, getSymbolPosition +) where + +import qualified Data.Text as T +import Database.SQLite.Simple +import Development.IDE.GHC.Compat +import HieDb (HieDb (getConn), Symbol (..)) +import Ide.Plugin.CallHierarchy.Types +import Prelude hiding (mod) + +incomingCalls :: HieDb -> Symbol -> IO [Vertex] +incomingCalls (getConn -> conn) symbol = do + let (o, m, u) = parseSymbol symbol + query conn + (Query $ T.pack $ concat + [ "SELECT mods.mod, decls.occ, mods.hs_src, decls.sl, decls.sc, " + , "decls.el, decls.ec, refs.sl, refs.sc, refs.el, refs.ec " + , "FROM refs " + , "JOIN decls ON decls.hieFile = refs.hieFile " + , "JOIN mods ON mods.hieFile = decls.hieFile " + , "where " + , "(refs.occ = ? AND refs.mod = ? AND refs.unit = ?) " + , "AND " + , "(decls.occ != ? OR mods.mod != ? OR mods.unit != ?) " + , "AND " + , "((refs.sl = decls.sl AND refs.sc > decls.sc) OR (refs.sl > decls.sl)) " + , "AND " + ,"((refs.el = decls.el AND refs.ec <= decls.ec) OR (refs.el < decls.el))" + ] + ) (o, m, u, o, m, u) + +outgoingCalls :: HieDb -> Symbol -> IO [Vertex] +outgoingCalls (getConn -> conn) symbol = do + let (o, m, u) = parseSymbol symbol + query conn + (Query $ T.pack $ concat + [ "SELECT rm.mod, defs.occ, rm.hs_src, defs.sl, defs.sc, defs.el, defs.ec, " + , "refs.sl, refs.sc, refs.el, refs.ec " + , "from refs " + , "JOIN defs ON defs.occ = refs.occ " + , "JOIN decls rd ON rd.hieFile = defs.hieFile AND rd.occ = defs.occ " + , "JOIN mods rm ON rm.mod = refs.mod AND rm.unit = refs.unit AND rm.hieFile = defs.hieFile " + , "JOIN decls ON decls.hieFile = refs.hieFile " + , "JOIN mods ON mods.hieFile = decls.hieFile " + , "where " + , "(decls.occ = ? AND mods.mod = ? AND mods.unit = ?) " + , "AND " + , "(defs.occ != ? OR rm.mod != ? OR rm.unit != ?) " + , "AND " + , "((refs.sl = decls.sl AND refs.sc > decls.sc) OR (refs.sl > decls.sl)) " + , "AND " + , "((refs.el = decls.el AND refs.ec <= decls.ec) OR (refs.el < decls.el))" + ] + ) (o, m, u, o, m, u) + +getSymbolPosition :: HieDb -> Vertex -> IO [SymbolPosition] +getSymbolPosition (getConn -> conn) Vertex{..} = do + query conn + (Query $ T.pack $ concat + [ "SELECT refs.sl, refs.sc from refs where " + , "(occ = ?) " + , "AND " + , "((refs.sl = ? AND refs.sc > ?) OR (refs.sl > ?)) " + , "AND " + , "((refs.el = ? AND refs.ec <= ?) OR (refs.el < ?))" + ] + ) (occ, sl, sc, sl, el, ec, el) + +parseSymbol :: Symbol -> (OccName, ModuleName, Unit) +parseSymbol Symbol{..} = + let o = symName + m = moduleName symModule + u = moduleUnit symModule + in (o, m, u) diff --git a/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Types.hs b/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Types.hs new file mode 100644 index 0000000000..a31f85fd45 --- /dev/null +++ b/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Types.hs @@ -0,0 +1,47 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DuplicateRecordFields #-} + +module Ide.Plugin.CallHierarchy.Types where + +import Data.Aeson +import Database.SQLite.Simple +import Database.SQLite.Simple.ToField +import GHC.Generics + +data Vertex = Vertex { + mod :: String +, occ :: String +, hieSrc :: FilePath +, sl :: Int -- ^ start line +, sc :: Int -- ^ start character +, el :: Int -- ^ end line +, ec :: Int -- ^ end character +, casl :: Int -- ^ sl for call appear +, casc :: Int -- ^ sc for call appear +, cael :: Int -- ^ el for call appear +, caec :: Int -- ^ ec for call appear +} deriving (Eq, Show, Generic, FromJSON, ToJSON) + +instance ToRow Vertex where + toRow (Vertex a b c d e f g h i j k) = + [ toField a, toField b, toField c, toField d + , toField e, toField f, toField g, toField h + , toField i, toField j, toField k + ] + +instance FromRow Vertex where + fromRow = Vertex <$> field <*> field <*> field + <*> field <*> field <*> field + <*> field <*> field <*> field + <*> field <*> field + +data SymbolPosition = SymbolPosition { + psl :: Int +, psc :: Int +} deriving (Eq, Show, Generic, FromJSON, ToJSON) + +instance ToRow SymbolPosition where + toRow (SymbolPosition a b) = toRow (a, b) + +instance FromRow SymbolPosition where + fromRow = SymbolPosition <$> field <*> field diff --git a/plugins/hls-call-hierarchy-plugin/test/Main.hs b/plugins/hls-call-hierarchy-plugin/test/Main.hs new file mode 100644 index 0000000000..31dad633e6 --- /dev/null +++ b/plugins/hls-call-hierarchy-plugin/test/Main.hs @@ -0,0 +1,551 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} + +module Main (main) where + +import Control.Lens (set, (^.)) +import Control.Monad.Extra +import qualified Data.Aeson as Aeson +import Data.Functor ((<&>)) +import Data.List (sort, tails) +import qualified Data.Map as M +import qualified Data.Text as T +import Development.IDE.Test +import Ide.Plugin.CallHierarchy +import qualified Language.LSP.Protocol.Lens as L +import qualified Language.LSP.Test as Test +import System.FilePath +import Test.Hls + +plugin :: PluginTestDescriptor () +plugin = mkPluginTestDescriptor' descriptor "call-hierarchy" + +main :: IO () +main = defaultTestRunner $ + testGroup "Call Hierarchy" + [ prepareCallHierarchyTests + , incomingCallsTests + , outgoingCallsTests + ] + +prepareCallHierarchyTests :: TestTree +prepareCallHierarchyTests = + testGroup + "Prepare Call Hierarchy" + [ testCase "variable" $ do + let contents = T.unlines ["a=3"] + range = mkRange 0 0 0 3 + selRange = mkRange 0 0 0 1 + expected = mkCallHierarchyItemV "a" SymbolKind_Function range selRange + oneCaseWithCreate contents 0 0 expected + , testCase "function" $ do + let contents = T.unlines ["a=(+)"] + range = mkRange 0 0 0 5 + selRange = mkRange 0 0 0 1 + expected = mkCallHierarchyItemV "a" SymbolKind_Function range selRange + oneCaseWithCreate contents 0 0 expected + , testCase "datatype" $ do + let contents = T.unlines ["data A=A"] + range = mkRange 0 0 0 8 + selRange = mkRange 0 5 0 6 + expected = mkCallHierarchyItemT "A" SymbolKind_Struct range selRange + oneCaseWithCreate contents 0 5 expected + , testCase "data constructor" $ do + let contents = T.unlines ["data A=A"] + range = mkRange 0 7 0 8 + selRange = mkRange 0 7 0 8 + expected = mkCallHierarchyItemC "A" SymbolKind_Constructor range selRange + oneCaseWithCreate contents 0 7 expected +-- , testCase "record" $ do +-- let contents = T.unlines ["data A=A{a::Int}"] +-- range = mkRange 0 9 0 10 +-- selRange = mkRange 0 9 0 10 +-- expected = mkCallHierarchyItemV "a" SymbolKind_Field range selRange +-- oneCaseWithCreate contents 0 9 expected + , testCase "type operator" $ do + let contents = T.unlines ["{-# LANGUAGE TypeOperators #-}", "type (><)=Maybe"] + range = mkRange 1 0 1 15 + selRange = mkRange 1 5 1 9 + expected = mkCallHierarchyItemT "><" SymbolKind_TypeParameter range selRange + oneCaseWithCreate contents 1 5 expected + , testCase "type class" $ do + let contents = T.unlines ["class A a where a :: a -> Int"] + range = mkRange 0 0 0 29 + selRange = mkRange 0 6 0 7 + expected = mkCallHierarchyItemT "A" SymbolKind_Interface range selRange + oneCaseWithCreate contents 0 6 expected + , testCase "type class method" $ do + let contents = T.unlines ["class A a where a :: a -> Int"] + range = mkRange 0 16 0 29 + selRange = mkRange 0 16 0 17 + expected = mkCallHierarchyItemV "a" SymbolKind_Method range selRange + oneCaseWithCreate contents 0 16 expected + , testCase "type class instance" $ do + let contents = T.unlines ["class A a where", "instance A () where"] + range = mkRange 1 9 1 10 + selRange = mkRange 1 9 1 10 + expected = mkCallHierarchyItemT "A" SymbolKind_Interface range selRange + oneCaseWithCreate contents 1 9 expected + , testGroup "type family" + [ testCase "1" $ do + let contents = T.unlines ["{-# LANGUAGE TypeFamilies #-}", "type family A"] + range = mkRange 1 0 1 13 + selRange = mkRange 1 12 1 13 + expected = mkCallHierarchyItemT "A" SymbolKind_Function range selRange + oneCaseWithCreate contents 1 12 expected + , testCase "2" $ do + let contents = T.unlines ["{-# LANGUAGE TypeFamilies #-}", "type family A a"] + range = mkRange 1 0 1 15 + selRange = mkRange 1 12 1 13 + expected = mkCallHierarchyItemT "A" SymbolKind_Function range selRange + oneCaseWithCreate contents 1 12 expected + ] + , testCase "type family instance" $ do + let contents = T.unlines + [ "{-# LANGUAGE TypeFamilies #-}" + , "type family A a" + , "type instance A () = ()" + ] + range = mkRange 2 14 2 23 + selRange = mkRange 2 14 2 15 + expected = mkCallHierarchyItemT "A" SymbolKind_Interface range selRange + oneCaseWithCreate contents 2 14 expected + , testGroup "data family" + [ testCase "1" $ do + let contents = T.unlines ["{-# LANGUAGE TypeFamilies #-}", "data family A"] + -- Since GHC 9.10 the range also includes the family name (and its parameters if any) + range = mkRange 1 0 1 (if ghcVersion >= GHC910 then 13 else 11) + selRange = mkRange 1 12 1 13 + expected = mkCallHierarchyItemT "A" SymbolKind_Function range selRange + oneCaseWithCreate contents 1 12 expected + , testCase "2" $ do + let contents = T.unlines [ "{-# LANGUAGE TypeFamilies #-}" , "data family A a"] + range = mkRange 1 0 1 (if ghcVersion >= GHC910 then 15 else 11) + selRange = mkRange 1 12 1 13 + expected = mkCallHierarchyItemT "A" SymbolKind_Function range selRange + oneCaseWithCreate contents 1 12 expected + ] + , testCase "data family instance" $ do + let contents = T.unlines + [ "{-# LANGUAGE TypeFamilies #-}" + , "data family A a" + , "data instance A () = A()" + ] + range = mkRange 2 14 2 24 + selRange = mkRange 2 14 2 15 + expected = mkCallHierarchyItemT "A" SymbolKind_Interface range selRange + oneCaseWithCreate contents 2 14 expected + , testCase "pattern" $ do + let contents = T.unlines ["Just x = Just 3"] + range = mkRange 0 0 0 15 + selRange = mkRange 0 5 0 6 + expected = mkCallHierarchyItemV "x" SymbolKind_Function range selRange + oneCaseWithCreate contents 0 5 expected + , testCase "pattern with type signature" $ do + let contents = T.unlines ["{-# LANGUAGE ScopedTypeVariables #-}", "a :: () = ()"] + range = mkRange 1 0 1 12 + selRange = mkRange 1 0 1 1 + expected = mkCallHierarchyItemV "a" SymbolKind_Function range selRange + oneCaseWithCreate contents 1 0 expected + , testCase "type synonym" $ do + let contents = T.unlines ["type A=Bool"] + range = mkRange 0 0 0 11 + selRange = mkRange 0 5 0 6 + expected = mkCallHierarchyItemT "A" SymbolKind_TypeParameter range selRange + oneCaseWithCreate contents 0 5 expected + , testCase "GADT" $ do + let contents = T.unlines + [ "{-# LANGUAGE GADTs #-}" + , "data A where A :: Int -> A" + ] + range = mkRange 1 13 1 26 + selRange = mkRange 1 13 1 14 + expected = mkCallHierarchyItemC "A" SymbolKind_Constructor range selRange + oneCaseWithCreate contents 1 13 expected + , testGroup "type signature" + [ testCase "next line" $ do + let contents = T.unlines ["a::Int", "a=3"] + range = mkRange 1 0 1 3 + selRange = mkRange 1 0 1 1 + expected = mkCallHierarchyItemV "a" SymbolKind_Function range selRange + oneCaseWithCreate contents 0 0 expected + , testCase "multi functions" $ do + let contents = T.unlines [ "a,b::Int", "a=3", "b=4"] + range = mkRange 2 0 2 3 + selRange = mkRange 2 0 2 1 + expected = mkCallHierarchyItemV "b" SymbolKind_Function range selRange + oneCaseWithCreate contents 0 2 expected + ] + , testCase "multi pattern" $ do + let contents = T.unlines + [ "f (Just _) = ()" + , "f Nothing = ()" + ] + range = mkRange 1 0 1 1 + selRange = mkRange 1 0 1 1 + expected = mkCallHierarchyItemV "f" SymbolKind_Function range selRange + oneCaseWithCreate contents 1 0 expected + ] + +incomingCallsTests :: TestTree +incomingCallsTests = + testGroup "Incoming Calls" + [ testGroup "single file" + [ testCase "xdata unavailable" $ + runSessionWithServer def plugin testDataDir $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines ["a=3", "b=a"] + waitForIndex (testDataDir "A.hs") + item <- expectOneElement =<< Test.prepareCallHierarchy (mkPrepareCallHierarchyParam doc 1 0) + let expected = [CallHierarchyIncomingCall item [mkRange 1 2 1 3]] + item' <- expectOneElement =<< Test.prepareCallHierarchy (mkPrepareCallHierarchyParam doc 0 0) + let itemNoData = set L.data_ Nothing item' + res <- Test.incomingCalls (mkIncomingCallsParam itemNoData) + liftIO $ sort expected @=? sort res + closeDoc doc + , testCase "xdata available" $ do + let contents = T.unlines ["a=3","b=a"] + positions = [(1, 0)] + ranges = [mkRange 1 2 1 3] + incomingCallTestCase contents 0 1 positions ranges + , testGroup "data" + [ testCase "data type" $ do + let contents = T.unlines ["data A=A"] + positions = [] + ranges = [] + incomingCallTestCase contents 0 5 positions ranges + , testCase "data constructor" $ do + let contents = T.unlines ["data A=A"] + positions = [(0, 5)] + ranges = [mkRange 0 7 0 8] + incomingCallTestCase contents 0 7 positions ranges + -- , testCase "record" $ do + -- let contents = T.unlines ["data A=A{a::Int}"] + -- positions = [(0, 5), (0, 7)] + -- ranges = [mkRange 0 9 0 10, mkRange 0 9 0 10] + -- incomingCallTestCase contents 0 9 positions ranges + ] + , testCase "function" $ do + let contents = T.unlines ["a=(+)"] + positions = [(0, 0)] + ranges = [mkRange 0 2 0 5] + incomingCallTestCase contents 0 3 positions ranges + , testCase "type operator" $ do + let contents = T.unlines + [ "{-# LANGUAGE TypeOperators #-}" + , "type (><)=Int"] + positions = [(1, 5)] + ranges = [mkRange 1 10 1 13] + incomingCallTestCase contents 1 10 positions ranges + , testGroup "type class" + [ testCase "type class method" $ do + let contents = T.unlines ["class A a where a :: a -> Int"] + positions = [(0, 6)] + ranges = [mkRange 0 16 0 17] + incomingCallTestCase contents 0 16 positions ranges + , testCase "type class instance" $ do + let contents = T.unlines + [ "class A a where a :: a -> Int" + , "instance A () where a = const 3"] + positions = [(0, 6)] + ranges = [mkRange 0 16 0 17] + incomingCallTestCase contents 1 20 positions ranges + , testCase "goto typeclass instance" $ do + let contents = T.unlines + [ "class F a where f :: a" + , "instance F Bool where f = x" + , "instance F Int where f = 3" + , "x = True" + ] + positions = [(1, 22)] + ranges = [mkRange 1 26 1 27] + incomingCallTestCase contents 3 0 positions ranges + ] + , testCase "type family instance" $ do + let contents = T.unlines + [ "{-# LANGUAGE TypeFamilies #-}" + , "type family A a" + , "type instance A Int = Char" + ] + positions = [(2, 14)] + ranges = [mkRange 2 22 2 26] + incomingCallTestCase contents 2 22 positions ranges + , testCase "GADT" $ do + let contents = T.unlines + [ "{-# LANGUAGE GADTs #-}" + , "data A where B :: Int -> A" + ] + positions = [(1, 5)] + ranges = [mkRange 1 13 1 14] + incomingCallTestCase contents 1 13 positions ranges + , testCase "multi pattern" $ do + let contents = T.unlines + [ "f 1 = 1" + , "f 2 = 2" + , "g = f" + ] + positions = [(2, 0)] + ranges = [mkRange 2 4 2 5] + incomingCallTestCase contents 1 0 positions ranges + ] + , testGroup "multi file" + [ testCase "1" $ do + let mp = M.fromList [ + ("A.hs", [ ((5, 0), mkRange 5 7 5 11) + , ((6, 0), mkRange 6 7 6 11) + , ((8, 0), mkRange 9 25 9 29) + ] + )] + incomingCallMultiFileTestCase "A.hs" 4 0 mp + , testCase "2" $ do + let mp = M.fromList [ + ("A.hs", [ ((4, 0), mkRange 4 13 4 16) + , ((8, 0), mkRange 10 7 10 10) + ] + ) + , ("B.hs", [ ((4, 0), mkRange 4 8 4 11)]) + ] + incomingCallMultiFileTestCase "C.hs" 2 0 mp + ] + ] + +outgoingCallsTests :: TestTree +outgoingCallsTests = + testGroup "Outgoing Calls" + [ testGroup "single file" + [ testCase "xdata unavailable" $ withCanonicalTempDir $ \dir -> + runSessionWithServer def plugin dir $ do + doc <- createDoc "A.hs" "haskell" $ T.unlines ["a=3", "b=a"] + waitForIndex (dir "A.hs") + item <- expectOneElement =<< Test.prepareCallHierarchy (mkPrepareCallHierarchyParam doc 0 1) + let expected = [CallHierarchyOutgoingCall item [mkRange 1 2 1 3]] + item' <- expectOneElement =<< Test.prepareCallHierarchy (mkPrepareCallHierarchyParam doc 1 0) + let itemNoData = set L.data_ Nothing item' + res <- Test.outgoingCalls (mkOutgoingCallsParam itemNoData) + liftIO $ sort expected @=? sort res + closeDoc doc + , testCase "xdata available" $ do + let contents = T.unlines ["a=3", "b=a"] + positions = [(0, 0)] + ranges = [mkRange 1 2 1 3] + outgoingCallTestCase contents 1 0 positions ranges + , testGroup "data" + [ testCase "data type" $ do + let contents = T.unlines ["data A=A"] + positions = [(0, 7)] + ranges = [mkRange 0 7 0 8] + outgoingCallTestCase contents 0 5 positions ranges + , testCase "data constructor" $ do + let contents = T.unlines ["data A=A"] + positions = [] + ranges = [] + outgoingCallTestCase contents 0 7 positions ranges + -- , testCase "record" $ do + -- let contents = T.unlines ["data A=A{a::Int}"] + -- positions = [(0, 7), (0, 9)] + -- ranges = [mkRange 0 7 0 8, mkRange 0 9 0 10] + -- outgoingCallTestCase contents 0 5 positions ranges + ] + , testCase "function" $ do + let contents = T.unlines ["a=3", "b=4", "c=a+b"] + positions = [(0, 1), (1, 1)] + ranges = [mkRange 2 2 2 3, mkRange 2 4 2 5] + outgoingCallTestCase contents 2 0 positions ranges + , testCase "type synonym" $ do + let contents = T.unlines ["data A", "type B=A"] + positions = [(0, 5)] + ranges = [mkRange 1 7 1 8] + outgoingCallTestCase contents 1 5 positions ranges + , testCase "type class instance" $ do + let contents = T.unlines + [ "class A a where a :: a" + , "instance A () where a = ()" + ] + positions = [(0, 16)] + ranges = [mkRange 0 16 0 17] + outgoingCallTestCase contents 1 9 positions ranges + , testCase "data family instance" $ do + let contents = T.unlines + [ "{-# LANGUAGE TypeFamilies #-}" + , "data family A a" + , "data instance A () = B" + ] + positions = [(2, 21)] + ranges = [mkRange 2 21 2 22] + outgoingCallTestCase contents 1 12 positions ranges + , testCase "GADT" $ do + let contents = T.unlines ["{-# LANGUAGE GADTs #-}", "data A where B :: A"] + positions = [(1, 13)] + ranges = [mkRange 1 13 1 14] + outgoingCallTestCase contents 1 5 positions ranges + ] + , testGroup "multi file" + [ testCase "1" $ do + let mp = M.fromList [ + ("A.hs", [ ((4, 0), mkRange 5 7 5 11)]) + , ("B.hs", [ ((4, 0), mkRange 5 14 5 17)]) + , ("C.hs", [ ((3, 0), mkRange 5 20 5 23)]) + ] + outgoingCallMultiFileTestCase "A.hs" 5 0 mp + , testCase "2" $ do + let mp = M.fromList [ + ("A.hs", [ ((4, 0), mkRange 9 25 9 29) + , ((5, 0), mkRange 10 25 10 29) + ] + ) + , ("B.hs", [ ((2, 9), mkRange 9 2 9 3) + , ((2, 13), mkRange 10 2 10 3) + , ((4, 0), mkRange 9 7 9 10) + , ((5, 0), mkRange 9 13 9 16) + , ((6, 0), mkRange 9 19 9 22) + ] + ) + , ("C.hs", [ ((2, 0), mkRange 10 7 10 10) + , ((3, 0), mkRange 10 13 10 16) + , ((4, 0), mkRange 10 19 10 22) + ] + ) + ] + outgoingCallMultiFileTestCase "A.hs" 8 0 mp + ] + ] + + +incomingCallTestCase :: T.Text -> Int -> Int -> [(Int, Int)] -> [Range] -> Assertion +incomingCallTestCase contents queryX queryY positions ranges = withCanonicalTempDir $ \dir -> + runSessionWithServer def plugin dir $ do + doc <- createDoc "A.hs" "haskell" contents + waitForIndex (dir "A.hs") + items <- concatMapM (\((x, y), range) -> + Test.prepareCallHierarchy (mkPrepareCallHierarchyParam doc x y) + <&> map (, range) + ) + (zip positions ranges) + let expected = map mkCallHierarchyIncomingCall items + item <- expectOneElement =<< Test.prepareCallHierarchy (mkPrepareCallHierarchyParam doc queryX queryY) + res <- Test.incomingCalls (mkIncomingCallsParam item) + liftIO $ sort expected @=? sort res + closeDoc doc + +incomingCallMultiFileTestCase :: FilePath -> Int -> Int -> M.Map FilePath [((Int, Int), Range)] -> Assertion +incomingCallMultiFileTestCase filepath queryX queryY mp = + runSessionWithServer def plugin testDataDir $ do + doc <- openDoc filepath "haskell" + waitForIndex (testDataDir filepath) + items <- fmap concat $ sequence $ M.elems $ M.mapWithKey (\fp pr -> do + p <- openDoc fp "haskell" + waitForKickDone + concatMapM (\((x, y), range) -> + Test.prepareCallHierarchy (mkPrepareCallHierarchyParam p x y) + <&> map (, range) + ) pr) mp + let expected = map mkCallHierarchyIncomingCall items + item <- expectOneElement =<< Test.prepareCallHierarchy (mkPrepareCallHierarchyParam doc queryX queryY) + res <- Test.incomingCalls (mkIncomingCallsParam item) + liftIO $ sort expected @=? sort res + closeDoc doc + +outgoingCallTestCase :: T.Text -> Int -> Int -> [(Int, Int)] -> [Range] -> Assertion +outgoingCallTestCase contents queryX queryY positions ranges = withCanonicalTempDir $ \dir -> + runSessionWithServer def plugin dir $ do + doc <- createDoc "A.hs" "haskell" contents + waitForIndex (dir "A.hs") + items <- concatMapM (\((x, y), range) -> + Test.prepareCallHierarchy (mkPrepareCallHierarchyParam doc x y) + <&> map (, range) + ) + (zip positions ranges) + let expected = map mkCallHierarchyOutgoingCall items + item <- expectOneElement =<< Test.prepareCallHierarchy (mkPrepareCallHierarchyParam doc queryX queryY) + res <- Test.outgoingCalls (mkOutgoingCallsParam item) + liftIO $ sort expected @=? sort res + closeDoc doc + +outgoingCallMultiFileTestCase :: FilePath -> Int -> Int -> M.Map FilePath [((Int, Int), Range)] -> Assertion +outgoingCallMultiFileTestCase filepath queryX queryY mp = + runSessionWithServer def plugin testDataDir $ do + doc <- openDoc filepath "haskell" + waitForIndex (testDataDir filepath) + items <- fmap concat $ sequence $ M.elems $ M.mapWithKey (\fp pr -> do + p <- openDoc fp "haskell" + waitForKickDone + concatMapM (\((x, y), range) -> + Test.prepareCallHierarchy (mkPrepareCallHierarchyParam p x y) + <&> map (, range) + ) pr) mp + let expected = map mkCallHierarchyOutgoingCall items + item <- expectOneElement =<< Test.prepareCallHierarchy (mkPrepareCallHierarchyParam doc queryX queryY) + res <- Test.outgoingCalls (mkOutgoingCallsParam item) + liftIO $ sort expected @=? sort res + closeDoc doc + +oneCaseWithCreate :: T.Text -> Int -> Int -> (Uri -> CallHierarchyItem -> Assertion) -> Assertion +oneCaseWithCreate contents queryX queryY expected = withCanonicalTempDir $ \dir -> + runSessionWithServer def plugin dir $ do + doc <- createDoc "A.hs" "haskell" contents + waitForIndex (dir "A.hs") + item <- expectOneElement =<< Test.prepareCallHierarchy (mkPrepareCallHierarchyParam doc queryX queryY) + liftIO $ expected (doc ^. L.uri) item + closeDoc doc + +expectOneElement :: [a] -> Session a +expectOneElement = \case + [x] -> pure x + xs -> liftIO . assertFailure $ "Expecting exactly one element, but got " ++ show (length xs) + +mkCallHierarchyItem' :: String -> T.Text -> SymbolKind -> Range -> Range -> Uri -> CallHierarchyItem -> Assertion +mkCallHierarchyItem' prefix name kind range selRange uri c@(CallHierarchyItem name' kind' tags' detail' uri' range' selRange' xdata') = do + assertHierarchyItem name name' + assertHierarchyItem kind kind' + assertHierarchyItem tags tags' + assertHierarchyItem detail detail' + assertHierarchyItem uri uri' + assertHierarchyItem range range' + assertHierarchyItem selRange selRange' + case xdata' of + Nothing -> assertFailure ("In " ++ show c ++ ", got Nothing for data but wanted " ++ show xdata) + Just v -> case Aeson.fromJSON v of + Aeson.Success v' -> assertBool ("In " ++ show c ++ " wanted data prefix: " ++ show xdata) (xdata `T.isPrefixOf` v') + Aeson.Error err -> assertFailure ("In " ++ show c ++ " wanted data prefix: " ++ show xdata ++ " but json parsing failed with " ++ show err) + where + tags = Nothing + detail = Just "Main" + assertHierarchyItem :: forall a. (Eq a, Show a) => a -> a -> Assertion + assertHierarchyItem = assertEqual ("In " ++ show c ++ ", got unexpected value for field") + xdata = T.pack prefix <> ":" <> name <> ":Main:main" + +mkCallHierarchyItemC, mkCallHierarchyItemT, mkCallHierarchyItemV :: + T.Text -> SymbolKind -> Range -> Range -> Uri -> CallHierarchyItem -> Assertion +mkCallHierarchyItemC = mkCallHierarchyItem' "c" +mkCallHierarchyItemT = mkCallHierarchyItem' "t" +mkCallHierarchyItemV = mkCallHierarchyItem' "v" + +mkCallHierarchyIncomingCall :: (CallHierarchyItem, Range) -> CallHierarchyIncomingCall +mkCallHierarchyIncomingCall (item, range) = CallHierarchyIncomingCall item [range] + +mkCallHierarchyOutgoingCall :: (CallHierarchyItem, Range) -> CallHierarchyOutgoingCall +mkCallHierarchyOutgoingCall (item, range) = CallHierarchyOutgoingCall item [range] + +testDataDir :: FilePath +testDataDir = "plugins" "hls-call-hierarchy-plugin" "test" "testdata" + +mkPrepareCallHierarchyParam :: TextDocumentIdentifier -> Int -> Int -> CallHierarchyPrepareParams +mkPrepareCallHierarchyParam doc x y = CallHierarchyPrepareParams doc (Position (fromIntegral x) (fromIntegral y)) Nothing + +mkIncomingCallsParam :: CallHierarchyItem -> CallHierarchyIncomingCallsParams +mkIncomingCallsParam = CallHierarchyIncomingCallsParams Nothing Nothing + +mkOutgoingCallsParam :: CallHierarchyItem -> CallHierarchyOutgoingCallsParams +mkOutgoingCallsParam = CallHierarchyOutgoingCallsParams Nothing Nothing + +-- Wait for a special test message emitted by ghcide when a file is indexed, +-- so that call hierarchy can safely query the database. +waitForIndex :: FilePath -> Session () +waitForIndex fp1 = skipManyTill anyMessage $ void $ referenceReady lenientEquals + where + -- fp1 may be relative, in that case we check that it is a suffix of the + -- filepath from the message + lenientEquals :: FilePath -> Bool + lenientEquals fp2 + | isRelative fp1 = any (equalFilePath fp1 . joinPath) $ tails $ splitDirectories fp2 + | otherwise = equalFilePath fp1 fp2 + diff --git a/plugins/hls-call-hierarchy-plugin/test/testdata/A.hs b/plugins/hls-call-hierarchy-plugin/test/testdata/A.hs new file mode 100644 index 0000000000..c31455d63b --- /dev/null +++ b/plugins/hls-call-hierarchy-plugin/test/testdata/A.hs @@ -0,0 +1,11 @@ +module A where +import B +import C + +foo1 = B.a + C.a +foo2 = foo1 + B.a + C.b +foo3 = foo1 + foo2 + C.c + +bar x = case x of + A -> B.a + B.b + B.c + foo1 + B -> C.a + C.b + C.c + foo2 diff --git a/plugins/hls-call-hierarchy-plugin/test/testdata/B.hs b/plugins/hls-call-hierarchy-plugin/test/testdata/B.hs new file mode 100644 index 0000000000..44a7fc9504 --- /dev/null +++ b/plugins/hls-call-hierarchy-plugin/test/testdata/B.hs @@ -0,0 +1,7 @@ +module B where +import qualified C +data T = A | B + +a = 3 + C.a +b = 4 +c = 5 diff --git a/plugins/hls-call-hierarchy-plugin/test/testdata/C.hs b/plugins/hls-call-hierarchy-plugin/test/testdata/C.hs new file mode 100644 index 0000000000..ab7d2158ae --- /dev/null +++ b/plugins/hls-call-hierarchy-plugin/test/testdata/C.hs @@ -0,0 +1,5 @@ +module C where + +a = 3 +b = 4 +c = 5 diff --git a/plugins/hls-call-hierarchy-plugin/test/testdata/hie.yaml b/plugins/hls-call-hierarchy-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..1909df7d79 --- /dev/null +++ b/plugins/hls-call-hierarchy-plugin/test/testdata/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["A", "B", "C"]}} diff --git a/plugins/hls-change-type-signature-plugin/README.md b/plugins/hls-change-type-signature-plugin/README.md new file mode 100644 index 0000000000..f0766e7f86 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/README.md @@ -0,0 +1,21 @@ +# Change Type Signature Plugin + +The change type signature plugin provides a code action to change a user's current type signature to it's actual type signature. +The plugin does not work in all error scenarios. Currently, the plugin uses GHC diagnostic messages to recover the actual type of a function. +If the plugin receives enough information it can correctly change the signature. + +## Demo + +![Change Type Signature One](change1.gif) + +![Change Type Signature Two](change2.gif) + + +## Changelog +### 1.0.0.0 +- First Release + +### 1.0.1.0 +- Fix 9.2 Test failures (`waitForProgressDone`) +- Add extra test scenarios for error message diffs in 9.2 +- Remove regex parsing for simple `Text` manipulation diff --git a/plugins/hls-change-type-signature-plugin/change1.gif b/plugins/hls-change-type-signature-plugin/change1.gif new file mode 100644 index 0000000000..de06051545 Binary files /dev/null and b/plugins/hls-change-type-signature-plugin/change1.gif differ diff --git a/plugins/hls-change-type-signature-plugin/change2.gif b/plugins/hls-change-type-signature-plugin/change2.gif new file mode 100644 index 0000000000..b7d007524d Binary files /dev/null and b/plugins/hls-change-type-signature-plugin/change2.gif differ diff --git a/plugins/hls-change-type-signature-plugin/src/Ide/Plugin/ChangeTypeSignature.hs b/plugins/hls-change-type-signature-plugin/src/Ide/Plugin/ChangeTypeSignature.hs new file mode 100644 index 0000000000..8b8b7e7d3a --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/src/Ide/Plugin/ChangeTypeSignature.hs @@ -0,0 +1,249 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE ViewPatterns #-} +-- | An HLS plugin to provide code actions to change type signatures +module Ide.Plugin.ChangeTypeSignature (descriptor + -- * For Unit Tests + , Log(..) + , errorMessageRegexes + ) where + +import Control.Lens +import Control.Monad (guard) +import Control.Monad.IO.Class (MonadIO) +import Control.Monad.Trans.Class (MonadTrans (lift)) +import Control.Monad.Trans.Except (ExceptT (..)) +import Control.Monad.Trans.Maybe (MaybeT (..), hoistMaybe) +import Data.Foldable (asum) +import qualified Data.Map as Map +import Data.Maybe (catMaybes) +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE (FileDiagnostic, + IdeState (..), Pretty (..), + Priority (..), Recorder, + WithPriority, + fdLspDiagnosticL, + fdStructuredMessageL, + logWith, realSrcSpanToRange) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.RuleTypes (GetParsedModule (GetParsedModule)) +import Development.IDE.GHC.Compat hiding (vcat) +import Development.IDE.GHC.Compat.Error (_MismatchMessage, + _TcRnMessageWithCtx, + _TcRnMessageWithInfo, + _TcRnSolverReport, + _TypeEqMismatchActual, + _TypeEqMismatchExpected, + msgEnvelopeErrorL, + reportContentL) +import Development.IDE.GHC.Util (printOutputable) +import Development.IDE.Types.Diagnostics (_SomeStructuredMessage) +import Generics.SYB (extQ, something) +import GHC.Tc.Errors.Types (ErrInfo (..), + TcRnMessageDetailed (..)) +import qualified Ide.Logger as Logger +import Ide.Plugin.Error (PluginError, + getNormalizedFilePathE) +import Ide.Types (Config, HandlerM, + PluginDescriptor (..), + PluginId (PluginId), + PluginMethodHandler, + defaultPluginDescriptor, + mkPluginHandler) +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import Text.Regex.TDFA ((=~)) + +data Log + = LogErrInfoCtxt ErrInfo + | LogFindSigLocFailure DeclName + +instance Pretty Log where + pretty = \case + LogErrInfoCtxt (ErrInfo ctxt suppl) -> + Logger.vcat [fromSDoc ctxt, fromSDoc suppl] + LogFindSigLocFailure name -> + pretty ("Lookup signature location failure: " <> name) + where + fromSDoc = pretty . printOutputable + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultPluginDescriptor plId "Provides a code action to change the type signature of a binding if it is wrong") + { pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeAction (codeActionHandler recorder plId) + } + +codeActionHandler + :: Recorder (WithPriority Log) + -> PluginId + -> PluginMethodHandler IdeState 'Method_TextDocumentCodeAction +codeActionHandler recorder plId ideState _ CodeActionParams{_textDocument, _range} = do + let TextDocumentIdentifier uri = _textDocument + nfp <- getNormalizedFilePathE uri + decls <- getDecls plId ideState nfp + + activeDiagnosticsInRange (shakeExtras ideState) nfp _range >>= \case + Nothing -> pure (InL []) + Just fileDiags -> do + actions <- lift $ mapM (generateAction recorder plId uri decls) fileDiags + pure (InL (catMaybes actions)) + +getDecls :: MonadIO m => PluginId -> IdeState -> NormalizedFilePath -> ExceptT PluginError m [LHsDecl GhcPs] +getDecls (PluginId changeTypeSignatureId) state = + runActionE (T.unpack changeTypeSignatureId <> ".GetParsedModule") state + . fmap (hsmodDecls . unLoc . pm_parsed_source) + . useE GetParsedModule + +-- | Text representing a Declaration's Name +type DeclName = Text +-- | The signature provided by GHC Error Message (Expected type) +type ExpectedSig = Text +-- | The signature provided by GHC Error Message (Actual type) +type ActualSig = Text + +-- | DataType that encodes the necessary information for changing a type signature +data ChangeSignature = ChangeSignature { + -- | The expected type based on Signature + expectedType :: ExpectedSig + -- | the Actual Type based on definition + , actualType :: ActualSig + -- | the declaration name to be updated + , declName :: DeclName + -- | the location of the declaration signature + , declSrcSpan :: RealSrcSpan + -- | the diagnostic to solve + , diagnostic :: FileDiagnostic + } + +-- | Create a CodeAction from a Diagnostic +generateAction + :: Recorder (WithPriority Log) + -> PluginId + -> Uri + -> [LHsDecl GhcPs] + -> FileDiagnostic + -> HandlerM Config (Maybe (Command |? CodeAction)) +generateAction recorder plId uri decls fileDiag = do + changeSig <- diagnosticToChangeSig recorder decls fileDiag + pure $ + changeSigToCodeAction plId uri <$> changeSig + +-- | Convert a diagnostic into a ChangeSignature and add the proper SrcSpan +diagnosticToChangeSig + :: Recorder (WithPriority Log) + -> [LHsDecl GhcPs] + -> FileDiagnostic + -> HandlerM Config (Maybe ChangeSignature) +diagnosticToChangeSig recorder decls diagnostic = runMaybeT $ do + -- Extract expected, actual, and extra error info + (expectedType, actualType, errInfo) <- hoistMaybe $ do + msg <- diagnostic ^. fdStructuredMessageL ^? _SomeStructuredMessage + tcRnMsg <- msg ^. msgEnvelopeErrorL ^? _TcRnMessageWithCtx + (_, TcRnMessageDetailed errInfo tcRnMsg') <- tcRnMsg ^? _TcRnMessageWithInfo + solverReport <- tcRnMsg' ^? _TcRnSolverReport . _1 . reportContentL + mismatch <- solverReport ^? _MismatchMessage + expectedType <- mismatch ^? _TypeEqMismatchExpected + actualType <- mismatch ^? _TypeEqMismatchActual + + pure (showType expectedType, showType actualType, errInfo) + + logWith recorder Debug (LogErrInfoCtxt errInfo) + + -- Extract the declName from the extra error text + declName <- hoistMaybe (matchingDiagnostic errInfo) + + -- Look up location of declName. If it fails, log it + declSrcSpan <- + case findSigLocOfStringDecl decls expectedType (T.unpack declName) of + Just x -> pure x + Nothing -> do + logWith recorder Debug (LogFindSigLocFailure declName) + hoistMaybe Nothing + + pure ChangeSignature{..} + where + showType :: Type -> Text + showType = T.pack . showSDocUnsafe . pprTidiedType + +-- | If a diagnostic has the proper message create a ChangeSignature from it +matchingDiagnostic :: ErrInfo -> Maybe DeclName +matchingDiagnostic ErrInfo{errInfoContext} = + asum $ map (unwrapMatch . (=~) errInfoTxt) errorMessageRegexes + where + unwrapMatch :: (Text, Text, Text, [Text]) -> Maybe DeclName + unwrapMatch (_, _, _, [name]) = Just name + unwrapMatch _ = Nothing + + errInfoTxt = printOutputable errInfoContext + +-- | List of regexes that match various Error Messages +errorMessageRegexes :: [Text] +errorMessageRegexes = [ -- be sure to add new Error Messages Regexes at the bottom to not fail any existing tests + "In an equation for ‘(.+)’:" + ] + +-- | Given a String with the name of a declaration, GHC's "Expected Type", find the declaration that matches +-- both the name given and the Expected Type, and return the type signature location +findSigLocOfStringDecl :: [LHsDecl GhcPs] -> ExpectedSig -> String -> Maybe RealSrcSpan +findSigLocOfStringDecl decls expectedType declName = something (const Nothing `extQ` findSig `extQ` findLocalSig) decls + where + -- search for Top Level Signatures + findSig :: LHsDecl GhcPs -> Maybe RealSrcSpan + findSig = \case + L (locA -> (RealSrcSpan rss _)) (SigD _ sig) -> case sig of + ts@(TypeSig _ idsSig _) -> isMatch ts idsSig >> pure rss + _ -> Nothing + _ -> Nothing + + -- search for Local Signatures + findLocalSig :: LSig GhcPs -> Maybe RealSrcSpan + findLocalSig = \case + (L (locA -> (RealSrcSpan rss _)) ts@(TypeSig _ idsSig _)) -> isMatch ts idsSig >> pure rss + _ -> Nothing + + -- Does the declName match? and does the expected signature match? + isMatch ts idsSig = do + ghcSig <- sigToText ts + guard (any compareId idsSig && expectedType == ghcSig) + + -- Given an IdP check to see if it matches the declName + compareId (L _ id') = declName == occNameString (occName id') + + +-- | Pretty Print the Type Signature (to validate GHC Error Message) +sigToText :: Sig GhcPs -> Maybe Text +sigToText = \case + ts@TypeSig {} -> Just $ stripSignature $ printOutputable ts + _ -> Nothing + +stripSignature :: Text -> Text +-- for whatever reason incoming signatures MAY have new lines after "::" or "=>" +stripSignature (T.filter (/= '\n') -> sig) = if T.isInfixOf " => " sig + -- remove constraints + then T.strip $ snd $ T.breakOnEnd " => " sig + else T.strip $ snd $ T.breakOnEnd " :: " sig + +changeSigToCodeAction :: PluginId -> Uri -> ChangeSignature -> Command |? CodeAction +changeSigToCodeAction (PluginId changeTypeSignatureId) uri ChangeSignature{..} = + InR CodeAction { _title = mkChangeSigTitle declName actualType + , _kind = Just (CodeActionKind_Custom ("quickfix." <> changeTypeSignatureId)) + , _diagnostics = Just [diagnostic ^. fdLspDiagnosticL ] + , _isPreferred = Nothing + , _disabled = Nothing + , _edit = Just $ mkChangeSigEdit uri declSrcSpan (mkNewSignature declName actualType) + , _command = Nothing + , _data_ = Nothing + } + +mkChangeSigTitle :: Text -> Text -> Text +mkChangeSigTitle declName actualType = "Change signature for ‘" <> declName <> "’ to: " <> actualType + +mkChangeSigEdit :: Uri -> RealSrcSpan -> Text -> WorkspaceEdit +mkChangeSigEdit uri ss replacement = + let txtEdit = TextEdit (realSrcSpanToRange ss) replacement + changes = Just $ Map.singleton uri [txtEdit] + in WorkspaceEdit changes Nothing Nothing + +mkNewSignature :: Text -> Text -> Text +mkNewSignature declName actualType = declName <> " :: " <> actualType diff --git a/plugins/hls-change-type-signature-plugin/test/Main.hs b/plugins/hls-change-type-signature-plugin/test/Main.hs new file mode 100644 index 0000000000..72a2ab780e --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/Main.hs @@ -0,0 +1,111 @@ +module Main where + +import Control.Monad (void) +import Data.Either (rights) +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Text.IO as TIO +import Ide.Plugin.ChangeTypeSignature (Log (..), errorMessageRegexes) +import qualified Ide.Plugin.ChangeTypeSignature as ChangeTypeSignature +import System.FilePath ((<.>), ()) +import Test.Hls (CodeAction (..), Command, + GhcVersion (..), + PluginTestDescriptor, + Position (Position), + Range (Range), Session, + TestName, TestTree, + TextDocumentIdentifier, + assertFailure, def, + defaultTestRunner, + executeCodeAction, + getCodeActions, + goldenWithHaskellDoc, + knownBrokenForGhcVersions, + liftIO, mkPluginTestDescriptor, + openDoc, runSessionWithServer, + testCase, testGroup, toEither, + type (|?), waitForBuildQueue, + waitForDiagnostics, (@?=)) +import Text.Regex.TDFA ((=~)) + +main :: IO () +main = defaultTestRunner test + +changeTypeSignaturePlugin :: PluginTestDescriptor Log +changeTypeSignaturePlugin = + mkPluginTestDescriptor + ChangeTypeSignature.descriptor + "changeTypeSignature" + +test :: TestTree +test = testGroup "changeTypeSignature" [ + testRegexes + , codeActionTest "TExpectedActual" 4 11 + , knownBrokenForGhcVersions [GHC96 .. GHC912] "Error Message in 9.6+ does not provide enough info" $ + codeActionTest "TRigidType" 4 14 + , codeActionTest "TRigidType2" 4 8 + , codeActionTest "TLocalBinding" 7 22 + , codeActionTest "TLocalBindingShadow1" 11 8 + , codeActionTest "TLocalBindingShadow2" 7 22 + , codeActionProperties "TErrorGivenPartialSignature" [(4, 13)] $ \actions -> liftIO $ length actions @?= 0 + ] + +testRegexes :: TestTree +testRegexes = testGroup "Regex Testing" [ + regexTest "TExpectedActual.txt" regex True + , regexTest "TLocalBinding.txt" regex True + , regexTest "TLocalBindingShadow1.txt" regex True + , regexTest "TLocalBindingShadow2.txt" regex True + -- Error message from GHC currently does not not provide enough info + , regexTest "TRigidType.txt" regex False + , regexTest "TRigidType2.txt" regex True + ] + where + regex = errorMessageRegexes !! 0 + +testDataDir :: FilePath +testDataDir = "plugins" "hls-change-type-signature-plugin" "test" "testdata" + +goldenChangeSignature :: FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +goldenChangeSignature fp = goldenWithHaskellDoc def changeTypeSignaturePlugin (fp <> " (golden)") testDataDir fp "expected" "hs" + +codeActionTest :: FilePath -> Int -> Int -> TestTree +codeActionTest fp line col = goldenChangeSignature fp $ \doc -> do + void waitForDiagnostics -- code actions are triggered from Diagnostics + void waitForBuildQueue -- apparently some tests need this to get the CodeAction to show up + actions <- getCodeActions doc (pointRange line col) + foundActions <- findChangeTypeActions actions + liftIO $ length foundActions @?= 1 + executeCodeAction (head foundActions) + +codeActionProperties :: TestName -> [(Int, Int)] -> ([CodeAction] -> Session ()) -> TestTree +codeActionProperties fp locs assertions = testCase fp $ do + runSessionWithServer def changeTypeSignaturePlugin testDataDir $ do + openDoc (fp <.> ".hs") "haskell" >>= codeActionsFromLocs >>= findChangeTypeActions >>= assertions + where + codeActionsFromLocs doc = concat <$> mapM (getCodeActions doc . uncurry pointRange) locs + +findChangeTypeActions :: [Command |? CodeAction] -> Session [CodeAction] +findChangeTypeActions = pure . filter isChangeTypeAction . rights . map toEither + where + isChangeTypeAction CodeAction{_kind} = case _kind of + Nothing -> False + Just kind -> case kind of + "quickfix.changeTypeSignature" -> True + _ -> False + + +regexTest :: FilePath -> Text -> Bool -> TestTree +regexTest fp regex shouldPass = testCase fp $ do + msg <- TIO.readFile (testDataDir fp) + case (msg =~ regex :: (Text, Text, Text, [Text]), shouldPass) of + ((_, _, _, [_]), True) -> pure () + ((_, _, _, [_]), False) -> assertFailure $ "Unexpected match: " <> fp <> " with " <> T.unpack regex + (_, True) -> assertFailure $ "Failed to match: " <> fp <> " with " <> T.unpack regex + (_, False) -> pure () + +pointRange :: Int -> Int -> Range +pointRange + (subtract 1 -> fromIntegral -> line) + (subtract 1 -> fromIntegral -> col) = + Range (Position line col) (Position line $ col + 1) diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TErrorGivenPartialSignature.hs b/plugins/hls-change-type-signature-plugin/test/testdata/TErrorGivenPartialSignature.hs new file mode 100644 index 0000000000..da45222d93 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TErrorGivenPartialSignature.hs @@ -0,0 +1,4 @@ +module TErrorGivenPartialSignature where + +partial :: Int -> Int +partial x = init x diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.expected.hs b/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.expected.hs new file mode 100644 index 0000000000..1d331c00bd --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.expected.hs @@ -0,0 +1,6 @@ +module TExpectedActual where + +fullSig :: [Int] -> Int +fullSig = go + where + go = head . reverse diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.hs b/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.hs new file mode 100644 index 0000000000..2a7629c392 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.hs @@ -0,0 +1,6 @@ +module TExpectedActual where + +fullSig :: Int -> Int +fullSig = go + where + go = head . reverse diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.txt new file mode 100644 index 0000000000..6a8246a921 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.txt @@ -0,0 +1,8 @@ +In the expression: go +In an equation for ‘fullSig’: +fullSig + = go + where + go = head . reverse + + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.expected.hs b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.expected.hs new file mode 100644 index 0000000000..dcff692d2c --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.expected.hs @@ -0,0 +1,8 @@ +module TLocalBinding where + +import Control.Monad (forM) + +local :: Int -> Int +local x = let test :: t0 a0 -> (a0 -> m0 b0) -> m0 (t0 b0) + test = forM + in x + 1 diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.hs b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.hs new file mode 100644 index 0000000000..388cf26dd5 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.hs @@ -0,0 +1,8 @@ +module TLocalBinding where + +import Control.Monad (forM) + +local :: Int -> Int +local x = let test :: Int -> Int + test = forM + in x + 1 diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.txt new file mode 100644 index 0000000000..3f31dc48b9 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.txt @@ -0,0 +1,8 @@ +Probable cause: ‘forM’ is applied to too few arguments +In the expression: forM +In an equation for ‘test’: test = forM +In the expression: + let + test :: Int -> Int + test = forM + in x + 1 diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.expected.hs b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.expected.hs new file mode 100644 index 0000000000..5e7a1ce2ea --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.expected.hs @@ -0,0 +1,11 @@ +module TLocalBindingShadow1 where + +import Control.Monad (forM) + +local :: Int -> Int +local x = let test :: Int -> Int + test = (+2) + in test x + +test :: [Double] -> (Double -> m0 b0) -> m0 [b0] +test = forM diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.hs b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.hs new file mode 100644 index 0000000000..8d7511df41 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.hs @@ -0,0 +1,11 @@ +module TLocalBindingShadow1 where + +import Control.Monad (forM) + +local :: Int -> Int +local x = let test :: Int -> Int + test = (+2) + in test x + +test :: [Double] -> Double +test = forM diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.txt new file mode 100644 index 0000000000..ef782e8aec --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.txt @@ -0,0 +1,4 @@ +Probable cause: ‘forM’ is applied to too few arguments +In the expression: forM +In an equation for ‘test’: test = forM + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.expected.hs b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.expected.hs new file mode 100644 index 0000000000..8dcb28794c --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.expected.hs @@ -0,0 +1,11 @@ +module TLocalBindingShadow2 where + +import Control.Monad (forM) + +local :: Int -> Int +local x = let test :: t0 a0 -> (a0 -> m0 b0) -> m0 (t0 b0) + test = forM + in test x + +test :: String -> String +test = reverse diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.hs b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.hs new file mode 100644 index 0000000000..6db8dbbfd3 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.hs @@ -0,0 +1,11 @@ +module TLocalBindingShadow2 where + +import Control.Monad (forM) + +local :: Int -> Int +local x = let test :: Int -> Int + test = forM + in test x + +test :: String -> String +test = reverse diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.txt new file mode 100644 index 0000000000..bea2526eb9 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.txt @@ -0,0 +1,9 @@ +Probable cause: ‘forM’ is applied to too few arguments +In the expression: forM +In an equation for ‘test’: test = forM +In the expression: + let + test :: Int -> Int + test = forM + in test x [GHC-83865] + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.expected.hs b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.expected.hs new file mode 100644 index 0000000000..0158112123 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.expected.hs @@ -0,0 +1,6 @@ +module TRigidType where + +test :: [[Int]] -> Int +test = go . head . reverse + where + go = head . reverse diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.hs b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.hs new file mode 100644 index 0000000000..d5d7fa4b10 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.hs @@ -0,0 +1,6 @@ +module TRigidType where + +test :: a -> Int +test = go . head . reverse + where + go = head . reverse diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.txt new file mode 100644 index 0000000000..f9e78c97ae --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.txt @@ -0,0 +1,5 @@ +In the expression: go . head . reverse +Relevant bindings include + test :: a -> Int + (bound at /home/sgillespie/dev/haskell/haskell-language-server/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.hs:4:1) [GHC-25897] + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.expected.hs b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.expected.hs new file mode 100644 index 0000000000..bbfb96cb81 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.expected.hs @@ -0,0 +1,4 @@ +module TRigidType2 where + +test :: [Int] -> Int +test = head diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.hs b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.hs new file mode 100644 index 0000000000..9a6c25807c --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.hs @@ -0,0 +1,4 @@ +module TRigidType2 where + +test :: a -> Int +test = head diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.txt new file mode 100644 index 0000000000..343129a942 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.txt @@ -0,0 +1,6 @@ +In the expression: head +In an equation for ‘test’: test = head +Relevant bindings include + test :: a -> Int + (bound at /home/sgillespie/dev/haskell/haskell-language-server/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.hs:4:1) [GHC-25897] + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/ghc921-error1.txt b/plugins/hls-change-type-signature-plugin/test/testdata/ghc921-error1.txt new file mode 100644 index 0000000000..3ade6af3da --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/ghc921-error1.txt @@ -0,0 +1,9 @@ + • Couldn't match type ‘Data.Set.Internal.Set Int’ with ‘Int’ + Expected: Int -> [Int] + Actual: Data.Set.Internal.Set Int -> [Int] + • In the second argument of ‘(.)’, namely ‘toList’ + In the expression: head . toList + In an equation for ‘test’: test = head . toList + | +83 | test = head . toList + | ^^^^^^ diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/ghc921-error2.txt b/plugins/hls-change-type-signature-plugin/test/testdata/ghc921-error2.txt new file mode 100644 index 0000000000..f76fb50189 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/ghc921-error2.txt @@ -0,0 +1,9 @@ + • Couldn't match type ‘b0 -> a0 -> b0’ with ‘Int’ + Expected: Int -> Int + Actual: (b0 -> a0 -> b0) -> b0 -> t0 a0 -> b0 + • Probable cause: ‘foldl’ is applied to too few arguments + In the expression: foldl + In an equation for ‘test’: test = foldl + | +83 | test = foldl + | diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/ghc921-error3.txt b/plugins/hls-change-type-signature-plugin/test/testdata/ghc921-error3.txt new file mode 100644 index 0000000000..5b5adc1e8b --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/ghc921-error3.txt @@ -0,0 +1,9 @@ + • Couldn't match type ‘[Int]’ with ‘Int’ + Expected: Int -> [Int] + Actual: [Int] -> [Int] + • In the second argument of ‘(.)’, namely ‘reverse’ + In the expression: head . reverse + In an equation for ‘test’: test = head . reverse + | +84 | test = head . reverse + | diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/hie.yaml b/plugins/hls-change-type-signature-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..8af53b6833 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/hie.yaml @@ -0,0 +1,12 @@ +cradle: + direct: + arguments: + - -i + - -i. + - TExpectedActual + - TRigidType + - TErrorGivenPartialSignature + - TLocalBinding + - TLocalBindingShadow1 + - TLocalBindingShadow2 + - -Wall diff --git a/plugins/hls-class-plugin/README.md b/plugins/hls-class-plugin/README.md new file mode 100644 index 0000000000..e037684db7 --- /dev/null +++ b/plugins/hls-class-plugin/README.md @@ -0,0 +1,13 @@ +# Class Plugin + +The class plugin provides handy operations about class, includes: + +1. Code action to add minimal class definition methods. +2. Code action to all missing class methods. +3. Type lens about missing type signatures for instance methods. + +## Demo + +![Code Actions](codeactions.gif) + +![Code Lens](codelens.gif) diff --git a/plugins/hls-class-plugin/codeactions.gif b/plugins/hls-class-plugin/codeactions.gif new file mode 100644 index 0000000000..ee0e345b41 Binary files /dev/null and b/plugins/hls-class-plugin/codeactions.gif differ diff --git a/plugins/hls-class-plugin/codelens.gif b/plugins/hls-class-plugin/codelens.gif new file mode 100644 index 0000000000..ead80cfd98 Binary files /dev/null and b/plugins/hls-class-plugin/codelens.gif differ diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class.hs new file mode 100644 index 0000000000..15a9fe0f02 --- /dev/null +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class.hs @@ -0,0 +1,25 @@ +module Ide.Plugin.Class (descriptor, Log(..)) where + +import Development.IDE (IdeState, Recorder, + WithPriority) +import Ide.Plugin.Class.CodeAction +import Ide.Plugin.Class.CodeLens +import Ide.Plugin.Class.Types +import Ide.Types +import Language.LSP.Protocol.Message +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = (defaultPluginDescriptor plId "Provides code actions and lenses for working with typeclasses") + { pluginCommands = commands plId + , pluginRules = getInstanceBindTypeSigsRule recorder >> getInstanceBindLensRule recorder + , pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeAction (codeAction recorder) + <> mkPluginHandler SMethod_TextDocumentCodeLens codeLens + <> mkResolveHandler SMethod_CodeLensResolve codeLensResolve + } + +commands :: PluginId -> [PluginCommand IdeState] +commands plId + = [ PluginCommand codeActionCommandId + "add placeholders for minimal methods" (addMethodPlaceholders plId) + , PluginCommand typeLensCommandId + "add type signatures for instance methods" (codeLensCommandHandler plId) + ] diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs new file mode 100644 index 0000000000..3f902ef80c --- /dev/null +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs @@ -0,0 +1,239 @@ +{-# LANGUAGE GADTs #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedLists #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} + +module Ide.Plugin.Class.CodeAction ( + addMethodPlaceholders, + codeAction, +) where + +import Control.Arrow ((>>>)) +import Control.Lens hiding (List, use) +import Control.Monad.Error.Class (MonadError (throwError)) +import Control.Monad.Extra +import Control.Monad.IO.Class (liftIO) +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Except (ExceptT) +import Control.Monad.Trans.Maybe +import Data.Aeson hiding (Null) +import Data.List +import Data.List.Extra (nubOrdOn) +import qualified Data.Map.Strict as Map +import Data.Maybe (isNothing, listToMaybe, + mapMaybe) +import qualified Data.Set as Set +import qualified Data.Text as T +import Development.IDE +import Development.IDE.Core.FileStore (getVersionedTextDoc) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.PositionMapping (fromCurrentRange) +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Compat.Error (TcRnMessage (..), + _TcRnMessage, + msgEnvelopeErrorL, + stripTcRnMessageContext) +import Development.IDE.GHC.Compat.Util +import Development.IDE.Spans.AtPoint (pointCommand) +import GHC.Iface.Ext.Types (ContextInfo (..), + HieAST (..), Identifier, + IdentifierDetails (..)) +import Ide.Plugin.Class.ExactPrint +import Ide.Plugin.Class.Types +import Ide.Plugin.Class.Utils +import qualified Ide.Plugin.Config +import Ide.Plugin.Error +import Ide.PluginUtils +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types + +addMethodPlaceholders :: PluginId -> CommandFunction IdeState AddMinimalMethodsParams +addMethodPlaceholders _ state _ param@AddMinimalMethodsParams{..} = do + caps <- lift pluginGetClientCapabilities + nfp <- getNormalizedFilePathE (verTxtDocId ^. L.uri) + pm <- runActionE "classplugin.addMethodPlaceholders.GetParsedModule" state + $ useE GetParsedModule nfp + (hsc_dflags . hscEnv -> df) <- runActionE "classplugin.addMethodPlaceholders.GhcSessionDeps" state + $ useE GhcSessionDeps nfp + (old, new) <- handleMaybeM (PluginInternalError "Unable to makeEditText") + $ liftIO $ runMaybeT + $ makeEditText pm df param + pragmaInsertion <- insertPragmaIfNotPresent state nfp InstanceSigs + let edit = + if withSig + then mergeEdit (workspaceEdit caps old new) pragmaInsertion + else workspaceEdit caps old new + + void $ lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) + + pure $ InR Null + where + toTextDocumentEdit edit = + TextDocumentEdit (verTxtDocId ^.re _versionedTextDocumentIdentifier) [InL edit] + + mergeEdit :: WorkspaceEdit -> [TextEdit] -> WorkspaceEdit + mergeEdit WorkspaceEdit{..} edits = WorkspaceEdit + { _documentChanges = + (\x -> x ++ map (InL . toTextDocumentEdit) edits) + <$> _documentChanges + , .. + } + + workspaceEdit caps old new + = diffText caps (verTxtDocId, old) new IncludeDeletions + +-- | +-- This implementation is ad-hoc in a sense that the diagnostic detection mechanism is +-- sensitive to the format of diagnostic messages from GHC. +codeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState Method_TextDocumentCodeAction +codeAction recorder state plId (CodeActionParams _ _ docId caRange _) = do + verTxtDocId <- liftIO $ runAction "classplugin.codeAction.getVersionedTextDoc" state $ getVersionedTextDoc docId + nfp <- getNormalizedFilePathE (verTxtDocId ^. L.uri) + activeDiagnosticsInRange (shakeExtras state) nfp caRange + >>= \case + Nothing -> pure $ InL [] + Just fileDiags -> do + actions <- join <$> mapM (mkActions nfp verTxtDocId) (methodDiags fileDiags) + pure $ InL actions + where + methodDiags fileDiags = + mapMaybe (\d -> (d,) <$> isClassMethodWarning (d ^. fdStructuredMessageL)) fileDiags + + mkActions + :: NormalizedFilePath + -> VersionedTextDocumentIdentifier + -> (FileDiagnostic, ClassMinimalDef) + -> ExceptT PluginError (HandlerM Ide.Plugin.Config.Config) [Command |? CodeAction] + mkActions docPath verTxtDocId (diag, classMinDef) = do + (HAR {hieAst = ast}, pmap) <- runActionE "classplugin.findClassIdentifier.GetHieAst" state + $ useWithStaleE GetHieAst docPath + instancePosition <- handleMaybe (PluginInvalidUserState "fromCurrentRange") $ + fromCurrentRange pmap range ^? _Just . L.start + & fmap (L.character -~ 1) + ident <- findClassIdentifier ast instancePosition + cls <- findClassFromIdentifier docPath ident + InstanceBindTypeSigsResult sigs <- runActionE "classplugin.codeAction.GetInstanceBindTypeSigs" state + $ useE GetInstanceBindTypeSigs docPath + (tmrTypechecked -> gblEnv ) <- runActionE "classplugin.codeAction.TypeCheck" state $ useE TypeCheck docPath + (hscEnv -> hsc) <- runActionE "classplugin.codeAction.GhcSession" state $ useE GhcSession docPath + logWith recorder Debug (LogImplementedMethods (hsc_dflags hsc) cls classMinDef) + pure + $ concatMap mkAction + $ nubOrdOn snd + $ filter ((/=) mempty . snd) + $ mkMethodGroups hsc gblEnv range sigs classMinDef + where + range = diag ^. fdLspDiagnosticL . L.range + + mkMethodGroups :: HscEnv -> TcGblEnv -> Range -> [InstanceBindTypeSig] -> ClassMinimalDef -> [MethodGroup] + mkMethodGroups hsc gblEnv range sigs classMinDef = minimalDef <> [allClassMethods] + where + minimalDef = minDefToMethodGroups hsc gblEnv range sigs classMinDef + allClassMethods = ("all missing methods", makeMethodDefinitions hsc gblEnv range sigs) + + mkAction :: MethodGroup -> [Command |? CodeAction] + mkAction (name, methods) + = [ mkCodeAction title + $ mkLspCommand plId codeActionCommandId title + (Just $ mkCmdParams methods False) + , mkCodeAction titleWithSig + $ mkLspCommand plId codeActionCommandId titleWithSig + (Just $ mkCmdParams methods True) + ] + where + title = "Add placeholders for " <> name + titleWithSig = title <> " with signature(s)" + + mkCmdParams :: [(T.Text, T.Text)] -> Bool -> [Value] + mkCmdParams methodGroup withSig = + [toJSON (AddMinimalMethodsParams verTxtDocId range methodGroup withSig)] + + mkCodeAction title cmd + = InR + $ CodeAction + title + (Just CodeActionKind_QuickFix) + (Just []) + Nothing + Nothing + Nothing + (Just cmd) + Nothing + + findClassIdentifier hf instancePosition = + handleMaybe (PluginInternalError "No Identifier found") + $ listToMaybe + $ mapMaybe listToMaybe + $ pointCommand hf instancePosition + ( (Map.keys . Map.filterWithKey isClassNodeIdentifier . getNodeIds) + <=< nodeChildren + ) + + findClassFromIdentifier docPath (Right name) = do + (hscEnv -> hscenv, _) <- runActionE "classplugin.findClassFromIdentifier.GhcSessionDeps" state + $ useWithStaleE GhcSessionDeps docPath + (tmrTypechecked -> thisMod, _) <- runActionE "classplugin.findClassFromIdentifier.TypeCheck" state + $ useWithStaleE TypeCheck docPath + handleMaybeM (PluginInternalError "initTcWithGbl failed") + . liftIO + . fmap snd + . initTcWithGbl hscenv thisMod ghostSpan $ do + tcthing <- tcLookup name + case tcthing of + AGlobal (AConLike (RealDataCon con)) + | Just cls <- tyConClass_maybe (dataConOrigTyCon con) -> pure cls + _ -> fail "Ide.Plugin.Class.findClassFromIdentifier" + findClassFromIdentifier _ (Left _) = throwError (PluginInternalError "Ide.Plugin.Class.findClassIdentifier") + +-- see https://hackage.haskell.org/package/ghc-9.8.1/docs/src/GHC.Types.Name.Occurrence.html#mkClassDataConOcc +isClassNodeIdentifier :: Identifier -> IdentifierDetails a -> Bool +isClassNodeIdentifier (Right i) ident | 'C':':':_ <- unpackFS $ occNameFS $ occName i = (isNothing . identType) ident && Use `Set.member` identInfo ident +isClassNodeIdentifier _ _ = False + +isClassMethodWarning :: StructuredMessage -> Maybe ClassMinimalDef +isClassMethodWarning message = case message ^? _SomeStructuredMessage . msgEnvelopeErrorL . _TcRnMessage of + Nothing -> Nothing + Just tcRnMessage -> isUnsatisfiedMinimalDefWarning tcRnMessage + +isUnsatisfiedMinimalDefWarning :: TcRnMessage -> Maybe ClassMinimalDef +isUnsatisfiedMinimalDefWarning = stripTcRnMessageContext >>> \case + TcRnUnsatisfiedMinimalDef classMinDef -> Just classMinDef + _ -> Nothing + +type MethodSignature = T.Text +type MethodName = T.Text +type MethodDefinition = (MethodName, MethodSignature) +type MethodGroup = (T.Text, [MethodDefinition]) + +makeMethodDefinition :: HscEnv -> TcGblEnv -> InstanceBindTypeSig -> MethodDefinition +makeMethodDefinition hsc gblEnv sig = (name, signature) + where + name = T.drop (T.length bindingPrefix) (printOutputable (bindName sig)) + signature = prettyBindingNameString (printOutputable (bindName sig)) <> " :: " <> T.pack (showDoc hsc gblEnv (bindType sig)) + +makeMethodDefinitions :: HscEnv -> TcGblEnv -> Range -> [InstanceBindTypeSig] -> [MethodDefinition] +makeMethodDefinitions hsc gblEnv range sigs = + [ makeMethodDefinition hsc gblEnv sig + | sig <- sigs + , inRange range (getSrcSpan $ bindName sig) + ] + +signatureToName :: InstanceBindTypeSig -> T.Text +signatureToName sig = T.drop (T.length bindingPrefix) (printOutputable (bindName sig)) + +-- Return [groupName text, [(methodName text, signature text)]] +minDefToMethodGroups :: HscEnv -> TcGblEnv -> Range -> [InstanceBindTypeSig] -> BooleanFormula Name -> [MethodGroup] +minDefToMethodGroups hsc gblEnv range sigs minDef = makeMethodGroup <$> go minDef + where + makeMethodGroup methodDefinitions = + let name = mconcat $ intersperse "," $ (\x -> "'" <> x <> "'") . fst <$> methodDefinitions + in (name, methodDefinitions) + + go (Var mn) = pure $ makeMethodDefinitions hsc gblEnv range $ filter ((==) (printOutputable mn) . signatureToName) sigs + go (Or ms) = concatMap (go . unLoc) ms + go (And ms) = foldr (liftA2 (<>) . go . unLoc) [[]] ms + go (Parens m) = go (unLoc m) + diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeLens.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeLens.hs new file mode 100644 index 0000000000..9410469516 --- /dev/null +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeLens.hs @@ -0,0 +1,96 @@ +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedLists #-} +{-# LANGUAGE ViewPatterns #-} +module Ide.Plugin.Class.CodeLens where + +import Control.Lens ((&), (?~), (^.)) +import Control.Monad.Trans.Class (MonadTrans (lift)) +import Data.Aeson hiding (Null) +import qualified Data.IntMap.Strict as IntMap +import Data.Maybe (mapMaybe, maybeToList) +import qualified Data.Text as T +import Development.IDE +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.PositionMapping +import Development.IDE.GHC.Compat +import Development.IDE.Spans.Pragmas (getFirstPragma, + insertNewPragma) +import Ide.Plugin.Class.Types +import Ide.Plugin.Class.Utils +import Ide.Plugin.Error +import Ide.PluginUtils +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types + +-- The code lens method is only responsible for providing the ranges of the code +-- lenses matched to a unique id +codeLens :: PluginMethodHandler IdeState Method_TextDocumentCodeLens +codeLens state _plId clp = do + nfp <- getNormalizedFilePathE $ clp ^. L.textDocument . L.uri + (InstanceBindLensResult (InstanceBindLens{lensRange}), pm) + <- runActionE "classplugin.GetInstanceBindLens" state + -- Using stale results means that we can almost always return a + -- value. In practice this means the lenses don't 'flicker' + $ useWithStaleE GetInstanceBindLens nfp + pure $ InL $ mapMaybe (toCodeLens pm) lensRange + where toCodeLens pm (range, int) = + let newRange = toCurrentRange pm range + in (\r -> CodeLens r Nothing (Just $ toJSON int)) <$> newRange + +-- The code lens resolve method matches a title to each unique id +codeLensResolve:: ResolveFunction IdeState Int Method_CodeLensResolve +codeLensResolve state plId cl uri uniqueID = do + nfp <- getNormalizedFilePathE uri + (InstanceBindLensResult (InstanceBindLens{lensDetails}), pm) + <- runActionE "classplugin.GetInstanceBindLens" state + $ useWithStaleE GetInstanceBindLens nfp + (tmrTypechecked -> gblEnv, _) <- runActionE "classplugin.codeAction.TypeCheck" state $ useWithStaleE TypeCheck nfp + (hscEnv -> hsc, _) <- runActionE "classplugin.codeAction.GhcSession" state $ useWithStaleE GhcSession nfp + (range, name, typ) <- handleMaybe PluginStaleResolve + $ IntMap.lookup uniqueID lensDetails + let title = prettyBindingNameString (printOutputable name) <> " :: " <> T.pack (showDoc hsc gblEnv typ) + edit <- handleMaybe (PluginInvalidUserState "toCurrentRange") $ makeEdit range title pm + let command = mkLspCommand plId typeLensCommandId title (Just [toJSON $ InstanceBindLensCommand uri edit]) + pure $ cl & L.command ?~ command + where + makeEdit :: Range -> T.Text -> PositionMapping -> Maybe TextEdit + makeEdit range bind mp = + let startPos = range ^. L.start + insertChar = startPos ^. L.character + insertRange = Range startPos startPos + in case toCurrentRange mp insertRange of + Just rg -> Just $ TextEdit rg (bind <> "\n" <> T.replicate (fromIntegral insertChar) " ") + Nothing -> Nothing + +-- Finally the command actually generates and applies the workspace edit for the +-- specified unique id. +codeLensCommandHandler :: PluginId -> CommandFunction IdeState InstanceBindLensCommand +codeLensCommandHandler plId state _ InstanceBindLensCommand{commandUri, commandEdit} = do + nfp <- getNormalizedFilePathE commandUri + (InstanceBindLensResult (InstanceBindLens{lensEnabledExtensions}), _) + <- runActionE "classplugin.GetInstanceBindLens" state + $ useWithStaleE GetInstanceBindLens nfp + -- We are only interested in the pragma information if the user does not + -- have the InstanceSigs extension enabled + mbPragma <- if InstanceSigs `elem` lensEnabledExtensions + then pure Nothing + else Just <$> getFirstPragma plId state nfp + let -- By mapping over our Maybe NextPragmaInfo value, we only compute this + -- edit if we actually need to. + pragmaInsertion = + maybeToList $ flip insertNewPragma InstanceSigs <$> mbPragma + wEdit = workspaceEdit pragmaInsertion + _ <- lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing wEdit) (\_ -> pure ()) + pure $ InR Null + where + workspaceEdit pragmaInsertion= + WorkspaceEdit + (pure [(commandUri, commandEdit : pragmaInsertion)]) + Nothing + Nothing + + + + diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs new file mode 100644 index 0000000000..bb0994442a --- /dev/null +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/ExactPrint.hs @@ -0,0 +1,138 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE RecordWildCards #-} + +module Ide.Plugin.Class.ExactPrint where + +import Control.Monad.Trans.Maybe +import Data.Either.Extra (eitherToMaybe) +import Data.Functor.Identity (Identity) +import qualified Data.Text as T +import Development.IDE.GHC.Compat +import GHC.Parser.Annotation +import Ide.Plugin.Class.Types +import Ide.Plugin.Class.Utils +import Language.Haskell.GHC.ExactPrint +import Language.Haskell.GHC.ExactPrint.Parsers +import Language.LSP.Protocol.Types (Range) + +#if MIN_VERSION_ghc(9,9,0) +import Control.Lens (_head, over) +#endif + +makeEditText :: Monad m => ParsedModule -> DynFlags -> AddMinimalMethodsParams -> MaybeT m (T.Text, T.Text) +makeEditText pm df AddMinimalMethodsParams{..} = do + mDecls <- MaybeT . pure $ traverse (makeMethodDecl df) methodGroup + let ps = +#if !MIN_VERSION_ghc(9,9,0) + makeDeltaAst $ +#endif + pm_parsed_source pm + + old = T.pack $ exactPrint ps +#if MIN_VERSION_ghc_exactprint(1,10,0) + ps' = addMethodDecls ps mDecls range withSig +#else + (ps', _, _) = runTransform (addMethodDecls ps mDecls range withSig) +#endif + new = T.pack $ exactPrint ps' + pure (old, new) + +makeMethodDecl :: DynFlags -> (T.Text, T.Text) -> Maybe (LHsDecl GhcPs, LHsDecl GhcPs) +makeMethodDecl df (mName, sig) = do + name <- eitherToMaybe $ parseDecl df (T.unpack mName) . T.unpack $ toMethodName mName <> " = _" + sig' <- eitherToMaybe $ parseDecl df (T.unpack sig) $ T.unpack sig + pure (name, sig') + +#if MIN_VERSION_ghc_exactprint(1,10,0) +addMethodDecls :: ParsedSource -> [(LHsDecl GhcPs, LHsDecl GhcPs)] -> Range -> Bool -> Located (HsModule GhcPs) +#else +addMethodDecls :: ParsedSource -> [(LHsDecl GhcPs, LHsDecl GhcPs)] -> Range -> Bool -> TransformT Identity (Located (HsModule GhcPs)) +#endif +addMethodDecls ps mDecls range withSig + | withSig = go (concatMap (\(decl, sig) -> [sig, decl]) mDecls) + | otherwise = go (map fst mDecls) + where + go inserting = do +#if MIN_VERSION_ghc_exactprint(1,10,0) + let allDecls = hsDecls ps +#else + allDecls <- hsDecls ps +#endif + case break (inRange range . getLoc) allDecls of + (before, L l inst : after) -> + let + instSpan = realSrcSpan $ getLoc l +#if MIN_VERSION_ghc(9,11,0) + instCol = srcSpanStartCol instSpan - 1 +#else + instCol = srcSpanStartCol instSpan +#endif +#if MIN_VERSION_ghc(9,9,0) + instRow = srcSpanEndLine instSpan + methodEpAnn = noAnnSrcSpanDP $ deltaPos 1 (instCol + defaultIndent) + -- Put each TyCl method/type signature on separate line, indented by 2 spaces relative to instance decl + newLine (L _ e) = L methodEpAnn e + + -- Set DeltaPos for following declarations so they don't move undesirably + resetFollowing = + over _head (\followingDecl -> + let followingDeclRow = srcSpanStartLine $ realSrcSpan $ getLoc followingDecl + delta = DifferentLine (followingDeclRow - instRow) instCol + in setEntryDP followingDecl delta) +#else + newLine (L l e) = + let dp = deltaPos 1 (instCol + defaultIndent - 1) + in L (noAnnSrcSpanDP (getLoc l) dp <> l) e + + resetFollowing = id +#endif + in replaceDecls ps (before ++ L l (addWhere inst):(map newLine inserting ++ resetFollowing after)) + (before, []) -> + replaceDecls ps before + + -- Add `where` keyword for `instance X where` if `where` is missing. + -- + -- The `where` in ghc-9.2 is now stored in the instance declaration + -- directly. More precisely, giving an `HsDecl GhcPs`, we have: + -- InstD --> ClsInstD --> ClsInstDecl --> XCClsInstDecl --> (EpAnn [AddEpAnn], AnnSortKey), + -- here `AnnEpAnn` keeps the track of Anns. + -- + -- See the link for the original definition: + -- https://hackage.haskell.org/package/ghc-9.2.1/docs/Language-Haskell-Syntax-Extension.html#t:XCClsInstDecl + addWhere :: HsDecl GhcPs -> HsDecl GhcPs + addWhere instd@(InstD xInstD (ClsInstD ext decl@ClsInstDecl{..})) = + case cid_ext of +#if MIN_VERSION_ghc(9,11,0) + (warnings, anns, key) + | EpTok _ <- acid_where anns -> instd + | otherwise -> + InstD xInstD (ClsInstD ext decl { + cid_ext = ( warnings + , anns { acid_where = EpTok d1 } + , key + ) + }) +#elif MIN_VERSION_ghc(9,9,0) + (warnings, anns, key) + | any (\(AddEpAnn kw _ )-> kw == AnnWhere) anns -> instd + | otherwise -> + InstD xInstD (ClsInstD ext decl { + cid_ext = ( warnings + , AddEpAnn AnnWhere d1 : anns + , key + ) + }) +#else + (EpAnn entry anns comments, key) -> + InstD xInstD (ClsInstD ext decl { + cid_ext = (EpAnn + entry + (AddEpAnn AnnWhere d1 : anns) + comments + , key + ) + }) + _ -> instd +#endif + addWhere decl = decl diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs new file mode 100644 index 0000000000..1669aba43d --- /dev/null +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Types.hs @@ -0,0 +1,234 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ViewPatterns #-} + +module Ide.Plugin.Class.Types where + +import Control.DeepSeq (rwhnf) +import Control.Monad.Extra (mapMaybeM, whenMaybe) +import Control.Monad.IO.Class (liftIO) +import Control.Monad.Trans.Maybe (MaybeT (MaybeT, runMaybeT)) +import Data.Aeson +import qualified Data.IntMap as IntMap +import Data.List.Extra (firstJust) +import Data.Maybe (catMaybes, mapMaybe, + maybeToList) +import qualified Data.Text as T +import Data.Unique (hashUnique, newUnique) +import Development.IDE +import Development.IDE.Core.PluginUtils (useMT) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat hiding (newUnique, (<+>)) +import Development.IDE.GHC.Compat.Util (bagToList) +import Development.IDE.Graph.Classes +import GHC.Generics +import Ide.Plugin.Class.Utils +import Ide.Types +import Language.LSP.Protocol.Types (TextEdit, + VersionedTextDocumentIdentifier) + +typeLensCommandId :: CommandId +typeLensCommandId = "classplugin.typelens" + +codeActionCommandId :: CommandId +codeActionCommandId = "classplugin.codeaction" + +-- | Default indent size for inserting +defaultIndent :: Int +defaultIndent = 2 + +data AddMinimalMethodsParams = AddMinimalMethodsParams + { verTxtDocId :: VersionedTextDocumentIdentifier + , range :: Range + , methodGroup :: [(T.Text, T.Text)] + -- ^ (name text, signature text) + , withSig :: Bool + } + deriving (Show, Eq, Generic, ToJSON, FromJSON) + +-- |The InstanceBindTypeSigs Rule collects the instance bindings type +-- signatures (both name and type). It is used by both the code actions and the +-- code lenses +data GetInstanceBindTypeSigs = GetInstanceBindTypeSigs + deriving (Generic, Show, Eq, Ord, Hashable, NFData) + +data InstanceBindTypeSig = InstanceBindTypeSig + { bindName :: Name + , bindType :: Type + } + +newtype InstanceBindTypeSigsResult = + InstanceBindTypeSigsResult [InstanceBindTypeSig] + +instance Show InstanceBindTypeSigsResult where + show _ = "" + +instance NFData InstanceBindTypeSigsResult where + rnf = rwhnf + +type instance RuleResult GetInstanceBindTypeSigs = InstanceBindTypeSigsResult + +-- |The necessary data to execute our code lens +data InstanceBindLensCommand = InstanceBindLensCommand + { -- |The URI needed to run actions in the command + commandUri :: Uri + -- |The specific TextEdit we want to apply. This does not include the + -- pragma edit which is computed in the command + , commandEdit :: TextEdit } + deriving (Generic, FromJSON, ToJSON) + +-- | The InstanceBindLens rule is specifically for code lenses. It relies on +-- the InstanceBindTypeSigs rule, filters out irrelevant matches and signatures +-- that can't be matched to a source span. It provides all the signatures linked +-- to a unique ID to aid in resolving. It also provides a list of enabled +-- extensions. +data GetInstanceBindLens = GetInstanceBindLens + deriving (Generic, Show, Eq, Ord, Hashable, NFData) + +data InstanceBindLens = InstanceBindLens + { -- |What we need to provide the code lens. The range linked with + -- a unique ID that will allow us to resolve the rest of the data later + lensRange :: [(Range, Int)] + -- |Provides the necessary data to allow us to display the + -- title of the lens and compute a TextEdit for it. + , lensDetails :: IntMap.IntMap (Range, Name, Type) + -- |Provides currently enabled extensions, allowing us to conditionally + -- insert needed extensions. + , lensEnabledExtensions :: [Extension] + } + +newtype InstanceBindLensResult = + InstanceBindLensResult InstanceBindLens + +instance Show InstanceBindLensResult where + show _ = "" + +instance NFData InstanceBindLensResult where + rnf = rwhnf + +type instance RuleResult GetInstanceBindLens = InstanceBindLensResult + +data Log + = LogImplementedMethods DynFlags Class ClassMinimalDef + | LogShake Shake.Log + +instance Pretty Log where + pretty = \case + LogImplementedMethods dflags cls methods -> + pretty ("The following methods are missing" :: String) + <+> pretty (show (getOccString cls) <> ":") -- 'show' is used here to add quotes around the class name + <+> pretty (showSDoc dflags $ ppr methods) + LogShake log -> pretty log + +data BindInfo = BindInfo + { bindSpan :: SrcSpan + -- ^ SrcSpan of the whole binding + , bindNameSpan :: SrcSpan + -- ^ SrcSpan of the binding name + } + +getInstanceBindLensRule :: Recorder (WithPriority Log) -> Rules () +getInstanceBindLensRule recorder = do + defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \GetInstanceBindLens nfp -> runMaybeT $ do +#if MIN_VERSION_ghc(9,9,0) + tmr@(tmrRenamed -> (hs_tyclds -> tycls, _, _, _, _)) <- useMT TypeCheck nfp +#else + tmr@(tmrRenamed -> (hs_tyclds -> tycls, _, _, _)) <- useMT TypeCheck nfp +#endif + (InstanceBindTypeSigsResult allBinds) <- useMT GetInstanceBindTypeSigs nfp + + let -- declared instance methods without signatures + bindInfos = [ bind + | instds <- map group_instds tycls -- class instance decls + , instd <- instds + , inst <- maybeToList $ getClsInstD (unLoc instd) + , bind <- getBindSpanWithoutSig inst + ] + targetSigs = matchBind bindInfos allBinds + rangeIntNameType <- liftIO $ mapMaybeM getRangeWithSig targetSigs + let lensRange = (\(range, int, _, _) -> (range, int)) <$> rangeIntNameType + lensDetails = IntMap.fromList $ (\(range, int, name, typ) -> (int, (range, name, typ))) <$> rangeIntNameType + lensEnabledExtensions = getExtensions $ tmrParsed tmr + pure $ InstanceBindLensResult $ InstanceBindLens{..} + where + -- Match Binds with their signatures + -- We try to give every `InstanceBindTypeSig` a `SrcSpan`, + -- hence we can display signatures for `InstanceBindTypeSig` with span later. + matchBind :: [BindInfo] -> [InstanceBindTypeSig] -> [Maybe (InstanceBindTypeSig, SrcSpan)] + matchBind existedBinds allBindWithSigs = + [firstJust (go bindSig) existedBinds | bindSig <- allBindWithSigs] + where + go :: InstanceBindTypeSig -> BindInfo -> Maybe (InstanceBindTypeSig, SrcSpan) + go bindSig bind = do + range <- (srcSpanToRange . bindNameSpan) bind + if inRange range (getSrcSpan $ bindName bindSig) + then Just (bindSig, bindSpan bind) + else Nothing + + getClsInstD (ClsInstD _ d) = Just d + getClsInstD _ = Nothing + + getSigName (ClassOpSig _ _ sigNames _) = Just $ map unLoc sigNames + getSigName _ = Nothing + + getBindSpanWithoutSig :: ClsInstDecl GhcRn -> [BindInfo] + getBindSpanWithoutSig ClsInstDecl{..} = + let bindNames = mapMaybe go $ +#if !MIN_VERSION_ghc(9,11,0) + bagToList +#endif + cid_binds + go (L l bind) = case bind of + FunBind{..} + -- `Generated` tagged for Template Haskell, + -- here we filter out nonsense generated bindings + -- that are nonsense for displaying code lenses. + -- + -- See https://github.com/haskell/haskell-language-server/issues/3319 + | not $ isGenerated (groupOrigin fun_matches) + -> Just $ L l fun_id + _ -> Nothing + -- Existed signatures' name + sigNames = concat $ mapMaybe (\(L _ r) -> getSigName r) cid_sigs + toBindInfo (L l (L l' _)) = BindInfo + (locA l) -- bindSpan + (locA l') -- bindNameSpan + in toBindInfo <$> filter (\(L _ name) -> unLoc name `notElem` sigNames) bindNames + + -- Get bind definition range with its rendered signature text + getRangeWithSig :: Maybe (InstanceBindTypeSig, SrcSpan) -> IO (Maybe (Range, Int, Name, Type)) + getRangeWithSig (Just (bind, span)) = runMaybeT $ do + range <- MaybeT . pure $ srcSpanToRange span + uniqueID <- liftIO $ hashUnique <$> newUnique + pure (range, uniqueID, bindName bind, bindType bind) + getRangeWithSig Nothing = pure Nothing + + +getInstanceBindTypeSigsRule :: Recorder (WithPriority Log) -> Rules () +getInstanceBindTypeSigsRule recorder = do + defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \GetInstanceBindTypeSigs nfp -> runMaybeT $ do + (tmrTypechecked -> gblEnv ) <- useMT TypeCheck nfp + (hscEnv -> hsc) <- useMT GhcSession nfp + let binds = collectHsBindsBinders $ tcg_binds gblEnv + (_, maybe [] catMaybes -> instanceBinds) <- liftIO $ + initTcWithGbl hsc gblEnv ghostSpan +#if MIN_VERSION_ghc(9,7,0) + $ liftZonkM +#endif + $ traverse bindToSig binds + pure $ InstanceBindTypeSigsResult instanceBinds + where + bindToSig id = do + let name = idName id + whenMaybe (isBindingName name) $ do + env <- tcInitTidyEnv +#if MIN_VERSION_ghc(9,11,0) + let ty = +#else + let (_, ty) = +#endif + tidyOpenType env (idType id) + pure $ InstanceBindTypeSig name ty diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/Utils.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Utils.hs new file mode 100644 index 0000000000..e73344c341 --- /dev/null +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/Utils.hs @@ -0,0 +1,73 @@ +{-# LANGUAGE ViewPatterns #-} + +module Ide.Plugin.Class.Utils where + +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Except +import Data.Char (isAlpha) +import Data.List (isPrefixOf) +import Data.String (IsString) +import qualified Data.Text as T +import Development.IDE +import Development.IDE.Core.PluginUtils +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Compat.Util +import Development.IDE.Spans.Pragmas (getNextPragmaInfo, + insertNewPragma) +import Ide.Plugin.Error +import Ide.PluginUtils +import Language.LSP.Protocol.Types + +-- | All instance bindings are started with `$c` +bindingPrefix :: IsString s => s +bindingPrefix = "$c" + +isBindingName :: Name -> Bool +isBindingName name = isPrefixOf bindingPrefix $ occNameString $ nameOccName name + +-- | Check if some `HasSrcSpan` value in the given range +inRange :: Range -> SrcSpan -> Bool +inRange range s = maybe False (subRange range) (srcSpanToRange s) + +ghostSpan :: RealSrcSpan +ghostSpan = realSrcLocSpan $ mkRealSrcLoc (fsLit "") 1 1 + +-- | "$cname" ==> "name" +prettyBindingNameString :: T.Text -> T.Text +prettyBindingNameString name + | T.isPrefixOf bindingPrefix name = + toMethodName $ T.drop (T.length bindingPrefix) name + | otherwise = name + +showDoc :: HscEnv -> TcGblEnv -> Type -> String +showDoc hsc gblEnv ty = showSDocForUser' hsc (mkPrintUnqualifiedDefault hsc (rdrEnv gblEnv)) (pprSigmaType ty) + where rdrEnv gblEnv = tcg_rdr_env gblEnv + +-- | Paren the name for pretty display if necessary +toMethodName :: T.Text -> T.Text +toMethodName n + | Just (h, _) <- T.uncons n + , not (isAlpha h || h == '_') + = "(" <> n <> ")" + | otherwise + = n + +-- | Here we use `useWithStale` to compute, Using stale results means that we can almost always return a value. +-- In practice this means the lenses don't 'flicker'. +-- This function is also used in code actions, but it doesn't matter because our actions only work +-- if the module parsed success. +insertPragmaIfNotPresent :: (MonadIO m) + => IdeState + -> NormalizedFilePath + -> Extension + -> ExceptT PluginError m [TextEdit] +insertPragmaIfNotPresent state nfp pragma = do + (hscEnv -> hsc_dflags -> sessionDynFlags, _) <- runActionE "classplugin.insertPragmaIfNotPresent.GhcSession" state + $ useWithStaleE GhcSession nfp + fileContents <- liftIO $ runAction "classplugin.insertPragmaIfNotPresent.GetFileContents" state + $ getFileContents nfp + (pm, _) <- runActionE "classplugin.insertPragmaIfNotPresent.GetParsedModuleWithComments" state + $ useWithStaleE GetParsedModuleWithComments nfp + let exts = getExtensions pm + info = getNextPragmaInfo sessionDynFlags fileContents + pure [insertNewPragma info pragma | pragma `notElem` exts] diff --git a/plugins/hls-class-plugin/test/Main.hs b/plugins/hls-class-plugin/test/Main.hs new file mode 100644 index 0000000000..7f1feddc11 --- /dev/null +++ b/plugins/hls-class-plugin/test/Main.hs @@ -0,0 +1,188 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedLists #-} +{-# LANGUAGE OverloadedStrings #-} + +module Main + ( main + ) where + +import Control.Exception (catch) +import Control.Lens (Prism', prism', view, (^.), + (^..), (^?)) +import Control.Monad (void) +import Data.Foldable (find) +import Data.Maybe +import qualified Data.Text as T +import qualified Ide.Plugin.Class as Class +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import System.FilePath +import Test.Hls + +main :: IO () +main = defaultTestRunner tests + +classPlugin :: PluginTestDescriptor Class.Log +classPlugin = mkPluginTestDescriptor Class.descriptor "class" + +tests :: TestTree +tests = testGroup + "class" + [ codeActionTests + , codeLensTests + ] + +codeActionTests :: TestTree +codeActionTests = testGroup + "code actions" + [ expectCodeActionsAvailable "Produces addMinimalMethodPlaceholders code actions for one instance" "T1" + [ "Add placeholders for '=='" + , "Add placeholders for '==' with signature(s)" + , "Add placeholders for '/='" + , "Add placeholders for '/=' with signature(s)" + , "Add placeholders for all missing methods" + , "Add placeholders for all missing methods with signature(s)" + ] + , goldenWithClass "Creates a placeholder for '=='" "T1" "eq" $ + getActionByTitle "Add placeholders for '=='" + , goldenWithClass "Creates a placeholder for '/='" "T1" "ne" $ + getActionByTitle "Add placeholders for '/='" + , goldenWithClass "Creates a placeholder for both '==' and '/='" "T1" "all" $ + getActionByTitle "Add placeholders for all missing methods" + , goldenWithClass "Creates a placeholder for 'fmap'" "T2" "fmap" $ + getActionByTitle "Add placeholders for 'fmap'" + , goldenWithClass "Creates a placeholder for multiple methods 1" "T3" "1" $ + getActionByTitle "Add placeholders for 'f','g'" + , goldenWithClass "Creates a placeholder for multiple methods 2" "T3" "2" $ + getActionByTitle "Add placeholders for 'g','h'" + , goldenWithClass "Creates a placeholder for a method starting with '_'" "T4" "" $ + getActionByTitle "Add placeholders for '_f'" + , goldenWithClass "Creates a placeholder for '==' with extra lines" "T5" "" $ + getActionByTitle "Add placeholders for '=='" + , goldenWithClass "Creates a placeholder for only the unimplemented methods of multiple methods" "T6" "1" $ + getActionByTitle "Add placeholders for 'g'" + , goldenWithClass "Creates a placeholder for other two methods" "T6" "2" $ + getActionByTitle "Add placeholders for 'g','h'" + , goldenWithClass "Creates a placeholder when all top-level decls are indented" "T7" "" $ + getActionByTitle "Add placeholders for 'g','h','i'" + , goldenWithClass "Don't insert pragma with GHC2021" "InsertWithGHC2021Enabled" "" $ + getActionByTitle "Add placeholders for '==' with signature(s)" + , goldenWithClass "Insert pragma if not exist" "InsertWithoutPragma" "" $ + getActionByTitle "Add placeholders for '==' with signature(s)" + , goldenWithClass "Don't insert pragma if exist" "InsertWithPragma" "" $ + getActionByTitle "Add placeholders for '==' with signature(s)" + , goldenWithClass "Only insert pragma once" "InsertPragmaOnce" "" $ + getActionByTitle "Add placeholders for 'pure','<*>' with signature(s)" + , expectCodeActionsAvailable "No code action available when minimal requirements meet" "MinimalDefinitionMeet" [] + , expectCodeActionsAvailable "Add placeholders for all missing methods is unavailable when all methods are required" "AllMethodsRequired" + [ "Add placeholders for 'f','g'" + , "Add placeholders for 'f','g' with signature(s)" + ] + , testCase "Update text document version" $ runSessionWithServer def classPlugin testDataDir $ do + doc <- createDoc "Version.hs" "haskell" "module Version where" + ver1 <- (^. L.version) <$> getVersionedDoc doc + liftIO $ ver1 @?= 0 + + -- Change the doc to ensure the version is not 0 + changeDoc doc + [ TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ + T.unlines ["module Version where", "data A a = A a", "instance Functor A where"] + ] + ver2 <- (^. L.version) <$> getVersionedDoc doc + _ <- waitForDiagnostics + liftIO $ ver2 @?= 1 + + -- Execute the action and see what the version is + action <- head . concatMap (^.. _CACodeAction) <$> getAllCodeActions doc + executeCodeAction action + _ <- waitForDiagnostics + ver3 <- (^. L.version) <$> getVersionedDoc doc + liftIO $ ver3 @?= 2 + pure mempty + ] + +codeLensTests :: TestTree +codeLensTests = testGroup + "code lens" + [ testCase "Has code lens" $ do + runSessionWithServer def classPlugin testDataDir $ do + doc <- openDoc "CodeLensSimple.hs" "haskell" + lens <- getAndResolveCodeLenses doc + let titles = map (^. L.title) $ mapMaybe (^. L.command) lens + liftIO $ titles @?= + [ "(==) :: B -> B -> Bool" + , "(==) :: A -> A -> Bool" + ] + , testCase "No lens for TH" $ do + runSessionWithServer def classPlugin testDataDir $ do + doc <- openDoc "TH.hs" "haskell" + lens <- getAndResolveCodeLenses doc + liftIO $ length lens @?= 0 + , testCase "Do not construct error action!, Ticket3942one" $ do + runSessionWithServer def classPlugin testDataDir $ do + doc <- openDoc "Ticket3942one.hs" "haskell" + _ <- waitForDiagnosticsFrom doc + lens <- getAllCodeActions doc + -- should switch to `liftIO $ length lens @?= 2, when Ticket3942 is entirely fixed` + -- current fix is just to make sure the code does not throw an exception that would mess up + -- the client UI. + liftIO $ length lens > 0 @?= True + `catch` \(e :: SessionException) -> do + liftIO $ assertFailure $ "classPluginTestError: "++ show e + , goldenCodeLens "Apply code lens" "CodeLensSimple" 1 + , goldenCodeLens "Apply code lens for local class" "LocalClassDefine" 0 + , goldenCodeLens "Apply code lens on the same line" "Inline" 0 + , goldenCodeLens "Don't insert pragma while existing" "CodeLensWithPragma" 0 + , goldenCodeLens "Don't insert pragma while GHC2021 enabled" "CodeLensWithGHC2021" 0 + , goldenCodeLens "Qualified name" "Qualified" 0 + , goldenCodeLens "Type family" "TypeFamily" 0 + , testCase "keep stale lens" $ do + runSessionWithServer def classPlugin testDataDir $ do + doc <- openDoc "Stale.hs" "haskell" + oldLens <- getAndResolveCodeLenses doc + let edit = TextEdit (mkRange 4 11 4 12) "" -- Remove the `_` + _ <- applyEdit doc edit + newLens <- getAndResolveCodeLenses doc + liftIO $ (view L.command <$> newLens ) @?= (view L.command <$> oldLens) + ] + +_CACodeAction :: Prism' (Command |? CodeAction) CodeAction +_CACodeAction = prism' InR $ \case + InR action -> Just action + _ -> Nothing + +goldenCodeLens :: TestName -> FilePath -> Int -> TestTree +goldenCodeLens title path idx = + goldenWithHaskellDoc def classPlugin title testDataDir path "expected" "hs" $ \doc -> do + lens <- getAndResolveCodeLenses doc + executeCommand $ fromJust $ (lens !! idx) ^. L.command + void $ skipManyTill anyMessage (message SMethod_WorkspaceApplyEdit) + +goldenWithClass ::TestName -> FilePath -> FilePath -> ([CodeAction] -> Session CodeAction) -> TestTree +goldenWithClass title path desc findAction = + goldenWithHaskellDoc def classPlugin title testDataDir path (desc <.> "expected") "hs" $ \doc -> do + _ <- waitForDiagnosticsFrom doc + actions <- concatMap (^.. _CACodeAction) <$> getAllCodeActions doc + action <- findAction actions + executeCodeAction action + void $ skipManyTill anyMessage (getDocumentEdit doc) + +getActionByTitle :: T.Text -> [CodeAction] -> Session CodeAction +getActionByTitle title actions = case find (\a -> a ^. L.title == title) actions of + Just a -> pure a + Nothing -> liftIO $ assertFailure $ "Action " <> show title <> " not found in " <> show [a ^. L.title | a <- actions] + +expectCodeActionsAvailable :: TestName -> FilePath -> [T.Text] -> TestTree +expectCodeActionsAvailable title path actionTitles = + testCase title $ do + runSessionWithServer def classPlugin testDataDir $ do + doc <- openDoc (path <.> "hs") "haskell" + _ <- waitForDiagnosticsFrom doc + caResults <- getAllCodeActions doc + liftIO $ map (^? _CACodeAction . L.title) caResults + @?= expectedActions + where + expectedActions = Just <$> actionTitles + +testDataDir :: FilePath +testDataDir = "plugins" "hls-class-plugin" "test" "testdata" diff --git a/plugins/hls-class-plugin/test/testdata/AllMethodsRequired.hs b/plugins/hls-class-plugin/test/testdata/AllMethodsRequired.hs new file mode 100644 index 0000000000..3d88e04a7b --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/AllMethodsRequired.hs @@ -0,0 +1,9 @@ +module AllMethodsRequired where + +class Test a where + f :: a + g :: a + {-# MINIMAL f,g #-} + +instance Test [a] where + diff --git a/plugins/hls-class-plugin/test/testdata/CodeLensSimple.expected.hs b/plugins/hls-class-plugin/test/testdata/CodeLensSimple.expected.hs new file mode 100644 index 0000000000..d285455e1c --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/CodeLensSimple.expected.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE InstanceSigs #-} +module CodeLensSimple where + +data A +instance Eq A where + (==) :: A -> A -> Bool + (==) = _ + +data B +instance Eq B where + (==)= _ diff --git a/plugins/hls-class-plugin/test/testdata/CodeLensSimple.hs b/plugins/hls-class-plugin/test/testdata/CodeLensSimple.hs new file mode 100644 index 0000000000..c8d049ea3d --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/CodeLensSimple.hs @@ -0,0 +1,9 @@ +module CodeLensSimple where + +data A +instance Eq A where + (==) = _ + +data B +instance Eq B where + (==)= _ diff --git a/plugins/hls-class-plugin/test/testdata/CodeLensWithGHC2021.expected.hs b/plugins/hls-class-plugin/test/testdata/CodeLensWithGHC2021.expected.hs new file mode 100644 index 0000000000..e0cfa1e434 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/CodeLensWithGHC2021.expected.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE GHC2021 #-} +module CodeLensWithGHC2021 where + +data A +instance Eq A where + (==) :: A -> A -> Bool + (==) = _ diff --git a/plugins/hls-class-plugin/test/testdata/CodeLensWithGHC2021.hs b/plugins/hls-class-plugin/test/testdata/CodeLensWithGHC2021.hs new file mode 100644 index 0000000000..41642161bd --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/CodeLensWithGHC2021.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE GHC2021 #-} +module CodeLensWithGHC2021 where + +data A +instance Eq A where + (==) = _ diff --git a/plugins/hls-class-plugin/test/testdata/CodeLensWithPragma.expected.hs b/plugins/hls-class-plugin/test/testdata/CodeLensWithPragma.expected.hs new file mode 100644 index 0000000000..9b570629a5 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/CodeLensWithPragma.expected.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE InstanceSigs #-} +module CodeLensWithPragma where + +data A +instance Eq A where + (==) :: A -> A -> Bool + (==) = _ diff --git a/plugins/hls-class-plugin/test/testdata/CodeLensWithPragma.hs b/plugins/hls-class-plugin/test/testdata/CodeLensWithPragma.hs new file mode 100644 index 0000000000..72e28660d5 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/CodeLensWithPragma.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE InstanceSigs #-} +module CodeLensWithPragma where + +data A +instance Eq A where + (==) = _ diff --git a/plugins/hls-class-plugin/test/testdata/Inline.expected.hs b/plugins/hls-class-plugin/test/testdata/Inline.expected.hs new file mode 100644 index 0000000000..e7cfd4772b --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/Inline.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE InstanceSigs #-} +module Inline where + +data A +instance Eq A where (==) :: A -> A -> Bool + (==) = _ diff --git a/plugins/hls-class-plugin/test/testdata/Inline.hs b/plugins/hls-class-plugin/test/testdata/Inline.hs new file mode 100644 index 0000000000..477935b57d --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/Inline.hs @@ -0,0 +1,4 @@ +module Inline where + +data A +instance Eq A where (==) = _ diff --git a/plugins/hls-class-plugin/test/testdata/InsertPragmaOnce.expected.hs b/plugins/hls-class-plugin/test/testdata/InsertPragmaOnce.expected.hs new file mode 100644 index 0000000000..fc0b0f9be0 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/InsertPragmaOnce.expected.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE InstanceSigs #-} +module InsertPragmaOnce where + +data A aaa +instance Applicative A where + pure :: a -> A a + pure = _ + (<*>) :: A (a -> b) -> A a -> A b + (<*>) = _ diff --git a/plugins/hls-class-plugin/test/testdata/InsertPragmaOnce.hs b/plugins/hls-class-plugin/test/testdata/InsertPragmaOnce.hs new file mode 100644 index 0000000000..f7eedbbfbb --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/InsertPragmaOnce.hs @@ -0,0 +1,4 @@ +module InsertPragmaOnce where + +data A aaa +instance Applicative A diff --git a/plugins/hls-class-plugin/test/testdata/InsertWithGHC2021Enabled.expected.hs b/plugins/hls-class-plugin/test/testdata/InsertWithGHC2021Enabled.expected.hs new file mode 100644 index 0000000000..d35355ae0b --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/InsertWithGHC2021Enabled.expected.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE GHC2021#-} +module InsertWithGHC2021Enabled where + +data A +instance Eq A where + (==) :: A -> A -> Bool + (==) = _ diff --git a/plugins/hls-class-plugin/test/testdata/InsertWithGHC2021Enabled.hs b/plugins/hls-class-plugin/test/testdata/InsertWithGHC2021Enabled.hs new file mode 100644 index 0000000000..1f20867b7d --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/InsertWithGHC2021Enabled.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE GHC2021#-} +module InsertWithGHC2021Enabled where + +data A +instance Eq A diff --git a/plugins/hls-class-plugin/test/testdata/InsertWithPragma.expected.hs b/plugins/hls-class-plugin/test/testdata/InsertWithPragma.expected.hs new file mode 100644 index 0000000000..e4a83500c2 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/InsertWithPragma.expected.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE InstanceSigs #-} +module InsertWithPragma where + +data A +instance Eq A where + (==) :: A -> A -> Bool + (==) = _ diff --git a/plugins/hls-class-plugin/test/testdata/InsertWithPragma.hs b/plugins/hls-class-plugin/test/testdata/InsertWithPragma.hs new file mode 100644 index 0000000000..b4260bd636 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/InsertWithPragma.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE InstanceSigs #-} +module InsertWithPragma where + +data A +instance Eq A diff --git a/plugins/hls-class-plugin/test/testdata/InsertWithoutPragma.expected.hs b/plugins/hls-class-plugin/test/testdata/InsertWithoutPragma.expected.hs new file mode 100644 index 0000000000..be9303c73b --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/InsertWithoutPragma.expected.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE InstanceSigs #-} +module InsertWithoutPragma where + +data A +instance Eq A where + (==) :: A -> A -> Bool + (==) = _ diff --git a/plugins/hls-class-plugin/test/testdata/InsertWithoutPragma.hs b/plugins/hls-class-plugin/test/testdata/InsertWithoutPragma.hs new file mode 100644 index 0000000000..f093f49769 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/InsertWithoutPragma.hs @@ -0,0 +1,4 @@ +module InsertWithoutPragma where + +data A +instance Eq A diff --git a/plugins/hls-class-plugin/test/testdata/LocalClassDefine.expected.hs b/plugins/hls-class-plugin/test/testdata/LocalClassDefine.expected.hs new file mode 100644 index 0000000000..62c39b1883 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/LocalClassDefine.expected.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE InstanceSigs #-} +module LocalClassDefine where + +data A +class F a where + f :: a -> Int + +instance F A where + f :: A -> Int + f = _ diff --git a/plugins/hls-class-plugin/test/testdata/LocalClassDefine.hs b/plugins/hls-class-plugin/test/testdata/LocalClassDefine.hs new file mode 100644 index 0000000000..684a36fe06 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/LocalClassDefine.hs @@ -0,0 +1,8 @@ +module LocalClassDefine where + +data A +class F a where + f :: a -> Int + +instance F A where + f = _ diff --git a/plugins/hls-class-plugin/test/testdata/MinimalDefinitionMeet.hs b/plugins/hls-class-plugin/test/testdata/MinimalDefinitionMeet.hs new file mode 100644 index 0000000000..39ce1d9c57 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/MinimalDefinitionMeet.hs @@ -0,0 +1,6 @@ +module MinimalDefinitionMeet where + +data X = X + +instance Eq X where + (==) = _ diff --git a/plugins/hls-class-plugin/test/testdata/Qualified.expected.hs b/plugins/hls-class-plugin/test/testdata/Qualified.expected.hs new file mode 100644 index 0000000000..8099dbea04 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/Qualified.expected.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE InstanceSigs #-} +module Qualified where +import qualified QualifiedA + +class F a where + f :: a + +instance F QualifiedA.A where + f :: QualifiedA.A + f = undefined diff --git a/plugins/hls-class-plugin/test/testdata/Qualified.hs b/plugins/hls-class-plugin/test/testdata/Qualified.hs new file mode 100644 index 0000000000..5788baf0a8 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/Qualified.hs @@ -0,0 +1,8 @@ +module Qualified where +import qualified QualifiedA + +class F a where + f :: a + +instance F QualifiedA.A where + f = undefined diff --git a/plugins/hls-class-plugin/test/testdata/QualifiedA.hs b/plugins/hls-class-plugin/test/testdata/QualifiedA.hs new file mode 100644 index 0000000000..ab67c5129b --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/QualifiedA.hs @@ -0,0 +1,3 @@ +module QualifiedA where + +data A diff --git a/plugins/hls-class-plugin/test/testdata/Stale.hs b/plugins/hls-class-plugin/test/testdata/Stale.hs new file mode 100644 index 0000000000..f70be2017f --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/Stale.hs @@ -0,0 +1,5 @@ +module Stale where + +data A a +instance Functor A where + fmap = _ diff --git a/plugins/hls-class-plugin/test/testdata/T1.all.expected.hs b/plugins/hls-class-plugin/test/testdata/T1.all.expected.hs new file mode 100644 index 0000000000..114ae94256 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T1.all.expected.hs @@ -0,0 +1,7 @@ +module T1 where + +data X = X + +instance Eq X where + (==) = _ + (/=) = _ diff --git a/plugins/hls-class-plugin/test/testdata/T1.eq.expected.hs b/plugins/hls-class-plugin/test/testdata/T1.eq.expected.hs new file mode 100644 index 0000000000..c3d7d09c03 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T1.eq.expected.hs @@ -0,0 +1,6 @@ +module T1 where + +data X = X + +instance Eq X where + (==) = _ diff --git a/plugins/hls-class-plugin/test/testdata/T1.hs b/plugins/hls-class-plugin/test/testdata/T1.hs new file mode 100644 index 0000000000..9f611ecc05 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T1.hs @@ -0,0 +1,5 @@ +module T1 where + +data X = X + +instance Eq X where diff --git a/plugins/hls-class-plugin/test/testdata/T1.ne.expected.hs b/plugins/hls-class-plugin/test/testdata/T1.ne.expected.hs new file mode 100644 index 0000000000..dc7fe80899 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T1.ne.expected.hs @@ -0,0 +1,6 @@ +module T1 where + +data X = X + +instance Eq X where + (/=) = _ diff --git a/plugins/hls-class-plugin/test/testdata/T2.fmap.expected.hs b/plugins/hls-class-plugin/test/testdata/T2.fmap.expected.hs new file mode 100644 index 0000000000..8e3a4194b2 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T2.fmap.expected.hs @@ -0,0 +1,13 @@ +module T2 where + +data X a + = A a + | B + +instance + (Eq a) => Eq (X a) + where + +instance + Functor X where + fmap = _ diff --git a/plugins/hls-class-plugin/test/testdata/T2.hs b/plugins/hls-class-plugin/test/testdata/T2.hs new file mode 100644 index 0000000000..c929d7df69 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T2.hs @@ -0,0 +1,12 @@ +module T2 where + +data X a + = A a + | B + +instance + (Eq a) => Eq (X a) + where + +instance + Functor X diff --git a/plugins/hls-class-plugin/test/testdata/T3.1.expected.hs b/plugins/hls-class-plugin/test/testdata/T3.1.expected.hs new file mode 100644 index 0000000000..829ce7506c --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T3.1.expected.hs @@ -0,0 +1,13 @@ +module T3 where + +class Test a where + f :: a + f = h + g :: a + h :: a + h = f + {-# MINIMAL f, g | g, h #-} + +instance Test [a] where + f = _ + g = _ diff --git a/plugins/hls-class-plugin/test/testdata/T3.2.expected.hs b/plugins/hls-class-plugin/test/testdata/T3.2.expected.hs new file mode 100644 index 0000000000..5872122fc0 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T3.2.expected.hs @@ -0,0 +1,13 @@ +module T3 where + +class Test a where + f :: a + f = h + g :: a + h :: a + h = f + {-# MINIMAL f, g | g, h #-} + +instance Test [a] where + g = _ + h = _ diff --git a/plugins/hls-class-plugin/test/testdata/T3.hs b/plugins/hls-class-plugin/test/testdata/T3.hs new file mode 100644 index 0000000000..72290c9cd9 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T3.hs @@ -0,0 +1,11 @@ +module T3 where + +class Test a where + f :: a + f = h + g :: a + h :: a + h = f + {-# MINIMAL f, g | g, h #-} + +instance Test [a] where diff --git a/plugins/hls-class-plugin/test/testdata/T4.expected.hs b/plugins/hls-class-plugin/test/testdata/T4.expected.hs new file mode 100644 index 0000000000..8f3c2545d8 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T4.expected.hs @@ -0,0 +1,8 @@ +module T4 where + +class Test a where + _f :: a + {-# MINIMAL _f #-} + +instance Test Int where + _f = _ diff --git a/plugins/hls-class-plugin/test/testdata/T4.hs b/plugins/hls-class-plugin/test/testdata/T4.hs new file mode 100644 index 0000000000..f5aeb3ca47 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T4.hs @@ -0,0 +1,7 @@ +module T4 where + +class Test a where + _f :: a + {-# MINIMAL _f #-} + +instance Test Int where diff --git a/plugins/hls-class-plugin/test/testdata/T5.expected.hs b/plugins/hls-class-plugin/test/testdata/T5.expected.hs new file mode 100644 index 0000000000..fcc51c0787 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T5.expected.hs @@ -0,0 +1,8 @@ +module T5 where + +data X = X + +instance Eq X where + (==) = _ + +x = () diff --git a/plugins/hls-class-plugin/test/testdata/T5.hs b/plugins/hls-class-plugin/test/testdata/T5.hs new file mode 100644 index 0000000000..d33dd8b17c --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T5.hs @@ -0,0 +1,7 @@ +module T5 where + +data X = X + +instance Eq X where + +x = () diff --git a/plugins/hls-class-plugin/test/testdata/T6.1.expected.hs b/plugins/hls-class-plugin/test/testdata/T6.1.expected.hs new file mode 100644 index 0000000000..a1e64f591b --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T6.1.expected.hs @@ -0,0 +1,22 @@ +module T6 where + +data X = X | Y + +class Test a where + f :: a -> a + f = h + + g :: a + + h :: a -> a + h = f + + i :: a + + {-# MINIMAL f, g, i | g, h #-} + +instance Test X where + f X = X + f Y = Y + i = undefined + g = _ diff --git a/plugins/hls-class-plugin/test/testdata/T6.2.expected.hs b/plugins/hls-class-plugin/test/testdata/T6.2.expected.hs new file mode 100644 index 0000000000..2b7b5454b9 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T6.2.expected.hs @@ -0,0 +1,23 @@ +module T6 where + +data X = X | Y + +class Test a where + f :: a -> a + f = h + + g :: a + + h :: a -> a + h = f + + i :: a + + {-# MINIMAL f, g, i | g, h #-} + +instance Test X where + f X = X + f Y = Y + i = undefined + g = _ + h = _ diff --git a/plugins/hls-class-plugin/test/testdata/T6.expected.hs b/plugins/hls-class-plugin/test/testdata/T6.expected.hs new file mode 100644 index 0000000000..80b8678e24 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T6.expected.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE GHC2021#-} +module T6 where + +data A +instance Eq A where + (==) :: A -> A -> Bool + (==) = _ diff --git a/plugins/hls-class-plugin/test/testdata/T6.hs b/plugins/hls-class-plugin/test/testdata/T6.hs new file mode 100644 index 0000000000..61d2c6dc62 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T6.hs @@ -0,0 +1,21 @@ +module T6 where + +data X = X | Y + +class Test a where + f :: a -> a + f = h + + g :: a + + h :: a -> a + h = f + + i :: a + + {-# MINIMAL f, g, i | g, h #-} + +instance Test X where + f X = X + f Y = Y + i = undefined diff --git a/plugins/hls-class-plugin/test/testdata/T7.expected.hs b/plugins/hls-class-plugin/test/testdata/T7.expected.hs new file mode 100644 index 0000000000..5bf716c900 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T7.expected.hs @@ -0,0 +1,20 @@ +module T7 where + + data X = X + + class Test a where + f :: a -> a + g :: a + h :: a -> a + i :: a + + instance Test X where + f X = X + g = _ + h = _ + i = _ + + + + + whiteSpaceBeforeAndIndentationOfThisShouldBePreserved = () diff --git a/plugins/hls-class-plugin/test/testdata/T7.hs b/plugins/hls-class-plugin/test/testdata/T7.hs new file mode 100644 index 0000000000..2f9a1b67f6 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/T7.hs @@ -0,0 +1,17 @@ +module T7 where + + data X = X + + class Test a where + f :: a -> a + g :: a + h :: a -> a + i :: a + + instance Test X where + f X = X + + + + + whiteSpaceBeforeAndIndentationOfThisShouldBePreserved = () diff --git a/plugins/hls-class-plugin/test/testdata/TH.hs b/plugins/hls-class-plugin/test/testdata/TH.hs new file mode 100644 index 0000000000..c6728db1ce --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/TH.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE TemplateHaskell #-} + +module TH where + +import THDef + +gen ''Bool True +gen ''Char 'a' diff --git a/plugins/hls-class-plugin/test/testdata/THDef.hs b/plugins/hls-class-plugin/test/testdata/THDef.hs new file mode 100644 index 0000000000..9a4cfcc37f --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/THDef.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE TemplateHaskell #-} + +module THDef where + +import Language.Haskell.TH +import Language.Haskell.TH.Syntax + +class F a where + f :: a + +gen :: Lift t => Name -> t -> Q [Dec] +gen ty v = [d| instance F $(conT ty) where f = v |] diff --git a/plugins/hls-class-plugin/test/testdata/Ticket3942one.hs b/plugins/hls-class-plugin/test/testdata/Ticket3942one.hs new file mode 100644 index 0000000000..d620fc2ebb --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/Ticket3942one.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE DeriveAnyClass #-} + +module Ticket3942one where + +class C a where + foo :: a -> Int + +newtype Foo = MkFoo Int deriving (C) +instance Show Foo where + + +main :: IO () +main = return () diff --git a/plugins/hls-class-plugin/test/testdata/TypeFamily.expected.hs b/plugins/hls-class-plugin/test/testdata/TypeFamily.expected.hs new file mode 100644 index 0000000000..67fb3bcf68 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/TypeFamily.expected.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE InstanceSigs #-} +module TypeFamily where + +class F a where + type Elem a + f :: Elem a -> a + +instance Eq a => F [a] where + f :: Eq a => Elem [a] -> [a] + f = _ diff --git a/plugins/hls-class-plugin/test/testdata/TypeFamily.hs b/plugins/hls-class-plugin/test/testdata/TypeFamily.hs new file mode 100644 index 0000000000..9b15794a73 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/TypeFamily.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE TypeFamilies #-} +module TypeFamily where + +class F a where + type Elem a + f :: Elem a -> a + +instance Eq a => F [a] where + f = _ diff --git a/plugins/hls-class-plugin/test/testdata/hie.yaml b/plugins/hls-class-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..8a26fe70c7 --- /dev/null +++ b/plugins/hls-class-plugin/test/testdata/hie.yaml @@ -0,0 +1,3 @@ +cradle: + direct: + arguments: [-XHaskell2010, QualifiedA, THDef] diff --git a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange.hs b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange.hs new file mode 100644 index 0000000000..52bcc2226b --- /dev/null +++ b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange.hs @@ -0,0 +1,185 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +module Ide.Plugin.CodeRange ( + descriptor + , Log + + -- * Internal + , findPosition + , findFoldingRanges + , createFoldingRange + ) where + +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Control.Monad.Trans.Except (ExceptT, mapExceptT) +import Control.Monad.Trans.Maybe (MaybeT (MaybeT), + maybeToExceptT) +import Data.List.Extra (drop1) +import Data.Maybe (fromMaybe) +import Data.Vector (Vector) +import qualified Data.Vector as V +import Development.IDE (Action, + IdeState (shakeExtras), + Range (Range), Recorder, + WithPriority, + cmapWithPrio) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.PositionMapping (PositionMapping, + toCurrentRange) +import Ide.Logger (Pretty (..)) +import Ide.Plugin.CodeRange.Rules (CodeRange (..), + GetCodeRange (..), + codeRangeRule, crkToFrk) +import qualified Ide.Plugin.CodeRange.Rules as Rules (Log) +import Ide.Plugin.Error +import Ide.PluginUtils (positionInRange) +import Ide.Types (PluginDescriptor (pluginHandlers, pluginRules), + PluginId, + PluginMethodHandler, + defaultPluginDescriptor, + mkPluginHandler) +import Language.LSP.Protocol.Message (Method (Method_TextDocumentFoldingRange, Method_TextDocumentSelectionRange), + SMethod (SMethod_TextDocumentFoldingRange, SMethod_TextDocumentSelectionRange)) +import Language.LSP.Protocol.Types (FoldingRange (..), + FoldingRangeParams (..), + NormalizedFilePath, Null, + Position (..), + Range (_start), + SelectionRange (..), + SelectionRangeParams (..), + TextDocumentIdentifier (TextDocumentIdentifier), + Uri, type (|?) (InL)) +import Prelude hiding (log, span) + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = (defaultPluginDescriptor plId "Provides selection and folding ranges for Haskell") + { pluginHandlers = mkPluginHandler SMethod_TextDocumentSelectionRange (selectionRangeHandler recorder) + <> mkPluginHandler SMethod_TextDocumentFoldingRange (foldingRangeHandler recorder) + , pluginRules = codeRangeRule (cmapWithPrio LogRules recorder) + } + +newtype Log = LogRules Rules.Log + +instance Pretty Log where + pretty (LogRules codeRangeLog) = pretty codeRangeLog + + +foldingRangeHandler :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'Method_TextDocumentFoldingRange +foldingRangeHandler _ ide _ FoldingRangeParams{..} = + do + filePath <- getNormalizedFilePathE uri + foldingRanges <- runActionE "FoldingRange" ide $ getFoldingRanges filePath + pure . InL $ foldingRanges + where + uri :: Uri + TextDocumentIdentifier uri = _textDocument + +getFoldingRanges :: NormalizedFilePath -> ExceptT PluginError Action [FoldingRange] +getFoldingRanges file = do + codeRange <- useE GetCodeRange file + pure $ findFoldingRanges codeRange + +selectionRangeHandler :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'Method_TextDocumentSelectionRange +selectionRangeHandler _ ide _ SelectionRangeParams{..} = do + do + filePath <- getNormalizedFilePathE uri + mapExceptT liftIO $ getSelectionRanges ide filePath positions + where + uri :: Uri + TextDocumentIdentifier uri = _textDocument + + positions :: [Position] + positions = _positions + + +getSelectionRanges :: IdeState -> NormalizedFilePath -> [Position] -> ExceptT PluginError IO ([SelectionRange] |? Null) +getSelectionRanges ide file positions = do + (codeRange, positionMapping) <- runIdeActionE "SelectionRange" (shakeExtras ide) $ useWithStaleFastE GetCodeRange file + -- 'positionMapping' should be applied to the input before using them + positions' <- + traverse (fromCurrentPositionE positionMapping) positions + + let selectionRanges = flip fmap positions' $ \pos -> + -- We need a default selection range if the lookup fails, + -- so that other positions can still have valid results. + let defaultSelectionRange = SelectionRange (Range pos pos) Nothing + in fromMaybe defaultSelectionRange . findPosition pos $ codeRange + + -- 'positionMapping' should be applied to the output ranges before returning them + maybeToExceptT (PluginInvalidUserState "toCurrentSelectionRange") . MaybeT . pure $ + InL <$> traverse (toCurrentSelectionRange positionMapping) selectionRanges + +-- | Find 'Position' in 'CodeRange'. This can fail, if the given position is not covered by the 'CodeRange'. +findPosition :: Position -> CodeRange -> Maybe SelectionRange +findPosition pos root = go Nothing root + where + -- Helper function for recursion. The range list is built top-down + go :: Maybe SelectionRange -> CodeRange -> Maybe SelectionRange + go acc node = + if positionInRange pos range + then maybe acc' (go acc') (binarySearchPos children) + -- If all children doesn't contain pos, acc' will be returned. + -- acc' will be Nothing only if we are in the root level. + else Nothing + where + range = _codeRange_range node + children = _codeRange_children node + acc' = Just $ maybe (SelectionRange range Nothing) (SelectionRange range . Just) acc + + binarySearchPos :: Vector CodeRange -> Maybe CodeRange + binarySearchPos v + | V.null v = Nothing + | V.length v == 1, + Just r <- V.headM v = if positionInRange pos (_codeRange_range r) then Just r else Nothing + | otherwise = do + let (left, right) = V.splitAt (V.length v `div` 2) v + startOfRight <- _start . _codeRange_range <$> V.headM right + if pos < startOfRight then binarySearchPos left else binarySearchPos right + +-- | Traverses through the code range and it children to a folding ranges. +-- +-- It starts with the root node, converts that into a folding range then moves towards the children. +-- It converts each child of each root node and parses it to folding range and moves to its children. +-- +-- Two cases to that are assumed to be taken care on the client side are: +-- +-- 1. When a folding range starts and ends on the same line, it is upto the client if it wants to +-- fold a single line folding or not. +-- +-- 2. As we are converting nodes of the ast into folding ranges, there are multiple nodes starting from a single line. +-- A single line of code doesn't mean a single node in AST, so this function removes all the nodes that have a duplicate +-- start line, ie. they start from the same line. +-- Eg. A multi-line function that also has a multi-line if statement starting from the same line should have the folding +-- according to the function. +-- +-- We think the client can handle this, if not we could change to remove these in future +-- +-- Discussion reference: https://github.com/haskell/haskell-language-server/pull/3058#discussion_r973737211 +findFoldingRanges :: CodeRange -> [FoldingRange] +findFoldingRanges codeRange = + -- removing the first node because it folds the entire file + drop1 $ findFoldingRangesRec codeRange + +findFoldingRangesRec :: CodeRange -> [FoldingRange] +findFoldingRangesRec r@(CodeRange _ children _) = + let frChildren :: [FoldingRange] = concat $ V.toList $ fmap findFoldingRangesRec children + in case createFoldingRange r of + Just x -> x:frChildren + Nothing -> frChildren + +-- | Parses code range to folding range +createFoldingRange :: CodeRange -> Maybe FoldingRange +createFoldingRange (CodeRange (Range (Position lineStart charStart) (Position lineEnd charEnd)) _ ck) = do + -- Type conversion of codeRangeKind to FoldingRangeKind + let frk = crkToFrk ck + Just (FoldingRange lineStart (Just charStart) lineEnd (Just charEnd) (Just frk) Nothing) + +-- | Likes 'toCurrentPosition', but works on 'SelectionRange' +toCurrentSelectionRange :: PositionMapping -> SelectionRange -> Maybe SelectionRange +toCurrentSelectionRange positionMapping SelectionRange{..} = do + newRange <- toCurrentRange positionMapping _range + pure $ SelectionRange { + _range = newRange, + _parent = _parent >>= toCurrentSelectionRange positionMapping + } diff --git a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/ASTPreProcess.hs b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/ASTPreProcess.hs new file mode 100644 index 0000000000..915a98d607 --- /dev/null +++ b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/ASTPreProcess.hs @@ -0,0 +1,190 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.CodeRange.ASTPreProcess + ( preProcessAST + , PreProcessEnv(..) + , isCustomNode + , CustomNodeType(..) + ) where + +import Control.Monad.Reader (Reader, asks) +import Data.Foldable +import Data.Functor.Identity (Identity (Identity, runIdentity)) +import Data.List (groupBy) +import Data.List.NonEmpty (NonEmpty) +import qualified Data.List.NonEmpty as NonEmpty +import Data.Map.Strict (Map) +import qualified Data.Map.Strict as Map +import Data.Maybe (fromMaybe, mapMaybe) +import Data.Semigroup (First (First, getFirst)) +import Data.Semigroup.Foldable (foldlM1) +import qualified Data.Set as Set +import Development.IDE.GHC.Compat +import GHC.Iface.Ext.Types (ContextInfo (..), HieAST (..), + Identifier, IdentifierDetails (..), + NodeInfo (nodeIdentifiers), Span) +import GHC.Iface.Ext.Utils (RefMap, flattenAst) +import Prelude hiding (span) + +{-| +Extra arguments for 'preProcessAST'. It's expected to be used in a 'Reader' context +-} +newtype PreProcessEnv a = PreProcessEnv + { preProcessEnvRefMap :: RefMap a + } + +{-| +Before converting the HieAST to selection range, we need to run some passes on it. Each pass potentially modifies +the AST to handle some special cases. + +'preProcessAST' combines the passes. Refer to 'mergeImports' or 'mergeSignatureWithDefinition' as +a concrete example example. + +Adding another manipulation to the AST is simple, just implement a function of type +`HieAST a -> Reader (PreProcessEnv a) (HieAST a)`, and append it to 'preProcessAST' with `>>=`. + +If it goes more complex, it may be more appropriate to split different manipulations to different modules. +-} +preProcessAST :: HieAST a -> Reader (PreProcessEnv a) (HieAST a) +preProcessAST node = mergeImports node >>= mergeSignatureWithDefinition + +{-| +Create a custom node in 'HieAST'. By "custom", we mean this node doesn't actually exist in the original 'HieAST' +provided by GHC, but created to suite the needs of hls-code-range-plugin. +-} +createCustomNode :: CustomNodeType -> NonEmpty (HieAST a) -> HieAST a +createCustomNode customNodeType children = mkAstNode customNodeInfo span' (NonEmpty.toList children) + where + span' :: RealSrcSpan + span' = runIdentity . foldlM1 (\x y -> Identity (combineRealSrcSpans x y)) . fmap nodeSpan $ children + + customNodeInfo = simpleNodeInfoCompat "HlsCustom" (customNodeTypeToFastString customNodeType) + +isCustomNode :: HieAST a -> Maybe CustomNodeType +isCustomNode node = do + nodeInfo <- generatedNodeInfo node + getFirst <$> foldMap go (nodeAnnotations nodeInfo) + where + go :: (FastStringCompat, FastStringCompat) -> Maybe (First CustomNodeType) + go (k, v) + | k == "HlsCustom", Just v' <- revCustomNodeTypeMapping Map.!? v = Just (First v') + | otherwise = Nothing + +data CustomNodeType = + -- | a group of imports + CustomNodeImportsGroup + -- | adjacent type signature and value definition are paired under a custom parent node + | CustomNodeAdjacentSignatureDefinition + deriving (Show, Eq, Ord) + +customNodeTypeMapping :: Map CustomNodeType FastStringCompat +customNodeTypeMapping = Map.fromList + [ (CustomNodeImportsGroup, "Imports") + , (CustomNodeAdjacentSignatureDefinition, "AdjacentSignatureDefinition") + ] + +revCustomNodeTypeMapping :: Map FastStringCompat CustomNodeType +revCustomNodeTypeMapping = Map.fromList . fmap (\(k, v) -> (v, k)) . Map.toList $ customNodeTypeMapping + +customNodeTypeToFastString :: CustomNodeType -> FastStringCompat +customNodeTypeToFastString k = fromMaybe "" (customNodeTypeMapping Map.!? k) + +{-| +Combines adjacent import declarations under a new parent node, so that the user will have an extra step selecting +the whole import area while expanding/shrinking the selection range. +-} +mergeImports :: forall a. HieAST a -> Reader (PreProcessEnv a) (HieAST a) +mergeImports node = pure $ node { nodeChildren = children } + where + children :: [HieAST a] + children = mapMaybe merge + . groupBy (\x y -> nodeIsImport x && nodeIsImport y) + . nodeChildren $ node + + merge :: [HieAST a] -> Maybe (HieAST a) + merge [] = Nothing + merge [x] = Just x + merge (x:xs) = Just $ createCustomNode CustomNodeImportsGroup (x NonEmpty.:| xs) + +nodeIsImport :: HieAST a -> Bool +nodeIsImport = isAnnotationInAstNode ("ImportDecl", "ImportDecl") + +{-| +Combine type signature with variable definition under a new parent node, if the signature is placed right before the +definition. This allows the user to have a step selecting both type signature and its accompanying definition. +-} +mergeSignatureWithDefinition :: HieAST a -> Reader (PreProcessEnv a) (HieAST a) +mergeSignatureWithDefinition node = do + refMap <- asks preProcessEnvRefMap + -- Do this recursively for children, so that non top level functions can be handled. + children' <- traverse mergeSignatureWithDefinition (nodeChildren node) + pure $ node { nodeChildren = reverse $ foldl' (go refMap) [] children' } + where + -- For every two adjacent nodes, we try to combine them into one. + go :: RefMap a -> [HieAST a] -> HieAST a -> [HieAST a] + go _ [] node' = [node'] + go refMap (prev:others) node' = + case mergeAdjacentSigDef refMap (prev, node') of + Nothing -> node':prev:others + Just comb -> comb:others + +-- | Merge adjacent type signature and variable/function definition, if the type signature belongs to that variable or +-- function. +-- +-- The implementation potentially has some corner cases not handled properly. +mergeAdjacentSigDef :: RefMap a -> (HieAST a, HieAST a) -> Maybe (HieAST a) +mergeAdjacentSigDef refMap (n1, n2) = do + -- Let's check the node's annotation. There should be a function binding following its type signature. + checkAnnotation + -- Find the identifier of the type signature. + typeSigId <- identifierForTypeSig n1 + -- Does that identifier appear in the second AST node as a definition? If so, we combines the two nodes. + refs <- Map.lookup typeSigId refMap + if any (isIdentADef (nodeSpan n2)) refs + then pure . createCustomNode CustomNodeAdjacentSignatureDefinition $ n1 NonEmpty.:| [n2] + else Nothing + where + checkAnnotation :: Maybe () + checkAnnotation = + if ("TypeSig", "Sig") `isAnnotationInAstNode` n1 && + (("FunBind", "HsBindLR") `isAnnotationInAstNode` n2 || ("VarBind", "HsBindLR") `isAnnotationInAstNode` n2) + then Just () + else Nothing + +{-| +Given the AST node of a type signature, tries to find the identifier of it. +-} +identifierForTypeSig :: forall a. HieAST a -> Maybe Identifier +identifierForTypeSig node = + {- + It seems that the identifier lives in one of the children, so we search for the first 'TyDecl' node in + its children recursively. + -} + case mapMaybe extractIdentifier nodes of + [] -> Nothing + (ident:_) -> Just ident + where + nodes = flattenAst node + + extractIdentifier :: HieAST a -> Maybe Identifier + extractIdentifier node' = sourceNodeInfo node' >>= + (fmap fst . find (\(_, detail) -> TyDecl `Set.member` identInfo detail) + . Map.toList . nodeIdentifiers) + +-- | Determines if the given occurrence of an identifier is a function/variable definition in the outer span +isIdentADef :: Span -> (Span, IdentifierDetails a) -> Bool +isIdentADef outerSpan (span, detail) = + realSrcSpanStart span >= realSrcSpanStart outerSpan && realSrcSpanEnd span <= realSrcSpanEnd outerSpan + && isDef + where + isDef :: Bool + isDef = any isContextInfoDef $ identInfo detail + + -- Determines if the 'ContextInfo' represents a variable/function definition + isContextInfoDef :: ContextInfo -> Bool + isContextInfoDef ValBind{} = True + isContextInfoDef MatchBind = True + isContextInfoDef _ = False + +isAnnotationInAstNode :: (FastStringCompat, FastStringCompat) -> HieAST a -> Bool +isAnnotationInAstNode p = maybe False (isAnnotationInNodeInfo p) . sourceNodeInfo diff --git a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs new file mode 100644 index 0000000000..2391a35e1a --- /dev/null +++ b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs @@ -0,0 +1,194 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.CodeRange.Rules + ( CodeRange (..) + , codeRange_range + , codeRange_children + , codeRange_kind + , CodeRangeKind(..) + , GetCodeRange(..) + , codeRangeRule + , Log(..) + + -- * Internal + , removeInterleaving + , simplify + , crkToFrk + ) where + +import Control.DeepSeq (NFData) +import qualified Control.Lens as Lens +import Control.Monad (foldM) +import Control.Monad.Except (ExceptT (..), runExceptT) +import Control.Monad.Reader (runReader) +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Maybe (MaybeT (MaybeT), + maybeToExceptT) +import Control.Monad.Trans.Writer.CPS +import Data.Coerce (coerce) +import Data.Foldable (traverse_) +import Data.Function (on, (&)) +import Data.Hashable +import Data.List (sort) +import qualified Data.Map.Strict as Map +import Data.Vector (Vector) +import qualified Data.Vector as V +import Development.IDE +import Development.IDE.Core.Rules (toIdeResult) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat.Util +import GHC.Generics (Generic) +import GHC.Iface.Ext.Types (HieAST (..), HieASTs (..)) +import GHC.Iface.Ext.Utils (RefMap) +import Ide.Plugin.CodeRange.ASTPreProcess (CustomNodeType (..), + PreProcessEnv (..), + isCustomNode, + preProcessAST) +import Language.LSP.Protocol.Lens (HasEnd (end), + HasStart (start)) +import Language.LSP.Protocol.Types (FoldingRangeKind (FoldingRangeKind_Comment, FoldingRangeKind_Imports, FoldingRangeKind_Region)) +import Prelude hiding (log) + +data Log = LogShake Shake.Log + | LogNoAST + | LogFoundInterleaving CodeRange CodeRange + deriving Show + +instance Pretty Log where + pretty log = case log of + LogShake shakeLog -> pretty shakeLog + LogNoAST -> "no HieAst exist for file" + LogFoundInterleaving r1 r2 -> + let prettyRange = pretty . show . _codeRange_range + in "CodeRange interleave: " <> prettyRange r1 <> " & " <> prettyRange r2 + +-- | A tree representing code ranges in a file. This can be useful for features like selection range and folding range +data CodeRange = CodeRange { + -- | Range for current level + _codeRange_range :: !Range, + -- | A vector of children, sorted by their ranges in ascending order. + -- Children are guaranteed not to interleave, but some gaps may exist among them. + _codeRange_children :: !(Vector CodeRange), + -- The kind of current code range + _codeRange_kind :: !CodeRangeKind + } + deriving (Show, Generic, NFData) + +-- | 'CodeKind' represents the kind of a code range +data CodeRangeKind = + -- | ordinary code + CodeKindRegion + -- | the group of imports + | CodeKindImports + -- | a comment + | CodeKindComment + deriving (Show, Eq, Generic, NFData) + +Lens.makeLenses ''CodeRange + +instance Eq CodeRange where + (==) = (==) `on` _codeRange_range + +instance Ord CodeRange where + compare :: CodeRange -> CodeRange -> Ordering + compare = compare `on` _codeRange_range + +-- | Construct a 'CodeRange'. A valid CodeRange will be returned in any case. If anything go wrong, +-- a list of warnings will be returned as 'Log' +buildCodeRange :: HieAST a -> RefMap a -> Writer [Log] CodeRange +buildCodeRange ast refMap = do + -- We work on 'HieAST', then convert it to 'CodeRange', so that applications such as selection range and folding + -- range don't need to care about 'HieAST' + -- TODO @sloorush actually use 'Annotated ParsedSource' to handle structures not in 'HieAST' properly (for example comments) + let ast' = runReader (preProcessAST ast) (PreProcessEnv refMap) + codeRange <- astToCodeRange ast' + pure $ simplify codeRange + +astToCodeRange :: HieAST a -> Writer [Log] CodeRange +astToCodeRange (Node _ sp []) = pure $ CodeRange (realSrcSpanToRange sp) mempty CodeKindRegion +astToCodeRange node@(Node _ sp children) = do + children' <- removeInterleaving . sort =<< traverse astToCodeRange children + let codeKind = if Just CustomNodeImportsGroup == isCustomNode node then CodeKindImports else CodeKindRegion + pure $ CodeRange (realSrcSpanToRange sp) (V.fromList children') codeKind + +-- | Remove interleaving of the list of 'CodeRange's. +removeInterleaving :: [CodeRange] -> Writer [Log] [CodeRange] +removeInterleaving = fmap reverse . foldM go [] + where + -- we want to traverse from left to right (to make the logs easier to read) + go :: [CodeRange] -> CodeRange -> Writer [Log] [CodeRange] + go [] x = pure [x] + go (x1:acc) x2 = do + -- Given that the CodeRange is already sorted on it's Range, and the Ord instance of Range + -- compares it's start position first, the start position must be already in an ascending order. + -- Then, if the end position of a node is larger than it's next neighbour's start position, an interleaving + -- must exist. + -- (Note: LSP Range's end position is exclusive) + x1' <- if x1 Lens.^. codeRange_range . end > x2 Lens.^. codeRange_range . start + then do + -- set x1.end to x2.start + let x1' :: CodeRange = x1 & codeRange_range . end Lens..~ (x2 Lens.^. codeRange_range . start) + tell [LogFoundInterleaving x1 x2] + pure x1' + else pure x1 + pure $ x2:x1':acc + +-- | Remove redundant nodes in 'CodeRange' tree +simplify :: CodeRange -> CodeRange +simplify r = + case onlyChild of + -- If a node has the exact same range as it's parent, and it has no sibling, then it can be removed. + Just onlyChild' -> + if _codeRange_range onlyChild' == curRange + then simplify (r { _codeRange_children = _codeRange_children onlyChild' }) + else withChildrenSimplified + Nothing -> withChildrenSimplified + where + curRange = _codeRange_range r + + onlyChild :: Maybe CodeRange = + let children = _codeRange_children r + in if V.length children == 1 then V.headM children else Nothing + + withChildrenSimplified = r { _codeRange_children = simplify <$> _codeRange_children r } + +data GetCodeRange = GetCodeRange + deriving (Eq, Show, Generic) + +instance Hashable GetCodeRange +instance NFData GetCodeRange + +type instance RuleResult GetCodeRange = CodeRange + +codeRangeRule :: Recorder (WithPriority Log) -> Rules () +codeRangeRule recorder = + define (cmapWithPrio LogShake recorder) $ \GetCodeRange file -> handleError recorder $ do + -- We need both 'HieAST' (for basic AST) and api annotations (for comments and some keywords). + -- See https://gitlab.haskell.org/ghc/ghc/-/wikis/api-annotations + HAR{hieAst, refMap} <- lift $ use_ GetHieAst file + ast <- maybeToExceptT LogNoAST . MaybeT . pure $ + getAsts hieAst Map.!? (coerce . mkFastString . fromNormalizedFilePath) file + let (codeRange, warnings) = runWriter (buildCodeRange ast refMap) + traverse_ (logWith recorder Warning) warnings + + pure codeRange + +-- | Handle error in 'Action'. Returns an 'IdeResult' with no value and no diagnostics on error. (but writes log) +handleError :: Recorder (WithPriority msg) -> ExceptT msg Action a -> Action (IdeResult a) +handleError recorder action' = do + valueEither <- runExceptT action' + case valueEither of + Left msg -> do + logWith recorder Warning msg + pure $ toIdeResult (Left []) + Right value -> pure $ toIdeResult (Right value) + +-- | Maps type CodeRangeKind to FoldingRangeKind +crkToFrk :: CodeRangeKind -> FoldingRangeKind +crkToFrk crk = case crk of + CodeKindComment -> FoldingRangeKind_Comment + CodeKindImports -> FoldingRangeKind_Imports + CodeKindRegion -> FoldingRangeKind_Region diff --git a/plugins/hls-code-range-plugin/test/Ide/Plugin/CodeRange/RulesTest.hs b/plugins/hls-code-range-plugin/test/Ide/Plugin/CodeRange/RulesTest.hs new file mode 100644 index 0000000000..4dee5e039c --- /dev/null +++ b/plugins/hls-code-range-plugin/test/Ide/Plugin/CodeRange/RulesTest.hs @@ -0,0 +1,80 @@ +{-# LANGUAGE OverloadedLists #-} + +module Ide.Plugin.CodeRange.RulesTest (testTree) where + +import Control.Monad.Trans.Writer.CPS +import Data.Bifunctor (Bifunctor (second)) +import qualified Data.Vector as V +import Ide.Plugin.CodeRange.Rules +import Test.Hls + +testTree :: TestTree +testTree = + testGroup "CodeRange.Rules" [ + testGroup "removeInterleaving" $ + let check :: [CodeRange] -> ([CodeRange], [Log]) -> Assertion + check input want = + second (fmap LogEq) (runWriter (removeInterleaving input)) @?= second (fmap LogEq) want + mkNode :: UInt -> UInt -> CodeRange + mkNode startCol endCol = + CodeRange (Range (Position 1 startCol) (Position 1 endCol)) [] CodeKindRegion + in [ + testCase "empty list" $ check [] ([], []), + testCase "one" $ check [mkNode 1 5] ([mkNode 1 5], []), + testCase "two, without intersection" $ check [mkNode 1 5, mkNode 5 6] ([mkNode 1 5, mkNode 5 6], []), + testCase "two, with intersection" $ let (x, y) = (mkNode 1 5, mkNode 2 4) + in check [x, y] ([mkNode 1 2, mkNode 2 4], [LogFoundInterleaving x y]), + testCase "three, with intersection" $ let (x, y, z) = (mkNode 1 10, mkNode 2 6, mkNode 4 12) + in check [x, y, z] ([mkNode 1 2, mkNode 2 4, mkNode 4 12], + [LogFoundInterleaving x y, LogFoundInterleaving y z]) + ], + testGroup "simplify" $ + let mkNode :: UInt -> UInt -> V.Vector CodeRange -> CodeRange + mkNode startCol endCol children = + CodeRange (Range (Position 1 startCol) (Position 1 endCol)) children CodeKindRegion + in [ + testCase "one level should not change" $ + let codeRange = mkNode 1 5 [] + in codeRange @=? simplify codeRange, + testCase "dedup 3 nested layers" $ + let input = + mkNode 1 10 [ + mkNode 1 5 [], + mkNode 5 10 [ + mkNode 5 10 [ + mkNode 5 10 [ + mkNode 6 10 [] + ] + ] + ] + ] + want = + mkNode 1 10 [ + mkNode 1 5 [], + mkNode 5 10 [ + mkNode 6 10 [] + ] + ] + in want @=? simplify input, + testCase "should not dedup node that has multiple children" $ + let input = + mkNode 1 10 [ + mkNode 1 10 [], + mkNode 2 10 [] + ] + in simplify input @?= input, + testCase "dedup simple two layers" $ + let input = mkNode 1 10 [ mkNode 1 10 []] + in simplify input @?= mkNode 1 10 [] + ] + ] + +newtype LogEq = LogEq Log + deriving Show + +instance Eq LogEq where + LogEq (LogShake _) == LogEq (LogShake _) = True + LogEq LogNoAST == LogEq LogNoAST = True + LogEq (LogFoundInterleaving left right) == LogEq (LogFoundInterleaving left' right') = + left == left' && right == right' + LogEq _ == LogEq _ = False diff --git a/plugins/hls-code-range-plugin/test/Ide/Plugin/CodeRangeTest.hs b/plugins/hls-code-range-plugin/test/Ide/Plugin/CodeRangeTest.hs new file mode 100644 index 0000000000..4db8e41d7b --- /dev/null +++ b/plugins/hls-code-range-plugin/test/Ide/Plugin/CodeRangeTest.hs @@ -0,0 +1,117 @@ +{-# LANGUAGE OverloadedLists #-} + +module Ide.Plugin.CodeRangeTest (testTree) where + +import qualified Data.Vector as V +import Ide.Plugin.CodeRange +import Ide.Plugin.CodeRange.Rules +import Test.Hls + +testTree :: TestTree +testTree = + testGroup "CodeRange" [ + testGroup "findPosition" $ + let check :: Position -> CodeRange -> Maybe SelectionRange -> Assertion + check position codeRange = (findPosition position codeRange @?=) + + mkCodeRange :: Position -> Position -> V.Vector CodeRange -> CodeRange + mkCodeRange start end children = CodeRange (Range start end) children CodeKindRegion + in [ + testCase "not in range" $ check + (Position 10 1) + (mkCodeRange (Position 1 1) (Position 5 10) []) + Nothing, + testCase "in top level range" $ check + (Position 3 8) + (mkCodeRange (Position 1 1) (Position 5 10) []) + (Just $ SelectionRange (Range (Position 1 1) (Position 5 10)) Nothing), + testCase "in the gap between children, in parent" $ check + (Position 3 6) + (mkCodeRange (Position 1 1) (Position 5 10) [ + mkCodeRange (Position 1 1) (Position 3 6) [], + mkCodeRange (Position 3 7) (Position 5 10) [] + ]) + (Just $ SelectionRange (Range (Position 1 1) (Position 5 10)) Nothing), + testCase "before all children, in parent" $ check + (Position 1 1) + (mkCodeRange (Position 1 1) (Position 5 10) [ + mkCodeRange (Position 1 2) (Position 3 6) [], + mkCodeRange (Position 3 7) (Position 5 10) [] + ]) + (Just $ SelectionRange (Range (Position 1 1) (Position 5 10)) Nothing), + testCase "in children, in parent" $ check + (Position 2 1) + (mkCodeRange (Position 1 1) (Position 5 10) [ + mkCodeRange (Position 1 2) (Position 3 6) [], + mkCodeRange (Position 3 7) (Position 5 10) [] + ]) + (Just $ SelectionRange (Range (Position 1 2) (Position 3 6)) $ Just + ( SelectionRange (Range (Position 1 1) (Position 5 10)) Nothing + ) + ) + ], + + -- TODO: Some more tests can be added on strange cases like + -- 1. lots of blank lines in between type signature and the body + -- 2. lots of blank lines in the function itself + -- etc. + testGroup "findFoldingRanges" $ + let check :: CodeRange -> [FoldingRange] -> Assertion + check codeRange = (findFoldingRanges codeRange @?=) + + mkCodeRange :: Position -> Position -> V.Vector CodeRange -> CodeRangeKind -> CodeRange + mkCodeRange start end children crk = CodeRange (Range start end) children crk + in [ + -- General test + testCase "Test General Code Block" $ check + (mkCodeRange (Position 1 1) (Position 5 10) [] CodeKindRegion) + [], + + -- Tests for code kind + testCase "Test Code Kind Region" $ check + (mkCodeRange (Position 1 1) (Position 5 10) [ + mkCodeRange (Position 1 2) (Position 3 6) [] CodeKindRegion + ] CodeKindRegion) + [FoldingRange 1 (Just 2) 3 (Just 6) (Just FoldingRangeKind_Region) Nothing], + testCase "Test Code Kind Comment" $ check + (mkCodeRange (Position 1 1) (Position 5 10) [ + mkCodeRange (Position 1 2) (Position 3 6) [] CodeKindComment + ] CodeKindRegion) + [FoldingRange 1 (Just 2) 3 (Just 6) (Just FoldingRangeKind_Comment) Nothing], + testCase "Test Code Kind Import" $ check + (mkCodeRange (Position 1 1) (Position 5 10) [ + mkCodeRange (Position 1 2) (Position 3 6) [] CodeKindImports + ] CodeKindRegion) + [FoldingRange 1 (Just 2) 3 (Just 6) (Just FoldingRangeKind_Imports) Nothing], + + -- Test for Code Portions with children + testCase "Test Children" $ check + (mkCodeRange (Position 1 1) (Position 5 10) [ + mkCodeRange (Position 1 2) (Position 3 6) [ + mkCodeRange (Position 1 3) (Position 1 5) [] CodeKindRegion + ] CodeKindRegion, + mkCodeRange (Position 3 7) (Position 5 10) [] CodeKindRegion + ] CodeKindRegion) + [ FoldingRange 1 (Just 2) 3 (Just 6) (Just FoldingRangeKind_Region) Nothing, + FoldingRange 1 (Just 3) 1 (Just 5) (Just FoldingRangeKind_Region) Nothing, + FoldingRange 3 (Just 7) 5 (Just 10) (Just FoldingRangeKind_Region) Nothing + ] + ], + + testGroup "createFoldingRange" $ + let check :: CodeRange -> Maybe FoldingRange -> Assertion + check codeRange = (createFoldingRange codeRange @?=) + + mkCodeRange :: Position -> Position -> V.Vector CodeRange -> CodeRangeKind -> CodeRange + mkCodeRange start end children crk = CodeRange (Range start end) children crk + in [ + -- General tests + testCase "Test General Code Block" $ check + (mkCodeRange (Position 1 1) (Position 5 10) [] CodeKindRegion) + (Just (FoldingRange 1 (Just 1) 5 (Just 10) (Just FoldingRangeKind_Region) Nothing)), + -- If a range has the same start and end line it need not be folded so Nothing is expected + testCase "Test Same Start Line" $ check + (mkCodeRange (Position 1 1) (Position 1 10) [] CodeKindRegion) + (Just (FoldingRange 1 (Just 1) 1 (Just 10) (Just FoldingRangeKind_Region) Nothing)) + ] + ] diff --git a/plugins/hls-code-range-plugin/test/Main.hs b/plugins/hls-code-range-plugin/test/Main.hs new file mode 100644 index 0000000000..da32deed51 --- /dev/null +++ b/plugins/hls-code-range-plugin/test/Main.hs @@ -0,0 +1,96 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Main (main) where + +import Control.Lens hiding (List, (<.>)) +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString.Lazy.Char8 as LBSChar8 +import Data.String (fromString) +import Ide.Plugin.CodeRange (Log, descriptor) +import qualified Ide.Plugin.CodeRange.RulesTest +import qualified Ide.Plugin.CodeRangeTest +import Language.LSP.Protocol.Lens (result) +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import System.FilePath ((<.>), ()) +import Test.Hls + +plugin :: PluginTestDescriptor Log +plugin = mkPluginTestDescriptor descriptor "codeRange" + +main :: IO () +main = do + defaultTestRunner $ + testGroup "Code Range" [ + testGroup "Integration Tests" [ + selectionRangeGoldenTest "Import" [(4, 36), (1, 8)], + selectionRangeGoldenTest "Function" [(5, 19), (5, 12), (4, 4), (3, 5)], + selectionRangeGoldenTest "Empty" [(1, 5)], + foldingRangeGoldenTest "Function" + ], + testGroup "Unit Tests" [ + Ide.Plugin.CodeRangeTest.testTree, + Ide.Plugin.CodeRange.RulesTest.testTree + ] + ] + +selectionRangeGoldenTest :: TestName -> [(UInt, UInt)] -> TestTree +selectionRangeGoldenTest testName positions = goldenGitDiff testName (testDataDir testName <.> "golden" <.> "txt" <> ghcSuffix) $ do + res <- runSessionWithServer def plugin testDataDir $ do + doc <- openDoc (testName <.> "hs") "haskell" + resp <- request SMethod_TextDocumentSelectionRange $ SelectionRangeParams Nothing Nothing doc + $ fmap (uncurry Position . (\(x, y) -> (x-1, y-1))) positions + let res = resp ^. result + pure $ fmap (showSelectionRangesForTest . absorbNull) res + case res of + Left (TResponseError (InL LSPErrorCodes_RequestFailed) _ _) -> pure "" + Left err -> assertFailure (show err) + Right golden -> pure golden + where + testDataDir :: FilePath + testDataDir = "plugins" "hls-code-range-plugin" "test" "testdata" "selection-range" + + showSelectionRangesForTest :: [SelectionRange] -> ByteString + showSelectionRangesForTest selectionRanges = LBSChar8.intercalate "\n" $ fmap showSelectionRangeForTest selectionRanges + + showSelectionRangeForTest :: SelectionRange -> ByteString + showSelectionRangeForTest selectionRange = go True (Just selectionRange) + where + go :: Bool -> Maybe SelectionRange -> ByteString + go _ Nothing = "" + go isFirst (Just (SelectionRange (Range sp ep) parent)) = + (if isFirst then "" else " => ") <> showPosition sp <> " " <> showPosition ep <> go False parent + showPosition :: Position -> ByteString + showPosition (Position line col) = "(" <> showLBS (line + 1) <> "," <> showLBS (col + 1) <> ")" + showLBS = fromString . show + +foldingRangeGoldenTest :: TestName -> TestTree +foldingRangeGoldenTest testName = goldenGitDiff testName (testDataDir testName <.> "golden" <.> "txt" <> ghcSuffix) $ do + res <- runSessionWithServer def plugin testDataDir $ do + doc <- openDoc (testName <.> "hs") "haskell" + resp <- request SMethod_TextDocumentFoldingRange $ FoldingRangeParams Nothing Nothing doc + let res = resp ^. result + pure $ fmap (showFoldingRangesForTest . absorbNull) res + + case res of + Left err -> assertFailure (show err) + Right golden -> pure golden + + where + testDataDir :: FilePath + testDataDir = "plugins" "hls-code-range-plugin" "test" "testdata" "folding-range" + + showFoldingRangesForTest :: [FoldingRange] -> ByteString + showFoldingRangesForTest foldingRanges = (LBSChar8.intercalate "\n" $ fmap showFoldingRangeForTest foldingRanges) `LBSChar8.snoc` '\n' + + showFoldingRangeForTest :: FoldingRange -> ByteString + showFoldingRangeForTest (FoldingRange sl (Just sc) el (Just ec) (Just frk) _) = + "((" <> showLBS sl <> ", " <> showLBS sc <> ") : (" <> showLBS el <> ", " <> showLBS ec <> ")) : " <> showFRK frk + showFoldingRangeForTest fr = + "unexpected FoldingRange: " <> fromString (show fr) + + showLBS = fromString . show + showFRK = fromString . show + +ghcSuffix :: String +ghcSuffix = if ghcVersion >= GHC910 then ".ghc910" else "" diff --git a/plugins/hls-code-range-plugin/test/testdata/folding-range/Empty.golden.txt b/plugins/hls-code-range-plugin/test/testdata/folding-range/Empty.golden.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/hls-code-range-plugin/test/testdata/folding-range/Empty.hs b/plugins/hls-code-range-plugin/test/testdata/folding-range/Empty.hs new file mode 100644 index 0000000000..444d0ce37b --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/folding-range/Empty.hs @@ -0,0 +1 @@ +module Empty where diff --git a/plugins/hls-code-range-plugin/test/testdata/folding-range/Function.golden.txt b/plugins/hls-code-range-plugin/test/testdata/folding-range/Function.golden.txt new file mode 100644 index 0000000000..98399f4847 --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/folding-range/Function.golden.txt @@ -0,0 +1,41 @@ +((2, 16) : (2, 22)) : FoldingRangeKind_Region +((4, 0) : (7, 21)) : FoldingRangeKind_Region +((4, 0) : (4, 25)) : FoldingRangeKind_Region +((4, 0) : (4, 6)) : FoldingRangeKind_Region +((4, 10) : (4, 25)) : FoldingRangeKind_Region +((4, 10) : (4, 17)) : FoldingRangeKind_Region +((4, 21) : (4, 25)) : FoldingRangeKind_Region +((5, 0) : (7, 21)) : FoldingRangeKind_Region +((5, 0) : (5, 6)) : FoldingRangeKind_Region +((5, 7) : (5, 8)) : FoldingRangeKind_Region +((5, 9) : (7, 21)) : FoldingRangeKind_Region +((5, 11) : (7, 21)) : FoldingRangeKind_Region +((5, 14) : (5, 28)) : FoldingRangeKind_Region +((5, 14) : (5, 23)) : FoldingRangeKind_Region +((5, 14) : (5, 15)) : FoldingRangeKind_Region +((5, 16) : (5, 21)) : FoldingRangeKind_Region +((5, 22) : (5, 23)) : FoldingRangeKind_Region +((5, 24) : (5, 26)) : FoldingRangeKind_Region +((5, 27) : (5, 28)) : FoldingRangeKind_Region +((6, 16) : (6, 20)) : FoldingRangeKind_Region +((7, 16) : (7, 21)) : FoldingRangeKind_Region +((9, 0) : (12, 20)) : FoldingRangeKind_Region +((9, 0) : (9, 24)) : FoldingRangeKind_Region +((9, 0) : (9, 5)) : FoldingRangeKind_Region +((9, 9) : (9, 24)) : FoldingRangeKind_Region +((9, 9) : (9, 16)) : FoldingRangeKind_Region +((9, 20) : (9, 24)) : FoldingRangeKind_Region +((10, 0) : (12, 20)) : FoldingRangeKind_Region +((10, 0) : (10, 5)) : FoldingRangeKind_Region +((10, 6) : (10, 7)) : FoldingRangeKind_Region +((10, 8) : (12, 20)) : FoldingRangeKind_Region +((10, 10) : (12, 20)) : FoldingRangeKind_Region +((10, 13) : (10, 27)) : FoldingRangeKind_Region +((10, 13) : (10, 22)) : FoldingRangeKind_Region +((10, 13) : (10, 14)) : FoldingRangeKind_Region +((10, 15) : (10, 20)) : FoldingRangeKind_Region +((10, 21) : (10, 22)) : FoldingRangeKind_Region +((10, 23) : (10, 25)) : FoldingRangeKind_Region +((10, 26) : (10, 27)) : FoldingRangeKind_Region +((11, 16) : (11, 21)) : FoldingRangeKind_Region +((12, 16) : (12, 20)) : FoldingRangeKind_Region diff --git a/plugins/hls-code-range-plugin/test/testdata/folding-range/Function.golden.txt.ghc910 b/plugins/hls-code-range-plugin/test/testdata/folding-range/Function.golden.txt.ghc910 new file mode 100644 index 0000000000..937654b5b7 --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/folding-range/Function.golden.txt.ghc910 @@ -0,0 +1,42 @@ +((2, 7) : (2, 15)) : FoldingRangeKind_Region +((2, 16) : (2, 22)) : FoldingRangeKind_Region +((4, 0) : (7, 21)) : FoldingRangeKind_Region +((4, 0) : (4, 25)) : FoldingRangeKind_Region +((4, 0) : (4, 6)) : FoldingRangeKind_Region +((4, 10) : (4, 25)) : FoldingRangeKind_Region +((4, 10) : (4, 17)) : FoldingRangeKind_Region +((4, 21) : (4, 25)) : FoldingRangeKind_Region +((5, 0) : (7, 21)) : FoldingRangeKind_Region +((5, 0) : (5, 6)) : FoldingRangeKind_Region +((5, 7) : (5, 8)) : FoldingRangeKind_Region +((5, 9) : (7, 21)) : FoldingRangeKind_Region +((5, 11) : (7, 21)) : FoldingRangeKind_Region +((5, 14) : (5, 28)) : FoldingRangeKind_Region +((5, 14) : (5, 23)) : FoldingRangeKind_Region +((5, 14) : (5, 15)) : FoldingRangeKind_Region +((5, 16) : (5, 21)) : FoldingRangeKind_Region +((5, 22) : (5, 23)) : FoldingRangeKind_Region +((5, 24) : (5, 26)) : FoldingRangeKind_Region +((5, 27) : (5, 28)) : FoldingRangeKind_Region +((6, 16) : (6, 20)) : FoldingRangeKind_Region +((7, 16) : (7, 21)) : FoldingRangeKind_Region +((9, 0) : (12, 20)) : FoldingRangeKind_Region +((9, 0) : (9, 24)) : FoldingRangeKind_Region +((9, 0) : (9, 5)) : FoldingRangeKind_Region +((9, 9) : (9, 24)) : FoldingRangeKind_Region +((9, 9) : (9, 16)) : FoldingRangeKind_Region +((9, 20) : (9, 24)) : FoldingRangeKind_Region +((10, 0) : (12, 20)) : FoldingRangeKind_Region +((10, 0) : (10, 5)) : FoldingRangeKind_Region +((10, 6) : (10, 7)) : FoldingRangeKind_Region +((10, 8) : (12, 20)) : FoldingRangeKind_Region +((10, 10) : (12, 20)) : FoldingRangeKind_Region +((10, 13) : (10, 27)) : FoldingRangeKind_Region +((10, 13) : (10, 22)) : FoldingRangeKind_Region +((10, 13) : (10, 14)) : FoldingRangeKind_Region +((10, 15) : (10, 20)) : FoldingRangeKind_Region +((10, 21) : (10, 22)) : FoldingRangeKind_Region +((10, 23) : (10, 25)) : FoldingRangeKind_Region +((10, 26) : (10, 27)) : FoldingRangeKind_Region +((11, 16) : (11, 21)) : FoldingRangeKind_Region +((12, 16) : (12, 20)) : FoldingRangeKind_Region diff --git a/plugins/hls-code-range-plugin/test/testdata/folding-range/Function.hs b/plugins/hls-code-range-plugin/test/testdata/folding-range/Function.hs new file mode 100644 index 0000000000..b73bece14f --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/folding-range/Function.hs @@ -0,0 +1,13 @@ +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} +{-# HLINT ignore "Use module export list" #-} +module Function(isEven) where + +isEven :: Integer -> Bool +isEven n = if n `mod` 2 == 0 + then True + else False + +isOdd :: Integer -> Bool +isOdd n = if n `mod` 2 == 0 + then False + else True diff --git a/plugins/hls-code-range-plugin/test/testdata/folding-range/hie.yaml b/plugins/hls-code-range-plugin/test/testdata/folding-range/hie.yaml new file mode 100644 index 0000000000..1a62ad9a94 --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/folding-range/hie.yaml @@ -0,0 +1,5 @@ +cradle: + direct: + arguments: + - "Function" + - "Empty" diff --git a/plugins/hls-code-range-plugin/test/testdata/selection-range/Empty.golden.txt b/plugins/hls-code-range-plugin/test/testdata/selection-range/Empty.golden.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/hls-code-range-plugin/test/testdata/selection-range/Empty.golden.txt.ghc910 b/plugins/hls-code-range-plugin/test/testdata/selection-range/Empty.golden.txt.ghc910 new file mode 100644 index 0000000000..7689c89086 --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/selection-range/Empty.golden.txt.ghc910 @@ -0,0 +1 @@ +(1,5) (1,5) \ No newline at end of file diff --git a/plugins/hls-code-range-plugin/test/testdata/selection-range/Empty.hs b/plugins/hls-code-range-plugin/test/testdata/selection-range/Empty.hs new file mode 100644 index 0000000000..444d0ce37b --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/selection-range/Empty.hs @@ -0,0 +1 @@ +module Empty where diff --git a/plugins/hls-code-range-plugin/test/testdata/selection-range/Function.golden.txt b/plugins/hls-code-range-plugin/test/testdata/selection-range/Function.golden.txt new file mode 100644 index 0000000000..48e84dc2df --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/selection-range/Function.golden.txt @@ -0,0 +1,4 @@ +(5,16) (5,20) => (5,16) (5,40) => (5,14) (11,20) => (5,1) (11,20) => (4,1) (11,20) => (3,1) (11,20) => (3,1) (14,15) +(5,12) (5,13) => (5,1) (11,20) => (4,1) (11,20) => (3,1) (11,20) => (3,1) (14,15) +(4,1) (4,9) => (4,1) (4,29) => (4,1) (11,20) => (3,1) (11,20) => (3,1) (14,15) +(3,1) (3,9) => (3,1) (3,61) => (3,1) (11,20) => (3,1) (14,15) \ No newline at end of file diff --git a/plugins/hls-code-range-plugin/test/testdata/selection-range/Function.golden.txt.ghc910 b/plugins/hls-code-range-plugin/test/testdata/selection-range/Function.golden.txt.ghc910 new file mode 100644 index 0000000000..eb359fb12b --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/selection-range/Function.golden.txt.ghc910 @@ -0,0 +1,4 @@ +(5,16) (5,20) => (5,16) (5,40) => (5,14) (5,40) => (5,1) (11,20) => (4,1) (11,20) => (3,1) (11,20) => (1,8) (14,15) +(5,12) (5,13) => (5,1) (11,20) => (4,1) (11,20) => (3,1) (11,20) => (1,8) (14,15) +(4,1) (4,9) => (4,1) (4,29) => (4,1) (11,20) => (3,1) (11,20) => (1,8) (14,15) +(3,1) (3,9) => (3,1) (3,61) => (3,1) (11,20) => (1,8) (14,15) \ No newline at end of file diff --git a/plugins/hls-code-range-plugin/test/testdata/selection-range/Function.hs b/plugins/hls-code-range-plugin/test/testdata/selection-range/Function.hs new file mode 100644 index 0000000000..4df95779a0 --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/selection-range/Function.hs @@ -0,0 +1,14 @@ +module FuncMultiMatch where + +someFunc :: Integral a => a -> String -> Maybe (Int, String) +someFunc _ "magic" = Nothing +someFunc x y = Just (fromIntegral x, y) + where + go :: Int -> Int + go 0 = -1 + go x = x + 1 + + hi = "greeting" + +otherFunc :: String -> String +otherFunc = id diff --git a/plugins/hls-code-range-plugin/test/testdata/selection-range/Import.golden.txt b/plugins/hls-code-range-plugin/test/testdata/selection-range/Import.golden.txt new file mode 100644 index 0000000000..43f39edf7d --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/selection-range/Import.golden.txt @@ -0,0 +1,2 @@ +(4,33) (4,38) => (4,32) (4,47) => (4,1) (4,47) => (3,1) (4,47) +(1,8) (1,8) \ No newline at end of file diff --git a/plugins/hls-code-range-plugin/test/testdata/selection-range/Import.golden.txt.ghc910 b/plugins/hls-code-range-plugin/test/testdata/selection-range/Import.golden.txt.ghc910 new file mode 100644 index 0000000000..4011ddb913 --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/selection-range/Import.golden.txt.ghc910 @@ -0,0 +1,2 @@ +(4,33) (4,38) => (4,32) (4,47) => (4,1) (4,47) => (3,1) (4,47) => (1,8) (4,47) +(1,8) (1,22) => (1,8) (4,47) \ No newline at end of file diff --git a/plugins/hls-code-range-plugin/test/testdata/selection-range/Import.hs b/plugins/hls-code-range-plugin/test/testdata/selection-range/Import.hs new file mode 100644 index 0000000000..9159c29d49 --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/selection-range/Import.hs @@ -0,0 +1,4 @@ +module MultiPositions where + +import Data.List (find) +import qualified Data.Foldable (foldl, foldl') diff --git a/plugins/hls-code-range-plugin/test/testdata/selection-range/hie.yaml b/plugins/hls-code-range-plugin/test/testdata/selection-range/hie.yaml new file mode 100644 index 0000000000..dd72f7881e --- /dev/null +++ b/plugins/hls-code-range-plugin/test/testdata/selection-range/hie.yaml @@ -0,0 +1,6 @@ +cradle: + direct: + arguments: + - "Import" + - "Function" + - "Empty" diff --git a/plugins/hls-eval-plugin/README.md b/plugins/hls-eval-plugin/README.md new file mode 100644 index 0000000000..d2b39498cb --- /dev/null +++ b/plugins/hls-eval-plugin/README.md @@ -0,0 +1,350 @@ +# Eval plugin for the [Haskell Language Server](https://github.com/haskell/haskell-language-server#readme) + +The Eval plugin evaluates code inserted in comments. + +This is mainly useful to test and document functions and to quickly evaluate small expressions. + +Every line of code to be evaluated is introduced by __>>>__ + +A quick calculation: + +``` +-- >>> 2**4.5/pi +-- 7.202530529256849 +``` + +A little test for the `double` function: + +``` +{- | +A doubling function. + +>>> double 11 +22 +-} +double = (2*) +``` + +# Demo + +![Eval](demo.gif) + +# Test Structure + +A test is composed by a sequence of contiguous lines, the result of their evaluation is inserted after the test body: + +``` +>>> "AB" ++ "CD" +>>> "CD" ++ "AB" +"ABCD" +"CDAB" +``` + +You execute a test by clicking on the _Evaluate_ code lens that appears above it (or _Refresh_, if the test has been run previously). A code action is also provided. + +All tests in the same comment block are executed together. + + +Tests can appear in all kind of comments: +* plain comments (both single and multi line) +``` +{- +>>> "ab" ++ "c" +"abc" +-} + +-- >>> "ab" ++ "c" +-- "abc" +``` +* Haddock commands (both single and multi line, forward and backward) +``` +{- +>>> "ab" ++ "c" +"abc" +-} + +-- >>> "ab" ++ "c" +-- "abc" + +double a = a + a +-- ^ A doubling function +-- >>> double 11 +-- 22 +``` + +Modules that use CPP and Literate Haskell (Bird-style only) modules are also supported (for GHC >= 8.8). + +# Test Components + +In general, a test is a sequence of: +* imports +* directives +* statements +* expressions +* properties + +in no particular order, with every line introduced by __>>>__ (or __prop>__ in the case of properties). + +### Imports + +``` +>>> import Data.List +>>> import GHC.TypeNats +``` + +From any package in scope but currently NOT from modules in the same source directory. + +### Language Extensions + +``` +>>> :set -XScopedTypeVariables -XStandaloneDeriving -XDataKinds -XTypeOperators -XExplicitNamespaces +``` + +### Statements and Declarations + +Function declarations (optionally introduced by __let__): + +``` +>>> let tuple x = (x,x) +>>> let one=1;two=2 +>>> triple x = (x,x,x) +``` + +Any other declaration: + +``` +>>> data TertiumDatur = Truly | Falsely | Other deriving Show +>>> class Display a where display :: a -> String +>>> instance Display TertiumDatur where display = show +``` + +Definitions are available to following tests in the __same__ comment: + +``` +{- +>>> two = 2 + +>>> two +2 +-} + +-- >>> two +-- Variable not in scope: two +``` + +If you want definitions to be available to all tests in the module, define a setup section: + +``` +-- $setup +-- >>> eleven = 11 + +{- +eleven is now available to any test: + +>>> eleven*2 +22 +-} +``` + + +### Type and Kind directives + +``` +>>> :type Truly +Truly :: TertiumDatur + +>>> :kind TertiumDatur +TertiumDatur :: * + +>>> :type 3 +3 :: forall p. Num p => p + +>>> :type +d 3 +3 :: Integer + +>>> type N = 1 +>>> type M = 40 +>>> :kind! N + M + 1 +N + M + 1 :: Nat += 42 +``` + +### Expressions + +``` +>>> tuple 2 +>>> triple 3 +>>> display Other +(2,2) +(3,3,3) +"Other" +``` + +IO expressions can also be evaluated but their output to stdout/stderr is NOT captured: + +``` +>>> print "foo" +() +``` + +### Properties + +``` +prop> \(l::[Int]) -> reverse (reverse l) == l ++++ OK, passed 100 tests. +``` + +# Haddock vs Plain Comments + +There is a conceptual difference between Haddock and plain comments: +* Haddock comments constitute the external module's documentation, they state the contract between the implementor and the module users (API) +* Plain comments are internal documentation meant to explain how the code works (implementation). + +This conceptual difference is reflected in the way tests results are refreshed by the Eval plugin. + +Say that we have defined a `double` function as: + +``` +double = (*2) +``` + +And, in an Haddock comment, we run the test: + +``` +{- | +>>> double 11 +22 +-} +``` + +We then change the definition to: + +``` +double = (*3) +``` + +When we refresh the test, its current result is compared with the previous one and differences are displayed (as they change the API): + +``` +{- | +>>> double 11 +WAS 22 +NOW 33 +-} +``` + +On the contrary, if the test were into a plain comment, the result would simply be replaced: + +``` +{- +>>> double 11 +33 +-} +``` + +If you find this WAS/NOW behaviour does not fit your needs, you can turn it off with toggling the configuration option: +```json +"haskell.plugin.eval.config.diff": false +``` + +# Multiline Output + +By default, the output of every expression is returned as a single line. + +This is a problem if you want, for example, to pretty print a value (in this case using the [pretty-simple](https://hackage.haskell.org/package/pretty-simple) package): + +``` +>>> import Text.Pretty.Simple +>>> pShowNoColor [1..3] +"[ 1\n, 2\n, 3\n]" +``` + +We could try to print the pretty-print output, but stdout is not captured so we get just a (): + +``` +>>> print $ pShowNoColor [1..7] +() +``` + +To display it properly, we can exploit the fact that the output of an error is displayed as a multi-line text: + +``` +>>> import qualified Data.Text.Lazy as TL +>>> import Text.Pretty.Simple +>>> prettyPrint v = error (TL.unpack $ pShowNoColor v) :: IO String +>>> prettyPrint [1..3] +[ 1 +, 2 +, 3 +] +``` + +This assumes you did not turn on exception marking (see [Marking exceptions](#marking-exceptions) below). + +# Differences with doctest + +Though the Eval plugin functionality is quite similar to that of [doctest](https://hackage.haskell.org/package/doctest), some doctest's features are not supported. + +### Capturing Stdout + +Only the value of an IO expression is spliced in, not its output: + +``` +>>> print "foo" +() +``` + +### Marking exceptions + +When an exception is thrown it is not prefixed: + +``` +>>> 1 `div` 0 +divide by zero +``` + +If you want to get the doctest/GHCi behaviour, you can toggle the configuration option: +```json +"haskell.plugin.eval.config.exception": true +``` +``` +>>> 1 `div` 0 +*** Exception: divide by zero +``` + +### Pattern Matching + +The arbitrary content matcher __...__ is unsupported. + +### Missing lambda abstractions in property tests + +Variables are not automatically introduced: + +``` +prop> reverse (reverse l) == (l::[Int]) +Variable not in scope: l :: [Int] +``` + +This works: + +``` +prop> \(l::[Int]) -> reverse (reverse l) == l ++++ OK, passed 100 tests. +``` + +### Multiline Expressions + +Multiline expressions are not supported, see https://github.com/haskell/haskell-language-server/issues/1817 + +# Acknowledgments + +Design/features derived from: + +* [GHCi](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/ghci.html) + +* [Haddock's](https://www.haskell.org/haddock/doc/html/ch03s08.html#idm140354810775744) Examples and Properties + +* [Doctest](https://hackage.haskell.org/package/doctest) + +* the REPLoid feature of [Dante](https://github.com/jyp/dante) + diff --git a/plugins/hls-eval-plugin/demo.gif b/plugins/hls-eval-plugin/demo.gif new file mode 100644 index 0000000000..ed2648ba0c Binary files /dev/null and b/plugins/hls-eval-plugin/demo.gif differ diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval.hs new file mode 100644 index 0000000000..30d43de005 --- /dev/null +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval.hs @@ -0,0 +1,40 @@ +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wwarn #-} +{-# LANGUAGE LambdaCase #-} + +{- | +Eval Plugin entry point. +-} +module Ide.Plugin.Eval ( + descriptor, + Eval.Log(..) + ) where + +import Development.IDE (IdeState) +import Ide.Logger (Recorder, WithPriority) +import Ide.Plugin.Eval.Config +import qualified Ide.Plugin.Eval.Handlers as Handlers +import Ide.Plugin.Eval.Rules (rules) +import qualified Ide.Plugin.Eval.Types as Eval +import Ide.Types (ConfigDescriptor (..), + PluginDescriptor (..), PluginId, + defaultConfigDescriptor, + defaultPluginDescriptor, + mkCustomConfig, mkPluginHandler) +import Language.LSP.Protocol.Message + +-- |Plugin descriptor +descriptor :: Recorder (WithPriority Eval.Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultPluginDescriptor plId "Provides code action and lens to evaluate expressions in doctest comments") + { pluginHandlers = mconcat + [ mkPluginHandler SMethod_TextDocumentCodeAction (Handlers.codeAction recorder) + , mkPluginHandler SMethod_TextDocumentCodeLens (Handlers.codeLens recorder) + ] + , pluginCommands = [Handlers.evalCommand recorder plId] + , pluginRules = rules recorder + , pluginConfigDescriptor = defaultConfigDescriptor + { configCustomConfig = mkCustomConfig properties + } + } diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Code.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Code.hs new file mode 100644 index 0000000000..e8b7428b10 --- /dev/null +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Code.hs @@ -0,0 +1,114 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} +{-# OPTIONS_GHC -Wwarn #-} + +-- | Expression execution +module Ide.Plugin.Eval.Code (Statement, testRanges, resultRange, propSetup, testCheck, asStatements,myExecStmt) where + +import Control.Lens ((^.)) +import Control.Monad.IO.Class +import Data.Algorithm.Diff (Diff, PolyDiff (..), getDiff) +import qualified Data.List.NonEmpty as NE +import Data.String (IsString) +import qualified Data.Text as T +import Development.IDE.GHC.Compat +import GHC (ExecOptions, ExecResult (..), + execStmt) +import Ide.Plugin.Eval.Types (Language (Plain), Loc, + Located (..), + Section (sectionLanguage), + Test (..), Txt, locate, locate0) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Types (Position (Position), + Range (Range)) +import System.IO.Extra (newTempFile, readFile') + +-- | Return the ranges of the expression and result parts of the given test +testRanges :: Test -> (Range, Range) +testRanges tst = + let startLine = testRange tst ^. L.start . L.line + (fromIntegral -> exprLines, fromIntegral -> resultLines) = testLengths tst + resLine = startLine + exprLines + in ( Range + (Position startLine 0) + --(Position (startLine + exprLines + resultLines) 0), + (Position resLine 0) + , Range (Position resLine 0) (Position (resLine + resultLines) 0) + ) + +{- |The document range where a test is defined + testRange :: Loc Test -> Range + testRange = fst . testRanges +-} + +-- |The document range where the result of the test is defined +resultRange :: Test -> Range +resultRange = snd . testRanges + +-- TODO: handle BLANKLINE +{- +>>> showDiffs $ getDiff ["abc","def","ghi","end"] ["abc","def","Z","ZZ","end"] +["abc","def","WAS ghi","NOW Z","NOW ZZ","end"] +-} +showDiffs :: (Semigroup a, IsString a) => [Diff a] -> [a] +showDiffs = map showDiff + +showDiff :: (Semigroup a, IsString a) => Diff a -> a +showDiff (First w) = "WAS " <> w +showDiff (Second w) = "NOW " <> w +showDiff (Both w _) = w + +testCheck :: Bool -> (Section, Test) -> [T.Text] -> [T.Text] +testCheck diff (section, test) out + | not diff || null (testOutput test) || sectionLanguage section == Plain = out + | otherwise = showDiffs $ getDiff (map T.pack $ testOutput test) out + +testLengths :: Test -> (Int, Int) +testLengths (Example e r _) = (NE.length e, length r) +testLengths (Property _ r _) = (1, length r) + +-- |A one-line Haskell statement +type Statement = Loc String + +asStatements :: Test -> [Statement] +asStatements lt = locate $ Located (fromIntegral $ testRange lt ^. L.start . L.line) (asStmts lt) + +asStmts :: Test -> [Txt] +asStmts (Example e _ _) = NE.toList e +asStmts (Property t _ _) = + ["prop11 = " ++ t, "(propEvaluation prop11 :: IO String)"] + + + +-- | A wrapper of 'InteractiveEval.execStmt', capturing the execution result +myExecStmt :: String -> ExecOptions -> Ghc (Either String (Maybe String)) +myExecStmt stmt opts = do + (temp, purge) <- liftIO newTempFile + evalPrint <- head <$> runDecls ("evalPrint x = P.writeFile " <> show temp <> " (P.show x)") + modifySession $ \hsc -> hsc {hsc_IC = setInteractivePrintName (hsc_IC hsc) evalPrint} + result <- execStmt stmt opts >>= \case + ExecComplete (Left err) _ -> pure $ Left $ show err + ExecComplete (Right _) _ -> liftIO $ Right . (\x -> if null x then Nothing else Just x) <$> readFile' temp + ExecBreak{} -> pure $ Right $ Just "breakpoints are not supported" + liftIO purge + pure result + +{- |GHC declarations required to execute test properties + +Example: + +prop> \(l::[Bool]) -> reverse (reverse l) == l ++++ OK, passed 100 tests. + +prop> \(l::[Bool]) -> reverse l == l +*** Failed! Falsified (after 6 tests and 2 shrinks): +[True,False] +-} +propSetup :: [Loc [Char]] +propSetup = + locate0 + [ ":set -XScopedTypeVariables -XExplicitForAll" + , "import qualified Test.QuickCheck as Q11" + , "propEvaluation p = Q11.quickCheckWithResult Q11.stdArgs p >>= error . Q11.output" -- uses `error` to get a multi-line display + ] diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Config.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Config.hs new file mode 100644 index 0000000000..4b789c37ee --- /dev/null +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Config.hs @@ -0,0 +1,34 @@ +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedStrings #-} +module Ide.Plugin.Eval.Config + ( properties + , getEvalConfig + , EvalConfig(..) + ) where + +import Development.IDE +import Ide.Plugin.Properties +import Ide.Types (PluginId) + +-- | The Eval plugin configuration. (see 'properties') +data EvalConfig = EvalConfig + { eval_cfg_diff :: Bool + , eval_cfg_exception :: Bool + } + deriving (Eq, Ord, Show) + +properties :: Properties + '[ 'PropertyKey "exception" 'TBoolean + , 'PropertyKey "diff" 'TBoolean + ] +properties = emptyProperties + & defineBooleanProperty #diff + "Enable the diff output (WAS/NOW) of eval lenses" True + & defineBooleanProperty #exception + "Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi." False + +getEvalConfig :: PluginId -> Action EvalConfig +getEvalConfig plId = + EvalConfig + <$> usePropertyAction #diff plId properties + <*> usePropertyAction #exception plId properties diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/GHC.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/GHC.hs new file mode 100644 index 0000000000..f0b01fca92 --- /dev/null +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/GHC.hs @@ -0,0 +1,180 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE LambdaCase #-} +{-# OPTIONS_GHC -Wno-orphans #-} + +-- |GHC API utilities +module Ide.Plugin.Eval.GHC ( + addExtension, + addImport, + hasPackage, + addPackages, + modifyFlags, + showDynFlags, + setSessionAndInteractiveDynFlags, +) where + +import Data.List (isPrefixOf) +import Data.Maybe (mapMaybe) +import Data.String (fromString) +import qualified Data.Text as T +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Compat.Util +import qualified Development.IDE.GHC.Compat.Util as EnumSet +import Development.IDE.GHC.Util (printOutputable) + +import GHC.LanguageExtensions.Type (Extension (..)) +import Ide.Plugin.Eval.Util (gStrictTry) + +import GHC (setTopSessionDynFlags, + setUnitDynFlags) +import GHC.Driver.Env +import GHC.Driver.Session (getDynFlags) + +{- $setup +>>> import GHC +>>> import GHC.Paths +>>> run act = runGhc (Just libdir) (getInteractiveDynFlags >>= act) +>>> libdir +"/Users/titto/.ghcup/ghc/8.8.4/lib/ghc-8.8.4" +-} + +{- | True if specified package is present in DynFlags + +-- >>> hasPackageTst pkg = run $ \df -> return (hasPackage df pkg) +>>> hasPackageTst pkg = run $ \_ -> addPackages [pkg] >>= return . either Left (\df -> Right (hasPackage df pkg)) + +>>> hasPackageTst "base" +Right True + +>>> hasPackageTst "ghc" +Right True + +>>> hasPackageTst "extra" +Left ": cannot satisfy -package extra\n (use -v for more information)" + +>>> hasPackageTst "QuickCheck" +Left ": cannot satisfy -package QuickCheck\n (use -v for more information)" +-} +hasPackage :: DynFlags -> String -> Bool +hasPackage df = hasPackage_ (packageFlags df) + +hasPackage_ :: [PackageFlag] -> [Char] -> Bool +hasPackage_ pkgFlags name = any (name `isPrefixOf`) (pkgNames_ pkgFlags) + +{- | +>>> run (return . pkgNames) +[] +-} +pkgNames :: DynFlags -> [String] +pkgNames = pkgNames_ . packageFlags + +pkgNames_ :: [PackageFlag] -> [String] +pkgNames_ = + mapMaybe + ( \case + ExposePackage _ (PackageArg n) _ -> Just n + ExposePackage _ (UnitIdArg uid) _ -> Just $ T.unpack $ printOutputable uid + _ -> Nothing + ) + +{- | Expose a list of packages. +>>> addPackagesTest pkgs = run (\_ -> (packageFlags <$>) <$> addPackages pkgs) + +>>> addPackagesTest [] +Right [] + +>>> addPackagesTest ["base","base","array"] +Right [-package base{package base True ([])},-package array{package array True ([])}] + +>>> addPackagesTest ["Cabal"] +Right [-package Cabal{package Cabal True ([])}] + +>>> addPackagesTest ["QuickCheck"] +Left ": cannot satisfy -package QuickCheck\n (use -v for more information)" + +>>> addPackagesTest ["base","notThere"] +Left ": cannot satisfy -package notThere\n (use -v for more information)" + +prop> \(x::Int) -> x + x == 2 * x ++++ OK, passed 100 tests. +-} +addPackages :: [String] -> Ghc (Either String DynFlags) +addPackages pkgNames = gStrictTry $ + modifyFlags $ \df -> + df{packageFlags = foldr (\pkgName pf -> if hasPackage_ pf pkgName then pf else expose pkgName : pf) (packageFlags df) pkgNames} + where + expose name = ExposePackage ("-package " ++ name) (PackageArg name) (ModRenaming True []) + +modifyFlags :: GhcMonad m => (DynFlags -> DynFlags) -> m DynFlags +modifyFlags f = do + df <- getSessionDynFlags + _ <- setSessionDynFlags (f df) + getSessionDynFlags + +-- modifyFlags f = do +-- modifyDynFlags f +-- getSessionDynFlags + +{- | Add import to evaluation context + +>>> run $ \_ -> addImport "import Data.Maybe" +Could not find module ‘Data.Maybe’ +Use -v (or `:set -v` in ghci) to see a list of the files searched for. + +>>> run $ \df -> addPackages ["base"] >> addImport "import Data.Maybe" +[import Data.Maybe] + +>>> run $ \df -> addPackages ["base"] >> addImport "import qualified Data.Maybe as M" +[import qualified Data.Maybe as M] +-} +addImport :: GhcMonad m => String -> m [InteractiveImport] +addImport i = do + ctx <- getContext + -- dbgO "CONTEXT" ctx + idecl <- parseImportDecl i + setContext $ IIDecl idecl : ctx + -- ctx' <- getContext + -- dbg "CONTEXT'" ctx' + getContext + +{- | Add extension to interactive evaluation session +>>> import GHC.LanguageExtensions.Type(Extension(..)) +>>> run $ \_ -> addExtension DeriveGeneric +() +-} +addExtension :: GhcMonad m => Extension -> m () +addExtension ext = + modifySession $ \hsc -> hsc{hsc_IC = setExtension (hsc_IC hsc) ext} + +setExtension :: InteractiveContext -> Extension -> InteractiveContext +setExtension ic ext = ic{ic_dflags = xopt_set (ic_dflags ic) ext} + +deriving instance Read Extension + +-- Partial display of DynFlags contents, for testing purposes +showDynFlags :: DynFlags -> String +showDynFlags df = + T.unpack . printOutputable . vcat . map (\(n, d) -> text (n ++ ": ") <+> d) $ + [ ("extensions", ppr . extensions $ df) + , ("extensionFlags", ppr . EnumSet.toList . extensionFlags $ df) + , ("importPaths", vList $ importPaths df) + , ("generalFlags", pprHsString . fromString . show . EnumSet.toList . generalFlags $ df) + , -- , ("includePaths", text . show $ includePaths df) + -- ("packageEnv", ppr $ packageEnv df) + ("pkgNames", vcat . map text $ pkgNames df) + , ("packageFlags", vcat . map ppr $ packageFlags df) + -- ,("pkgDatabase",(map) (ppr . installedPackageId) . pkgDatabase $ df) + -- ("pkgDatabase", text . show <$> pkgDatabase $ df) + ] + +vList :: [String] -> SDoc +vList = vcat . map text + +setSessionAndInteractiveDynFlags :: DynFlags -> Ghc () +setSessionAndInteractiveDynFlags df = do + _ <- setUnitDynFlags (homeUnitId_ df) df + modifySession (hscUpdateLoggerFlags . hscSetActiveUnitId (homeUnitId_ df)) + df' <- getDynFlags + setTopSessionDynFlags df' + sessDyns <- getSessionDynFlags + setInteractiveDynFlags sessDyns diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Handlers.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Handlers.hs new file mode 100644 index 0000000000..1f19b5b476 --- /dev/null +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Handlers.hs @@ -0,0 +1,692 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE ExtendedDefaultRules #-} +{-# LANGUAGE NoMonomorphismRestriction #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} +{-# OPTIONS_GHC -Wno-type-defaults #-} + +{- | +A plugin inspired by the REPLoid feature of , 's Examples and Properties and . + +For a full example see the "Ide.Plugin.Eval.Tutorial" module. +-} +module Ide.Plugin.Eval.Handlers ( + codeAction, + codeLens, + evalCommand, +) where + +import Control.Applicative (Alternative ((<|>))) +import Control.Arrow (second) +import Control.Exception (bracket_) +import qualified Control.Exception as E +import Control.Lens (ix, (%~), (^.)) +import Control.Monad (guard, void, + when) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Control.Monad.Trans.Except (ExceptT (..), + runExceptT) +import Data.Aeson (toJSON) +import Data.Char (isSpace) +import Data.Foldable (toList) +import Data.List (dropWhileEnd, + find, + intercalate, + intersperse) +import qualified Data.Map as Map +import Data.Maybe (catMaybes) +import Data.String (IsString) +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.Core.FileStore (getUriContents, setSomethingModified) +import Development.IDE.Core.Rules (IdeState, + runAction) +import Development.IDE.Core.Shake (use_, uses_, VFSModified (VFSUnmodified), useWithSeparateFingerprintRule_) +import Development.IDE.GHC.Compat hiding (typeKind, + unitState) +import Development.IDE.GHC.Compat.Util (OverridingBool (..)) +import Development.IDE.GHC.Util (evalGhcEnv, + modifyDynFlags) +import Development.IDE.Import.DependencyInformation (transitiveDeps, + transitiveModuleDeps) +import Development.IDE.Types.Location (toNormalizedFilePath') +import GHC (ClsInst, + ExecOptions (execLineNumber, execSourceFile), + FamInst, + GhcMonad, + NamedThing (getName), + defaultFixity, + execOptions, + exprType, + getInfo, + getInteractiveDynFlags, + isImport, isStmt, + parseName, + pprFamInst, + pprInstance, + typeKind) + + +import Development.IDE.Core.RuleTypes (GetLinkable (GetLinkable), + GetModSummary (GetModSummary), + GetModuleGraphTransDepsFingerprints (GetModuleGraphTransDepsFingerprints), + GhcSessionDeps (GhcSessionDeps), + ModSummaryResult (msrModSummary), + LinkableResult (linkableHomeMod), + TypeCheck (..), + tmrTypechecked, GetModuleGraphTransDepsFingerprints(..), GetModuleGraph(..)) +import qualified Development.IDE.GHC.Compat.Core as Compat (InteractiveImport (IIModule)) +import qualified Development.IDE.GHC.Compat.Core as SrcLoc (unLoc) +import Development.IDE.Types.HscEnvEq (HscEnvEq (hscEnv)) +import qualified GHC.LanguageExtensions.Type as LangExt (Extension (..)) + +import Data.List.Extra (unsnoc) +import Development.IDE.Core.PluginUtils +import Development.IDE.Types.Shake (toKey) +import GHC.Types.SrcLoc (UnhelpfulSpanReason (UnhelpfulInteractive)) +import Ide.Logger (Priority (..), + Recorder, + WithPriority, + logWith) +import Ide.Plugin.Error (PluginError (PluginInternalError), + handleMaybeM) +import Ide.Plugin.Eval.Code (Statement, + asStatements, + myExecStmt, + propSetup, + resultRange, + testCheck, + testRanges) +import Ide.Plugin.Eval.Config (EvalConfig (..), + getEvalConfig) +import Ide.Plugin.Eval.GHC (addImport, + addPackages, + hasPackage, + setSessionAndInteractiveDynFlags) +import Ide.Plugin.Eval.Parse.Comments (commentsToSections) +import Ide.Plugin.Eval.Parse.Option (parseSetFlags) +import Ide.Plugin.Eval.Rules (queueForEvaluation, + unqueueForEvaluation) +import Ide.Plugin.Eval.Types +import Ide.Plugin.Eval.Util (gStrictTry, + isLiterate, + prettyWarnings, + response', timed) +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import Language.LSP.Server +#if MIN_VERSION_ghc(9,11,0) +import GHC.Unit.Module.ModIface (IfaceTopEnv (..)) +#endif + +codeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState Method_TextDocumentCodeAction +codeAction recorder st plId CodeActionParams{_textDocument,_range} = do + rangeCommands <- mkRangeCommands recorder st plId _textDocument + pure + $ InL + [ InL command + | (testRange, command) <- rangeCommands + , _range `isSubrangeOf` testRange + ] + +{- | Code Lens provider + NOTE: Invoked every time the document is modified, not just when the document is saved. +-} +codeLens :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState Method_TextDocumentCodeLens +codeLens recorder st plId CodeLensParams{_textDocument} = do + rangeCommands <- mkRangeCommands recorder st plId _textDocument + pure + $ InL + [ CodeLens range (Just command) Nothing + | (range, command) <- rangeCommands + ] + +mkRangeCommands :: Recorder (WithPriority Log) -> IdeState -> PluginId -> TextDocumentIdentifier -> ExceptT PluginError (HandlerM Config) [(Range, Command)] +mkRangeCommands recorder st plId textDocument = + let dbg = logWith recorder Debug + perf = timed (\lbl duration -> dbg $ LogExecutionTime lbl duration) + in perf "evalMkRangeCommands" $ + do + let TextDocumentIdentifier uri = textDocument + fp <- uriToFilePathE uri + let nfp = toNormalizedFilePath' fp + isLHS = isLiterate fp + dbg $ LogCodeLensFp fp + (comments, _) <- + runActionE "eval.GetParsedModuleWithComments" st $ useWithStaleE GetEvalComments nfp + dbg $ LogCodeLensComments comments + + -- Extract tests from source code + let Sections{..} = commentsToSections isLHS comments + tests = testsBySection nonSetupSections + cmd = mkLspCommand plId evalCommandName "Evaluate=..." (Just []) + let rangeCommands = + [ (testRange, cmd') + | (section, ident, test) <- tests + , let (testRange, resultRange) = testRanges test + args = EvalParams (setupSections ++ [section]) textDocument ident + cmd' = + (cmd :: Command) + { _arguments = Just [toJSON args] + , _title = + if trivial resultRange + then "Evaluate..." + else "Refresh..." + } + ] + + perf "tests" $ + dbg $ LogTests + (length tests) + (length nonSetupSections) + (length setupSections) + (length rangeCommands) + + pure rangeCommands + where + trivial (Range p p') = p == p' + +evalCommandName :: CommandId +evalCommandName = "evalCommand" + +evalCommand :: Recorder (WithPriority Log) -> PluginId -> PluginCommand IdeState +evalCommand recorder plId = PluginCommand evalCommandName "evaluate" (runEvalCmd recorder plId) + +type EvalId = Int + +runEvalCmd :: Recorder (WithPriority Log) -> PluginId -> CommandFunction IdeState EvalParams +runEvalCmd recorder plId st mtoken EvalParams{..} = + let dbg = logWith recorder Debug + perf = timed (\lbl duration -> dbg $ LogExecutionTime lbl duration) + cmd :: ExceptT PluginError (HandlerM Config) WorkspaceEdit + cmd = do + let tests = map (\(a,_,b) -> (a,b)) $ testsBySection sections + + let TextDocumentIdentifier{_uri} = module_ + fp <- uriToFilePathE _uri + let nfp = toNormalizedFilePath' fp + mdlText <- moduleText st _uri + + -- enable codegen for the module which we need to evaluate. + final_hscEnv <- liftIO $ bracket_ + (setSomethingModified VFSUnmodified st "Eval" $ do + queueForEvaluation st nfp + return [toKey IsEvaluating nfp] + ) + (setSomethingModified VFSUnmodified st "Eval" $ do + unqueueForEvaluation st nfp + return [toKey IsEvaluating nfp] + ) + (initialiseSessionForEval (needsQuickCheck tests) st nfp) + + evalCfg <- liftIO $ runAction "eval: config" st $ getEvalConfig plId + + -- Perform the evaluation of the command + edits <- + perf "edits" $ + liftIO $ + evalGhcEnv final_hscEnv $ do + runTests recorder evalCfg fp tests + + let workspaceEditsMap = Map.singleton _uri (addFinalReturn mdlText edits) + let workspaceEdits = WorkspaceEdit (Just workspaceEditsMap) Nothing Nothing + + return workspaceEdits + in perf "evalCmd" $ ExceptT $ + pluginWithIndefiniteProgress "Evaluating" mtoken Cancellable $ \_updater -> + runExceptT $ response' cmd + +-- | Create an HscEnv which is suitable for performing interactive evaluation. +-- All necessary home modules will have linkables and the current module will +-- also be loaded into the environment. +-- +-- The interactive context and interactive dynamic flags are also set appropiately. +initialiseSessionForEval :: Bool -> IdeState -> NormalizedFilePath -> IO HscEnv +initialiseSessionForEval needs_quickcheck st nfp = do + (ms, env1) <- runAction "runEvalCmd" st $ do + + ms <- msrModSummary <$> use_ GetModSummary nfp + deps_hsc <- hscEnv <$> use_ GhcSessionDeps nfp + + linkables_needed <- transitiveDeps <$> useWithSeparateFingerprintRule_ GetModuleGraphTransDepsFingerprints GetModuleGraph nfp <*> pure nfp + linkables <- uses_ GetLinkable (nfp : maybe [] transitiveModuleDeps linkables_needed) + -- We unset the global rdr env in mi_globals when we generate interfaces + -- See Note [Clearing mi_globals after generating an iface] + -- However, the eval plugin (setContext specifically) requires the rdr_env + -- for the current module - so get it from the Typechecked Module and add + -- it back to the iface for the current module. + tm <- tmrTypechecked <$> use_ TypeCheck nfp + let rdr_env = tcg_rdr_env tm + let linkable_hsc = loadModulesHome (map (addRdrEnv . linkableHomeMod) linkables) deps_hsc + addRdrEnv hmi + | iface <- hm_iface hmi + , ms_mod ms == mi_module iface +#if MIN_VERSION_ghc(9,11,0) + = hmi { hm_iface = set_mi_top_env (Just $ IfaceTopEnv (forceGlobalRdrEnv (globalRdrEnvLocal rdr_env)) (mkIfaceImports $ tcg_import_decls tm)) iface} +#else + = hmi { hm_iface = iface { mi_globals = Just $! +#if MIN_VERSION_ghc(9,8,0) + forceGlobalRdrEnv +#endif + rdr_env + }} +#endif + | otherwise = hmi + + return (ms, linkable_hsc) + -- Bit awkward we need to use evalGhcEnv here but setContext requires to run + -- in the Ghc monad + env2 <- liftIO $ evalGhcEnv env1 $ do + setContext [Compat.IIModule (moduleName (ms_mod ms))] + let df = flip xopt_set LangExt.ExtendedDefaultRules + . flip xopt_unset LangExt.MonomorphismRestriction + . flip gopt_set Opt_ImplicitImportQualified + . flip gopt_unset Opt_DiagnosticsShowCaret + . setBackend ghciBackend + $ (ms_hspp_opts ms) { + useColor = Never + , canUseColor = False } + modifyDynFlags (const df) + when needs_quickcheck $ void $ addPackages ["QuickCheck"] + getSession + return env2 + +#if MIN_VERSION_ghc(9,11,0) +mkIfaceImports :: [ImportUserSpec] -> [IfaceImport] +mkIfaceImports = map go + where + go (ImpUserSpec decl ImpUserAll) = IfaceImport decl ImpIfaceAll + go (ImpUserSpec decl (ImpUserExplicit env)) = IfaceImport decl (ImpIfaceExplicit (forceGlobalRdrEnv env)) + go (ImpUserSpec decl (ImpUserEverythingBut ns)) = IfaceImport decl (ImpIfaceEverythingBut ns) +#endif + +addFinalReturn :: Text -> [TextEdit] -> [TextEdit] +addFinalReturn mdlText edits + | not (null edits) && not (T.null mdlText) && T.last mdlText /= '\n' = + finalReturn mdlText : edits + | otherwise = edits + +finalReturn :: Text -> TextEdit +finalReturn txt = + let ls = T.lines txt + l = fromIntegral $ length ls -1 + c = fromIntegral $ T.length $ maybe T.empty snd (unsnoc ls) + p = Position l c + in TextEdit (Range p p) "\n" + +moduleText :: IdeState -> Uri -> ExceptT PluginError (HandlerM config) Text +moduleText state uri = do + contents <- + handleMaybeM (PluginInternalError "mdlText") $ + liftIO $ + runAction "eval.getUriContents" state $ + getUriContents $ + toNormalizedUri uri + pure $ Rope.toText contents + +testsBySection :: [Section] -> [(Section, EvalId, Test)] +testsBySection sections = + [(section, ident, test) + | (ident, section) <- zip [0..] sections + , test <- sectionTests section + ] + +type TEnv = String +-- |GHC declarations required for expression evaluation +evalSetup :: Ghc () +evalSetup = do + preludeAsP <- parseImportDecl "import qualified Prelude as P" + context <- getContext + setContext (IIDecl preludeAsP : context) + +runTests :: Recorder (WithPriority Log) -> EvalConfig -> TEnv -> [(Section, Test)] -> Ghc [TextEdit] +runTests recorder EvalConfig{..} e tests = do + df <- getInteractiveDynFlags + evalSetup + when (hasQuickCheck df && needsQuickCheck tests) $ void $ evals recorder True e df propSetup + + mapM (processTest e df) tests + where + processTest :: TEnv -> DynFlags -> (Section, Test) -> Ghc TextEdit + processTest fp df (section, test) = do + let dbg = logWith recorder Debug + let pad = pad_ $ (if isLiterate fp then ("> " `T.append`) else id) $ padPrefix (sectionFormat section) + rs <- runTest e df test + dbg $ LogRunTestResults rs + + let checkedResult = testCheck eval_cfg_diff (section, test) rs + let resultLines = concatMap T.lines checkedResult + + let edit = asEdit (sectionFormat section) test (map pad resultLines) + dbg $ LogRunTestEdits edit + return edit + + -- runTest :: String -> DynFlags -> Loc Test -> Ghc [Text] + runTest _ df test + | not (hasQuickCheck df) && isProperty test = + return $ + singleLine + "Add QuickCheck to your cabal dependencies to run this test." + runTest e df test = evals recorder (eval_cfg_exception && not (isProperty test)) e df (asStatements test) + +asEdit :: Format -> Test -> [Text] -> TextEdit +asEdit (MultiLine commRange) test resultLines + -- A test in a block comment, ending with @-\}@ without newline in-between. + | testRange test ^. L.end . L.line == commRange ^. L.end . L.line + = + TextEdit + (Range + (testRange test ^. L.end) + (resultRange test ^. L.end) + ) + ("\n" <> T.unlines (resultLines <> ["-}"])) +asEdit _ test resultLines = + TextEdit (resultRange test) (T.unlines resultLines) + +{- | +The result of evaluating a test line can be: +* a value +* nothing +* a (possibly multiline) error message + +A value is returned for a correct expression. + +Either a pure value: +>>> 'h' :"askell" +"haskell" + +Or an 'IO a' (output on stdout/stderr is ignored): +>>> print "OK" >> return "ABC" +"ABC" + +Nothing is returned for a correct directive: + +>>>:set -XFlexibleInstances +>>> import Data.Maybe + +Nothing is returned for a correct declaration (let..,x=, data, class) + +>>> let x = 11 +>>> y = 22 +>>> data B = T | F +>>> class C a + +Nothing is returned for an empty line: + +>>> + +A, possibly multi line, error is returned for a wrong declaration, directive or value or an exception thrown by the evaluated code: + +>>>:set -XNonExistent +Some flags have not been recognized: -XNonExistent + +>>> cls C +Variable not in scope: cls :: t0 -> t +Data constructor not in scope: C + +>>> "A +lexical error in string/character literal at end of input + +Exceptions are shown as if printed, but it can be configured to include prefix like +in GHCi or doctest. This allows it to be used as a hack to simulate print until we +get proper IO support. See #1977 + +>>> 3 `div` 0 +divide by zero + +>>> error "Something went wrong\nbad times" :: E.SomeException +Something went wrong +bad times + +Or for a value that does not have a Show instance and can therefore not be displayed: +>>> data V = V +>>> V +No instance for (Show V) arising from a use of ‘evalPrint’ +-} +evals :: Recorder (WithPriority Log) -> Bool -> TEnv -> DynFlags -> [Statement] -> Ghc [Text] +evals recorder mark_exception fp df stmts = do + er <- gStrictTry $ mapM eval stmts + return $ case er of + Left err -> errorLines err + Right rs -> concat . catMaybes $ rs + where + dbg = logWith recorder Debug + eval :: Statement -> Ghc (Maybe [Text]) + eval (Located l stmt) + | -- GHCi flags + Just (words -> flags) <- parseSetFlags stmt = do + dbg $ LogEvalFlags flags + ndf <- getInteractiveDynFlags + dbg $ LogEvalPreSetDynFlags ndf + eans <- + liftIO $ try @GhcException $ + parseDynamicFlagsCmdLine ndf + (map (L $ UnhelpfulSpan unhelpfulReason) flags) + dbg $ LogEvalParsedFlags eans + case eans of + Left err -> pure $ Just $ errorLines $ show err + Right (df', ignoreds, warns) -> do + let warnings = do + guard $ not $ null warns + pure $ errorLines $ + prettyWarnings warns + igns = do + guard $ not $ null ignoreds + pure + ["Some flags have not been recognized: " + <> T.pack (intercalate ", " $ map SrcLoc.unLoc ignoreds) + ] + dbg $ LogEvalPostSetDynFlags df' + setSessionAndInteractiveDynFlags df' + pure $ warnings <> igns + | -- A type/kind command + Just (cmd, arg) <- parseGhciLikeCmd $ T.pack stmt = + evalGhciLikeCmd cmd arg + | -- A statement + isStmt pf stmt = + do + dbg $ LogEvalStmtStart stmt + res <- exec stmt l + let r = case res of + Left err -> Just . (if mark_exception then exceptionLines else errorLines) $ err + Right x -> singleLine <$> x + dbg $ LogEvalStmtResult r + return r + | -- An import + isImport pf stmt = + do + dbg $ LogEvalImport stmt + _ <- addImport stmt + return Nothing + | -- A declaration + otherwise = + do + dbg $ LogEvalDeclaration stmt + void $ runDecls stmt + return Nothing + pf = initParserOpts df + unhelpfulReason = UnhelpfulInteractive + exec stmt l = + let opts = execOptions{execSourceFile = fp, execLineNumber = l} + in myExecStmt stmt opts + +needsQuickCheck :: [(Section, Test)] -> Bool +needsQuickCheck = any (isProperty . snd) + +hasQuickCheck :: DynFlags -> Bool +hasQuickCheck df = hasPackage df "QuickCheck" + +singleLine :: String -> [Text] +singleLine s = [T.pack s] + +{- | + Convert error messages to a list of text lines + Remove unnecessary information. +-} +errorLines :: String -> [Text] +errorLines = + dropWhileEnd T.null + . takeWhile (not . (\x -> "CallStack" `T.isPrefixOf` x || "HasCallStack" `T.isPrefixOf` x)) + . T.lines + . T.pack + +{- | + Convert exception messages to a list of text lines + Remove unnecessary information and mark it as exception. + We use '*** Exception:' to make it identical to doctest + output, see #2353. +-} +exceptionLines :: String -> [Text] +exceptionLines = (ix 0 %~ ("*** Exception: " <>)) . errorLines + +{- | +>>> map (pad_ (T.pack "--")) (map T.pack ["2+2",""]) +["--2+2","--"] +-} +pad_ :: Text -> Text -> Text +pad_ prefix = (prefix `T.append`) . convertBlank + +convertBlank :: Text -> Text +convertBlank x + | T.null x = "" + | otherwise = x + +padPrefix :: IsString p => Format -> p +padPrefix SingleLine = "-- " +padPrefix _ = "" + +{- | Resulting @Text@ MUST NOT prefix each line with @--@ + Such comment-related post-process will be taken place + solely in 'evalGhciLikeCmd'. +-} +type GHCiLikeCmd = DynFlags -> Text -> Ghc (Maybe Text) + +-- Should we use some sort of trie here? +ghciLikeCommands :: [(Text, GHCiLikeCmd)] +ghciLikeCommands = + [ ("info", doInfoCmd False) + , ("info!", doInfoCmd True) + , ("kind", doKindCmd False) + , ("kind!", doKindCmd True) + , ("type", doTypeCmd) + ] + +evalGhciLikeCmd :: Text -> Text -> Ghc (Maybe [Text]) +evalGhciLikeCmd cmd arg = do + df <- getSessionDynFlags + case lookup cmd ghciLikeCommands + <|> snd + <$> find (T.isPrefixOf cmd . fst) ghciLikeCommands of + Just hndler -> + fmap + T.lines + <$> hndler df arg + _ -> E.throw $ GhciLikeCmdNotImplemented cmd arg + +doInfoCmd :: Bool -> DynFlags -> Text -> Ghc (Maybe Text) +doInfoCmd allInfo dflags s = do + sdocs <- mapM infoThing (T.words s) + pure $ Just $ T.pack $ showSDoc dflags (vcat sdocs) + where + infoThing :: GHC.GhcMonad m => Text -> m SDoc + infoThing (T.unpack -> str) = do + names <- GHC.parseName str + mb_stuffs <- mapM (GHC.getInfo allInfo) names + let filtered = filterOutChildren (\(t,_f,_ci,_fi,_sd) -> t) + (catMaybes $ toList mb_stuffs) + return $ vcat (intersperse (text "") $ map pprInfo filtered) + + filterOutChildren :: (a -> TyThing) -> [a] -> [a] + filterOutChildren get_thing xs + = filter (not . has_parent) xs + where + all_names = mkNameSet (map (getName . get_thing) xs) + has_parent x = case tyThingParent_maybe (get_thing x) of + Just p -> getName p `elemNameSet` all_names + Nothing -> False + + pprInfo :: (TyThing, Fixity, [GHC.ClsInst], [GHC.FamInst], SDoc) -> SDoc + pprInfo (thing, fixity, cls_insts, fam_insts, docs) + = docs + $$ pprTyThingInContextLoc thing + $$ showFixity thing fixity + $$ vcat (map GHC.pprInstance cls_insts) + $$ vcat (map GHC.pprFamInst fam_insts) + + pprTyThingInContextLoc :: TyThing -> SDoc + pprTyThingInContextLoc tyThing + = showWithLoc (pprDefinedAt (getName tyThing)) + (pprTyThingInContext showToHeader tyThing) + + showWithLoc :: SDoc -> SDoc -> SDoc + showWithLoc loc doc + = hang doc 2 (text "\t--" <+> loc) + + showFixity :: TyThing -> Fixity -> SDoc + showFixity thing fixity + | fixity /= GHC.defaultFixity || isSymOcc (getOccName thing) + = ppr fixity <+> pprInfixName (GHC.getName thing) + | otherwise = empty + +doKindCmd :: Bool -> DynFlags -> Text -> Ghc (Maybe Text) +doKindCmd False df arg = do + let input = T.strip arg + (_, kind) <- typeKind False $ T.unpack input + let kindText = text (T.unpack input) <+> "::" <+> pprSigmaType kind + pure $ Just $ T.pack (showSDoc df kindText) +doKindCmd True df arg = do + let input = T.strip arg + (ty, kind) <- typeKind True $ T.unpack input + let kindDoc = text (T.unpack input) <+> "::" <+> pprSigmaType kind + tyDoc = "=" <+> pprSigmaType ty + pure $ Just $ T.pack (showSDoc df $ kindDoc $$ tyDoc) + +doTypeCmd :: DynFlags -> Text -> Ghc (Maybe Text) +doTypeCmd dflags arg = do + let (emod, expr) = parseExprMode arg + ty <- GHC.exprType emod $ T.unpack expr + let rawType = T.strip $ T.pack $ showSDoc dflags $ pprSigmaType ty + broken = T.any (\c -> c == '\r' || c == '\n') rawType + pure $ + Just $ + if broken + then + T.pack $ + showSDoc dflags $ + text (T.unpack expr) + $$ nest 2 ("::" <+> pprSigmaType ty) + else expr <> " :: " <> rawType <> "\n" + +parseExprMode :: Text -> (TcRnExprMode, T.Text) +parseExprMode rawArg = case T.break isSpace rawArg of + ("+d", rest) -> (TM_Default, T.strip rest) + _ -> (TM_Inst, rawArg) + +data GhciLikeCmdException = GhciLikeCmdNotImplemented + { ghciCmdName :: Text + , ghciCmdArg :: Text + } + +instance Show GhciLikeCmdException where + showsPrec _ GhciLikeCmdNotImplemented{..} = + showString "unknown command '" + . showString (T.unpack ghciCmdName) + . showChar '\'' + +instance E.Exception GhciLikeCmdException + +{- +>>> parseGhciLikeCmd (T.pack ":kind! N + M + 1") +Just ("kind!","N + M + 1") +>>> parseGhciLikeCmd (T.pack ":kind a") +Just ("kind","a") +-} +parseGhciLikeCmd :: Text -> Maybe (Text, Text) +parseGhciLikeCmd input = do + (':', rest) <- T.uncons $ T.stripStart input + pure $ second T.strip $ T.break isSpace rest diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Parse/Comments.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Parse/Comments.hs new file mode 100644 index 0000000000..6f8b303302 --- /dev/null +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Parse/Comments.hs @@ -0,0 +1,572 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TemplateHaskell #-} + +module Ide.Plugin.Eval.Parse.Comments where + +import qualified Control.Applicative.Combinators.NonEmpty as NE +import Control.Arrow (first, (&&&), (>>>)) +import Control.Lens (lensField, lensRules, + view, (.~), (^.)) +import Control.Lens.Extras (is) +import Control.Lens.TH (makeLensesWith, + makePrisms, + mappingNamer) +import Control.Monad (guard, void, when) +import Control.Monad.Combinators () +import Control.Monad.Reader (ask) +import Control.Monad.Trans.Reader (Reader, runReader) +import qualified Data.Char as C +import qualified Data.DList as DL +import qualified Data.Foldable as F +import Data.Function ((&)) +import Data.Functor ((<&>)) +import Data.Functor.Identity +import Data.List.NonEmpty (NonEmpty ((:|))) +import qualified Data.List.NonEmpty as NE hiding (unzip) +import Data.Map.Strict (Map) +import qualified Data.Map.Strict as Map +import qualified Data.Text as T +import Data.Void (Void) +import GHC.Generics hiding (UInt, to) +import Ide.Plugin.Eval.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Types + +import qualified Text.Megaparsec as P +import Text.Megaparsec +import Text.Megaparsec.Char (alphaNumChar, char, + eol, hspace, + letterChar) + +#if MIN_VERSION_base(4,19,0) +import qualified Data.Functor as NE (unzip) +#else +import qualified Data.List.NonEmpty as NE (unzip) +#endif + +{- +We build parsers combining the following three kinds of them: + + * Line parser - paring a single line into an input, + works both for line- and block-comments. + A line should be a proper content of lines contained in comment: + doesn't include starting @--@ and @{\-@ and no ending @-\}@ + + * Line comment group parser: parses a contiguous group of + tuples of position and line comment into sections of line comments. + Each input MUST start with @--@. + + * Block comment parser: Parsing entire block comment into sections. + Input must be surrounded by @{\-@ and @-\}@. +-} + +-- | Line parser +type LineParser a = forall m. ParsecT Void String m a + +-- | Line comment group parser +type LineGroupParser = Parsec Void [(Range, RawLineComment)] + +data BlockEnv = BlockEnv + { isLhs :: Bool + , blockRange :: Range + } + deriving (Show, Eq, Ord) + +makeLensesWith + (lensRules & lensField .~ mappingNamer (pure . (++ "L"))) + ''BlockEnv + +-- | Block comment parser +type BlockCommentParser = ParsecT Void String (Reader BlockEnv) + +-- | Prop line, with "prop>" stripped off +newtype PropLine = PropLine {getPropLine :: String} + deriving (Show) + +-- | Example line, with @>>>@ stripped off +newtype ExampleLine = ExampleLine {getExampleLine :: String} + deriving (Show) + +data TestComment + = AProp + { testCommentRange :: Range + , lineProp :: PropLine + , propResults :: [String] + } + | AnExample + { testCommentRange :: Range + , lineExamples :: NonEmpty ExampleLine + , exampleResults :: [String] + } + deriving (Show) + +-- | Classification of comments +data CommentFlavour = Vanilla | HaddockNext | HaddockPrev | Named String + deriving (Read, Show, Eq, Ord) + +-- | Single line or block comments? +data CommentStyle = Line | Block Range + deriving (Show, Eq, Ord, Generic) + +makePrisms ''CommentStyle + +commentsToSections :: + -- | True if it is literate Haskell + Bool -> + Comments -> + Sections +commentsToSections isLHS Comments {..} = + let (lineSectionSeeds, lineSetupSeeds) = + foldMap + ( \lcs -> + let theRan = + Range + (view L.start $ fst $ NE.head lcs) + (view L.end $ fst $ NE.last lcs) + in case parseMaybe lineGroupP $ NE.toList lcs of + Nothing -> mempty + Just (mls, rs) -> + ( maybe mempty (Map.singleton theRan) mls + , -- orders setup sections in ascending order + if null rs + then mempty + else + Map.singleton theRan $ + DL.singleton (Line, rs) + ) + ) + $ groupLineComments $ + Map.filterWithKey + -- FIXME: + -- To comply with the initial behaviour of + -- Extended Eval Plugin; + -- but it also rejects modules with + -- non-zero base indentation level! + ( \pos _ -> + if isLHS + then pos ^. L.start . L.character == 2 + else pos ^. L.start . L.character == 0 + ) + lineComments + (blockSeed, blockSetupSeeds) = + foldMap + ( \(ran, lcs) -> + case parseBlockMaybe isLHS ran blockCommentBP $ + getRawBlockComment lcs of + Nothing -> mempty + Just (Named "setup", grp) -> + -- orders setup sections in ascending order + ( mempty + , Map.singleton ran $ + DL.singleton (Block ran, grp) + ) + Just grp -> + ( Map.singleton ran grp + , mempty + ) + ) + -- It seems Extended Eval Plugin doesn't constraint + -- starting indentation level for block comments. + -- Rather, it constrains the indentation level /inside/ + -- block comment body. + $ Map.toList blockComments + lineSections = + lineSectionSeeds <&> uncurry (testsToSection Line) + multilineSections = + Map.mapWithKey + (uncurry . testsToSection . Block) + blockSeed + setupSections = + -- Setups doesn't need Dummy position + map + ( \(style, tests) -> + testsToSection + style + (Named "setup") + tests + ) + $ DL.toList $ + F.fold $ + Map.unionWith (<>) lineSetupSeeds blockSetupSeeds + nonSetupSections = F.toList $ lineSections `Map.union` multilineSections + in Sections {..} + +parseBlockMaybe :: Bool -> Range -> BlockCommentParser a -> String -> Maybe a +parseBlockMaybe isLhs blockRange p i = + case runReader (runParserT p' "" i) BlockEnv {..} of + Left {} -> Nothing + Right a -> Just a + where + p' = do + updateParserState $ \st -> + st + { statePosState = + (statePosState st) + { pstateSourcePos = positionToSourcePos $ blockRange ^. L.start + } + } + p + +type CommentRange = Range + +type SectionRange = Range + +testsToSection :: + CommentStyle -> + CommentFlavour -> + [TestComment] -> + Section +testsToSection style flav tests = + let sectionName + | Named name <- flav = name + | otherwise = "" + sectionLanguage = case flav of + HaddockNext -> Haddock + HaddockPrev -> Haddock + _ -> Plain + sectionTests = map fromTestComment tests + sectionFormat = + case style of + Line -> SingleLine + Block ran -> MultiLine ran + in Section {..} + +fromTestComment :: TestComment -> Test +fromTestComment AProp {..} = + Property + { testline = getPropLine lineProp + , testOutput = propResults + , testRange = testCommentRange + } +fromTestComment AnExample {..} = + Example + { testLines = getExampleLine <$> lineExamples + , testOutput = exampleResults + , testRange = testCommentRange + } + +-- * Block comment parser + +{- $setup +>>> dummyPos = Position 0 0 +>>> parseE p = either (error . errorBundlePretty) id . parse p "" +-} + +-- >>> parseE (blockCommentBP True dummyPos) "{- |\n >>> 5+5\n 11\n -}" +-- (HaddockNext,[AnExample {testCommentRange = Position {_line = 1, _character = 0}, lineExamples = ExampleLine {getExampleLine = " 5+5"} :| [], exampleResults = [" 11"]}]) + +blockCommentBP :: + -- | True if Literate Haskell + BlockCommentParser (CommentFlavour, [TestComment]) +blockCommentBP = do + skipCount 2 anySingle -- "{-" + void $ optional $ char ' ' + flav <- commentFlavourP + hit <- skipNormalCommentBlock + if hit + then do + body <- + many $ + (blockExamples <|> blockProp) + <* skipNormalCommentBlock + void takeRest -- just consume the rest + pure (flav, body) + else pure (flav, []) + +skipNormalCommentBlock :: BlockCommentParser Bool +skipNormalCommentBlock = do + BlockEnv {..} <- ask + skipManyTill (normalLineP isLhs $ Block blockRange) $ + False <$ try (optional (chunk "-}") *> eof) + <|> True <$ lookAhead (try $ testSymbol isLhs $ Block blockRange) + +testSymbol :: Bool -> CommentStyle -> LineParser () +testSymbol isLHS style = + -- FIXME: To comply with existing Extended Eval Plugin Behaviour; + -- it must skip one space after a comment! + -- This prevents Eval Plugin from working on + -- modules with non-standard base indentation-level. + when (isLHS && is _Block style) (void $ count' 0 2 $ char ' ') + *> (exampleSymbol <|> propSymbol) + +eob :: LineParser () +eob = eof <|> try (optional (chunk "-}") *> eof) <|> void eol + +blockExamples + , blockProp :: + BlockCommentParser TestComment +blockExamples = do + BlockEnv {..} <- ask + (ran, examples) <- withRange $ NE.some $ exampleLineStrP isLhs $ Block blockRange + AnExample ran examples <$> resultBlockP +blockProp = do + BlockEnv {..} <- ask + (ran, Identity prop) <- withRange $ fmap Identity $ propLineStrP isLhs $ Block blockRange + AProp ran prop <$> resultBlockP + +withRange :: + (TraversableStream s, Ord v, Traversable t) => + ParsecT v s m (t (a, Position)) -> + ParsecT v s m (Range, t a) +withRange p = do + beg <- sourcePosToPosition <$> getSourcePos + as <- p + let fin + | null as = beg + | otherwise = snd $ last $ F.toList as + pure (Range beg fin, fst <$> as) + +resultBlockP :: BlockCommentParser [String] +resultBlockP = do + BlockEnv {..} <- ask + many $ + fmap fst $ nonEmptyNormalLineP isLhs $ + Block blockRange + +positionToSourcePos :: Position -> SourcePos +positionToSourcePos pos = + P.SourcePos + { sourceName = "" + , sourceLine = P.mkPos $ fromIntegral $ 1 + pos ^. L.line + , sourceColumn = P.mkPos $ fromIntegral $ 1 + pos ^. L.character + } + +sourcePosToPosition :: SourcePos -> Position +sourcePosToPosition SourcePos {..} = + Position (fromIntegral $ unPos sourceLine - 1) (fromIntegral $ unPos sourceColumn - 1) + +-- * Line Group Parser + +{- | +Result: a tuple of ordinary line tests and setting sections. + +TODO: Haddock comment can adjacent to vanilla comment: + + @ + -- Vanilla comment + -- Another vanilla + -- | This parses as Haddock comment as GHC + @ + +This behaviour is not yet handled correctly in Eval Plugin; +but for future extension for this, we use a tuple here instead of 'Either'. +-} +lineGroupP :: + LineGroupParser + (Maybe (CommentFlavour, [TestComment]), [TestComment]) +lineGroupP = do + (_, flav) <- lookAhead $ parseLine (commentFlavourP <* takeRest) + case flav of + Named "setup" -> (Nothing,) <$> lineCommentSectionsP + flav -> (,mempty) . Just . (flav,) <$> lineCommentSectionsP + +-- >>> parse (lineGroupP <*eof) "" $ (dummyPosition, ) . RawLineComment <$> ["-- a", "-- b"] +-- Variable not in scope: dummyPosition :: Position + +commentFlavourP :: LineParser CommentFlavour +commentFlavourP = + P.option + Vanilla + ( HaddockNext <$ char '|' + <|> HaddockPrev <$ char '^' + <|> Named <$ char '$' + <* optional hspace + <*> ((:) <$> letterChar <*> P.many alphaNumChar) + ) + <* optional (char ' ') + +lineCommentHeadP :: LineParser () +lineCommentHeadP = do + -- and no operator symbol character follows. + void $ chunk "--" + skipMany $ char '-' + void $ optional $ char ' ' + +lineCommentSectionsP :: + LineGroupParser [TestComment] +lineCommentSectionsP = do + skipMany normalLineCommentP + many $ + exampleLinesGP + <|> uncurry AProp <$> propLineGP <*> resultLinesP + <* skipMany normalLineCommentP + +lexemeLine :: LineGroupParser a -> LineGroupParser a +lexemeLine p = p <* skipMany normalLineCommentP + +resultLinesP :: LineGroupParser [String] +resultLinesP = many nonEmptyLGP + +normalLineCommentP :: LineGroupParser (Range, String) +normalLineCommentP = + parseLine (fst <$ commentFlavourP <*> normalLineP False Line) + +nonEmptyLGP :: LineGroupParser String +nonEmptyLGP = + try $ + fmap snd $ + parseLine $ + fst <$ commentFlavourP <*> nonEmptyNormalLineP False Line + +exampleLinesGP :: LineGroupParser TestComment +exampleLinesGP = + lexemeLine $ + uncurry AnExample . first convexHullRange . NE.unzip + <$> NE.some exampleLineGP + <*> resultLinesP + +convexHullRange :: NonEmpty Range -> Range +convexHullRange nes = + Range (NE.head nes ^. L.start) (NE.last nes ^. L.end) + +exampleLineGP :: LineGroupParser (Range, ExampleLine) +exampleLineGP = + -- In line-comments, indentation-level inside comment doesn't matter. + parseLine (fst <$ commentFlavourP <*> exampleLineStrP False Line) + +propLineGP :: LineGroupParser (Range, PropLine) +propLineGP = + -- In line-comments, indentation-level inside comment doesn't matter. + parseLine (fst <$ commentFlavourP <*> propLineStrP False Line) + +{- | +Turning a line parser into line group parser consuming a single line comment. +Parses a sinlge line comment, skipping prefix "--[-*]" with optional one horizontal space. +fails if the input does not start with "--". + +__N.B.__ We don't strip comment flavours. + +>>> pck = (:[]).(:[]) . RawLineComment + +>>> parseMaybe (parseLine $ takeRest) $ pck "-- >>> A" +Just [">>> A"] + +>>> parseMaybe (parseLine $ takeRest) $ pck "--- >>> A" +Just [" >>> A"] + +>>> parseMaybe (parseLine takeRest) $ pck "" +Nothing +-} +parseLine :: + (Ord (f RawLineComment), Traversable f) => + LineParser a -> + Parsec Void [f RawLineComment] (f a) +parseLine p = + P.token + (mapM $ parseMaybe (lineCommentHeadP *> p) . getRawLineComment) + mempty + +-- * Line Parsers + +-- | Non-empty normal line. +nonEmptyNormalLineP :: + -- | True if Literate Haskell + Bool -> + CommentStyle -> + LineParser (String, Position) +nonEmptyNormalLineP isLHS style = try $ do + (ln, pos) <- normalLineP isLHS style + guard $ + case style of + Block{} -> T.strip (T.pack ln) `notElem` ["{-", "-}", ""] + _ -> not $ all C.isSpace ln + pure (ln, pos) + +{- | Normal line is a line neither a example nor prop. + Empty line is normal. +-} +normalLineP :: + -- | True if Literate Haskell + Bool -> + CommentStyle -> + LineParser (String, Position) +normalLineP isLHS style = do + notFollowedBy + (try $ testSymbol isLHS style) + when (isLHS && is _Block style) $ + void $ count' 0 2 $ char ' ' + consume style + +consume :: CommentStyle -> LineParser (String, Position) +consume style = + case style of + Line -> (,) <$> takeRest <*> getPosition + Block {} -> manyTill_ anySingle (getPosition <* eob) + +getPosition :: (Ord v, TraversableStream s) => ParsecT v s m Position +getPosition = sourcePosToPosition <$> getSourcePos + +-- | Parses example test line. +exampleLineStrP :: + -- | True if Literate Haskell + Bool -> + CommentStyle -> + LineParser (ExampleLine, Position) +exampleLineStrP isLHS style = + try $ + -- FIXME: To comply with existing Extended Eval Plugin Behaviour; + -- it must skip one space after a comment! + -- This prevents Eval Plugin from working on + -- modules with non-standard base indentation-level. + when (isLHS && is _Block style) (void $ count' 0 2 $ char ' ') + *> exampleSymbol + *> (first ExampleLine <$> consume style) + +exampleSymbol :: LineParser () +exampleSymbol = + chunk ">>>" *> P.notFollowedBy (char '>') + +propSymbol :: LineParser () +propSymbol = chunk "prop>" *> P.notFollowedBy (char '>') + +-- | Parses prop test line. +propLineStrP :: + -- | True if Literate HAskell + Bool -> + CommentStyle -> + LineParser (PropLine, Position) +propLineStrP isLHS style = + -- FIXME: To comply with existing Extended Eval Plugin Behaviour; + -- it must skip one space after a comment! + -- This prevents Eval Plugin from working on + -- modules with non-standard base indentation-level. + when (isLHS && is _Block style) (void $ count' 0 2 $ char ' ') + *> chunk "prop>" + *> P.notFollowedBy (char '>') + *> (first PropLine <$> consume style) + +-- * Utilities + +{- | +Given a sequence of tokens increasing in their starting position, +groups them into sublists consisting of contiguous tokens; +Two adjacent tokens are considered to be contiguous if + + * line number increases by 1, and + * they have same starting column. + +>>> contiguousGroupOn id [(1,2),(2,2),(3,4),(4,4),(5,4),(7,0),(8,0)] +[(1,2) :| [(2,2)],(3,4) :| [(4,4),(5,4)],(7,0) :| [(8,0)]] +-} +contiguousGroupOn :: (a -> (UInt, UInt)) -> [a] -> [NonEmpty a] +contiguousGroupOn toLineCol = foldr step [] + where + step a [] = [pure a] + step a bss0@((b :| bs) : bss) + | let (aLine, aCol) = toLineCol a + , let (bLine, bCol) = toLineCol b + , aLine + 1 == bLine && aCol == bCol = + (a :| b : bs) : bss + | otherwise = pure a : bss0 + +{- | Given a map from positions, divides them into subgroup + with contiguous line and columns. +-} +groupLineComments :: + Map Range a -> [NonEmpty (Range, a)] +groupLineComments = + contiguousGroupOn (fst >>> view L.start >>> view L.line &&& view L.character) + . Map.toList diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Parse/Option.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Parse/Option.hs new file mode 100644 index 0000000000..ec2b6561a8 --- /dev/null +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Parse/Option.hs @@ -0,0 +1,40 @@ +{-# OPTIONS_GHC -Wwarn #-} + +-- | GHC language options parser +module Ide.Plugin.Eval.Parse.Option ( + langOptions, + parseSetFlags, +) where + +import Control.Arrow (left) +import Data.Void (Void) +import Text.Megaparsec +import Text.Megaparsec.Char + +{- | +>>> langOptions ":set -XBinaryLiterals -XOverloadedStrings " +Right ["BinaryLiterals","OverloadedStrings"] + +>>> langOptions ":set" +Right [] + +>>> langOptions "" +Left "No match" +-} +langOptions :: String -> Either String [String] +langOptions = + left errorBundlePretty + . parse (space *> languageOpts <* eof) "" + +parseSetFlags :: String -> Maybe String +parseSetFlags = parseMaybe + (hspace *> chunk ":set" + *> hspace1 *> takeRest + :: Parsec Void String String + ) + +-- >>> parseMaybe languageOpts ":set -XBinaryLiterals -XOverloadedStrings" +-- Just ["BinaryLiterals","OverloadedStrings"] +languageOpts :: Parsec Void String [String] +languageOpts = string ":set" *> space1 + *> many (string "-X" *> many letterChar <* space) diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Rules.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Rules.hs new file mode 100644 index 0000000000..d01ddbc55c --- /dev/null +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Rules.hs @@ -0,0 +1,130 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE PatternSynonyms #-} + +module Ide.Plugin.Eval.Rules (GetEvalComments(..), rules,queueForEvaluation, unqueueForEvaluation, Log) where + +import Control.Lens (toListOf) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import qualified Data.ByteString as BS +import Data.Data.Lens (biplate) +import Data.HashSet (HashSet) +import qualified Data.HashSet as Set +import Data.IORef +import qualified Data.Map.Strict as Map +import Data.String (fromString) +import Development.IDE (GetParsedModuleWithComments (GetParsedModuleWithComments), + IdeState, + LinkableType (BCOLinkable), + NeedsCompilation (NeedsCompilation), + NormalizedFilePath, + RuleBody (RuleNoDiagnostics), + Rules, defineEarlyCutoff, + encodeLinkableType, + fromNormalizedFilePath, + realSrcSpanToRange, + useWithStale_, use_) +import Development.IDE.Core.PositionMapping (toCurrentRange) +import Development.IDE.Core.Rules (needsCompilationRule) +import Development.IDE.Core.Shake (IsIdeGlobal, + RuleBody (RuleWithCustomNewnessCheck), + addIdeGlobal, + getIdeGlobalAction, + getIdeGlobalState) +import Development.IDE.GHC.Compat +import qualified Development.IDE.GHC.Compat as SrcLoc +import qualified Development.IDE.GHC.Compat.Util as FastString +import Development.IDE.Graph (alwaysRerun) +import GHC.Parser.Annotation +import Ide.Logger (Recorder, WithPriority, + cmapWithPrio) +import Ide.Plugin.Eval.Types + + +rules :: Recorder (WithPriority Log) -> Rules () +rules recorder = do + evalParsedModuleRule recorder + redefinedNeedsCompilation recorder + isEvaluatingRule recorder + addIdeGlobal . EvaluatingVar =<< liftIO(newIORef mempty) + +newtype EvaluatingVar = EvaluatingVar (IORef (HashSet NormalizedFilePath)) +instance IsIdeGlobal EvaluatingVar + +queueForEvaluation :: IdeState -> NormalizedFilePath -> IO () +queueForEvaluation ide nfp = do + EvaluatingVar var <- getIdeGlobalState ide + atomicModifyIORef' var (\fs -> (Set.insert nfp fs, ())) + +unqueueForEvaluation :: IdeState -> NormalizedFilePath -> IO () +unqueueForEvaluation ide nfp = do + EvaluatingVar var <- getIdeGlobalState ide + -- remove the module from the Evaluating state, so that next time it won't evaluate to True + atomicModifyIORef' var $ \fs -> (Set.delete nfp fs, ()) + +apiAnnComments' :: ParsedModule -> [SrcLoc.RealLocated EpaCommentTok] +apiAnnComments' pm = do + L span (EpaComment c _) <- getEpaComments $ pm_parsed_source pm + pure (L ( +#if MIN_VERSION_ghc(9,11,0) + epaLocationRealSrcSpan +#else + anchor +#endif + span) c) + where + getEpaComments :: Development.IDE.GHC.Compat.Located (HsModule GhcPs) -> [LEpaComment] + getEpaComments = toListOf biplate + +pattern RealSrcSpanAlready :: SrcLoc.RealSrcSpan -> SrcLoc.RealSrcSpan +pattern RealSrcSpanAlready x = x + +evalParsedModuleRule :: Recorder (WithPriority Log) -> Rules () +evalParsedModuleRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleNoDiagnostics $ \GetEvalComments nfp -> do + (pm, posMap) <- useWithStale_ GetParsedModuleWithComments nfp + let comments = foldMap (\case + L (RealSrcSpanAlready real) bdy + | FastString.unpackFS (srcSpanFile real) == + fromNormalizedFilePath nfp + , let ran0 = realSrcSpanToRange real + , Just curRan <- toCurrentRange posMap ran0 + -> + + -- since Haddock parsing is unset explicitly in 'getParsedModuleWithComments', + -- we can concentrate on these two + case bdy of + EpaLineComment cmt -> + mempty { lineComments = Map.singleton curRan (RawLineComment cmt) } + EpaBlockComment cmt -> + mempty { blockComments = Map.singleton curRan $ RawBlockComment cmt } + _ -> mempty + _ -> mempty + ) + $ apiAnnComments' pm + -- we only care about whether the comments are null + -- this is valid because the only dependent is NeedsCompilation + fingerPrint = fromString $ if nullComments comments then "" else "1" + return (Just fingerPrint, Just comments) + +isEvaluatingRule :: Recorder (WithPriority Log) -> Rules () +isEvaluatingRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleNoDiagnostics $ \IsEvaluating f -> do + alwaysRerun + EvaluatingVar var <- getIdeGlobalAction + b <- liftIO $ (f `Set.member`) <$> readIORef var + return (Just (if b then BS.singleton 1 else BS.empty), Just b) + +-- Redefine the NeedsCompilation rule to set the linkable type to Just _ +-- whenever the module is being evaluated +-- This will ensure that the modules are loaded with linkables +-- and the interactive session won't try to compile them on the fly, +-- leading to much better performance of the evaluate code lens +redefinedNeedsCompilation :: Recorder (WithPriority Log) -> Rules () +redefinedNeedsCompilation recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleWithCustomNewnessCheck (<=) $ \NeedsCompilation f -> do + isEvaluating <- use_ IsEvaluating f + if isEvaluating then do + let linkableType = BCOLinkable + fp = encodeLinkableType $ Just linkableType + pure (Just fp, Just (Just linkableType)) + else + needsCompilationRule f + diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Types.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Types.hs new file mode 100644 index 0000000000..1753ab4e6c --- /dev/null +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Types.hs @@ -0,0 +1,246 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} +{-# OPTIONS_GHC -Wwarn #-} + +module Ide.Plugin.Eval.Types + ( Log(..), + locate, + locate0, + Test (..), + isProperty, + Format (..), + Language (..), + Section (..), + Sections (..), + hasTests, + hasPropertyTest, + splitSections, + Loc, + Located (..), + Comments (..), + RawBlockComment (..), + RawLineComment (..), + unLoc, + Txt, + EvalParams(..), + GetEvalComments(..), + IsEvaluating(..), + nullComments) +where + +import Control.Arrow ((>>>)) +import Control.DeepSeq (deepseq) +import Control.Lens +import Data.Aeson (FromJSON, ToJSON) +import Data.List (partition) +import Data.List.NonEmpty (NonEmpty) +import Data.Map.Strict (Map) +import Data.String (IsString (..)) +import qualified Data.Text as T +import Development.IDE (Range, RuleResult) +import qualified Development.IDE.Core.Shake as Shake +import qualified Development.IDE.GHC.Compat.Core as Core +import Development.IDE.Graph.Classes +import GHC.Generics (Generic) +import Ide.Logger +import Ide.Plugin.Eval.GHC (showDynFlags) +import Ide.Plugin.Eval.Util +import Language.LSP.Protocol.Types (TextDocumentIdentifier, + TextEdit) +import qualified System.Time.Extra as Extra +import qualified Text.Megaparsec as P + +data Log + = LogShake Shake.Log + | LogCodeLensFp FilePath + | LogCodeLensComments Comments + | LogExecutionTime T.Text Extra.Seconds + | LogTests !Int !Int !Int !Int + | LogRunTestResults [T.Text] + | LogRunTestEdits TextEdit + | LogEvalFlags [String] + | LogEvalPreSetDynFlags Core.DynFlags + | LogEvalParsedFlags + (Either + Core.GhcException + (Core.DynFlags, [Core.Located String], DynFlagsParsingWarnings)) + | LogEvalPostSetDynFlags Core.DynFlags + | LogEvalStmtStart String + | LogEvalStmtResult (Maybe [T.Text]) + | LogEvalImport String + | LogEvalDeclaration String + +instance Pretty Log where + pretty = \case + LogShake shakeLog -> pretty shakeLog + LogCodeLensFp fp -> "fp" <+> pretty fp + LogCodeLensComments comments -> "comments" <+> viaShow comments + LogExecutionTime lbl duration -> pretty lbl <> ":" <+> pretty (Extra.showDuration duration) + LogTests nTests nNonSetupSections nSetupSections nLenses -> "Tests" <+> fillSep + [ pretty nTests + , "tests in" + , pretty nNonSetupSections + , "sections" + , pretty nSetupSections + , "setups" + , pretty nLenses + , "lenses." + ] + LogRunTestResults results -> "TEST RESULTS" <+> viaShow results + LogRunTestEdits edits -> "TEST EDIT" <+> viaShow edits + LogEvalFlags flags -> "{:SET" <+> pretty flags + LogEvalPreSetDynFlags dynFlags -> "pre set" <+> pretty (showDynFlags dynFlags) + LogEvalParsedFlags eans -> "parsed flags" <+> viaShow (eans + <&> (_1 %~ showDynFlags >>> _3 %~ prettyWarnings)) + LogEvalPostSetDynFlags dynFlags -> "post set" <+> pretty (showDynFlags dynFlags) + LogEvalStmtStart stmt -> "{STMT" <+> pretty stmt + LogEvalStmtResult result -> "STMT}" <+> pretty result + LogEvalImport stmt -> "{IMPORT" <+> pretty stmt + LogEvalDeclaration stmt -> "{DECL" <+> pretty stmt + +-- | A thing with a location attached. +data Located l a = Located {location :: l, located :: a} + deriving (Eq, Show, Ord, Functor, Generic, FromJSON, ToJSON) + +-- | Discard location information. +unLoc :: Located l a -> a +unLoc (Located _ a) = a + +instance (NFData l, NFData a) => NFData (Located l a) where + rnf (Located loc a) = loc `deepseq` a `deepseq` () + +type Loc = Located Line + +type Line = Int + +locate :: Loc [a] -> [Loc a] +locate (Located l tst) = zipWith Located [l ..] tst + +locate0 :: [a] -> [Loc a] +locate0 = locate . Located 0 + +type Txt = String + +data Sections = Sections + { nonSetupSections :: [Section] + , setupSections :: [Section] + } + deriving (Show, Eq, Generic) + +data Section = Section + { sectionName :: Txt + , sectionTests :: [Test] + , sectionLanguage :: Language + , sectionFormat :: Format + } + deriving (Eq, Show, Generic, FromJSON, ToJSON, NFData) + +hasTests :: Section -> Bool +hasTests = not . null . sectionTests + +hasPropertyTest :: Section -> Bool +hasPropertyTest = any isProperty . sectionTests + +-- |Split setup and normal sections +splitSections :: [Section] -> ([Section], [Section]) +splitSections = partition ((== "setup") . sectionName) + +data Test + = Example {testLines :: NonEmpty Txt, testOutput :: [Txt], testRange :: Range} + | Property {testline :: Txt, testOutput :: [Txt], testRange :: Range} + deriving (Eq, Show, Generic, FromJSON, ToJSON, NFData) + +data IsEvaluating = IsEvaluating + deriving (Eq, Show, Generic) +instance Hashable IsEvaluating +instance NFData IsEvaluating + +type instance RuleResult IsEvaluating = Bool + +data GetEvalComments = GetEvalComments + deriving (Eq, Show, Generic) +instance Hashable GetEvalComments +instance NFData GetEvalComments + +type instance RuleResult GetEvalComments = Comments +data Comments = Comments + { lineComments :: Map Range RawLineComment + , blockComments :: Map Range RawBlockComment + } + deriving (Show, Eq, Ord, Generic) + +nullComments :: Comments -> Bool +nullComments Comments{..} = null lineComments && null blockComments + +instance NFData Comments + +newtype RawBlockComment = RawBlockComment {getRawBlockComment :: String} + deriving (Show, Eq, Ord) + deriving newtype + ( IsString + , P.Stream + , P.TraversableStream + , P.VisualStream + , Semigroup + , Monoid + , NFData + ) + +newtype RawLineComment = RawLineComment {getRawLineComment :: String} + deriving (Show, Eq, Ord) + deriving newtype + ( IsString + , P.Stream + , P.TraversableStream + , P.VisualStream + , Semigroup + , Monoid + , NFData + ) + +instance Semigroup Comments where + Comments ls bs <> Comments ls' bs' = Comments (ls <> ls') (bs <> bs') + +instance Monoid Comments where + mempty = Comments mempty mempty + +isProperty :: Test -> Bool +isProperty Property {} = True +isProperty _ = False + +data Format + = SingleLine + | -- | @Range@ is that of surrounding entire block comment, not section. + -- Used for detecting no-newline test commands. + MultiLine Range + deriving (Eq, Show, Ord, Generic, FromJSON, ToJSON, NFData) + +data Language = Plain | Haddock deriving (Eq, Show, Generic, Ord, FromJSON, ToJSON, NFData) + +data ExpectedLine = ExpectedLine [LineChunk] | WildCardLine + deriving (Eq, Show, Generic, FromJSON, ToJSON, NFData) + +instance IsString ExpectedLine where + fromString = ExpectedLine . return . LineChunk + +data LineChunk = LineChunk String | WildCardChunk + deriving (Eq, Show, Generic, FromJSON, ToJSON, NFData) + +instance IsString LineChunk where + fromString = LineChunk + +type EvalId = Int + +-- | Specify the test section to execute +data EvalParams = EvalParams + { sections :: [Section] + , module_ :: !TextDocumentIdentifier + , evalId :: !EvalId -- ^ unique group id; for test uses + } + deriving (Eq, Show, Generic, FromJSON, ToJSON) diff --git a/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Util.hs b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Util.hs new file mode 100644 index 0000000000..9498076511 --- /dev/null +++ b/plugins/hls-eval-plugin/src/Ide/Plugin/Eval/Util.hs @@ -0,0 +1,101 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE RecordWildCards #-} +{-# OPTIONS_GHC -Wno-orphans #-} + +-- | Debug utilities +module Ide.Plugin.Eval.Util ( + timed, + isLiterate, + response', + gStrictTry, + DynFlagsParsingWarnings, + prettyWarnings, +) where + +import Control.Exception (SomeException, evaluate, + fromException) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Control.Monad.Trans.Class (MonadTrans (lift)) +import Control.Monad.Trans.Except (ExceptT (..), + runExceptT) +import Data.Aeson (Value) +import Data.String (IsString (fromString)) +import Development.IDE.GHC.Compat.Outputable +import Development.IDE.GHC.Compat.Util (MonadCatch, bagToList, + catch) +import Ide.Plugin.Error +import Ide.Types (HandlerM, + pluginSendRequest) +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import System.FilePath (takeExtension) +import qualified System.Time.Extra as Extra +import System.Time.Extra (duration) +import UnliftIO.Exception (catchAny) + +#if !MIN_VERSION_ghc(9,8,0) +import qualified Data.Text as T +import Development.IDE (printOutputable) +import qualified Development.IDE.GHC.Compat.Core as Core +#endif + +timed :: MonadIO m => (t -> Extra.Seconds -> m a) -> t -> m b -> m b +timed out name op = do + (secs, r) <- duration op + _ <- out name secs + return r + +isLiterate :: FilePath -> Bool +isLiterate x = takeExtension x `elem` [".lhs", ".lhs-boot"] + +response' :: ExceptT PluginError (HandlerM c) WorkspaceEdit -> ExceptT PluginError (HandlerM c) (Value |? Null) +response' act = do + res <- ExceptT (runExceptT act + `catchAny` \e -> do + res <- showErr e + pure . Left . PluginInternalError $ fromString res) + _ <- lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing res) (\_ -> pure ()) + pure $ InR Null + +gStrictTry :: (MonadIO m, MonadCatch m) => m b -> m (Either String b) +gStrictTry op = + catch + (op >>= fmap Right . gevaluate) + (fmap Left . showErr) + +gevaluate :: MonadIO m => a -> m a +gevaluate = liftIO . evaluate + +showErr :: Monad m => SomeException -> m String +showErr e = + case fromException e of + -- On GHC 9.4+, the show instance adds the error message span + -- We don't want this for the plugin + -- So render without the span. + Just (SourceError msgs) -> return $ renderWithContext defaultSDocContext + $ vcat + $ bagToList + $ fmap (vcat . unDecorated + . diagnosticMessage + (defaultDiagnosticOpts @GhcMessage) + . errMsgDiagnostic) + $ getMessages msgs + _ -> + return . show $ e + +#if MIN_VERSION_ghc(9,8,0) +type DynFlagsParsingWarnings = Messages DriverMessage + +prettyWarnings :: DynFlagsParsingWarnings -> String +prettyWarnings = printWithoutUniques . pprMessages (defaultDiagnosticOpts @DriverMessage) +#else +type DynFlagsParsingWarnings = [Core.Warn] + +prettyWarnings :: DynFlagsParsingWarnings -> String +prettyWarnings = unlines . map prettyWarn + +prettyWarn :: Core.Warn -> String +prettyWarn Core.Warn{..} = + T.unpack (printOutputable $ Core.getLoc warnMsg) <> ": warning:\n" + <> " " <> Core.unLoc warnMsg +#endif diff --git a/plugins/hls-eval-plugin/test/Main.hs b/plugins/hls-eval-plugin/test/Main.hs new file mode 100644 index 0000000000..03416c6902 --- /dev/null +++ b/plugins/hls-eval-plugin/test/Main.hs @@ -0,0 +1,353 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} +module Main + ( main + ) where + +import Control.Lens (_Just, folded, preview, view, (^.), + (^..), (^?)) +import Control.Monad (join) +import Data.Aeson (Value (Object), fromJSON, object, + (.=)) +import Data.Aeson.Types (Pair, Result (Success)) +import Data.List (isInfixOf) +import Data.List.Extra (nubOrdOn) +import qualified Data.Map as Map +import qualified Data.Maybe as Maybe +import qualified Data.Text as T +import Ide.Plugin.Config (Config) +import qualified Ide.Plugin.Config as Plugin +import qualified Ide.Plugin.Eval as Eval +import Ide.Plugin.Eval.Types (EvalParams (..), Section (..), + testOutput) +import Language.LSP.Protocol.Lens (command, range, title) +import System.FilePath ((<.>), ()) +import Test.Hls +import qualified Test.Hls.FileSystem as FS + +main :: IO () +main = defaultTestRunner tests + +evalPlugin :: PluginTestDescriptor Eval.Log +evalPlugin = mkPluginTestDescriptor Eval.descriptor "eval" + +tests :: TestTree +tests = + testGroup "eval" + [ testCase "Produces Evaluate code lenses" $ + runSessionWithServerInTmpDir def evalPlugin (mkFs $ FS.directProject "T1.hs") $ do + doc <- openDoc "T1.hs" "haskell" + lenses <- getCodeLenses doc + liftIO $ map (preview $ command . _Just . title) lenses @?= [Just "Evaluate..."] + , testCase "Produces Refresh code lenses" $ + runSessionWithServerInTmpDir def evalPlugin (mkFs $ FS.directProject "T2.hs") $ do + doc <- openDoc "T2.hs" "haskell" + lenses <- getCodeLenses doc + liftIO $ map (preview $ command . _Just . title) lenses @?= [Just "Refresh..."] + , testCase "Code lenses have ranges" $ + runSessionWithServerInTmpDir def evalPlugin (mkFs $ FS.directProject "T1.hs") $ do + doc <- openDoc "T1.hs" "haskell" + lenses <- getCodeLenses doc + liftIO $ map (view range) lenses @?= [Range (Position 4 0) (Position 5 0)] + , testCase "Multi-line expressions have a multi-line range" $ do + runSessionWithServerInTmpDir def evalPlugin (mkFs $ FS.directProject "T3.hs") $ do + doc <- openDoc "T3.hs" "haskell" + lenses <- getCodeLenses doc + liftIO $ map (view range) lenses @?= [Range (Position 3 0) (Position 5 0)] + , testCase "Executed expressions range covers only the expression" $ do + runSessionWithServerInTmpDir def evalPlugin (mkFs $ FS.directProject "T2.hs") $ do + doc <- openDoc "T2.hs" "haskell" + lenses <- getCodeLenses doc + liftIO $ map (view range) lenses @?= [Range (Position 4 0) (Position 5 0)] + + , goldenWithEvalForCodeAction "Evaluation of expressions via code action" "T1" "hs" + , goldenWithEvalForCodeAction "Reevaluation of expressions via code action" "T2" "hs" + + , goldenWithEval "Evaluation of expressions" "T1" "hs" + , goldenWithEval "Reevaluation of expressions" "T2" "hs" + , goldenWithEval "Evaluation of expressions w/ imports" "T3" "hs" + , goldenWithEval "Evaluation of expressions w/ lets" "T4" "hs" + , goldenWithEval "Refresh an evaluation" "T5" "hs" + , goldenWithEval "Refresh an evaluation w/ lets" "T6" "hs" + , goldenWithEval "Refresh a multiline evaluation" "T7" "hs" + , goldenWithEval "Evaluate a multi-line show result" "TMultiResult" "hs" -- Do not escape from comments! + , testCase "Semantic and Lexical errors are reported" $ do + evalInFile "T8.hs" "-- >>> noFunctionWithThisName" "-- Variable not in scope: noFunctionWithThisName" + evalInFile "T8.hs" "-- >>> res = \"a\" + \"bc\"" $ + if ghcVersion >= GHC96 then + "-- No instance for `Num String' arising from a use of `+'\n-- In the expression: \"a\" + \"bc\"\n-- In an equation for `res': res = \"a\" + \"bc\"" + else + "-- No instance for (Num String) arising from a use of `+'\n-- In the expression: \"a\" + \"bc\"\n-- In an equation for `res': res = \"a\" + \"bc\"" + + evalInFile "T8.hs" "-- >>> \"" (if ghcVersion >= GHC912 then "-- lexical error at end of input" else "-- lexical error in string/character literal at end of input") + evalInFile "T8.hs" "-- >>> 3 `div` 0" "-- divide by zero" -- The default for marking exceptions is False + , goldenWithEval "Applies file LANGUAGE extensions" "T9" "hs" + , goldenWithEval "Evaluate a type with :kind!" "T10" "hs" + , goldenWithEval "Reports an error for an incorrect type with :kind!" "T11" "hs" + , goldenWithEval "Shows a kind with :kind" "T12" "hs" + , goldenWithEval "Reports an error for an incorrect type with :kind" "T13" "hs" + , goldenWithEval' "Returns a fully-instantiated type for :type" "T14" "hs" (if ghcVersion >= GHC98 then "ghc98.expected" else "expected") -- See https://gitlab.haskell.org/ghc/ghc/-/issues/24069 + , goldenWithEval "Doesn't break in module containing main function" "T4139" "hs" + , goldenWithEval "Returns defaulted type for :type +d, admitting multiple whitespaces around arguments" "T16" "hs" + , goldenWithEval ":type reports an error when given with unknown +x option" "T17" "hs" + , goldenWithEval "Reports an error when given with unknown command" "T18" "hs" + , goldenWithEval "Returns defaulted type for :type +d reflecting the default declaration specified in the >>> prompt" "T19" "hs" + -- TODO: known issue - see a note in P.R. #361 + , goldenWithEval ":type +d reflects the `default' declaration of the module" "T20" "hs" + , testCase ":type handles a multilined result properly" $ + evalInFile "T21.hs" "-- >>> :type fun" $ T.unlines [ + "-- fun", + "-- :: forall {k1} (k2 :: Nat) (n :: Nat) (a :: k1).", + "-- (KnownNat k2, KnownNat n, Typeable a) =>", + "-- Proxy k2 -> Proxy n -> Proxy a -> ()" + ] + , goldenWithEval ":t behaves exactly the same as :type" "T22" "hs" + , testCase ":type does \"dovetails\" for short identifiers" $ + evalInFile "T23.hs" "-- >>> :type f" $ T.unlines [ + "-- f :: forall {k1} (k2 :: Nat) (n :: Nat) (a :: k1).", + "-- (KnownNat k2, KnownNat n, Typeable a) =>", + "-- Proxy k2 -> Proxy n -> Proxy a -> ()" + ] + , goldenWithEval ":kind! treats a multilined result properly" "T24" "hs" + , goldenWithEval ":kind treats a multilined result properly" "T25" "hs" + , goldenWithEvalAndFs "local imports" (FS.directProjectMulti ["T26.hs", "Util.hs"]) "T26" "hs" + , goldenWithEval "Preserves one empty comment line after prompt" "T27" "hs" + , goldenWithEval "Evaluate comment after multiline function definition" "T28" "hs" + , goldenWithEval "Multi line comments" "TMulti" "hs" + , goldenWithEval "Multi line comments, with the last test line ends without newline" "TEndingMulti" "hs" + , goldenWithEval "Evaluate expressions in Plain comments in both single line and multi line format" "TPlainComment" "hs" + , goldenWithEval "Evaluate expressions in Haddock comments in both single line and multi line format" "THaddock" "hs" + , goldenWithEval "Compare results (for Haddock tests only)" "TCompare" "hs" + , goldenWithEvalAndFs "Local Modules imports are accessible in a test" (FS.directProjectMulti ["TLocalImport.hs", "Util.hs"]) "TLocalImport" "hs" + , goldenWithEvalAndFs "Transitive local dependency" (FS.directProjectMulti ["TTransitive.hs", "TLocalImport.hs", "Util.hs"]) "TTransitive" "hs" + -- , goldenWithEval "Local Modules can be imported in a test" "TLocalImportInTest" "hs" + , goldenWithEval "Setting language option TupleSections" "TLanguageOptionsTupleSections" "hs" + , goldenWithEval' ":set accepts ghci flags" "TFlags" "hs" (if ghcVersion >= GHC98 then "ghc98.expected" else "expected") + , testCase ":set -fprint-explicit-foralls works" $ do + evalInFile "T8.hs" "-- >>> :t id" "-- id :: a -> a" + evalInFile "T8.hs" "-- >>> :set -fprint-explicit-foralls\n-- >>> :t id" "-- id :: forall a. a -> a" + , goldenWithEval "The default language extensions for the eval plugin are the same as those for ghci" "TSameDefaultLanguageExtensionsAsGhci" "hs" + , goldenWithEval "IO expressions are supported, stdout/stderr output is ignored" "TIO" "hs" + , goldenWithEvalAndFs "Property checking" cabalProjectFS "TProperty" "hs" + , knownBrokenInWindowsBeforeGHC912 "The output has path separators in it, which on Windows look different. Just skip it there" $ + goldenWithEvalAndFs' "Property checking with exception" cabalProjectFS "TPropertyError" "hs" $ + case ghcVersion of + GHC912 -> "ghc912.expected" + GHC910 -> "ghc910.expected" + GHC98 -> "ghc98.expected" + GHC96 -> "ghc96.expected" + , goldenWithEval "Prelude has no special treatment, it is imported as stated in the module" "TPrelude" "hs" + , goldenWithEval "Don't panic on {-# UNPACK #-} pragma" "TUNPACK" "hs" + , goldenWithEval "Can handle eval inside nested comment properly" "TNested" "hs" + , goldenWithEval "Test on last line insert results correctly" "TLastLine" "hs" + , testGroup "with preprocessors" + [ knownBrokenInEnv [HostOS Windows] + "CPP eval on Windows and/or GHC <= 8.6 fails for some reasons" $ + goldenWithEval "CPP support" "TCPP" "hs" + , goldenWithEval "Literate Haskell Bird Style" "TLHS" "lhs" + ] + , goldenWithEval "Works with NoImplicitPrelude" "TNoImplicitPrelude" "hs" + , goldenWithEval "Variable 'it' works" "TIt" "hs" + , testGroup "configuration" + [ goldenWithEval' "Give 'WAS' by default" "TDiff" "hs" "expected.default" + , goldenWithEvalConfig' "Give the result only if diff is off" "TDiff" "hs" "expected.no-diff" diffOffConfig + , goldenWithEvalConfig' "Evaluates to exception (not marked)" "TException" "hs" "expected.nomark" (exceptionConfig False) + , goldenWithEvalConfig' "Evaluates to exception (with mark)" "TException" "hs" "expected.marked" (exceptionConfig True) + ] + , testGroup ":info command" + [ testCase ":info reports type, constructors and instances" $ do + [output] <- map (unlines . codeLensTestOutput) <$> evalLenses "TInfo.hs" + "data Foo = Foo1 | Foo2" `isInfixOf` output @? "Output does not include Foo data declaration" + "Eq Foo" `isInfixOf` output @? "Output does not include instance Eq Foo" + "Ord Foo" `isInfixOf` output @? "Output does not include instance Ord Foo" + not ("Baz Foo" `isInfixOf` output) @? "Output includes instance Baz Foo" + , testCase ":info reports type, constructors and instances for multiple types" $ do + [output] <- map (unlines . codeLensTestOutput) <$> evalLenses "TInfoMany.hs" + "data Foo = Foo1 | Foo2" `isInfixOf` output @? "Output does not include Foo data declaration" + "Eq Foo" `isInfixOf` output @? "Output does not include instance Eq Foo" + "Ord Foo" `isInfixOf` output @? "Output does not include instance Ord Foo" + not ("Baz Foo" `isInfixOf` output) @? "Output includes instance Baz Foo" + "data Bar = Bar1 | Bar2 | Bar3" `isInfixOf` output @? "Output does not include Bar data declaration" + "Eq Bar" `isInfixOf` output @? "Output does not include instance Eq Bar" + "Ord Bar" `isInfixOf` output @? "Output does not include instance Ord Bar" + not ("Baz Bar" `isInfixOf` output) @? "Output includes instance Baz Bar" + , testCase ":info! reports type, constructors and unfiltered instances" $ do + [output] <- map (unlines . codeLensTestOutput) <$> evalLenses "TInfoBang.hs" + "data Foo = Foo1 | Foo2" `isInfixOf` output @? "Output does not include Foo data declaration" + "Eq Foo" `isInfixOf` output @? "Output does not include instance Eq Foo" + "Ord Foo" `isInfixOf` output @? "Output does not include instance Ord Foo" + "Baz Foo" `isInfixOf` output @? "Output does not include instance Baz Foo" + , testCase ":info! reports type, constructors and unfiltered instances for multiple types" $ do + [output] <- map (unlines . codeLensTestOutput) <$> evalLenses "TInfoBangMany.hs" + "data Foo = Foo1 | Foo2" `isInfixOf` output @? "Output does not include Foo data declaration" + "Eq Foo" `isInfixOf` output @? "Output does not include instance Eq Foo" + "Ord Foo" `isInfixOf` output @? "Output does not include instance Ord Foo" + "Baz Foo" `isInfixOf` output @? "Output does not include instance Baz Foo" + "data Bar = Bar1 | Bar2 | Bar3" `isInfixOf` output @? "Output does not include Bar data declaration" + "Eq Bar" `isInfixOf` output @? "Output does not include instance Eq Bar" + "Ord Bar" `isInfixOf` output @? "Output does not include instance Ord Bar" + "Baz Bar" `isInfixOf` output @? "Output does not include instance Baz Bar" + , testCase ":i behaves exactly the same as :info" $ do + [output] <- map (unlines . codeLensTestOutput) <$> evalLenses "TI_Info.hs" + "data Foo = Foo1 | Foo2" `isInfixOf` output @? "Output does not include Foo data declaration" + "Eq Foo" `isInfixOf` output @? "Output does not include instance Eq Foo" + "Ord Foo" `isInfixOf` output @? "Output does not include instance Ord Foo" + not ("Baz Foo" `isInfixOf` output) @? "Output includes instance Baz Foo" + ] + , testCase "Interfaces are reused after Eval" $ do + runSessionWithServerInTmpDir def evalPlugin (mkFs $ FS.directProjectMulti ["TLocalImport.hs", "Util.hs"]) $ do + doc <- openDoc "TLocalImport.hs" "haskell" + _ <- waitForTypecheck doc + lenses <- getCodeLenses doc + cmd <- liftIO $ case lenses^..folded.command._Just of + [cmd] -> (cmd^.title @?= "Evaluate...") >> pure cmd + cmds -> assertFailure $ "Expected a single command, got " <> show (length cmds) + + executeCmd cmd + + -- trigger a rebuild and check that dependency interfaces are not rebuilt + changeDoc doc [] + _ <- waitForTypecheck doc + Right keys <- getLastBuildKeys + let ifaceKeys = filter ("GetModIface" `T.isPrefixOf`) keys + liftIO $ ifaceKeys @?= [] + ] + where + knownBrokenInWindowsBeforeGHC912 msg = + foldl (.) id + [ knownBrokenInSpecificEnv [GhcVer ghcVer, HostOS Windows] msg + | ghcVer <- [GHC96 .. GHC910] + ] + +goldenWithEval :: TestName -> FilePath -> FilePath -> TestTree +goldenWithEval title path ext = + goldenWithHaskellDocInTmpDir def evalPlugin title (mkFs $ FS.directProject (path <.> ext)) path "expected" ext executeLensesBackwards + +goldenWithEvalForCodeAction :: TestName -> FilePath -> FilePath -> TestTree +goldenWithEvalForCodeAction title path ext = + goldenWithHaskellDocInTmpDir def evalPlugin title (mkFs $ FS.directProject (path <.> ext)) path "expected" ext executeCodeActionsBackwards + +goldenWithEvalAndFs :: TestName -> [FS.FileTree] -> FilePath -> FilePath -> TestTree +goldenWithEvalAndFs title tree path ext = + goldenWithHaskellDocInTmpDir def evalPlugin title (mkFs tree) path "expected" ext executeLensesBackwards + +-- | Similar function as 'goldenWithEval' with an alternate reference file +-- naming. Useful when reference file may change because of GHC version. +goldenWithEval' :: TestName -> FilePath -> FilePath -> FilePath -> TestTree +goldenWithEval' title path ext expected = + goldenWithHaskellDocInTmpDir def evalPlugin title (mkFs $ FS.directProject (path <.> ext)) path expected ext executeLensesBackwards + +goldenWithEvalAndFs' :: TestName -> [FS.FileTree] -> FilePath -> FilePath -> FilePath -> TestTree +goldenWithEvalAndFs' title tree path ext expected = + goldenWithHaskellDocInTmpDir def evalPlugin title (mkFs tree) path expected ext executeLensesBackwards + + +-- | Execute lenses backwards, to avoid affecting their position in the source file +executeLensesBackwards :: TextDocumentIdentifier -> Session () +executeLensesBackwards doc = do + codeLenses <- getCodeLenses doc + -- liftIO $ print codeLenses + executeCmdsBackwards [c | CodeLens{_command = Just c} <- codeLenses] + +executeCodeActionsBackwards :: TextDocumentIdentifier -> Session () +executeCodeActionsBackwards doc = do + codeLenses <- getCodeLenses doc + let ranges = [_range | CodeLens{_range} <- codeLenses] + -- getAllCodeActions cannot get our code actions because they have no diagnostics + codeActions <- join <$> traverse (getCodeActions doc) ranges + let cmds = Maybe.mapMaybe (^? _L) codeActions + executeCmdsBackwards cmds + +-- Execute commands backwards, nubbing elements to avoid +-- evaluating the same section with multiple tests +-- more than twice +executeCmdsBackwards :: [Command] -> Session () +executeCmdsBackwards = mapM_ executeCmd . nubOrdOn actSectionId . reverse + +actSectionId :: Command -> Int +actSectionId Command{_arguments = Just [fromJSON -> Success EvalParams{..}]} = evalId +actSectionId _ = error "Invalid CodeLens" + +-- Execute command and wait for result +executeCmd :: Command -> Session () +executeCmd cmd = do + executeCommand cmd + _ <- skipManyTill anyMessage (message SMethod_WorkspaceApplyEdit) + -- liftIO $ print _resp + pure () + +evalLenses :: FilePath -> IO [CodeLens] +evalLenses path = runSessionWithServerInTmpDir def evalPlugin (mkFs cabalProjectFS) $ do + doc <- openDoc path "haskell" + executeLensesBackwards doc + getCodeLenses doc + +codeLensTestOutput :: CodeLens -> [String] +codeLensTestOutput codeLens = do + CodeLens { _command = Just command } <- [codeLens] + Command { _arguments = Just args } <- [command] + Success EvalParams { sections = sections } <- fromJSON @EvalParams <$> args + Section { sectionTests = sectionTests } <- sections + testOutput =<< sectionTests + +testDataDir :: FilePath +testDataDir = "plugins" "hls-eval-plugin" "test" "testdata" + +changeConfig :: [Pair] -> Config +changeConfig conf = + def + { Plugin.plugins = Map.fromList [("eval", + def { Plugin.plcGlobalOn = True, Plugin.plcConfig = unObject $ object conf } + )] } + where + unObject (Object obj) = obj + unObject _ = undefined + +diffOffConfig :: Config +diffOffConfig = changeConfig ["diff" .= False] + +exceptionConfig :: Bool -> Config +exceptionConfig exCfg = changeConfig ["exception" .= exCfg] + +goldenWithEvalConfig' :: TestName -> FilePath -> FilePath -> FilePath -> Config -> TestTree +goldenWithEvalConfig' title path ext expected cfg = + goldenWithHaskellDocInTmpDir cfg evalPlugin title (mkFs $ FS.directProject $ path <.> ext) path expected ext $ \doc -> do + executeLensesBackwards doc + +evalInFile :: HasCallStack => FilePath -> T.Text -> T.Text -> IO () +evalInFile fp e expected = runSessionWithServerInTmpDir def evalPlugin (mkFs $ FS.directProject fp) $ do + doc <- openDoc fp "haskell" + origin <- documentContents doc + let withEval = origin <> e + changeDoc doc [TextDocumentContentChangeEvent . InR . TextDocumentContentChangeWholeDocument $ withEval] + executeLensesBackwards doc + result <- fmap T.strip . T.stripPrefix withEval <$> documentContents doc + liftIO $ result @?= Just (T.strip expected) + +-- ---------------------------------------------------------------------------- +-- File system definitions +-- Used for declaring a test file tree +-- ---------------------------------------------------------------------------- + +mkFs :: [FS.FileTree] -> FS.VirtualFileTree +mkFs = FS.mkVirtualFileTree testDataDir + +cabalProjectFS :: [FS.FileTree] +cabalProjectFS = FS.simpleCabalProject' + [ FS.copy "test.cabal" + , FS.file "cabal.project" + (FS.text "packages: ./info-util .\n" + ) + , FS.copy "TProperty.hs" + , FS.copy "TPropertyError.hs" + , FS.copy "TI_Info.hs" + , FS.copy "TInfo.hs" + , FS.copy "TInfoBang.hs" + , FS.copy "TInfoBangMany.hs" + , FS.copy "TInfoMany.hs" + , FS.directory "info-util" + [ FS.copy "info-util/info-util.cabal" + , FS.copy "info-util/InfoUtil.hs" + ] + ] diff --git a/plugins/hls-eval-plugin/test/cabal.project b/plugins/hls-eval-plugin/test/cabal.project new file mode 100644 index 0000000000..3fae89fe02 --- /dev/null +++ b/plugins/hls-eval-plugin/test/cabal.project @@ -0,0 +1,3 @@ +packages: + testdata/ + testdata/info-util/ diff --git a/test/testdata/eval/T1.hs.expected b/plugins/hls-eval-plugin/test/testdata/T1.expected.hs similarity index 100% rename from test/testdata/eval/T1.hs.expected rename to plugins/hls-eval-plugin/test/testdata/T1.expected.hs diff --git a/test/testdata/eval/T1.hs b/plugins/hls-eval-plugin/test/testdata/T1.hs similarity index 100% rename from test/testdata/eval/T1.hs rename to plugins/hls-eval-plugin/test/testdata/T1.hs diff --git a/plugins/hls-eval-plugin/test/testdata/T10.expected.hs b/plugins/hls-eval-plugin/test/testdata/T10.expected.hs new file mode 100644 index 0000000000..776c970591 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T10.expected.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE DataKinds, TypeOperators #-} +module T10 where +import GHC.TypeNats ( type (+) ) + +type Dummy = 1 + 1 + +-- >>> type N = 1 +-- >>> type M = 40 +-- >>> :kind! N + M + 1 +-- N + M + 1 :: Natural +-- = 42 diff --git a/plugins/hls-eval-plugin/test/testdata/T10.hs b/plugins/hls-eval-plugin/test/testdata/T10.hs new file mode 100644 index 0000000000..e29c75876e --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T10.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE DataKinds, TypeOperators #-} +module T10 where +import GHC.TypeNats ( type (+) ) + +type Dummy = 1 + 1 + +-- >>> type N = 1 +-- >>> type M = 40 +-- >>> :kind! N + M + 1 diff --git a/plugins/hls-eval-plugin/test/testdata/T11.expected.hs b/plugins/hls-eval-plugin/test/testdata/T11.expected.hs new file mode 100644 index 0000000000..63d0ed8a07 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T11.expected.hs @@ -0,0 +1,4 @@ +module T11 where + +-- >>> :kind! A +-- Not in scope: type constructor or class `A' diff --git a/plugins/hls-eval-plugin/test/testdata/T11.hs b/plugins/hls-eval-plugin/test/testdata/T11.hs new file mode 100644 index 0000000000..b4dbe83460 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T11.hs @@ -0,0 +1,3 @@ +module T11 where + +-- >>> :kind! A diff --git a/plugins/hls-eval-plugin/test/testdata/T12.expected.hs b/plugins/hls-eval-plugin/test/testdata/T12.expected.hs new file mode 100644 index 0000000000..4f0dd67b82 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T12.expected.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE DataKinds, TypeOperators #-} +module T12 where +import GHC.TypeNats ( type (+) ) + +type Dummy = 1 + 1 + +-- >>> type N = 1 +-- >>> type M = 40 +-- >>> :kind N + M + 1 +-- N + M + 1 :: Natural diff --git a/plugins/hls-eval-plugin/test/testdata/T12.hs b/plugins/hls-eval-plugin/test/testdata/T12.hs new file mode 100644 index 0000000000..8a2d269165 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T12.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE DataKinds, TypeOperators #-} +module T12 where +import GHC.TypeNats ( type (+) ) + +type Dummy = 1 + 1 + +-- >>> type N = 1 +-- >>> type M = 40 +-- >>> :kind N + M + 1 diff --git a/plugins/hls-eval-plugin/test/testdata/T13.expected.hs b/plugins/hls-eval-plugin/test/testdata/T13.expected.hs new file mode 100644 index 0000000000..60a75bdfdd --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T13.expected.hs @@ -0,0 +1,4 @@ +module T13 where + +-- >>> :kind A +-- Not in scope: type constructor or class `A' diff --git a/plugins/hls-eval-plugin/test/testdata/T13.hs b/plugins/hls-eval-plugin/test/testdata/T13.hs new file mode 100644 index 0000000000..b2f51a5ddc --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T13.hs @@ -0,0 +1,3 @@ +module T13 where + +-- >>> :kind A diff --git a/plugins/hls-eval-plugin/test/testdata/T14.expected.hs b/plugins/hls-eval-plugin/test/testdata/T14.expected.hs new file mode 100644 index 0000000000..544679bfff --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T14.expected.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE TypeApplications #-} +module T14 where + +foo :: Show a => a -> String +foo = show + +-- >>> :type foo @Int +-- foo @Int :: Int -> String diff --git a/plugins/hls-eval-plugin/test/testdata/T14.ghc98.expected.hs b/plugins/hls-eval-plugin/test/testdata/T14.ghc98.expected.hs new file mode 100644 index 0000000000..61ee830fa1 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T14.ghc98.expected.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE TypeApplications #-} +module T14 where + +foo :: Show a => a -> String +foo = show + +-- >>> :type foo @Int +-- foo @Int :: Show Int => Int -> String diff --git a/plugins/hls-eval-plugin/test/testdata/T14.hs b/plugins/hls-eval-plugin/test/testdata/T14.hs new file mode 100644 index 0000000000..8f74911c22 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T14.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE TypeApplications #-} +module T14 where + +foo :: Show a => a -> String +foo = show + +-- >>> :type foo @Int diff --git a/plugins/hls-eval-plugin/test/testdata/T16.expected.hs b/plugins/hls-eval-plugin/test/testdata/T16.expected.hs new file mode 100644 index 0000000000..3edb5b2cc1 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T16.expected.hs @@ -0,0 +1,4 @@ +module T16 where + +-- >>> :type +d 40+ 2 +-- 40+ 2 :: Integer diff --git a/plugins/hls-eval-plugin/test/testdata/T16.hs b/plugins/hls-eval-plugin/test/testdata/T16.hs new file mode 100644 index 0000000000..69a43028d1 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T16.hs @@ -0,0 +1,3 @@ +module T16 where + +-- >>> :type +d 40+ 2 diff --git a/plugins/hls-eval-plugin/test/testdata/T17.expected.hs b/plugins/hls-eval-plugin/test/testdata/T17.expected.hs new file mode 100644 index 0000000000..caf06a9fee --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T17.expected.hs @@ -0,0 +1,4 @@ +module T17 where + +-- >>> :type +no 42 +-- parse error on input `+' diff --git a/plugins/hls-eval-plugin/test/testdata/T17.hs b/plugins/hls-eval-plugin/test/testdata/T17.hs new file mode 100644 index 0000000000..0b6d1a9611 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T17.hs @@ -0,0 +1,3 @@ +module T17 where + +-- >>> :type +no 42 diff --git a/plugins/hls-eval-plugin/test/testdata/T18.expected.hs b/plugins/hls-eval-plugin/test/testdata/T18.expected.hs new file mode 100644 index 0000000000..39e72343f1 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T18.expected.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE TypeApplications #-} +module T18 where + +-- >>> :noooop foo bar +-- unknown command 'noooop' diff --git a/plugins/hls-eval-plugin/test/testdata/T18.hs b/plugins/hls-eval-plugin/test/testdata/T18.hs new file mode 100644 index 0000000000..42bc0b3e2f --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T18.hs @@ -0,0 +1,4 @@ +{-# LANGUAGE TypeApplications #-} +module T18 where + +-- >>> :noooop foo bar diff --git a/plugins/hls-eval-plugin/test/testdata/T19.expected.hs b/plugins/hls-eval-plugin/test/testdata/T19.expected.hs new file mode 100644 index 0000000000..5d5f4ed4c1 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T19.expected.hs @@ -0,0 +1,7 @@ +module T19 where +import Data.Word (Word) +type W = Word + +-- >>> default (Word) +-- >>> :type +d 40+ 2 +-- 40+ 2 :: Word diff --git a/plugins/hls-eval-plugin/test/testdata/T19.hs b/plugins/hls-eval-plugin/test/testdata/T19.hs new file mode 100644 index 0000000000..e58af97967 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T19.hs @@ -0,0 +1,6 @@ +module T19 where +import Data.Word (Word) +type W = Word + +-- >>> default (Word) +-- >>> :type +d 40+ 2 diff --git a/test/testdata/eval/T2.hs.expected b/plugins/hls-eval-plugin/test/testdata/T2.expected.hs similarity index 100% rename from test/testdata/eval/T2.hs.expected rename to plugins/hls-eval-plugin/test/testdata/T2.expected.hs diff --git a/test/testdata/eval/T2.hs b/plugins/hls-eval-plugin/test/testdata/T2.hs similarity index 100% rename from test/testdata/eval/T2.hs rename to plugins/hls-eval-plugin/test/testdata/T2.hs diff --git a/plugins/hls-eval-plugin/test/testdata/T20.expected.hs b/plugins/hls-eval-plugin/test/testdata/T20.expected.hs new file mode 100644 index 0000000000..36c93b99c1 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T20.expected.hs @@ -0,0 +1,7 @@ +module T20 where +import Data.Word (Word) + +default (Word) + +-- >>> :type +d 40+ 2 +-- 40+ 2 :: Integer diff --git a/plugins/hls-eval-plugin/test/testdata/T20.hs b/plugins/hls-eval-plugin/test/testdata/T20.hs new file mode 100644 index 0000000000..bd46606ae0 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T20.hs @@ -0,0 +1,6 @@ +module T20 where +import Data.Word (Word) + +default (Word) + +-- >>> :type +d 40+ 2 diff --git a/plugins/hls-eval-plugin/test/testdata/T21.hs b/plugins/hls-eval-plugin/test/testdata/T21.hs new file mode 100644 index 0000000000..64068f55ed --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T21.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE PolyKinds #-} +{-# LANGUAGE ScopedTypeVariables #-} +module T21 where +import Data.Proxy (Proxy(..)) +import GHC.TypeNats (KnownNat) +import Type.Reflection (Typeable) + +fun :: forall k n a. (KnownNat k, KnownNat n, Typeable a) + => Proxy k -> Proxy n -> Proxy a -> () +fun _ _ _ = () + diff --git a/plugins/hls-eval-plugin/test/testdata/T22.expected.hs b/plugins/hls-eval-plugin/test/testdata/T22.expected.hs new file mode 100644 index 0000000000..98792c637f --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T22.expected.hs @@ -0,0 +1,10 @@ +module T22 where +import Data.Proxy (Proxy (..)) +import GHC.TypeNats (KnownNat) +import Type.Reflection (Typeable) + +f :: Integer +f = 32 + +-- >>> :t f +-- f :: Integer diff --git a/plugins/hls-eval-plugin/test/testdata/T22.hs b/plugins/hls-eval-plugin/test/testdata/T22.hs new file mode 100644 index 0000000000..43bb32e839 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T22.hs @@ -0,0 +1,9 @@ +module T22 where +import Data.Proxy (Proxy (..)) +import GHC.TypeNats (KnownNat) +import Type.Reflection (Typeable) + +f :: Integer +f = 32 + +-- >>> :t f diff --git a/plugins/hls-eval-plugin/test/testdata/T23.hs b/plugins/hls-eval-plugin/test/testdata/T23.hs new file mode 100644 index 0000000000..0a9edaa5c9 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T23.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE PolyKinds #-} +{-# LANGUAGE ScopedTypeVariables #-} +module T23 where +import Data.Proxy (Proxy (..)) +import GHC.TypeNats (KnownNat) +import Type.Reflection (Typeable) + +f :: forall k n a. (KnownNat k, KnownNat n, Typeable a) + => Proxy k -> Proxy n -> Proxy a -> () +f _ _ _ = () + diff --git a/plugins/hls-eval-plugin/test/testdata/T24.expected.hs b/plugins/hls-eval-plugin/test/testdata/T24.expected.hs new file mode 100644 index 0000000000..f7909ddb04 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T24.expected.hs @@ -0,0 +1,32 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE PolyKinds #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE TypeOperators #-} +{-# LANGUAGE UndecidableInstances #-} +module T24 where +import GHC.TypeNats (type (-)) +data Proxy a = Stop | Next (Proxy a) + +type family LongP n a where + LongP 0 a = a + LongP n a = Next (LongP (n - 1) a) + +-- >>> :kind! ((LongP 10 Stop) :: Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy Double))))))))))))) +-- ((LongP 10 Stop) :: Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy Double))))))))))))) :: Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- Double)))))))))))) +-- = 'Next +-- ('Next +-- ('Next +-- ('Next ('Next ('Next ('Next ('Next ('Next ('Next 'Stop))))))))) diff --git a/plugins/hls-eval-plugin/test/testdata/T24.hs b/plugins/hls-eval-plugin/test/testdata/T24.hs new file mode 100644 index 0000000000..01f53ed17d --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T24.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE PolyKinds #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE TypeOperators #-} +{-# LANGUAGE UndecidableInstances #-} +module T24 where +import GHC.TypeNats (type (-)) +data Proxy a = Stop | Next (Proxy a) + +type family LongP n a where + LongP 0 a = a + LongP n a = Next (LongP (n - 1) a) + +-- >>> :kind! ((LongP 10 Stop) :: Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy Double))))))))))))) diff --git a/plugins/hls-eval-plugin/test/testdata/T25.expected.hs b/plugins/hls-eval-plugin/test/testdata/T25.expected.hs new file mode 100644 index 0000000000..1b85e9ae56 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T25.expected.hs @@ -0,0 +1,28 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE PolyKinds #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE TypeOperators #-} +{-# LANGUAGE UndecidableInstances #-} +module T25 where +import GHC.TypeNats (type (-)) +data Proxy a = Stop | Next (Proxy a) + +type family LongP n a where + LongP 0 a = a + LongP n a = Next (LongP (n - 1) a) + +-- >>> :kind (Stop :: Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy Double))))))))))))) +-- (Stop :: Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy Double))))))))))))) :: Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- (Proxy +-- Double)))))))))))) diff --git a/plugins/hls-eval-plugin/test/testdata/T25.hs b/plugins/hls-eval-plugin/test/testdata/T25.hs new file mode 100644 index 0000000000..e813d207db --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T25.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE PolyKinds #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE TypeOperators #-} +{-# LANGUAGE UndecidableInstances #-} +module T25 where +import GHC.TypeNats (type (-)) +data Proxy a = Stop | Next (Proxy a) + +type family LongP n a where + LongP 0 a = a + LongP n a = Next (LongP (n - 1) a) + +-- >>> :kind (Stop :: Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy (Proxy Double))))))))))))) diff --git a/plugins/hls-eval-plugin/test/testdata/T26.expected.hs b/plugins/hls-eval-plugin/test/testdata/T26.expected.hs new file mode 100644 index 0000000000..c500f9b0e7 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T26.expected.hs @@ -0,0 +1,5 @@ +module T26 where +import Util () + +-- >>> "hello" +-- "hello" diff --git a/plugins/hls-eval-plugin/test/testdata/T26.hs b/plugins/hls-eval-plugin/test/testdata/T26.hs new file mode 100644 index 0000000000..059fc79ea9 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T26.hs @@ -0,0 +1,4 @@ +module T26 where +import Util () + +-- >>> "hello" diff --git a/plugins/hls-eval-plugin/test/testdata/T27.expected.hs b/plugins/hls-eval-plugin/test/testdata/T27.expected.hs new file mode 100644 index 0000000000..e931c55c29 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T27.expected.hs @@ -0,0 +1,5 @@ +module T27 where + +-- >>> () +-- () +-- diff --git a/plugins/hls-eval-plugin/test/testdata/T27.hs b/plugins/hls-eval-plugin/test/testdata/T27.hs new file mode 100644 index 0000000000..c1d68eba66 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T27.hs @@ -0,0 +1,4 @@ +module T27 where + +-- >>> () +-- diff --git a/plugins/hls-eval-plugin/test/testdata/T28.expected.hs b/plugins/hls-eval-plugin/test/testdata/T28.expected.hs new file mode 100644 index 0000000000..74ecea6e75 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T28.expected.hs @@ -0,0 +1,7 @@ +module T28 where + +f True = True +f False = False + +-- >>> 1+1 +-- 2 diff --git a/plugins/hls-eval-plugin/test/testdata/T28.hs b/plugins/hls-eval-plugin/test/testdata/T28.hs new file mode 100644 index 0000000000..e72910c4c2 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T28.hs @@ -0,0 +1,6 @@ +module T28 where + +f True = True +f False = False + +-- >>> 1+1 diff --git a/test/testdata/eval/T3.hs.expected b/plugins/hls-eval-plugin/test/testdata/T3.expected.hs similarity index 100% rename from test/testdata/eval/T3.hs.expected rename to plugins/hls-eval-plugin/test/testdata/T3.expected.hs diff --git a/test/testdata/eval/T3.hs b/plugins/hls-eval-plugin/test/testdata/T3.hs similarity index 100% rename from test/testdata/eval/T3.hs rename to plugins/hls-eval-plugin/test/testdata/T3.hs diff --git a/plugins/hls-eval-plugin/test/testdata/T4.expected.hs b/plugins/hls-eval-plugin/test/testdata/T4.expected.hs new file mode 100644 index 0000000000..53bb845392 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T4.expected.hs @@ -0,0 +1,10 @@ +module T4 where + +import Data.List (unwords) + +-- >>> let evaluation = " evaluation" +-- >>> unwords example ++ evaluation +-- "This is an example of evaluation" +example :: [String] +example = ["This","is","an","example","of"] + diff --git a/plugins/hls-eval-plugin/test/testdata/T4.hs b/plugins/hls-eval-plugin/test/testdata/T4.hs new file mode 100644 index 0000000000..96a43e5f60 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T4.hs @@ -0,0 +1,9 @@ +module T4 where + +import Data.List (unwords) + +-- >>> let evaluation = " evaluation" +-- >>> unwords example ++ evaluation +example :: [String] +example = ["This","is","an","example","of"] + diff --git a/plugins/hls-eval-plugin/test/testdata/T4139.expected.hs b/plugins/hls-eval-plugin/test/testdata/T4139.expected.hs new file mode 100644 index 0000000000..ade8332a32 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T4139.expected.hs @@ -0,0 +1,7 @@ +module T4139 where + +-- >>> 'x' +-- 'x' + +main :: IO () +main = putStrLn "Hello World!" diff --git a/plugins/hls-eval-plugin/test/testdata/T4139.hs b/plugins/hls-eval-plugin/test/testdata/T4139.hs new file mode 100644 index 0000000000..855d6ef08b --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T4139.hs @@ -0,0 +1,6 @@ +module T4139 where + +-- >>> 'x' + +main :: IO () +main = putStrLn "Hello World!" diff --git a/test/testdata/eval/T5.hs.expected b/plugins/hls-eval-plugin/test/testdata/T5.expected.hs similarity index 100% rename from test/testdata/eval/T5.hs.expected rename to plugins/hls-eval-plugin/test/testdata/T5.expected.hs diff --git a/test/testdata/eval/T5.hs b/plugins/hls-eval-plugin/test/testdata/T5.hs similarity index 100% rename from test/testdata/eval/T5.hs rename to plugins/hls-eval-plugin/test/testdata/T5.hs diff --git a/test/testdata/eval/T6.hs.expected b/plugins/hls-eval-plugin/test/testdata/T6.expected.hs similarity index 100% rename from test/testdata/eval/T6.hs.expected rename to plugins/hls-eval-plugin/test/testdata/T6.expected.hs diff --git a/test/testdata/eval/T6.hs b/plugins/hls-eval-plugin/test/testdata/T6.hs similarity index 100% rename from test/testdata/eval/T6.hs rename to plugins/hls-eval-plugin/test/testdata/T6.hs diff --git a/test/testdata/eval/T7.hs.expected b/plugins/hls-eval-plugin/test/testdata/T7.expected.hs similarity index 100% rename from test/testdata/eval/T7.hs.expected rename to plugins/hls-eval-plugin/test/testdata/T7.expected.hs diff --git a/test/testdata/eval/T7.hs b/plugins/hls-eval-plugin/test/testdata/T7.hs similarity index 100% rename from test/testdata/eval/T7.hs rename to plugins/hls-eval-plugin/test/testdata/T7.hs diff --git a/plugins/hls-eval-plugin/test/testdata/T8.hs b/plugins/hls-eval-plugin/test/testdata/T8.hs new file mode 100644 index 0000000000..44cd164a09 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T8.hs @@ -0,0 +1,2 @@ +-- An empty playground +module T8 where diff --git a/plugins/hls-eval-plugin/test/testdata/T9.expected.hs b/plugins/hls-eval-plugin/test/testdata/T9.expected.hs new file mode 100644 index 0000000000..bc09993826 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T9.expected.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE DataKinds #-} +module T9 where +import Data.Proxy (Proxy(..)) + +type P = Proxy + +-- >>> Proxy :: Proxy 3 +-- Proxy diff --git a/plugins/hls-eval-plugin/test/testdata/T9.hs b/plugins/hls-eval-plugin/test/testdata/T9.hs new file mode 100644 index 0000000000..9926ad836e --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/T9.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE DataKinds #-} +module T9 where +import Data.Proxy (Proxy(..)) + +type P = Proxy + +-- >>> Proxy :: Proxy 3 diff --git a/plugins/hls-eval-plugin/test/testdata/TCPP.expected.hs b/plugins/hls-eval-plugin/test/testdata/TCPP.expected.hs new file mode 100644 index 0000000000..1c35954861 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TCPP.expected.hs @@ -0,0 +1,22 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +module TCPP where + +-- >>> y +-- 11 +y :: Integer +y = 11 + +#define ALL + +#ifdef ALL +-- >>> 3+y +-- 14 +#else +-- >>> 5+y +#endif + +-- >>> 2+y +-- 13 diff --git a/plugins/hls-eval-plugin/test/testdata/TCPP.hs b/plugins/hls-eval-plugin/test/testdata/TCPP.hs new file mode 100644 index 0000000000..efed8edacf --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TCPP.hs @@ -0,0 +1,19 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +module TCPP where + +-- >>> y +y :: Integer +y = 11 + +#define ALL + +#ifdef ALL +-- >>> 3+y +#else +-- >>> 5+y +#endif + +-- >>> 2+y diff --git a/plugins/hls-eval-plugin/test/testdata/TCompare.expected.hs b/plugins/hls-eval-plugin/test/testdata/TCompare.expected.hs new file mode 100644 index 0000000000..9b2f5301cc --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TCompare.expected.hs @@ -0,0 +1,13 @@ +module TCompare where + +-- In plain comments, the previous result of an evaluation is simply replaced + +-- >>> 2+2 +-- 4 + +-- In Haddock comments, the previous result of an evaluation is compared with the new one +-- and difference are displayed + +-- | >>> 2+2 +-- WAS 5 +-- NOW 4 diff --git a/plugins/hls-eval-plugin/test/testdata/TCompare.hs b/plugins/hls-eval-plugin/test/testdata/TCompare.hs new file mode 100644 index 0000000000..2be9f94426 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TCompare.hs @@ -0,0 +1,12 @@ +module TCompare where + +-- In plain comments, the previous result of an evaluation is simply replaced + +-- >>> 2+2 +-- 5 + +-- In Haddock comments, the previous result of an evaluation is compared with the new one +-- and difference are displayed + +-- | >>> 2+2 +-- 5 diff --git a/plugins/hls-eval-plugin/test/testdata/TDiff.expected.default.hs b/plugins/hls-eval-plugin/test/testdata/TDiff.expected.default.hs new file mode 100644 index 0000000000..a2ed8fbd44 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TDiff.expected.default.hs @@ -0,0 +1,8 @@ +module TDiff where + +-- | +-- >>> myId 5 +-- WAS 4 +-- NOW 5 +myId :: a -> a +myId x = x diff --git a/plugins/hls-eval-plugin/test/testdata/TDiff.expected.no-diff.hs b/plugins/hls-eval-plugin/test/testdata/TDiff.expected.no-diff.hs new file mode 100644 index 0000000000..373a64a804 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TDiff.expected.no-diff.hs @@ -0,0 +1,7 @@ +module TDiff where + +-- | +-- >>> myId 5 +-- 5 +myId :: a -> a +myId x = x diff --git a/plugins/hls-eval-plugin/test/testdata/TDiff.hs b/plugins/hls-eval-plugin/test/testdata/TDiff.hs new file mode 100644 index 0000000000..bf5b0eb287 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TDiff.hs @@ -0,0 +1,7 @@ +module TDiff where + +-- | +-- >>> myId 5 +-- 4 +myId :: a -> a +myId x = x diff --git a/plugins/hls-eval-plugin/test/testdata/TEndingMulti.expected.hs b/plugins/hls-eval-plugin/test/testdata/TEndingMulti.expected.hs new file mode 100644 index 0000000000..2e73d6d7d1 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TEndingMulti.expected.hs @@ -0,0 +1,9 @@ +module TEndingMulti where + +-- Now trailing doctest is allowed: + +{- >>> 42 +>>> 54 +42 +54 +-} diff --git a/plugins/hls-eval-plugin/test/testdata/TEndingMulti.hs b/plugins/hls-eval-plugin/test/testdata/TEndingMulti.hs new file mode 100644 index 0000000000..81d79f5464 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TEndingMulti.hs @@ -0,0 +1,6 @@ +module TEndingMulti where + +-- Now trailing doctest is allowed: + +{- >>> 42 +>>> 54-} diff --git a/plugins/hls-eval-plugin/test/testdata/TException.expected.marked.hs b/plugins/hls-eval-plugin/test/testdata/TException.expected.marked.hs new file mode 100644 index 0000000000..3e655416e6 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TException.expected.marked.hs @@ -0,0 +1,6 @@ +module TException where + +-- >>> exceptionalCode +-- *** Exception: I am exceptional! +exceptionalCode :: Int +exceptionalCode = error "I am exceptional!" diff --git a/plugins/hls-eval-plugin/test/testdata/TException.expected.nomark.hs b/plugins/hls-eval-plugin/test/testdata/TException.expected.nomark.hs new file mode 100644 index 0000000000..9ac7cd03a3 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TException.expected.nomark.hs @@ -0,0 +1,6 @@ +module TException where + +-- >>> exceptionalCode +-- I am exceptional! +exceptionalCode :: Int +exceptionalCode = error "I am exceptional!" diff --git a/plugins/hls-eval-plugin/test/testdata/TException.hs b/plugins/hls-eval-plugin/test/testdata/TException.hs new file mode 100644 index 0000000000..5e083ab1dd --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TException.hs @@ -0,0 +1,5 @@ +module TException where + +-- >>> exceptionalCode +exceptionalCode :: Int +exceptionalCode = error "I am exceptional!" diff --git a/plugins/hls-eval-plugin/test/testdata/TFlags.expected.hs b/plugins/hls-eval-plugin/test/testdata/TFlags.expected.hs new file mode 100644 index 0000000000..2c8e0ef92a --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TFlags.expected.hs @@ -0,0 +1,64 @@ +-- Support for language options + +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE Haskell2010 #-} + +module TFlags where + +-- Language options set in the module source (ScopedTypeVariables) +-- also apply to tests so this works fine +-- >>> f = (\(c::Char) -> [c]) + +{- Multiple options can be set with a single `:set` + +>>> :set -XMultiParamTypeClasses -XFlexibleInstances +>>> class Z a b c +-} + +{- + +Options apply only in the section where they are defined (unless they are in the setup section), so this will fail: + +>>> class L a b c +Too many parameters for class `L' +(Enable MultiParamTypeClasses to allow multi-parameter classes) +In the class declaration for `L' +-} + + +{- +Options apply to all tests in the same section after their declaration. + +Not set yet: + +>>> class D +No parameters for class `D' +(Enable MultiParamTypeClasses to allow no-parameter classes) +In the class declaration for `D' + +Now it works: + +>>>:set -XMultiParamTypeClasses +>>> class C + +It still works + +>>> class F +-} + +{- Now -package flag is handled correctly: + +>>> :set -package ghc-prim +>>> import GHC.Prim + +-} + + +{- Invalid option/flags are reported, but valid ones will be reflected + +>>> :set -XRank2Types -XAbsent -XDatatypeContexts -XWrong -fprint-nothing-at-all +: warning: + -XDatatypeContexts is deprecated: It was widely considered a misfeature, and has been removed from the Haskell language. +Some flags have not been recognized: -XAbsent, -XWrong, -fprint-nothing-at-all + +-} diff --git a/plugins/hls-eval-plugin/test/testdata/TFlags.ghc98.expected.hs b/plugins/hls-eval-plugin/test/testdata/TFlags.ghc98.expected.hs new file mode 100644 index 0000000000..2e4de4c0b7 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TFlags.ghc98.expected.hs @@ -0,0 +1,62 @@ +-- Support for language options + +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE Haskell2010 #-} + +module TFlags where + +-- Language options set in the module source (ScopedTypeVariables) +-- also apply to tests so this works fine +-- >>> f = (\(c::Char) -> [c]) + +{- Multiple options can be set with a single `:set` + +>>> :set -XMultiParamTypeClasses -XFlexibleInstances +>>> class Z a b c +-} + +{- + +Options apply only in the section where they are defined (unless they are in the setup section), so this will fail: + +>>> class L a b c +Too many parameters for class `L' +In the class declaration for `L' +-} + + +{- +Options apply to all tests in the same section after their declaration. + +Not set yet: + +>>> class D +No parameters for class `D' +In the class declaration for `D' + +Now it works: + +>>>:set -XMultiParamTypeClasses +>>> class C + +It still works + +>>> class F +-} + +{- Now -package flag is handled correctly: + +>>> :set -package ghc-prim +>>> import GHC.Prim + +-} + + +{- Invalid option/flags are reported, but valid ones will be reflected + +>>> :set -XRank2Types -XAbsent -XDatatypeContexts -XWrong -fprint-nothing-at-all +: warning: [GHC-53692] [-Wdeprecated-flags (in -Wdefault)] + -XDatatypeContexts is deprecated: It was widely considered a misfeature, and has been removed from the Haskell language. +Some flags have not been recognized: -XAbsent, -XWrong, -fprint-nothing-at-all + +-} diff --git a/plugins/hls-eval-plugin/test/testdata/TFlags.hs b/plugins/hls-eval-plugin/test/testdata/TFlags.hs new file mode 100644 index 0000000000..075a04dc86 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TFlags.hs @@ -0,0 +1,55 @@ +-- Support for language options + +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE Haskell2010 #-} + +module TFlags where + +-- Language options set in the module source (ScopedTypeVariables) +-- also apply to tests so this works fine +-- >>> f = (\(c::Char) -> [c]) + +{- Multiple options can be set with a single `:set` + +>>> :set -XMultiParamTypeClasses -XFlexibleInstances +>>> class Z a b c +-} + +{- + +Options apply only in the section where they are defined (unless they are in the setup section), so this will fail: + +>>> class L a b c +-} + + +{- +Options apply to all tests in the same section after their declaration. + +Not set yet: + +>>> class D + +Now it works: + +>>>:set -XMultiParamTypeClasses +>>> class C + +It still works + +>>> class F +-} + +{- Now -package flag is handled correctly: + +>>> :set -package ghc-prim +>>> import GHC.Prim + +-} + + +{- Invalid option/flags are reported, but valid ones will be reflected + +>>> :set -XRank2Types -XAbsent -XDatatypeContexts -XWrong -fprint-nothing-at-all + +-} diff --git a/plugins/hls-eval-plugin/test/testdata/THaddock.expected.hs b/plugins/hls-eval-plugin/test/testdata/THaddock.expected.hs new file mode 100644 index 0000000000..222bedfc67 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/THaddock.expected.hs @@ -0,0 +1,42 @@ +{- | +Tests in plain comments in both single line or multi line format, both forward and backward. +Tests are ignored if: + * do not start on the first column (in Ordinary Haskell) + * do not start on the first or second column (in Literate Haskell) +-} +module THaddock () where + +{- ORMOLU_DISABLE -} + +-- | Single line comment +-- >>> "a"++"b" +-- "ab" + +{- | Multi line comment + +>>> "b"++"c" +"bc" +-} + +double :: Num a => a -> a +double a = a + a +-- ^ Single line backward comments +-- >>> double 11 +-- 22 + +twice :: [a] -> [a] +twice a = a ++ a +{- ^ Multi-line backward comments +>>> twice "ABC" +"ABCABC" +-} + +{- | >>> 2+five +7 + + ^-- This works, as it starts at the first column after the header. + + >>> IGNORED as it does not start on the first column +-} +five :: Integer +five = 5 diff --git a/plugins/hls-eval-plugin/test/testdata/THaddock.hs b/plugins/hls-eval-plugin/test/testdata/THaddock.hs new file mode 100644 index 0000000000..03a28975e0 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/THaddock.hs @@ -0,0 +1,37 @@ +{- | +Tests in plain comments in both single line or multi line format, both forward and backward. +Tests are ignored if: + * do not start on the first column (in Ordinary Haskell) + * do not start on the first or second column (in Literate Haskell) +-} +module THaddock () where + +{- ORMOLU_DISABLE -} + +-- | Single line comment +-- >>> "a"++"b" + +{- | Multi line comment + +>>> "b"++"c" +-} + +double :: Num a => a -> a +double a = a + a +-- ^ Single line backward comments +-- >>> double 11 + +twice :: [a] -> [a] +twice a = a ++ a +{- ^ Multi-line backward comments +>>> twice "ABC" +-} + +{- | >>> 2+five + + ^-- This works, as it starts at the first column after the header. + + >>> IGNORED as it does not start on the first column +-} +five :: Integer +five = 5 diff --git a/plugins/hls-eval-plugin/test/testdata/TIO.expected.hs b/plugins/hls-eval-plugin/test/testdata/TIO.expected.hs new file mode 100644 index 0000000000..016780bca7 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TIO.expected.hs @@ -0,0 +1,12 @@ +-- IO expressions are supported, stdout/stderr output is ignored +module TIO where + +import Control.Concurrent (threadDelay) + +{- +Does not capture stdout, returns value. +Has a delay in order to show progress reporting. + +>>> threadDelay 2000000 >> print "ABC" >> return "XYZ" +"XYZ" +-} diff --git a/plugins/hls-eval-plugin/test/testdata/TIO.hs b/plugins/hls-eval-plugin/test/testdata/TIO.hs new file mode 100644 index 0000000000..016780bca7 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TIO.hs @@ -0,0 +1,12 @@ +-- IO expressions are supported, stdout/stderr output is ignored +module TIO where + +import Control.Concurrent (threadDelay) + +{- +Does not capture stdout, returns value. +Has a delay in order to show progress reporting. + +>>> threadDelay 2000000 >> print "ABC" >> return "XYZ" +"XYZ" +-} diff --git a/plugins/hls-eval-plugin/test/testdata/TI_Info.hs b/plugins/hls-eval-plugin/test/testdata/TI_Info.hs new file mode 100644 index 0000000000..931ab2d7c4 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TI_Info.hs @@ -0,0 +1,5 @@ +module TI_Info (Eq, Ord, Foo) where + +import InfoUtil (Eq, Ord, Foo) + +-- >>> :i Foo diff --git a/plugins/hls-eval-plugin/test/testdata/TInfo.hs b/plugins/hls-eval-plugin/test/testdata/TInfo.hs new file mode 100644 index 0000000000..5562ff8d6a --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TInfo.hs @@ -0,0 +1,5 @@ +module TInfo (Eq, Ord, Foo) where + +import InfoUtil (Eq, Ord, Foo) + +-- >>> :info Foo diff --git a/plugins/hls-eval-plugin/test/testdata/TInfoBang.hs b/plugins/hls-eval-plugin/test/testdata/TInfoBang.hs new file mode 100644 index 0000000000..dae2550716 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TInfoBang.hs @@ -0,0 +1,5 @@ +module TInfoBang (Eq, Ord, Foo) where + +import InfoUtil (Eq, Ord, Foo) + +-- >>> :info! Foo diff --git a/plugins/hls-eval-plugin/test/testdata/TInfoBangMany.hs b/plugins/hls-eval-plugin/test/testdata/TInfoBangMany.hs new file mode 100644 index 0000000000..b547960a5b --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TInfoBangMany.hs @@ -0,0 +1,5 @@ +module TInfoBangMany (Eq, Ord, Foo, Bar) where + +import InfoUtil (Eq, Ord, Foo, Bar) + +-- >>> :info! Foo Bar diff --git a/plugins/hls-eval-plugin/test/testdata/TInfoMany.hs b/plugins/hls-eval-plugin/test/testdata/TInfoMany.hs new file mode 100644 index 0000000000..39d7da6fa4 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TInfoMany.hs @@ -0,0 +1,5 @@ +module TInfoMany (Eq, Ord, Foo, Bar) where + +import InfoUtil (Eq, Ord, Foo, Bar) + +-- >>> :info Foo Bar diff --git a/plugins/hls-eval-plugin/test/testdata/TIt.expected.hs b/plugins/hls-eval-plugin/test/testdata/TIt.expected.hs new file mode 100644 index 0000000000..940fb1a23d --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TIt.expected.hs @@ -0,0 +1,11 @@ +module TIt where + +-- >>> "test" +-- >>> it +-- "test" +-- "test" + +-- >>> pure "test2" +-- >>> it +-- "test2" +-- "test2" diff --git a/plugins/hls-eval-plugin/test/testdata/TIt.hs b/plugins/hls-eval-plugin/test/testdata/TIt.hs new file mode 100644 index 0000000000..9430fbe477 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TIt.hs @@ -0,0 +1,7 @@ +module TIt where + +-- >>> "test" +-- >>> it + +-- >>> pure "test2" +-- >>> it diff --git a/plugins/hls-eval-plugin/test/testdata/TLHS.expected.lhs b/plugins/hls-eval-plugin/test/testdata/TLHS.expected.lhs new file mode 100644 index 0000000000..f62ad893e2 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TLHS.expected.lhs @@ -0,0 +1,25 @@ +An example LHS + +> {-# LANGUAGE OverloadedStrings #-} + +> module TLHS where + +More comments, here. + +> -- | >>> 2+2 +> -- 4 + +> -- | >>> x+a +> -- 14 + +> {- | +> >>> 5+5 +> WAS 11 +> NOW 10 +> -} +> x :: Integer +> x = 3 + +> a :: Integer +> a = 11 + diff --git a/plugins/hls-eval-plugin/test/testdata/TLHS.lhs b/plugins/hls-eval-plugin/test/testdata/TLHS.lhs new file mode 100644 index 0000000000..4572772384 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TLHS.lhs @@ -0,0 +1,22 @@ +An example LHS + +> {-# LANGUAGE OverloadedStrings #-} + +> module TLHS where + +More comments, here. + +> -- | >>> 2+2 + +> -- | >>> x+a + +> {- | +> >>> 5+5 +> 11 +> -} +> x :: Integer +> x = 3 + +> a :: Integer +> a = 11 + diff --git a/plugins/hls-eval-plugin/test/testdata/TLHSLaTeX.expected.lhs b/plugins/hls-eval-plugin/test/testdata/TLHSLaTeX.expected.lhs new file mode 100644 index 0000000000..be1b15feb7 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TLHSLaTeX.expected.lhs @@ -0,0 +1,17 @@ +\documentstyle{article} + +\begin{document} + +\section{Introduction} + +This is a trivial program that prints the first 20 factorials. + +\begin{code} +module TLHSLaTeX where + +-- >>> prod +-- [(1,1),(2,2),(3,6)] +prod = [ (n, product [1..n]) | n <- [1..3]] +\end{code} + +\end{document} diff --git a/plugins/hls-eval-plugin/test/testdata/TLHSLaTeX.lhs b/plugins/hls-eval-plugin/test/testdata/TLHSLaTeX.lhs new file mode 100644 index 0000000000..39d12d4caf --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TLHSLaTeX.lhs @@ -0,0 +1,16 @@ +\documentstyle{article} + +\begin{document} + +\section{Introduction} + +This is a trivial program that prints the first 20 factorials. + +\begin{code} +module TLHSLaTeX where + +-- >>> prod +prod = [ (n, product [1..n]) | n <- [1..3]] +\end{code} + +\end{document} diff --git a/plugins/hls-eval-plugin/test/testdata/TLanguageOptionsTupleSections.expected.hs b/plugins/hls-eval-plugin/test/testdata/TLanguageOptionsTupleSections.expected.hs new file mode 100644 index 0000000000..c5f9e0ae60 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TLanguageOptionsTupleSections.expected.hs @@ -0,0 +1,9 @@ +-- This works fine: {-# LANGUAGE TupleSections #-} +module TLanguageOptionsTupleSections where + +-- Why oh why is this not working? +-- What is special about TupleSections? +-- >>> :set -XTupleSections +-- >>> ("a",) "b" +-- ("a","b") + diff --git a/plugins/hls-eval-plugin/test/testdata/TLanguageOptionsTupleSections.hs b/plugins/hls-eval-plugin/test/testdata/TLanguageOptionsTupleSections.hs new file mode 100644 index 0000000000..c5f9e0ae60 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TLanguageOptionsTupleSections.hs @@ -0,0 +1,9 @@ +-- This works fine: {-# LANGUAGE TupleSections #-} +module TLanguageOptionsTupleSections where + +-- Why oh why is this not working? +-- What is special about TupleSections? +-- >>> :set -XTupleSections +-- >>> ("a",) "b" +-- ("a","b") + diff --git a/plugins/hls-eval-plugin/test/testdata/TLastLine.expected.hs b/plugins/hls-eval-plugin/test/testdata/TLastLine.expected.hs new file mode 100644 index 0000000000..c91988cc4c --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TLastLine.expected.hs @@ -0,0 +1,4 @@ +module TLastLine where + +-- >>> take 3 [1..] +-- [1,2,3] diff --git a/plugins/hls-eval-plugin/test/testdata/TLastLine.hs b/plugins/hls-eval-plugin/test/testdata/TLastLine.hs new file mode 100644 index 0000000000..779fb1230a --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TLastLine.hs @@ -0,0 +1,3 @@ +module TLastLine where + +-- >>> take 3 [1..] \ No newline at end of file diff --git a/plugins/hls-eval-plugin/test/testdata/TLocalImport.expected.hs b/plugins/hls-eval-plugin/test/testdata/TLocalImport.expected.hs new file mode 100644 index 0000000000..39d10a9a4c --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TLocalImport.expected.hs @@ -0,0 +1,9 @@ +module TLocalImport where + +import qualified Util + +-- >>> Util.tst 11 11 +-- True + +tst' :: Eq a => a -> a -> Bool +tst' = Util.tst diff --git a/plugins/hls-eval-plugin/test/testdata/TLocalImport.hs b/plugins/hls-eval-plugin/test/testdata/TLocalImport.hs new file mode 100644 index 0000000000..73e327c489 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TLocalImport.hs @@ -0,0 +1,8 @@ +module TLocalImport where + +import qualified Util + +-- >>> Util.tst 11 11 + +tst' :: Eq a => a -> a -> Bool +tst' = Util.tst diff --git a/plugins/hls-eval-plugin/test/testdata/TLocalImportInTest.expected.hs b/plugins/hls-eval-plugin/test/testdata/TLocalImportInTest.expected.hs new file mode 100644 index 0000000000..7f4d68cbf3 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TLocalImportInTest.expected.hs @@ -0,0 +1,5 @@ +module TLocalImportInTest where + +-- >>> import qualified Util +-- >>> Util.a +-- 'a' diff --git a/plugins/hls-eval-plugin/test/testdata/TLocalImportInTest.hs b/plugins/hls-eval-plugin/test/testdata/TLocalImportInTest.hs new file mode 100644 index 0000000000..55a692bd0c --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TLocalImportInTest.hs @@ -0,0 +1,4 @@ +module TLocalImportInTest where + +-- >>> import qualified Util +-- >>> Util.a diff --git a/plugins/hls-eval-plugin/test/testdata/TMulti.expected.hs b/plugins/hls-eval-plugin/test/testdata/TMulti.expected.hs new file mode 100644 index 0000000000..7c35e06d12 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TMulti.expected.hs @@ -0,0 +1,25 @@ +{- | Multi line comments are parsed correctly. +-} +module TMulti () where + +{- ORMOLU_DISABLE -} + +-- this should work fine if previous multi comment is parsed correctly +-- >>> "a"++"b" +-- "ab" + + {- >>> 3+3 +6 + -} + +-- this should work fine if previous multi comment is parsed correctly +-- >>> "a"++"b" +-- "ab" + + {-| >>> "NOT IGNORED" +"NOT IGNORED" +-} + +-- this should work fine if previous multi comment is parsed correctly +-- >>> "a"++"b" +-- "ab" diff --git a/plugins/hls-eval-plugin/test/testdata/TMulti.hs b/plugins/hls-eval-plugin/test/testdata/TMulti.hs new file mode 100644 index 0000000000..35ea1f9bd8 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TMulti.hs @@ -0,0 +1,19 @@ +{- | Multi line comments are parsed correctly. +-} +module TMulti () where + +{- ORMOLU_DISABLE -} + +-- this should work fine if previous multi comment is parsed correctly +-- >>> "a"++"b" + + {- >>> 3+3 + -} + +-- this should work fine if previous multi comment is parsed correctly +-- >>> "a"++"b" + + {-| >>> "NOT IGNORED" -} + +-- this should work fine if previous multi comment is parsed correctly +-- >>> "a"++"b" diff --git a/plugins/hls-eval-plugin/test/testdata/TMultiResult.expected.hs b/plugins/hls-eval-plugin/test/testdata/TMultiResult.expected.hs new file mode 100644 index 0000000000..a8ed57a2ce --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TMultiResult.expected.hs @@ -0,0 +1,13 @@ +module TMultiResult where +-- test multiline show instance (see #2907) + +data Multiline = M {l1 :: String, l2 :: String} deriving Read + +instance Show Multiline where + show m = "M {\n l1=" <> show (l1 m) <> ",\n l2=" <> show (l2 m) <> "\n}" + +-- >>> M "first line" "second line" +-- M { +-- l1="first line", +-- l2="second line" +-- } diff --git a/plugins/hls-eval-plugin/test/testdata/TMultiResult.hs b/plugins/hls-eval-plugin/test/testdata/TMultiResult.hs new file mode 100644 index 0000000000..11037e516f --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TMultiResult.hs @@ -0,0 +1,9 @@ +module TMultiResult where +-- test multiline show instance (see #2907) + +data Multiline = M {l1 :: String, l2 :: String} deriving Read + +instance Show Multiline where + show m = "M {\n l1=" <> show (l1 m) <> ",\n l2=" <> show (l2 m) <> "\n}" + +-- >>> M "first line" "second line" diff --git a/plugins/hls-eval-plugin/test/testdata/TNested.expected.hs b/plugins/hls-eval-plugin/test/testdata/TNested.expected.hs new file mode 100644 index 0000000000..ea5aa78423 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TNested.expected.hs @@ -0,0 +1,15 @@ +module TNested () where +{- +>>> 54 +54 +{- +Nested +-} +-} + +{- +{- +>>> 42 +42 +-} +-} diff --git a/plugins/hls-eval-plugin/test/testdata/TNested.hs b/plugins/hls-eval-plugin/test/testdata/TNested.hs new file mode 100644 index 0000000000..8089bdb29b --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TNested.hs @@ -0,0 +1,13 @@ +module TNested () where +{- +>>> 54 +{- +Nested +-} +-} + +{- +{- +>>> 42 +-} +-} diff --git a/plugins/hls-eval-plugin/test/testdata/TNoImplicitPrelude.expected.hs b/plugins/hls-eval-plugin/test/testdata/TNoImplicitPrelude.expected.hs new file mode 100644 index 0000000000..c0dfae5983 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TNoImplicitPrelude.expected.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +module TNoImplicitPrelude where + +import Data.List (unwords) +import Data.String (String) + +-- >>> unwords example +-- "This is an example of evaluation" +example :: [String] +example = ["This","is","an","example","of","evaluation"] diff --git a/plugins/hls-eval-plugin/test/testdata/TNoImplicitPrelude.hs b/plugins/hls-eval-plugin/test/testdata/TNoImplicitPrelude.hs new file mode 100644 index 0000000000..521fb6a87a --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TNoImplicitPrelude.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +module TNoImplicitPrelude where + +import Data.List (unwords) +import Data.String (String) + +-- >>> unwords example +example :: [String] +example = ["This","is","an","example","of","evaluation"] diff --git a/plugins/hls-eval-plugin/test/testdata/TPlainComment.expected.hs b/plugins/hls-eval-plugin/test/testdata/TPlainComment.expected.hs new file mode 100644 index 0000000000..0243a715d8 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TPlainComment.expected.hs @@ -0,0 +1,29 @@ +{- | +Tests in plain comments in both single line or multi line format. +Tests are ignored if: + * do not start on the first column + * are in multi line comments that open and close on the same line +-} +module TPlainComment where + +{- ORMOLU_DISABLE -} + +-- Single line comment +-- >>> "a"++"b" +-- "ab" + +{- Multi line comment + +>>> "b"++"c" +"bc" +-} + +{- >>> 2+five +7 + + ^-- This works, as it starts at the first column after the header. + + >>> IGNORED as it does not start on the first column +-} +five :: Integer +five = 5 diff --git a/plugins/hls-eval-plugin/test/testdata/TPlainComment.hs b/plugins/hls-eval-plugin/test/testdata/TPlainComment.hs new file mode 100644 index 0000000000..2455d26a86 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TPlainComment.hs @@ -0,0 +1,26 @@ +{- | +Tests in plain comments in both single line or multi line format. +Tests are ignored if: + * do not start on the first column + * are in multi line comments that open and close on the same line +-} +module TPlainComment where + +{- ORMOLU_DISABLE -} + +-- Single line comment +-- >>> "a"++"b" + +{- Multi line comment + +>>> "b"++"c" +-} + +{- >>> 2+five + + ^-- This works, as it starts at the first column after the header. + + >>> IGNORED as it does not start on the first column +-} +five :: Integer +five = 5 diff --git a/plugins/hls-eval-plugin/test/testdata/TPrelude.expected.hs b/plugins/hls-eval-plugin/test/testdata/TPrelude.expected.hs new file mode 100644 index 0000000000..850ac987dd --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TPrelude.expected.hs @@ -0,0 +1,11 @@ +-- Prelude has no special treatment, it is imported as stated in the module. +module TPrelude where + +import Prelude hiding (foldr) + +-- >>> foldr (+) 10 [2,3,5] +-- 20 +foldr :: (a -> z -> z) -> z -> [a] -> z +foldr f z bs = + (foldl (\g a -> g . f a) id bs) z + diff --git a/plugins/hls-eval-plugin/test/testdata/TPrelude.hs b/plugins/hls-eval-plugin/test/testdata/TPrelude.hs new file mode 100644 index 0000000000..2c573325b0 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TPrelude.hs @@ -0,0 +1,10 @@ +-- Prelude has no special treatment, it is imported as stated in the module. +module TPrelude where + +import Prelude hiding (foldr) + +-- >>> foldr (+) 10 [2,3,5] +foldr :: (a -> z -> z) -> z -> [a] -> z +foldr f z bs = + (foldl (\g a -> g . f a) id bs) z + diff --git a/plugins/hls-eval-plugin/test/testdata/TProperty.expected.hs b/plugins/hls-eval-plugin/test/testdata/TProperty.expected.hs new file mode 100644 index 0000000000..28336f1e29 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TProperty.expected.hs @@ -0,0 +1,6 @@ +-- Support for property checking +module TProperty where + +-- prop> \(l::[Bool]) -> reverse (reverse l) == l +-- +++ OK, passed 100 tests. + diff --git a/plugins/hls-eval-plugin/test/testdata/TProperty.hs b/plugins/hls-eval-plugin/test/testdata/TProperty.hs new file mode 100644 index 0000000000..8a1bb166bf --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TProperty.hs @@ -0,0 +1,5 @@ +-- Support for property checking +module TProperty where + +-- prop> \(l::[Bool]) -> reverse (reverse l) == l + diff --git a/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs new file mode 100644 index 0000000000..089779ea2b --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs @@ -0,0 +1,21 @@ +-- Support for property checking +module TProperty where + +-- prop> \(l::[Bool]) -> head l +-- *** Failed! (after 1 test): +-- Exception: +-- Prelude.head: empty list +-- CallStack (from HasCallStack): +-- error, called at libraries/ghc-internal/src/GHC/Internal/List.hs:2030:3 in ghc-internal:GHC.Internal.List +-- errorEmptyList, called at libraries/ghc-internal/src/GHC/Internal/List.hs:96:11 in ghc-internal:GHC.Internal.List +-- badHead, called at libraries/ghc-internal/src/GHC/Internal/List.hs:90:28 in ghc-internal:GHC.Internal.List +-- head, called at :1:27 in interactive:Ghci2 +-- HasCallStack backtrace: +-- collectBacktraces, called at libraries/ghc-internal/src/GHC/Internal/Exception.hs:169:13 in ghc-internal:GHC.Internal.Exception +-- toExceptionWithBacktrace, called at libraries/ghc-internal/src/GHC/Internal/Exception.hs:204:5 in ghc-internal:GHC.Internal.Exception +-- error, called at libraries/ghc-internal/src/GHC/Internal/List.hs:2030:3 in ghc-internal:GHC.Internal.List +-- errorEmptyList, called at libraries/ghc-internal/src/GHC/Internal/List.hs:96:11 in ghc-internal:GHC.Internal.List +-- badHead, called at libraries/ghc-internal/src/GHC/Internal/List.hs:90:28 in ghc-internal:GHC.Internal.List +-- head, called at :1:27 in interactive:Ghci2 +-- +-- [] diff --git a/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc912.expected.hs b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc912.expected.hs new file mode 100644 index 0000000000..46359c86ab --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc912.expected.hs @@ -0,0 +1,6 @@ +-- Support for property checking +module TProperty where + +-- prop> \(l::[Bool]) -> head l +-- *** Failed! Exception: 'Prelude.head: empty list' (after 1 test): +-- [] diff --git a/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc94.expected.hs b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc94.expected.hs new file mode 100644 index 0000000000..5699e7517e --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc94.expected.hs @@ -0,0 +1,13 @@ +-- Support for property checking +module TProperty where + +-- prop> \(l::[Bool]) -> head l +-- *** Failed! (after 1 test): +-- Exception: +-- Prelude.head: empty list +-- CallStack (from HasCallStack): +-- error, called at libraries/base/GHC/List.hs:1646:3 in base:GHC.List +-- errorEmptyList, called at libraries/base/GHC/List.hs:85:11 in base:GHC.List +-- badHead, called at libraries/base/GHC/List.hs:81:28 in base:GHC.List +-- head, called at :1:27 in interactive:Ghci2 +-- [] diff --git a/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc96.expected.hs b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc96.expected.hs new file mode 100644 index 0000000000..a90fd16600 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc96.expected.hs @@ -0,0 +1,13 @@ +-- Support for property checking +module TProperty where + +-- prop> \(l::[Bool]) -> head l +-- *** Failed! (after 1 test): +-- Exception: +-- Prelude.head: empty list +-- CallStack (from HasCallStack): +-- error, called at libraries/base/GHC/List.hs:1644:3 in base:GHC.List +-- errorEmptyList, called at libraries/base/GHC/List.hs:87:11 in base:GHC.List +-- badHead, called at libraries/base/GHC/List.hs:83:28 in base:GHC.List +-- head, called at :1:27 in interactive:Ghci2 +-- [] diff --git a/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc98.expected.hs b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc98.expected.hs new file mode 100644 index 0000000000..55b606f0cb --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc98.expected.hs @@ -0,0 +1,13 @@ +-- Support for property checking +module TProperty where + +-- prop> \(l::[Bool]) -> head l +-- *** Failed! (after 1 test): +-- Exception: +-- Prelude.head: empty list +-- CallStack (from HasCallStack): +-- error, called at libraries/base/GHC/List.hs:2004:3 in base:GHC.List +-- errorEmptyList, called at libraries/base/GHC/List.hs:90:11 in base:GHC.List +-- badHead, called at libraries/base/GHC/List.hs:84:28 in base:GHC.List +-- head, called at :1:27 in interactive:Ghci2 +-- [] diff --git a/plugins/hls-eval-plugin/test/testdata/TPropertyError.hs b/plugins/hls-eval-plugin/test/testdata/TPropertyError.hs new file mode 100644 index 0000000000..4d70e738f3 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TPropertyError.hs @@ -0,0 +1,4 @@ +-- Support for property checking +module TProperty where + +-- prop> \(l::[Bool]) -> head l diff --git a/plugins/hls-eval-plugin/test/testdata/TSameDefaultLanguageExtensionsAsGhci.expected.hs b/plugins/hls-eval-plugin/test/testdata/TSameDefaultLanguageExtensionsAsGhci.expected.hs new file mode 100644 index 0000000000..ac69b2a0ef --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TSameDefaultLanguageExtensionsAsGhci.expected.hs @@ -0,0 +1,27 @@ +-- The default language extensions for the eval plugin are the same as those for ghci + +module TSameDefaultLanguageExtensionsAsGhci where + +{- +Running `:showi language` within ghci currently lists NoDatatypeContexts, ExtendedDefaultRules, NoMonomorphismRestriction and NondecreasingIndentation. + +The flags NoDatatypeContexts and NondecreasingIndentation are globally set in Haskell2021, whereas ExtendedDefaultRules and NoMonomorphismRestriction are set manually within ghci. +(see https://github.com/ghc/ghc/blob/5abf59976c7335df760e5d8609d9488489478173/ghc/GHCi/UI.hs#L473-L483) + +It therefore suffices to test for ExtendedDefaultRules and NoMonomorphismRestriction only. +-} + + +-- ExtendedDefaultRules + +-- >>> [] +-- [] + +-- >>> reverse [] +-- [] + +-- NoMonomorphismRestriction + +-- >>> plus = (+) +-- >>> :t plus +-- plus :: Num a => a -> a -> a diff --git a/plugins/hls-eval-plugin/test/testdata/TSameDefaultLanguageExtensionsAsGhci.hs b/plugins/hls-eval-plugin/test/testdata/TSameDefaultLanguageExtensionsAsGhci.hs new file mode 100644 index 0000000000..148f0f86a8 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TSameDefaultLanguageExtensionsAsGhci.hs @@ -0,0 +1,24 @@ +-- The default language extensions for the eval plugin are the same as those for ghci + +module TSameDefaultLanguageExtensionsAsGhci where + +{- +Running `:showi language` within ghci currently lists NoDatatypeContexts, ExtendedDefaultRules, NoMonomorphismRestriction and NondecreasingIndentation. + +The flags NoDatatypeContexts and NondecreasingIndentation are globally set in Haskell2021, whereas ExtendedDefaultRules and NoMonomorphismRestriction are set manually within ghci. +(see https://github.com/ghc/ghc/blob/5abf59976c7335df760e5d8609d9488489478173/ghc/GHCi/UI.hs#L473-L483) + +It therefore suffices to test for ExtendedDefaultRules and NoMonomorphismRestriction only. +-} + + +-- ExtendedDefaultRules + +-- >>> [] + +-- >>> reverse [] + +-- NoMonomorphismRestriction + +-- >>> plus = (+) +-- >>> :t plus diff --git a/plugins/hls-eval-plugin/test/testdata/TSectionEval.expected.hs b/plugins/hls-eval-plugin/test/testdata/TSectionEval.expected.hs new file mode 100644 index 0000000000..30f3756b76 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TSectionEval.expected.hs @@ -0,0 +1,17 @@ +-- Tests in the same document section are executed together +module TSectionEval where + +{- +>>> 1+1 +2 + +>>> 2+2 +4 +-} + +{- +>>> 3+3 + +>>> 4+4 +-} + diff --git a/plugins/hls-eval-plugin/test/testdata/TSectionEval.hs b/plugins/hls-eval-plugin/test/testdata/TSectionEval.hs new file mode 100644 index 0000000000..9d0226d9ae --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TSectionEval.hs @@ -0,0 +1,15 @@ +-- Tests in the same document section are executed together +module TSectionEval where + +{- +>>> 1+1 + +>>> 2+2 +-} + +{- +>>> 3+3 + +>>> 4+4 +-} + diff --git a/plugins/hls-eval-plugin/test/testdata/TSetup.expected.hs b/plugins/hls-eval-plugin/test/testdata/TSetup.expected.hs new file mode 100644 index 0000000000..12d3ccf966 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TSetup.expected.hs @@ -0,0 +1,12 @@ +-- The setup section is executed before any other test +module TSetup where + +-- $setup +-- >>> x=11 +-- >>> y=22 + +-- >>> x+y +-- 33 + +-- >>> x*y +-- 242 diff --git a/plugins/hls-eval-plugin/test/testdata/TSetup.hs b/plugins/hls-eval-plugin/test/testdata/TSetup.hs new file mode 100644 index 0000000000..e29c3dd7fa --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TSetup.hs @@ -0,0 +1,10 @@ +-- The setup section is executed before any other test +module TSetup where + +-- $setup +-- >>> x=11 +-- >>> y=22 + +-- >>> x+y + +-- >>> x*y diff --git a/plugins/hls-eval-plugin/test/testdata/TTransitive.expected.hs b/plugins/hls-eval-plugin/test/testdata/TTransitive.expected.hs new file mode 100644 index 0000000000..13dc64b913 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TTransitive.expected.hs @@ -0,0 +1,6 @@ +module TTransitive where + +import TLocalImport + +-- >>> tst' 11 11 +-- True diff --git a/plugins/hls-eval-plugin/test/testdata/TTransitive.hs b/plugins/hls-eval-plugin/test/testdata/TTransitive.hs new file mode 100644 index 0000000000..39d54c3ed0 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TTransitive.hs @@ -0,0 +1,5 @@ +module TTransitive where + +import TLocalImport + +-- >>> tst' 11 11 diff --git a/plugins/hls-eval-plugin/test/testdata/TUNPACK.expected.hs b/plugins/hls-eval-plugin/test/testdata/TUNPACK.expected.hs new file mode 100644 index 0000000000..407202f20f --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TUNPACK.expected.hs @@ -0,0 +1,14 @@ +{- | Won't panic on UNPACKs -} +module TUNPACK (THStatus(..)) where + +type ByteString = String +type BSEndo = ByteString -> ByteString +type BSEndoList = [ByteString] -> [ByteString] + +data THStatus = THStatus + {-# UNPACK #-} !Int -- running total byte count + BSEndoList -- previously parsed lines + BSEndo -- bytestrings to be prepended + +-- >>> "Yay! UNPACK pragma didn't do bad things!" +-- "Yay! UNPACK pragma didn't do bad things!" diff --git a/plugins/hls-eval-plugin/test/testdata/TUNPACK.hs b/plugins/hls-eval-plugin/test/testdata/TUNPACK.hs new file mode 100644 index 0000000000..a16a07ef1a --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/TUNPACK.hs @@ -0,0 +1,13 @@ +{- | Won't panic on UNPACKs -} +module TUNPACK (THStatus(..)) where + +type ByteString = String +type BSEndo = ByteString -> ByteString +type BSEndoList = [ByteString] -> [ByteString] + +data THStatus = THStatus + {-# UNPACK #-} !Int -- running total byte count + BSEndoList -- previously parsed lines + BSEndo -- bytestrings to be prepended + +-- >>> "Yay! UNPACK pragma didn't do bad things!" diff --git a/plugins/hls-eval-plugin/test/testdata/Util.hs b/plugins/hls-eval-plugin/test/testdata/Util.hs new file mode 100644 index 0000000000..a0191ea783 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/Util.hs @@ -0,0 +1,13 @@ +-- Used for testing local imports +module Util + ( tst + , a + , b + ) +where + +tst a b = a == b + +a = 'a' + +b = 'b' diff --git a/plugins/hls-eval-plugin/test/testdata/hie.yaml b/plugins/hls-eval-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..5aa5d712ea --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/hie.yaml @@ -0,0 +1,4 @@ +cradle: + cabal: + - path: "./" + component: "lib:test" diff --git a/plugins/hls-eval-plugin/test/testdata/info-util/InfoUtil.hs b/plugins/hls-eval-plugin/test/testdata/info-util/InfoUtil.hs new file mode 100644 index 0000000000..4d69121335 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/info-util/InfoUtil.hs @@ -0,0 +1,20 @@ +module InfoUtil + ( Eq + , Ord + , Foo (..) + , Bar (..) + , Baz + ) +where + +import Prelude (Eq, Ord) + +data Foo = Foo1 | Foo2 + deriving (Eq, Ord) + +data Bar = Bar1 | Bar2 | Bar3 + deriving (Eq, Ord) + +class Baz t +instance Baz Foo +instance Baz Bar diff --git a/plugins/hls-eval-plugin/test/testdata/info-util/info-util.cabal b/plugins/hls-eval-plugin/test/testdata/info-util/info-util.cabal new file mode 100644 index 0000000000..8c766a58c4 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/info-util/info-util.cabal @@ -0,0 +1,18 @@ +name: info-util +version: 0.1.0.0 +-- synopsis: +-- description: +license: BSD3 +author: Author name here +maintainer: example@example.com +copyright: 2017 Author name here +category: Web +build-type: Simple +cabal-version: >=1.10 + +library + exposed-modules: + InfoUtil + build-depends: base >= 4.7 && < 5 + default-language: Haskell2010 + ghc-options: -Wall -fwarn-unused-imports diff --git a/plugins/hls-eval-plugin/test/testdata/test.cabal b/plugins/hls-eval-plugin/test/testdata/test.cabal new file mode 100644 index 0000000000..00ed0aaf18 --- /dev/null +++ b/plugins/hls-eval-plugin/test/testdata/test.cabal @@ -0,0 +1,25 @@ +name: test +version: 0.1.0.0 +-- synopsis: +-- description: +license: BSD3 +author: Author name here +maintainer: example@example.com +copyright: 2017 Author name here +category: Web +build-type: Simple +cabal-version: >=1.10 + +library + exposed-modules: + TProperty + TPropertyError + TInfo + TInfoMany + TInfoBang + TInfoBangMany + TI_Info + + build-depends: base >= 4.7 && < 5, QuickCheck, info-util + default-language: Haskell2010 + ghc-options: -Wall -fwarn-unused-imports diff --git a/plugins/hls-explicit-fixity-plugin/README.md b/plugins/hls-explicit-fixity-plugin/README.md new file mode 100644 index 0000000000..409ff7f3dc --- /dev/null +++ b/plugins/hls-explicit-fixity-plugin/README.md @@ -0,0 +1,13 @@ +# Explicit Fixity Plugin + +The hls-explicit-fixity-plugin will show available fixity explicitly while hovering. + +## Demo + +![fixity1](./fixity1.png) + +![fixity2](./fixity2.png) + +## Change log +### 1.0.0.0 +- Released! diff --git a/plugins/hls-explicit-fixity-plugin/fixity1.png b/plugins/hls-explicit-fixity-plugin/fixity1.png new file mode 100644 index 0000000000..0f88fd53d5 Binary files /dev/null and b/plugins/hls-explicit-fixity-plugin/fixity1.png differ diff --git a/plugins/hls-explicit-fixity-plugin/fixity2.png b/plugins/hls-explicit-fixity-plugin/fixity2.png new file mode 100644 index 0000000000..7798192a39 Binary files /dev/null and b/plugins/hls-explicit-fixity-plugin/fixity2.png differ diff --git a/plugins/hls-explicit-fixity-plugin/src/Ide/Plugin/ExplicitFixity.hs b/plugins/hls-explicit-fixity-plugin/src/Ide/Plugin/ExplicitFixity.hs new file mode 100644 index 0000000000..92bc37f743 --- /dev/null +++ b/plugins/hls-explicit-fixity-plugin/src/Ide/Plugin/ExplicitFixity.hs @@ -0,0 +1,121 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} +{-# OPTIONS_GHC -Wno-orphans #-} +{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} + +module Ide.Plugin.ExplicitFixity(descriptor, Log) where + +import Control.DeepSeq +import Control.Monad.IO.Class (MonadIO, liftIO) +import Data.Either.Extra +import Data.Hashable +import qualified Data.Map.Strict as M +import Data.Maybe +import qualified Data.Set as S +import qualified Data.Text as T +import Development.IDE hiding (pluginHandlers, + pluginRules) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.PositionMapping (idDelta) +import Development.IDE.Core.Shake (addPersistentRule) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat +import qualified Development.IDE.GHC.Compat.Util as Util +import Development.IDE.LSP.Notifications (ghcideNotificationsPluginPriority) +import Development.IDE.Spans.AtPoint +import GHC.Generics (Generic) +import Ide.Plugin.Error +import Ide.Types hiding (pluginId) +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder pluginId = (defaultPluginDescriptor pluginId "Provides fixity information in hovers") + { pluginRules = fixityRule recorder + , pluginHandlers = mkPluginHandler SMethod_TextDocumentHover hover + -- Make this plugin has a lower priority than ghcide's plugin to ensure + -- type info display first. + , pluginPriority = ghcideNotificationsPluginPriority - 1 + } + +hover :: PluginMethodHandler IdeState Method_TextDocumentHover +hover state _ (HoverParams (TextDocumentIdentifier uri) pos _) = do + nfp <- getNormalizedFilePathE uri + runIdeActionE "ExplicitFixity" (shakeExtras state) $ do + (FixityMap fixmap, _) <- useWithStaleFastE GetFixity nfp + (HAR{hieAst}, mapping) <- useWithStaleFastE GetHieAst nfp + let ns = getNamesAtPoint hieAst pos mapping + fs = mapMaybe (\n -> (n,) <$> M.lookup n fixmap) ns + pure $ maybeToNull $ toHover fs + where + toHover :: [(Name, Fixity)] -> Maybe Hover + toHover [] = Nothing + toHover fixities = + let -- Splicing fixity info + contents = T.intercalate "\n\n" $ fixityText <$> fixities + -- Append to the previous hover content + contents' = "\n" <> sectionSeparator <> contents + in Just $ Hover (InL (mkPlainText contents')) Nothing + + fixityText :: (Name, Fixity) -> T.Text +#if MIN_VERSION_GLASGOW_HASKELL(9,12,0,0) + fixityText (name, Fixity precedence direction) = +#else + fixityText (name, Fixity _ precedence direction) = +#endif + printOutputable direction <> " " <> printOutputable precedence <> " `" <> printOutputable name <> "`" + +newtype FixityMap = FixityMap (M.Map Name Fixity) +instance Show FixityMap where + show _ = "FixityMap" + +instance NFData FixityMap where + rnf (FixityMap xs) = rnf xs + +instance NFData Fixity where + rnf = rwhnf + +newtype Log = LogShake Shake.Log + +instance Pretty Log where + pretty = \case + LogShake log -> pretty log + +data GetFixity = GetFixity deriving (Show, Eq, Generic) + +instance Hashable GetFixity +instance NFData GetFixity + +type instance RuleResult GetFixity = FixityMap + +-- | Convert a HieAST to FixityTree with fixity info gathered +lookupFixities :: MonadIO m => HscEnv -> TcGblEnv -> S.Set Name -> m (M.Map Name Fixity) +lookupFixities hscEnv tcGblEnv names + = liftIO + $ fmap (fromMaybe M.empty . snd) + $ initTcWithGbl hscEnv tcGblEnv (realSrcLocSpan $ mkRealSrcLoc "" 1 1) + $ M.traverseMaybeWithKey (\_ v -> v) + $ M.fromSet lookupFixity names + where + lookupFixity name = do + f <- Util.handleGhcException + (const $ pure Nothing) + (Just <$> lookupFixityRn name) + if f == Just defaultFixity + then pure Nothing + else pure f + +fixityRule :: Recorder (WithPriority Log) -> Rules () +fixityRule recorder = do + define (cmapWithPrio LogShake recorder) $ \GetFixity nfp -> do + HAR{refMap} <- use_ GetHieAst nfp + env <- hscEnv <$> use_ GhcSessionDeps nfp -- deps necessary so that we can consult already loaded in ifaces instead of loading in duplicates + tcGblEnv <- tmrTypechecked <$> use_ TypeCheck nfp + fs <- lookupFixities env tcGblEnv (S.mapMonotonic (\(Right n) -> n) $ S.filter isRight $ M.keysSet refMap) + pure ([], Just (FixityMap fs)) + + -- Ensure that this plugin doesn't block on startup + addPersistentRule GetFixity $ \_ -> pure $ Just (FixityMap M.empty, idDelta, Nothing) diff --git a/plugins/hls-explicit-fixity-plugin/test/Main.hs b/plugins/hls-explicit-fixity-plugin/test/Main.hs new file mode 100644 index 0000000000..26e94091cd --- /dev/null +++ b/plugins/hls-explicit-fixity-plugin/test/Main.hs @@ -0,0 +1,84 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE OverloadedStrings #-} + +module Main where + +import qualified Data.Text as T +import Ide.Plugin.ExplicitFixity (Log, descriptor) +import System.FilePath +import Test.Hls + +plugin :: PluginTestDescriptor Log +plugin = mkPluginTestDescriptor descriptor "explicit-fixity" + +main :: IO () +main = defaultTestRunner tests + +tests :: TestTree +tests = testGroup "Explicit fixity" + [ hoverTest "(++)" (Position 5 7) "infixr 5 `++`" + , hoverTest "($)" (Position 6 7) "infixr 0 `$`" + , hoverTest "(.)" (Position 7 7) "infixr 9 `.`" + , hoverTest "(+)" (Position 8 7) "infixl 6 `+`" + , hoverTest "(-)" (Position 9 8) "infixl 6 `-`" + , hoverTest "(<>)" (Position 10 7) "infixr 6 `<>`" + , hoverTest "(>>=)" (Position 11 7) "infixl 1 `>>=`" + , hoverTest "(>=>)" (Position 12 7) "infixr 1 `>=>`" + , hoverTest "elem" (Position 13 7) "infix 4 `elem`" + , hoverTest "on" (Position 14 7) "infixl 0 `on`" + , hoverTest "(||)" (Position 15 8) "infixr 2 `||`" + , hoverTest "mod" (Position 16 8) "infixl 7 `mod`" + , hoverTest "(**)" (Position 17 8) "infixr 8 `**`" + , hoverTest "(^)" (Position 18 8) "infixr 8 `^`" + , hoverTest "(<$)" (Position 19 8) "infixl 4 `<$`" + , hoverTest "seq" (Position 20 9) "infixr 0 `seq`" + , hoverTest "(<|>)" (Position 21 8) "infixl 3 `<|>`" + , hoverTest "fixity define" (Position 23 11) "infixr 7 `>>:`" + , hoverTest "record" (Position 28 10) "infix 9 `>>::`" + , hoverTest "wildcards1" (Position 30 5) "infixr 7 `>>:`" + , hoverTest "wildcards2" (Position 30 5) "infix 9 `>>::`" + , hoverTest "function" (Position 32 11) "infixl 1 `f`" + , hoverTest "signature" (Position 35 2) "infixr 9 `>>>:`" + , hoverTest "operator" (Position 36 2) "infixr 9 `>>>:`" + , hoverTest "escape" (Position 39 2) "infixl 3 `~\\:`" + -- TODO: Ensure that there is no one extra new line in import statement + , hoverTestExpectFail + "import" + (Position 2 18) + (BrokenIdeal "Control.Monad***") + (BrokenCurrent "Control.Monad\n\n") + , hoverTestImport "import" (Position 4 7) "infixr 9 `>>>:`" + ] + +hoverTest :: TestName -> Position -> T.Text -> TestTree +hoverTest = hoverTest' "Hover.hs" + +hoverTestImport :: TestName -> Position -> T.Text -> TestTree +hoverTestImport = hoverTest' "HoverImport.hs" + +hoverTestExpectFail + :: TestName + -> Position + -> ExpectBroken 'Ideal T.Text + -> ExpectBroken 'Current T.Text + -> TestTree +hoverTestExpectFail title pos _ = + hoverTest title pos . unCurrent + +hoverTest' :: String -> TestName -> Position -> T.Text -> TestTree +hoverTest' docName title pos expected = testCase title $ runSessionWithServer def plugin testDataDir $ do + doc <- openDoc docName "haskell" + waitForKickDone + h <- getHover doc pos + case h of + Nothing -> liftIO $ assertFailure "No hover" + Just (Hover contents _) -> case contents of + InL (MarkupContent _ txt) -> do + liftIO + $ assertBool ("Failed to find `" <> T.unpack expected <> "` in hover message: " <> T.unpack txt) + $ expected `T.isInfixOf` txt + _ -> liftIO $ assertFailure "Unexpected content type" + closeDoc doc + +testDataDir :: FilePath +testDataDir = "plugins" "hls-explicit-fixity-plugin" "test" "testdata" diff --git a/plugins/hls-explicit-fixity-plugin/test/testdata/Hover.hs b/plugins/hls-explicit-fixity-plugin/test/testdata/Hover.hs new file mode 100644 index 0000000000..f5fd50a501 --- /dev/null +++ b/plugins/hls-explicit-fixity-plugin/test/testdata/Hover.hs @@ -0,0 +1,40 @@ +{-# LANGUAGE RecordWildCards #-} +module Hover where +import Control.Monad +import Data.Function (on) +import Control.Applicative ((<|>)) +f1 = (++) +f2 = ($) +f3 = (.) +f4 = (+) +f5 = 1 - 2 +f6 = (<>) +f7 = (>>=) +f8 = (>=>) +f9 = elem +f10 = on +f11 = (||) +f12 = mod +f13 = (**) +f14 = (^) +f15 = (<$) +f16 = seq +f17 = (<|>) + +infixr 7 >>: +infix 9 >>:: +data F = G + { (>>:) :: Int -> Int -> Int + , c :: Int + , (>>::) :: Char + } +f G{..} = undefined + +infixl 1 `f` + +infixr 9 >>>: +(>>>:) :: Int -> Int +(>>>:) x = 3 + +infixl 3 ~\: +(~\:) x y = 3 diff --git a/plugins/hls-explicit-fixity-plugin/test/testdata/HoverImport.hs b/plugins/hls-explicit-fixity-plugin/test/testdata/HoverImport.hs new file mode 100644 index 0000000000..e3474eb0c3 --- /dev/null +++ b/plugins/hls-explicit-fixity-plugin/test/testdata/HoverImport.hs @@ -0,0 +1,5 @@ +module HoverImport where + +import Hover + +g = (>>>:) diff --git a/plugins/hls-explicit-fixity-plugin/test/testdata/hie.yaml b/plugins/hls-explicit-fixity-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..824558147d --- /dev/null +++ b/plugins/hls-explicit-fixity-plugin/test/testdata/hie.yaml @@ -0,0 +1,3 @@ +cradle: + direct: + arguments: [] diff --git a/plugins/hls-explicit-imports-plugin/src/Ide/Plugin/ExplicitImports.hs b/plugins/hls-explicit-imports-plugin/src/Ide/Plugin/ExplicitImports.hs new file mode 100644 index 0000000000..17634491fe --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/src/Ide/Plugin/ExplicitImports.hs @@ -0,0 +1,566 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ViewPatterns #-} + +module Ide.Plugin.ExplicitImports + ( descriptor + , descriptorForModules + , abbreviateImportTitle + , abbreviateImportTitleWithoutModule + , Log(..) + ) where + +import Control.DeepSeq +import Control.Lens (_Just, (&), (?~), (^?)) +import Control.Monad.Error.Class (MonadError (throwError)) +import Control.Monad.IO.Class +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Except (ExceptT) +import Control.Monad.Trans.Maybe +import qualified Data.Aeson as A (ToJSON (toJSON)) +import Data.Aeson.Types (FromJSON) +import Data.Char (isSpace) +import qualified Data.IntMap as IM (IntMap, elems, + fromList, (!?)) +import Data.IORef (readIORef) +import Data.List (singleton) +import qualified Data.Map.Strict as Map +import Data.Maybe (isJust, isNothing, + mapMaybe) +import qualified Data.Set as S +import Data.String (fromString) +import qualified Data.Text as T +import qualified Data.Text as Text +import Data.Traversable (for) +import qualified Data.Unique as U (hashUnique, + newUnique) +import Development.IDE hiding (pluginHandlers, + pluginRules) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.PositionMapping +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat hiding ((<+>)) +import Development.IDE.Graph.Classes +import GHC.Generics (Generic) +import Ide.Plugin.Error (PluginError (..), + getNormalizedFilePathE, + handleMaybe) +import qualified Ide.Plugin.RangeMap as RM (RangeMap, + filterByRange, + fromList) +import Ide.Plugin.Resolve +import Ide.PluginUtils +import Ide.Types +import Language.LSP.Protocol.Lens (HasInlayHint (inlayHint), + HasTextDocument (textDocument)) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types + +-- This plugin is named explicit-imports for historical reasons. Besides +-- providing code actions and lenses to make imports explicit it also provides +-- code actions and lens to refine imports. + +importCommandId :: CommandId +importCommandId = "ImportLensCommand" + +data Log + = LogShake Shake.Log + | LogWAEResponseError (TResponseError Method_WorkspaceApplyEdit) + | forall a. (Pretty a) => LogResolve a + + +instance Pretty Log where + pretty = \case + LogShake logMsg -> pretty logMsg + LogWAEResponseError rspErr -> "RequestWorkspaceApplyEdit Failed with " <+> pretty rspErr + LogResolve msg -> pretty msg + +-- | The "main" function of a plugin +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder = + -- (almost) no one wants to see an explicit import list for Prelude + descriptorForModules recorder (/= pRELUDE_NAME) + +descriptorForModules + :: Recorder (WithPriority Log) + -> (ModuleName -> Bool) + -- ^ Predicate to select modules that will be annotated + -> PluginId + -> PluginDescriptor IdeState +descriptorForModules recorder modFilter plId = + let resolveRecorder = cmapWithPrio LogResolve recorder + codeActionHandlers = mkCodeActionHandlerWithResolve resolveRecorder (codeActionProvider recorder) (codeActionResolveProvider recorder) + in (defaultPluginDescriptor plId "Provides a code action to make imports explicit") + { + -- This plugin provides a command handler + pluginCommands = [PluginCommand importCommandId "Explicit import command" (runImportCommand recorder)], + -- This plugin defines a new rule + pluginRules = minimalImportsRule recorder modFilter, + pluginHandlers = + -- This plugin provides code lenses + mkPluginHandler SMethod_TextDocumentCodeLens (lensProvider recorder) + <> mkResolveHandler SMethod_CodeLensResolve (lensResolveProvider recorder) + -- This plugin provides inlay hints + <> mkPluginHandler SMethod_TextDocumentInlayHint (inlayHintProvider recorder) + -- This plugin provides code actions + <> codeActionHandlers + } + +isInlayHintsSupported :: IdeState -> Bool +isInlayHintsSupported ideState = + let clientCaps = Shake.clientCapabilities $ shakeExtras ideState + in isJust $ clientCaps ^? textDocument . _Just . inlayHint . _Just + +-- | The actual command handler +runImportCommand :: Recorder (WithPriority Log) -> CommandFunction IdeState IAResolveData +runImportCommand recorder ideState _ eird@(ResolveOne _ _) = do + wedit <- resolveWTextEdit ideState eird + _ <- lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing wedit) logErrors + return $ InR Null + where logErrors (Left re) = do + logWith recorder Error (LogWAEResponseError re) + pure () + logErrors (Right _) = pure () +runImportCommand _ _ _ rd = do + throwError $ PluginInvalidParams (T.pack $ "Unexpected argument for command handler:" <> show rd) + + +-- | We provide two code lenses for imports. The first lens makes imports +-- explicit. For example, for the module below: +-- > import Data.List +-- > f = intercalate " " . sortBy length +-- the provider should produce one code lens associated to the import statement: +-- > import Data.List (intercalate, sortBy) +-- +-- The second one allows us to import functions directly from the original +-- module. For example, for the following import +-- > import Random.ReExporting.Module (liftIO) +-- the provider should produce one code lens associated to the import statement: +-- > Refine imports to import Control.Monad.IO.Class (liftIO) +lensProvider :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'Method_TextDocumentCodeLens +lensProvider _ state _ CodeLensParams {_textDocument = TextDocumentIdentifier {_uri}} = do + nfp <- getNormalizedFilePathE _uri + (ImportActionsResult{forLens}, pm) <- runActionE "ImportActions" state $ useWithStaleE ImportActions nfp + let lens = [ generateLens _uri newRange int + -- provide ExplicitImport only if the client does not support inlay hints + | not (isInlayHintsSupported state) + , (range, (int, ExplicitImport)) <- forLens + , Just newRange <- [toCurrentRange pm range]] <> + -- RefineImport is always provided because inlay hints cannot + [ generateLens _uri newRange int + | (range, (int, RefineImport)) <- forLens + , Just newRange <- [toCurrentRange pm range]] + pure $ InL lens + where -- because these are non resolved lenses we only need the range and a + -- unique id to later resolve them with. These are for both refine + -- import lenses and for explicit import lenses. + generateLens :: Uri -> Range -> Int -> CodeLens + generateLens uri range int = + CodeLens { _data_ = Just $ A.toJSON $ ResolveOne uri int + , _range = range + , _command = Nothing } + + +lensResolveProvider :: Recorder (WithPriority Log) -> ResolveFunction IdeState IAResolveData 'Method_CodeLensResolve +lensResolveProvider _ ideState plId cl uri rd@(ResolveOne _ uid) = do + nfp <- getNormalizedFilePathE uri + (ImportActionsResult{forResolve}, _) <- runActionE "ImportActions" ideState $ useWithStaleE ImportActions nfp + target <- handleMaybe PluginStaleResolve $ forResolve IM.!? uid + let updatedCodeLens = cl & L.command ?~ mkCommand plId target + pure updatedCodeLens + where mkCommand :: PluginId -> ImportEdit -> Command + mkCommand pId (ImportEdit{ieResType, ieText}) = + let -- The only new thing we need to provide to resolve a lens is the + -- title, as the unique Id is the same to resolve the lens title + -- as it is to apply the lens through a command. + -- The title is written differently depending on what type of lens + -- it is. + title ExplicitImport = abbreviateImportTitle ieText + title RefineImport = "Refine imports to " <> T.intercalate ", " (T.lines ieText) + in mkLspCommand pId importCommandId (title ieResType) (Just [A.toJSON rd]) +lensResolveProvider _ _ _ _ _ rd = do + throwError $ PluginInvalidParams (T.pack $ "Unexpected argument for lens resolve handler: " <> show rd) + + +-- | Provide explicit imports in inlay hints. +-- Applying textEdits can make the import explicit. +-- There is currently no need to resolve inlay hints, +-- as no tooltips or commands are provided in the label. +inlayHintProvider :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'Method_TextDocumentInlayHint +inlayHintProvider _ state _ InlayHintParams {_textDocument = TextDocumentIdentifier {_uri}, _range = visibleRange} = + if isInlayHintsSupported state + then do + nfp <- getNormalizedFilePathE _uri + (ImportActionsResult {forLens, forResolve}, pm) <- runActionE "ImportActions" state $ useWithStaleE ImportActions nfp + let inlayHints = [ inlayHint + | (range, (int, _)) <- forLens + , Just newRange <- [toCurrentRange pm range] + , isSubrangeOf newRange visibleRange + , Just ie <- [forResolve IM.!? int] + , Just inlayHint <- [generateInlayHints newRange ie pm]] + pure $ InL inlayHints + -- When the client does not support inlay hints, fallback to the code lens, + -- so there is nothing to response here. + -- `[]` is no different from `null`, we chose to use all `[]` to indicate "no information" + else pure $ InL [] + where + -- The appropriate and intended position for the hint hints to begin + -- is the end of the range for the code lens. + -- import Data.Char (isSpace) + -- |--- range ----|-- IH ---| + -- |^-_paddingLeft + -- ^-_position + generateInlayHints :: Range -> ImportEdit -> PositionMapping -> Maybe InlayHint + generateInlayHints (Range _ end) ie pm = do + label <- mkLabel ie + currentEnd <- toCurrentPosition pm end + return InlayHint { _position = currentEnd + , _label = InL label + , _kind = Nothing -- neither a type nor a parameter + , _textEdits = fmap singleton $ toTEdit pm ie + , _tooltip = Just $ InL "Make this import explicit" -- simple enough, no need to resolve + , _paddingLeft = Just True -- show an extra space before the inlay hint + , _paddingRight = Nothing + , _data_ = Nothing + } + mkLabel :: ImportEdit -> Maybe T.Text + mkLabel (ImportEdit{ieResType, ieText}) = + let title ExplicitImport = Just $ abbreviateImportTitleWithoutModule ieText + title RefineImport = Nothing -- does not provide imports statements that can be refined via inlay hints + in title ieResType + + +-- |For explicit imports: If there are any implicit imports, provide both one +-- code action per import to make that specific import explicit, and one code +-- action to turn them all into explicit imports. For refine imports: If there +-- are any reexported imports, provide both one code action per import to refine +-- that specific import, and one code action to refine all imports. +codeActionProvider :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'Method_TextDocumentCodeAction +codeActionProvider _ ideState _pId (CodeActionParams _ _ TextDocumentIdentifier {_uri} range _context) = do + nfp <- getNormalizedFilePathE _uri + (ImportActionsResult{forCodeActions}, pm) <- runActionE "ImportActions" ideState $ useWithStaleE ImportActions nfp + newRange <- toCurrentRangeE pm range + let relevantCodeActions = RM.filterByRange newRange forCodeActions + allExplicit = + [InR $ mkCodeAction "Make all imports explicit" (Just $ A.toJSON $ ExplicitAll _uri) + -- We should only provide this code action if there are any code + -- of this type + | any (\x -> iaResType x == ExplicitImport) relevantCodeActions] + allRefine = + [InR $ mkCodeAction "Refine all imports" (Just $ A.toJSON $ RefineAll _uri) + -- We should only provide this code action if there are any code + -- of this type + | any (\x -> iaResType x == RefineImport) relevantCodeActions] + -- The only thing different in making the two types of code actions, is + -- the title. The actual resolve data type, ResolveOne is used by both + -- of them + toCodeAction uri (ImportAction _ int ExplicitImport) = + mkCodeAction "Make this import explicit" (Just $ A.toJSON $ ResolveOne uri int) + toCodeAction uri (ImportAction _ int RefineImport) = + mkCodeAction "Refine this import" (Just $ A.toJSON $ ResolveOne uri int) + pure $ InL ((InR . toCodeAction _uri <$> relevantCodeActions) <> allExplicit <> allRefine) + where mkCodeAction title data_ = + CodeAction + { _title = title + , _kind = Just CodeActionKind_QuickFix + , _command = Nothing + , _edit = Nothing + , _diagnostics = Nothing + , _isPreferred = Nothing + , _disabled = Nothing + , _data_ = data_} + +codeActionResolveProvider :: Recorder (WithPriority Log) -> ResolveFunction IdeState IAResolveData 'Method_CodeActionResolve +codeActionResolveProvider _ ideState _ ca _ rd = do + wedit <- resolveWTextEdit ideState rd + pure $ ca & L.edit ?~ wedit +-------------------------------------------------------------------------------- + +resolveWTextEdit :: IdeState -> IAResolveData -> ExceptT PluginError (HandlerM Config) WorkspaceEdit +-- Providing the edit for the command, or the resolve for the code action is +-- completely generic, as all we need is the unique id and the text edit. +resolveWTextEdit ideState (ResolveOne uri int) = do + nfp <- getNormalizedFilePathE uri + (ImportActionsResult{forResolve}, pm) <- runActionE "ImportActions" ideState $ useWithStaleE ImportActions nfp + iEdit <- handleMaybe PluginStaleResolve $ forResolve IM.!? int + pure $ mkWorkspaceEdit uri [iEdit] pm +resolveWTextEdit ideState (ExplicitAll uri) = do + nfp <- getNormalizedFilePathE uri + (ImportActionsResult{forResolve}, pm) <- runActionE "ImportActions" ideState $ useWithStaleE ImportActions nfp + let edits = [ ie | ie@ImportEdit{ieResType = ExplicitImport} <- IM.elems forResolve] + pure $ mkWorkspaceEdit uri edits pm +resolveWTextEdit ideState (RefineAll uri) = do + nfp <- getNormalizedFilePathE uri + (ImportActionsResult{forResolve}, pm) <- runActionE "ImportActions" ideState $ useWithStaleE ImportActions nfp + let edits = [ re | re@ImportEdit{ieResType = RefineImport} <- IM.elems forResolve] + pure $ mkWorkspaceEdit uri edits pm +mkWorkspaceEdit :: Uri -> [ImportEdit] -> PositionMapping -> WorkspaceEdit +mkWorkspaceEdit uri edits pm = + WorkspaceEdit {_changes = Just $ Map.singleton uri (mapMaybe (toTEdit pm) edits) + , _documentChanges = Nothing + , _changeAnnotations = Nothing} + +toTEdit :: PositionMapping -> ImportEdit -> Maybe TextEdit +toTEdit pm ImportEdit{ieRange, ieText} = + let newRange = toCurrentRange pm ieRange + in (\r -> TextEdit r ieText) <$> newRange + +data ImportActions = ImportActions + deriving (Show, Generic, Eq, Ord) + +instance Hashable ImportActions + +instance NFData ImportActions + +type instance RuleResult ImportActions = ImportActionsResult + +data ResultType = ExplicitImport | RefineImport + deriving Eq + +data ImportActionsResult = ImportActionsResult + { -- |For providing the code lenses we need to have a range, and a unique id + -- that is later resolved to the new text for each import. It is stored in + -- a list, because we always need to provide all the code lens in a file. + forLens :: [(Range, (Int, ResultType))] + -- |For the code actions we have the same data as for the code lenses, but + -- we store it in a RangeMap, because that allows us to filter on a specific + -- range with better performance, and code actions are almost always only + -- requested for a specific range + , forCodeActions :: RM.RangeMap ImportAction + -- |For resolve we have an intMap where for every previously provided unique id + -- we provide a textEdit to allow our code actions or code lens to be resolved + , forResolve :: IM.IntMap ImportEdit } + +-- |For resolving code lenses and code actions we need standard text edit stuff, +-- such as range and text, and then we need the result type, because we use this +-- for code lenses which need to create a appropriate title +data ImportEdit = ImportEdit { ieRange :: Range, ieText :: T.Text, ieResType :: ResultType} + +-- |The necessary data for providing code actions: the range, a unique ID for +-- later resolving the action, and the type of action for giving a proper name. +data ImportAction = ImportAction { iaRange :: Range, iaUniqueId :: Int, iaResType :: ResultType} + +instance Show ImportActionsResult where show _ = "" + +instance NFData ImportActionsResult where rnf = rwhnf + +data IAResolveData = ResolveOne + { uri :: Uri + , importId :: Int } + | ExplicitAll + { uri :: Uri } + | RefineAll + { uri :: Uri } + deriving (Generic, Show, A.ToJSON, FromJSON) + +exportedModuleStrings :: ParsedModule -> [String] +exportedModuleStrings ParsedModule{pm_parsed_source = L _ HsModule{..}} + | Just export <- hsmodExports, + exports <- unLoc export + = map (T.unpack . printOutputable) exports +exportedModuleStrings _ = [] + +minimalImportsRule :: Recorder (WithPriority Log) -> (ModuleName -> Bool) -> Rules () +minimalImportsRule recorder modFilter = defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \ImportActions nfp -> runMaybeT $ do + -- Get the typechecking artifacts from the module + tmr <- MaybeT $ use TypeCheck nfp + -- We also need a GHC session with all the dependencies + hsc <- MaybeT $ use GhcSessionDeps nfp + + -- refine imports: 2 layer map ModuleName -> ModuleName -> [Avails] (exports) + import2Map <- do + -- first layer is from current(editing) module to its imports + ImportMap currIm <- MaybeT $ use GetImportMap nfp + for currIm $ \path -> do + -- second layer is from the imports of first layer to their imports + ImportMap importIm <- MaybeT $ use GetImportMap path + for importIm $ \imp_path -> do + imp_hir <- MaybeT $ use GetModIface imp_path + return $ mi_exports $ hirModIface imp_hir + + -- Use the GHC api to extract the "minimal" imports + locationImportWithMinimal <- MaybeT $ liftIO $ extractMinimalImports hsc tmr + + let minimalImportsResult = + [ (range, (printOutputable minImport, ExplicitImport)) + | (location, impDecl, minImport) <- locationImportWithMinimal + , not (isQualifiedImport impDecl) + , not (isExplicitImport impDecl) + , let L _ moduleName = ideclName impDecl + , modFilter moduleName + , let range = realSrcSpanToRange location] + + refineImportsResult = + [ (range, (T.intercalate "\n" + . map (printOutputable . constructImport origImport minImport) + . Map.toList + $ filteredInnerImports, RefineImport)) + -- for every minimal imports + | (location, origImport, minImport@(ImportDecl{ideclName = L _ mn})) <- locationImportWithMinimal + -- (almost) no one wants to see an refine import list for Prelude + , mn /= pRELUDE_NAME + -- we check for the inner imports + , Just innerImports <- [Map.lookup mn import2Map] + -- and only get those symbols used + , Just filteredInnerImports <- [filterByImport minImport innerImports] + -- if no symbols from this modules then don't need to generate new import + , not $ null filteredInnerImports + -- and then convert that to a Range + , let range = realSrcSpanToRange location + ] + uniqueAndRangeAndText <- liftIO $ for (minimalImportsResult ++ refineImportsResult) $ \rt -> do + u <- U.hashUnique <$> U.newUnique + pure (u, rt) + let rangeAndUnique = [ ImportAction r u rt | (u, (r, (_, rt))) <- uniqueAndRangeAndText ] + pure ImportActionsResult + { forLens = (\ImportAction{..} -> (iaRange, (iaUniqueId, iaResType))) <$> rangeAndUnique + , forCodeActions = RM.fromList iaRange rangeAndUnique + , forResolve = IM.fromList ((\(u, (r, (te, ty))) -> (u, ImportEdit r te ty)) <$> uniqueAndRangeAndText) } + +-------------------------------------------------------------------------------- + +-- | Use the ghc api to extract a minimal, explicit set of imports for this module +extractMinimalImports :: + HscEnvEq -> + TcModuleResult -> + IO (Maybe [(RealSrcSpan, ImportDecl GhcRn, ImportDecl GhcRn)]) +extractMinimalImports hsc TcModuleResult {..} = runMaybeT $ do + -- extract the original imports and the typechecking environment + let tcEnv = tmrTypechecked +#if MIN_VERSION_ghc(9,9,0) + (_, imports, _, _, _) = tmrRenamed +#else + (_, imports, _, _) = tmrRenamed +#endif + ParsedModule {pm_parsed_source = L loc _} = tmrParsed + emss = exportedModuleStrings tmrParsed + Just srcSpan <- pure $ realSpan loc + -- Don't make suggestions for modules which are also exported, the user probably doesn't want this! + -- See https://github.com/haskell/haskell-language-server/issues/2079 + let notExportedImports = filter (notExported emss) imports + + -- GHC is secretly full of mutable state + gblElts <- liftIO $ readIORef (tcg_used_gres tcEnv) + + -- call findImportUsage does exactly what we need + -- GHC is full of treats like this + let usage = findImportUsage notExportedImports gblElts + (_, Just minimalImports) <- liftIO $ + initTcWithGbl (hscEnv hsc) tcEnv srcSpan $ getMinimalImports usage + + let minimalImportsMap = + Map.fromList + [ (realSrcSpanStart l, impDecl) + | L (locA -> RealSrcSpan l _) impDecl <- minimalImports + ] + results = + [ (location, imp, minImport) + | L (locA -> RealSrcSpan location _) imp <- imports + , Just minImport <- [Map.lookup (realSrcSpanStart location) minimalImportsMap]] + -- return both the original imports and the computed minimal ones + return results + where + notExported :: [String] -> LImportDecl GhcRn -> Bool + notExported [] _ = True + notExported exports (L _ ImportDecl{ideclName = L _ name}) = + not $ any (\e -> ("module " ++ moduleNameString name) == e) exports + +isExplicitImport :: ImportDecl GhcRn -> Bool +isExplicitImport ImportDecl {ideclImportList = Just (Exactly, _)} = True +isExplicitImport _ = False + +-- This number is somewhat arbitrarily chosen. Ideally the protocol would tell us these things, +-- but at the moment I don't believe we know it. +-- 80 columns is traditional, but Haskellers tend to use longer lines (citation needed) and it's +-- probably not too bad if the lens is a *bit* longer than normal lines. +maxColumns :: Int +maxColumns = 120 + +-- we don't want to create a really massive code lens (and the decl can be extremely large!). +-- So we abbreviate it to fit a max column size, and indicate how many more items are in the list +-- after the abbreviation +abbreviateImportTitle :: T.Text -> T.Text +abbreviateImportTitle input = + let + -- For starters, we only want one line in the title + -- we also need to compress multiple spaces into one + oneLineText = T.unwords $ filter (not . T.null) $ T.split isSpace input + -- Now, split at the max columns, leaving space for the summary text we're going to add + -- (conservatively assuming we won't need to print a number larger than 100) + (prefix, suffix) = T.splitAt (maxColumns - T.length (summaryText 100)) oneLineText + -- We also want to truncate the last item so we get a "clean" break, rather than half way through + -- something. The conditional here is just because 'breakOnEnd' doesn't give us quite the right thing + -- if there are actually no commas. + (actualPrefix, extraSuffix) = if T.count "," prefix > 0 then T.breakOnEnd "," prefix else (prefix, "") + actualSuffix = extraSuffix <> suffix + + -- The number of additional items is the number of commas+1 + numAdditionalItems = T.count "," actualSuffix + 1 + -- We want to make text like this: import Foo (AImport, BImport, ... (30 items)) + -- We also want it to look sensible if we end up splitting in the module name itself, + summaryText :: Int -> T.Text + summaryText n = " ... (" <> fromString (show n) <> " items)" + -- so we only add a trailing paren if we've split in the export list + suffixText = summaryText numAdditionalItems <> if T.count "(" prefix > 0 then ")" else "" + title = + -- If the original text fits, just use it + if T.length oneLineText <= maxColumns + then oneLineText + else actualPrefix <> suffixText + in title + +-- Create an import abbreviate title without module for inlay hints +abbreviateImportTitleWithoutModule :: Text.Text -> Text.Text +abbreviateImportTitleWithoutModule = abbreviateImportTitle . T.dropWhile (/= '(') + +-- | The title of the command is ideally the minimal explicit import decl, but +-------------------------------------------------------------------------------- + + +filterByImport :: ImportDecl GhcRn -> Map.Map ModuleName [AvailInfo] -> Maybe (Map.Map ModuleName [AvailInfo]) +filterByImport (ImportDecl{ideclImportList = Just (_, L _ names)}) + avails = + -- if there is a function defined in the current module and is used + -- i.e. if a function is not reexported but defined in current + -- module then this import cannot be refined + if importedNames `S.isSubsetOf` allFilteredAvailsNames + then Just res + else Nothing + where importedNames = S.fromList $ map (ieName . unLoc) names + res = Map.filter (any (any (`S.member` importedNames) . getAvailNames)) avails + allFilteredAvailsNames = S.fromList + $ concatMap getAvailNames + $ mconcat + $ Map.elems res +filterByImport _ _ = Nothing + +constructImport :: ImportDecl GhcRn -> ImportDecl GhcRn -> (ModuleName, [AvailInfo]) -> ImportDecl GhcRn +constructImport ImportDecl{ideclQualified = qualified, ideclImportList = origHiding} imd@ImportDecl{ideclImportList = Just (hiding, L _ names)} + (newModuleName, avails) = imd + { ideclName = noLocA newModuleName + , ideclImportList = if isNothing origHiding && qualified /= NotQualified + then Nothing + else Just (hiding, noLocA newNames) + } + where newNames = filter (\n -> any (n `containsAvail`) avails) names + -- Check if a name is exposed by AvailInfo (the available information of a module) + containsAvail :: LIE GhcRn -> AvailInfo -> Bool + containsAvail name avail = + any (\an -> printOutputable an == (printOutputable . ieName . unLoc $ name)) + $ getAvailNames avail + +constructImport _ lim _ = lim + +getAvailNames :: AvailInfo -> [Name] +getAvailNames = +#if MIN_VERSION_ghc(9,7,0) + availNames +#else + availNamesWithSelectors +#endif diff --git a/plugins/hls-explicit-imports-plugin/test/Main.hs b/plugins/hls-explicit-imports-plugin/test/Main.hs new file mode 100644 index 0000000000..01fe1d469e --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/Main.hs @@ -0,0 +1,290 @@ +{-# LANGUAGE DisambiguateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} +module Main + ( main + ) where + +import Control.Lens ((^.)) +import Control.Monad (unless) +import Data.Either.Extra +import Data.Foldable (find) +import Data.Text (Text) +import qualified Data.Text as T +import Data.Traversable (for) +import qualified Ide.Plugin.ExplicitImports as ExplicitImports +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import System.FilePath (()) +import Test.Hls + +explicitImportsPlugin :: PluginTestDescriptor ExplicitImports.Log +explicitImportsPlugin = mkPluginTestDescriptor ExplicitImports.descriptor "explicitImports" + +main :: IO () +main = defaultTestRunner $ testGroup "import-actions" + [testGroup + "Refine Imports" + [ codeActionGoldenTest "RefineWithOverride" 3 1 + -- Although the client has inlay hints caps, refine is always provided by the code lens + , codeLensGoldenTest codeActionNoResolveCaps isRefineImports "RefineUsualCase" 1 + , codeLensGoldenTest codeActionNoResolveCaps isRefineImports "RefineQualified" 0 + , codeLensGoldenTest codeActionNoResolveCaps isRefineImports "RefineQualifiedExplicit" 0 + ], + testGroup + "Make imports explicit" + [ codeActionAllGoldenTest "ExplicitUsualCase" 3 0 + , codeActionAllResolveGoldenTest "ExplicitUsualCase" 3 0 + , inlayHintsTestWithCap "ExplicitUsualCase" 2 $ (@=?) + [mkInlayHint (Position 2 16) "( a1 )" + (TextEdit (Range (Position 2 0) (Position 2 16)) "import ExplicitA ( a1 )")] + , inlayHintsTestWithoutCap "ExplicitUsualCase" 2 $ (@=?) [] + , codeActionOnlyGoldenTest "ExplicitOnlyThis" 3 0 + , codeActionOnlyResolveGoldenTest "ExplicitOnlyThis" 3 0 + , inlayHintsTestWithCap "ExplicitOnlyThis" 3 $ (@=?) + [mkInlayHint (Position 3 16) "( b1 )" + (TextEdit (Range (Position 3 0) (Position 3 16)) "import ExplicitB ( b1 )")] + , inlayHintsTestWithoutCap "ExplicitOnlyThis" 3 $ (@=?) [] + -- Only when the client does not support inlay hints, explicit will be provided by code lens + , codeLensGoldenTest codeActionNoInlayHintsCaps notRefineImports "ExplicitUsualCase" 0 + , noCodeLensTest codeActionNoResolveCaps "ExplicitUsualCase" + , codeActionBreakFile "ExplicitBreakFile" 4 0 + , inlayHintsTestWithCap "ExplicitBreakFile" 3 $ (@=?) + [mkInlayHint (Position 3 16) "( a1 )" + (TextEdit (Range (Position 3 0) (Position 3 16)) "import ExplicitA ( a1 )")] + , inlayHintsTestWithoutCap "ExplicitBreakFile" 3 $ (@=?) [] + , codeActionStaleAction "ExplicitStaleAction" 4 0 + , testCase "No CodeAction when exported" $ + runSessionWithServer def explicitImportsPlugin testDataDir $ do + doc <- openDoc "ExplicitExported.hs" "haskell" + action <- getCodeActions doc (pointRange 3 0) + liftIO $ action @?= [] + , testCase "No CodeLens when exported" $ + runSessionWithServer def explicitImportsPlugin testDataDir $ do + doc <- openDoc "ExplicitExported.hs" "haskell" + lenses <- getCodeLenses doc + liftIO $ lenses @?= [] + , testCase "No InlayHints when exported" $ + runSessionWithServer def explicitImportsPlugin testDataDir $ do + doc <- openDoc "ExplicitExported.hs" "haskell" + inlayHints <- getInlayHints doc (pointRange 3 0) + liftIO $ inlayHints @?= [] + , testGroup "Title abbreviation" + [ testCase "not abbreviated" $ + let i = "import " <> T.replicate 70 "F" <> " (Athing, Bthing, Cthing)" + in ExplicitImports.abbreviateImportTitle i @?= i + , testCase "abbreviated in module name" $ + let i = "import " <> T.replicate 120 "F" <> " (Athing, Bthing, Cthing)" + o = "import " <> T.replicate 97 "F" <> " ... (3 items)" + in ExplicitImports.abbreviateImportTitle i @?= o + , testCase "abbreviated in import list" $ + let i = "import " <> T.replicate 78 "F" <> " (Athing, Bthing, Cthing, Dthing, Ething)" + o = "import " <> T.replicate 78 "F" <> " (Athing, Bthing, ... (3 items))" + in ExplicitImports.abbreviateImportTitle i @?= o + -- This one breaks earlier in the same import item, but still splits the list in the same place + , testCase "abbreviated in import list (slightly shorter module)" $ + let i = "import " <> T.replicate 76 "F" <> " (Athing, Bthing, Cthing, Dthing, Ething)" + o = "import " <> T.replicate 76 "F" <> " (Athing, Bthing, ... (3 items))" + in ExplicitImports.abbreviateImportTitle i @?= o + -- This one breaks later in the same import item, but still splits the list in the same place + , testCase "abbreviated in import list (slightly longer module)" $ + let i = "import " <> T.replicate 80 "F" <> " (Athing, Bthing, Cthing, Dthing, Ething)" + o = "import " <> T.replicate 80 "F" <> " (Athing, Bthing, ... (3 items))" + in ExplicitImports.abbreviateImportTitle i @?= o + ] + , testGroup "Title abbreviation without module" + [ testCase "not abbreviated" $ + let i = "import M (" <> T.replicate 70 "F" <> ", Athing, Bthing, Cthing)" + o = "(FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF, Athing, Bthing, Cthing)" + in ExplicitImports.abbreviateImportTitleWithoutModule i @?= o + , testCase "abbreviated that drop module name" $ + let i = "import " <> T.replicate 120 "F" <> " (Athing, Bthing, Cthing)" + o = "(Athing, Bthing, Cthing)" + in ExplicitImports.abbreviateImportTitleWithoutModule i @?= o + , testCase "abbreviated in import list" $ + let i = "import M (Athing, Bthing, " <> T.replicate 100 "F" <> ", Cthing, Dthing, Ething)" + o = "(Athing, Bthing, ... (4 items))" + in ExplicitImports.abbreviateImportTitleWithoutModule i @?= o + ] + ]] + +-- code action tests + +codeActionAllGoldenTest :: FilePath -> Int -> Int -> TestTree +codeActionAllGoldenTest fp l c = goldenWithImportActions " code action" fp codeActionNoResolveCaps $ \doc -> do + actions <- getCodeActions doc (pointRange l c) + case find ((== Just "Make all imports explicit") . caTitle) actions of + Just (InR x) -> executeCodeAction x + _ -> liftIO $ assertFailure "Unable to find CodeAction" + +codeActionBreakFile :: FilePath -> Int -> Int -> TestTree +-- If use `codeActionNoResolveCaps` instead of `codeActionNoInlayHintsCaps` here, +-- we will get a puzzling error: https://github.com/haskell/haskell-language-server/pull/4235#issuecomment-2189048997 +codeActionBreakFile fp l c = goldenWithImportActions " code action" fp codeActionNoInlayHintsCaps $ \doc -> do + _ <- getCodeLenses doc + changeDoc doc [edit] + actions <- getCodeActions doc (pointRange l c) + case find ((== Just "Make all imports explicit") . caTitle) actions of + Just (InR x) -> executeCodeAction x + _ -> liftIO $ assertFailure "Unable to find CodeAction" + where edit = TextDocumentContentChangeEvent $ InL + TextDocumentContentChangePartial + { _range = pointRange 2 29 + , _rangeLength = Nothing + , _text = "x" + } + +codeActionStaleAction :: FilePath -> Int -> Int -> TestTree +codeActionStaleAction fp l c = goldenWithImportActions " code action" fp codeActionResolveCaps $ \doc -> do + _ <- waitForDiagnostics + actions <- getCodeActions doc (pointRange l c) + changeDoc doc [edit] + _ <- waitForDiagnostics + case find ((== Just "Make this import explicit") . caTitle) actions of + Just (InR x) -> + maybeResolveCodeAction x >>= + \case Just _ -> liftIO $ assertFailure "Code action still valid" + Nothing -> pure () + _ -> liftIO $ assertFailure "Unable to find CodeAction" + where edit = TextDocumentContentChangeEvent $ InL + TextDocumentContentChangePartial + { _range = Range (Position 6 0) (Position 6 0) + , _rangeLength = Nothing + , _text = "\ntesting = undefined" + } + +codeActionAllResolveGoldenTest :: FilePath -> Int -> Int -> TestTree +codeActionAllResolveGoldenTest fp l c = goldenWithImportActions " code action resolve" fp codeActionResolveCaps $ \doc -> do + actions <- getCodeActions doc (pointRange l c) + Just (InR x) <- pure $ find ((== Just "Make all imports explicit") . caTitle) actions + resolved <- resolveCodeAction x + executeCodeAction resolved + +codeActionOnlyGoldenTest :: FilePath -> Int -> Int -> TestTree +codeActionOnlyGoldenTest fp l c = goldenWithImportActions " code action" fp codeActionNoResolveCaps $ \doc -> do + actions <- getCodeActions doc (pointRange l c) + case find ((== Just "Make this import explicit") . caTitle) actions of + Just (InR x) -> executeCodeAction x + _ -> liftIO $ assertFailure "Unable to find CodeAction" + +codeActionOnlyResolveGoldenTest :: FilePath -> Int -> Int -> TestTree +codeActionOnlyResolveGoldenTest fp l c = goldenWithImportActions " code action resolve" fp codeActionResolveCaps $ \doc -> do + actions <- getCodeActions doc (pointRange l c) + Just (InR x) <- pure $ find ((== Just "Make this import explicit") . caTitle) actions + resolved <- resolveCodeAction x + executeCodeAction resolved + +maybeResolveCodeAction :: CodeAction -> Session (Maybe CodeAction) +maybeResolveCodeAction ca = do + resolveResponse <- request SMethod_CodeActionResolve ca + let resolved = resolveResponse ^. L.result + pure $ eitherToMaybe resolved + +caTitle :: (Command |? CodeAction) -> Maybe Text +caTitle (InR CodeAction {_title}) = Just _title +caTitle _ = Nothing + +-- code lens tests + +codeLensGoldenTest :: ClientCapabilities -> (CodeLens -> Bool) -> FilePath -> Int -> TestTree +codeLensGoldenTest caps predicate fp i = goldenWithImportActions " code lens" fp caps $ \doc -> do + codeLenses <- getCodeLenses doc + resolvedCodeLenses <- for codeLenses resolveCodeLens + (CodeLens {_command = Just c}) <- pure (filter predicate resolvedCodeLenses !! i) + executeCmd c + +noCodeLensTest :: ClientCapabilities -> FilePath -> TestTree +noCodeLensTest caps fp = do + testCase (fp ++ " no code lens") $ run $ \_ -> do + doc <- openDoc (fp ++ ".hs") "haskell" + codeLenses <- getCodeLenses doc + resolvedCodeLenses <- for codeLenses resolveCodeLens + unless (null resolvedCodeLenses) $ + liftIO (assertFailure "Unexpected code lens") + where + run = runSessionWithTestConfig def + { testDirLocation = Left testDataDir + , testConfigCaps = caps + , testLspConfig = def + , testPluginDescriptor = explicitImportsPlugin + } + + +notRefineImports :: CodeLens -> Bool +notRefineImports (CodeLens _ (Just (Command text _ _)) _) + | "Refine imports to" `T.isPrefixOf` text = False +notRefineImports _ = True + +-- inlay hints tests + +inlayHintsTest :: ClientCapabilities -> String -> FilePath -> UInt -> ([InlayHint] -> Assertion) -> TestTree +inlayHintsTest configCaps postfix fp line assert = testCase (fp ++ postfix) $ run $ \_ -> do + doc <- openDoc (fp ++ ".hs") "haskell" + inlayHints <- getInlayHints doc (lineRange line) + liftIO $ assert inlayHints + where + -- zero-based position + lineRange line = Range (Position line 0) (Position line 1000) + run = runSessionWithTestConfig def + { testDirLocation = Left testDataDir + , testPluginDescriptor = explicitImportsPlugin + , testConfigCaps = configCaps + } + +inlayHintsTestWithCap :: FilePath -> UInt -> ([InlayHint] -> Assertion) -> TestTree +inlayHintsTestWithCap = inlayHintsTest fullLatestClientCaps " inlay hints with client caps" + +inlayHintsTestWithoutCap :: FilePath -> UInt -> ([InlayHint] -> Assertion) -> TestTree +inlayHintsTestWithoutCap = inlayHintsTest codeActionNoInlayHintsCaps " inlay hints without client caps" + + +mkInlayHint :: Position -> Text -> TextEdit -> InlayHint +mkInlayHint pos label textEdit = + InlayHint + { _position = pos + , _label = InL label + , _kind = Nothing + , _textEdits = Just [textEdit] + , _tooltip = Just $ InL "Make this import explicit" + , _paddingLeft = Just True + , _paddingRight = Nothing + , _data_ = Nothing + } + +-- Execute command and wait for result +executeCmd :: Command -> Session () +executeCmd cmd = do + executeCommand cmd + _resp <- skipManyTill anyMessage (message SMethod_WorkspaceApplyEdit) + -- liftIO $ print _resp + return () + +-- helpers + +goldenWithImportActions :: String -> FilePath -> ClientCapabilities -> (TextDocumentIdentifier -> Session ()) -> TestTree +goldenWithImportActions title fp caps = goldenWithHaskellAndCaps def caps explicitImportsPlugin (fp <> title <> " (golden)") testDataDir fp "expected" "hs" + +testDataDir :: String +testDataDir = "plugins" "hls-explicit-imports-plugin" "test" "testdata" + +pointRange :: Int -> Int -> Range +pointRange + (subtract 1 -> fromIntegral -> line) + (subtract 1 -> fromIntegral -> col) = + Range (Position line col) (Position line $ col + 1) + +------------------------------------------------------------------------------- +-- code action tests + +codeActionGoldenTest :: FilePath -> Int -> Int -> TestTree +codeActionGoldenTest fp l c = goldenWithImportActions "" fp codeActionNoResolveCaps $ \doc -> do + actions <- getCodeActions doc (pointRange l c) + case find ((== Just "Refine all imports") . caTitle) actions of + Just (InR x) -> executeCodeAction x + _ -> liftIO $ assertFailure "Unable to find CodeAction" + +isRefineImports :: CodeLens -> Bool +isRefineImports (CodeLens _ (Just (Command txt _ _)) _) + | "Refine imports to" `T.isInfixOf` txt = True +isRefineImports _ = False diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitA.hs b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitA.hs new file mode 100644 index 0000000000..8c69f8d84b --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitA.hs @@ -0,0 +1,7 @@ +module ExplicitA where + +a1 :: String +a1 = "a1" + +a2 :: String +a2 = "a2" diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitB.hs b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitB.hs new file mode 100644 index 0000000000..7eb07baca6 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitB.hs @@ -0,0 +1,7 @@ +module ExplicitB where + +b1 :: String +b1 = "b1" + +b2 :: String +b2 = "b2" diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitBreakFile.expected.hs b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitBreakFile.expected.hs new file mode 100644 index 0000000000..3ff53cddac --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitBreakFile.expected.hs @@ -0,0 +1,6 @@ +{-# OPTIONS_GHC -Wall #-} +module ExplicitBreakFile whexe + +import ExplicitA ( a1 ) + +main = putStrLn $ "hello " ++ a1 diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitBreakFile.hs b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitBreakFile.hs new file mode 100644 index 0000000000..bb20a5f1b0 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitBreakFile.hs @@ -0,0 +1,6 @@ +{-# OPTIONS_GHC -Wall #-} +module ExplicitBreakFile where + +import ExplicitA + +main = putStrLn $ "hello " ++ a1 diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitExported.hs b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitExported.hs new file mode 100644 index 0000000000..ef6591ef3b --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitExported.hs @@ -0,0 +1,6 @@ +module ExplicitExported (module ExplicitA) where + +import ExplicitA + +main :: IO () +main = putStrLn $ "hello " ++ a1 diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitOnlyThis.expected.hs b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitOnlyThis.expected.hs new file mode 100644 index 0000000000..5dc9602676 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitOnlyThis.expected.hs @@ -0,0 +1,7 @@ +module ExplicitOnlyThis where + +import ExplicitA ( a1 ) +import ExplicitB + +main :: IO () +main = putStrLn $ "hello " ++ a1 ++ b1 diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitOnlyThis.hs b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitOnlyThis.hs new file mode 100644 index 0000000000..eab53a795d --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitOnlyThis.hs @@ -0,0 +1,7 @@ +module ExplicitOnlyThis where + +import ExplicitA +import ExplicitB + +main :: IO () +main = putStrLn $ "hello " ++ a1 ++ b1 diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitStaleAction.expected.hs b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitStaleAction.expected.hs new file mode 100644 index 0000000000..4433837a8e --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitStaleAction.expected.hs @@ -0,0 +1,8 @@ +{-# OPTIONS_GHC -Wall #-} +module ExplicitStaleAction where + +import ExplicitA + +main = putStrLn $ "hello " ++ a1 + +testing = undefined \ No newline at end of file diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitStaleAction.hs b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitStaleAction.hs new file mode 100644 index 0000000000..864fbbc2c3 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitStaleAction.hs @@ -0,0 +1,6 @@ +{-# OPTIONS_GHC -Wall #-} +module ExplicitStaleAction where + +import ExplicitA + +main = putStrLn $ "hello " ++ a1 diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitUsualCase.expected.hs b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitUsualCase.expected.hs new file mode 100644 index 0000000000..f96b6b2322 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitUsualCase.expected.hs @@ -0,0 +1,6 @@ +module ExplicitUsualCase where + +import ExplicitA ( a1 ) + +main :: IO () +main = putStrLn $ "hello " ++ a1 diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitUsualCase.hs b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitUsualCase.hs new file mode 100644 index 0000000000..6ca72a9d31 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/ExplicitUsualCase.hs @@ -0,0 +1,6 @@ +module ExplicitUsualCase where + +import ExplicitA + +main :: IO () +main = putStrLn $ "hello " ++ a1 diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineA.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineA.hs new file mode 100644 index 0000000000..eac694c211 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineA.hs @@ -0,0 +1,7 @@ +module RefineA + ( module RefineB + , module RefineC + ) where + +import RefineB +import RefineC \ No newline at end of file diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineB.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineB.hs new file mode 100644 index 0000000000..aace4c226f --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineB.hs @@ -0,0 +1,7 @@ +module RefineB where + +b1 :: String +b1 = "b1" + +b2 :: String +b2 = "b2" \ No newline at end of file diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineC.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineC.hs new file mode 100644 index 0000000000..7af22d912f --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineC.hs @@ -0,0 +1,4 @@ +module RefineC where + +c1 :: String +c1 = "c1" \ No newline at end of file diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineD.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineD.hs new file mode 100644 index 0000000000..0c6d8aba16 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineD.hs @@ -0,0 +1,7 @@ +module RefineD (module RefineE, module RefineD) where + +import RefineE hiding (e1) +import qualified RefineE + +e1 :: String +e1 = RefineE.e1 <> " but overrided" \ No newline at end of file diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineE.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineE.hs new file mode 100644 index 0000000000..07a3125ca7 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineE.hs @@ -0,0 +1,7 @@ +module RefineE where + +e1 :: String +e1 = "e1" + +e2 :: String +e2 = "e2" \ No newline at end of file diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineF.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineF.hs new file mode 100644 index 0000000000..dd264e3ddf --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineF.hs @@ -0,0 +1,7 @@ +module RefineF (module RefineF, module RefineG) where + +import RefineG + +f1 :: String +f1 = "f1" + diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineG.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineG.hs new file mode 100644 index 0000000000..f38731ba03 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineG.hs @@ -0,0 +1,4 @@ +module RefineG where + +g1 :: String +g1 = "g1" \ No newline at end of file diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineQualified.expected.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineQualified.expected.hs new file mode 100644 index 0000000000..e94827bccd --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineQualified.expected.hs @@ -0,0 +1,11 @@ +module Main where + +import qualified RefineB as RA +import qualified RefineC as RA +import RefineD +import Data.List (intercalate) + +main :: IO () +main = putStrLn + $ "hello " + <> intercalate ", " [RA.b1, RA.c1, e2] diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineQualified.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineQualified.hs new file mode 100644 index 0000000000..78e7f4566b --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineQualified.hs @@ -0,0 +1,10 @@ +module Main where + +import qualified RefineA as RA +import RefineD +import Data.List (intercalate) + +main :: IO () +main = putStrLn + $ "hello " + <> intercalate ", " [RA.b1, RA.c1, e2] diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineQualifiedExplicit.expected.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineQualifiedExplicit.expected.hs new file mode 100644 index 0000000000..11e38aabfc --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineQualifiedExplicit.expected.hs @@ -0,0 +1,11 @@ +module Main where + +import qualified RefineB as RA ( b1 ) +import qualified RefineC as RA ( c1 ) +import RefineD +import Data.List (intercalate) + +main :: IO () +main = putStrLn + $ "hello " + <> intercalate ", " [RA.b1, RA.c1, e2] diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineQualifiedExplicit.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineQualifiedExplicit.hs new file mode 100644 index 0000000000..03e65d5d88 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineQualifiedExplicit.hs @@ -0,0 +1,10 @@ +module Main where + +import qualified RefineA as RA (b1, c1) +import RefineD +import Data.List (intercalate) + +main :: IO () +main = putStrLn + $ "hello " + <> intercalate ", " [RA.b1, RA.c1, e2] diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineUsualCase.expected.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineUsualCase.expected.hs new file mode 100644 index 0000000000..1e0c36661b --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineUsualCase.expected.hs @@ -0,0 +1,10 @@ +module Main where + +import RefineA +import RefineE ( e2 ) +import Data.List (intercalate) + +main :: IO () +main = putStrLn + $ "hello " + <> intercalate ", " [b1, c1, e2] diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineUsualCase.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineUsualCase.hs new file mode 100644 index 0000000000..2d59232063 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineUsualCase.hs @@ -0,0 +1,10 @@ +module Main where + +import RefineA +import RefineD +import Data.List (intercalate) + +main :: IO () +main = putStrLn + $ "hello " + <> intercalate ", " [b1, c1, e2] diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineWithOverride.expected.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineWithOverride.expected.hs new file mode 100644 index 0000000000..06e195d639 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineWithOverride.expected.hs @@ -0,0 +1,12 @@ +module Main where + +import RefineB ( b1 ) +import RefineC ( c1 ) +import RefineD +import RefineF +import Data.List (intercalate) + +main :: IO () +main = putStrLn + $ "hello " + <> intercalate ", " [b1, c1, e1, f1, g1] diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/RefineWithOverride.hs b/plugins/hls-explicit-imports-plugin/test/testdata/RefineWithOverride.hs new file mode 100644 index 0000000000..dba1ab7fbb --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/RefineWithOverride.hs @@ -0,0 +1,11 @@ +module Main where + +import RefineA +import RefineD +import RefineF +import Data.List (intercalate) + +main :: IO () +main = putStrLn + $ "hello " + <> intercalate ", " [b1, c1, e1, f1, g1] diff --git a/plugins/hls-explicit-imports-plugin/test/testdata/hie.yaml b/plugins/hls-explicit-imports-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..369b6e1dd3 --- /dev/null +++ b/plugins/hls-explicit-imports-plugin/test/testdata/hie.yaml @@ -0,0 +1,20 @@ +cradle: + direct: + arguments: + - ExplicitOnlyThis.hs + - ExplicitStaleAction.hs + - ExplicitUsualCase.hs + - ExplicitExported.hs + - ExplicitA.hs + - ExplicitB.hs + - RefineUsualCase.hs + - RefineQualified.hs + - RefineQualifiedExplicit.hs + - RefineWithOverride.hs + - RefineA.hs + - RefineB.hs + - RefineC.hs + - RefineD.hs + - RefineE.hs + - RefineF.hs + - RefineG.hs diff --git a/plugins/hls-explicit-record-fields-plugin/README.md b/plugins/hls-explicit-record-fields-plugin/README.md new file mode 100644 index 0000000000..d4330c3445 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/README.md @@ -0,0 +1,18 @@ +# Explicit Record Fields Plugin + +`hls-explicit-record-fields-plugin` is a plugin to expand record wildcards, explicitly listing all record fields as field puns. It works in both record construction and pattern binding scenarios, and it works as you would expect regardless of whether there are explicitly provided fields or puns in addition to the wildcard. + + +## Demo + +![Expand Wildcard Demo](wildcard.gif) + + +## Known limitations + +One of the shortcomings of the current approach is that all fields of the record are expanded, whether they are actually used or not. This results in warnings of unused bindings, if the corresponding warning flag is enabled. + + +## Change log +### 1.0.0.0 +- Release diff --git a/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs new file mode 100644 index 0000000000..a111e9062b --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs @@ -0,0 +1,648 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ViewPatterns #-} + +module Ide.Plugin.ExplicitFields + ( descriptor + , Log + ) where + +import Control.Arrow ((&&&)) +import Control.Lens ((&), (?~), (^.)) +import Control.Monad (replicateM) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Maybe +import Data.Aeson (ToJSON (toJSON)) +import Data.Generics (GenericQ, everything, + everythingBut, extQ, mkQ) +import qualified Data.IntMap.Strict as IntMap +import Data.List (find, intersperse) +import qualified Data.Map as Map +import Data.Maybe (fromMaybe, isJust, + mapMaybe, maybeToList) +import Data.Text (Text) +import qualified Data.Text as T +import Data.Unique (hashUnique, newUnique) +import Development.IDE (IdeState, + Location (Location), + Pretty (..), + Range (Range, _end, _start), + Recorder (..), Rules, + WithPriority (..), + defineNoDiagnostics, + getDefinition, hsep, + printName, + realSrcSpanToRange, + shakeExtras, + srcSpanToLocation, + srcSpanToRange, viaShow) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.PositionMapping (PositionMapping, + toCurrentPosition, + toCurrentRange) +import Development.IDE.Core.RuleTypes (TcModuleResult (..), + TypeCheck (..)) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat (FieldLabel (flSelector), + FieldOcc (FieldOcc), + GenLocated (L), GhcPass, + GhcTc, + HasSrcSpan (getLoc), + HsConDetails (RecCon), + HsExpr (HsApp, HsVar, XExpr), + HsFieldBind (hfbLHS), + HsRecFields (..), + HsWrap (HsWrap), LPat, + Located, + NamedThing (getName), + Outputable, + TcGblEnv (tcg_binds), + Var (varName), + XXExprGhcTc (..), + conLikeFieldLabels, + nameSrcSpan, + pprNameUnqualified, + recDotDot, unLoc) +import Development.IDE.GHC.Compat.Core (Extension (NamedFieldPuns), + HsExpr (RecordCon, rcon_flds), + HsRecField, LHsExpr, + LocatedA, Name, Pat (..), + RealSrcSpan, UniqFM, + conPatDetails, emptyUFM, + hfbPun, hfbRHS, + lookupUFM, + mapConPatDetail, mapLoc, + pattern RealSrcSpan, + plusUFM_C, unitUFM) +import Development.IDE.GHC.Util (getExtensions, + printOutputable, + stripOccNamePrefix) +import Development.IDE.Graph (RuleResult) +import Development.IDE.Graph.Classes (Hashable, NFData) +import Development.IDE.Spans.Pragmas (NextPragmaInfo (..), + getFirstPragma, + insertNewPragma) +import GHC.Generics (Generic) +import GHC.Iface.Ext.Types (Identifier) +import Ide.Logger (Priority (..), + cmapWithPrio, logWith, + (<+>)) +import Ide.Plugin.Error (PluginError (PluginInternalError, PluginStaleResolve), + getNormalizedFilePathE, + handleMaybe) +import Ide.Plugin.RangeMap (RangeMap) +import qualified Ide.Plugin.RangeMap as RangeMap +import Ide.Plugin.Resolve (mkCodeActionWithResolveAndCommand) +import Ide.Types (PluginDescriptor (..), + PluginId (..), + PluginMethodHandler, + ResolveFunction, + defaultPluginDescriptor, + mkPluginHandler) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message (Method (..), + SMethod (SMethod_TextDocumentInlayHint)) +import Language.LSP.Protocol.Types (CodeAction (..), + CodeActionKind (CodeActionKind_RefactorRewrite), + CodeActionParams (CodeActionParams), + Command, InlayHint (..), + InlayHintLabelPart (InlayHintLabelPart), + InlayHintParams (InlayHintParams, _range, _textDocument), + TextDocumentIdentifier (TextDocumentIdentifier), + TextEdit (TextEdit), + WorkspaceEdit (WorkspaceEdit), + type (|?) (InL, InR)) + +#if __GLASGOW_HASKELL__ < 910 +import Development.IDE.GHC.Compat (HsExpansion (HsExpanded)) +#endif + +data Log + = LogShake Shake.Log + | LogCollectedRecords [RecordInfo] + | LogRenderedRecords [TextEdit] + | forall a. (Pretty a) => LogResolve a + + +instance Pretty Log where + pretty = \case + LogShake shakeLog -> pretty shakeLog + LogCollectedRecords recs -> "Collected records with wildcards:" <+> pretty recs + LogRenderedRecords recs -> "Rendered records:" <+> viaShow recs + LogResolve msg -> pretty msg + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + let resolveRecorder = cmapWithPrio LogResolve recorder + (carCommands, caHandlers) = mkCodeActionWithResolveAndCommand resolveRecorder plId codeActionProvider codeActionResolveProvider + ihDotdotHandler = mkPluginHandler SMethod_TextDocumentInlayHint (inlayHintDotdotProvider recorder) + ihPosRecHandler = mkPluginHandler SMethod_TextDocumentInlayHint (inlayHintPosRecProvider recorder) + in (defaultPluginDescriptor plId "Provides a code action to make record wildcards explicit") + { pluginHandlers = caHandlers <> ihDotdotHandler <> ihPosRecHandler + , pluginCommands = carCommands + , pluginRules = collectRecordsRule recorder *> collectNamesRule + } + +codeActionProvider :: PluginMethodHandler IdeState 'Method_TextDocumentCodeAction +codeActionProvider ideState _ (CodeActionParams _ _ docId range _) = do + nfp <- getNormalizedFilePathE (docId ^. L.uri) + CRR {crCodeActions, crCodeActionResolve, enabledExtensions} <- runActionE "ExplicitFields.CollectRecords" ideState $ useE CollectRecords nfp + -- All we need to build a code action is the list of extensions, and a int to + -- allow us to resolve it later. + let recordUids = [ uid + | uid <- RangeMap.filterByRange range crCodeActions + , Just record <- [IntMap.lookup uid crCodeActionResolve] + -- Only fully saturated constructor applications can be + -- converted to the record syntax through the code action + , isConvertible record + ] + let actions = map (mkCodeAction enabledExtensions) recordUids + pure $ InL actions + where + mkCodeAction :: [Extension] -> Int -> Command |? CodeAction + mkCodeAction exts uid = InR CodeAction + { _title = mkTitle exts -- TODO: `Expand positional record` without NamedFieldPuns if RecordInfoApp + , _kind = Just CodeActionKind_RefactorRewrite + , _diagnostics = Nothing + , _isPreferred = Nothing + , _disabled = Nothing + , _edit = Nothing + , _command = Nothing + , _data_ = Just $ toJSON uid + } + + isConvertible :: RecordInfo -> Bool + isConvertible = \case + RecordInfoApp _ (RecordAppExpr Unsaturated _ _) -> False + _ -> True + +codeActionResolveProvider :: ResolveFunction IdeState Int 'Method_CodeActionResolve +codeActionResolveProvider ideState pId ca uri uid = do + nfp <- getNormalizedFilePathE uri + pragma <- getFirstPragma pId ideState nfp + CRR {crCodeActionResolve, nameMap, enabledExtensions} <- runActionE "ExplicitFields.CollectRecords" ideState $ useE CollectRecords nfp + -- If we are unable to find the unique id in our IntMap of records, it means + -- that this resolve is stale. + record <- handleMaybe PluginStaleResolve $ IntMap.lookup uid crCodeActionResolve + -- We should never fail to render + rendered <- handleMaybe (PluginInternalError "Failed to render") $ renderRecordInfoAsTextEdit nameMap record + let shouldInsertNamedFieldPuns (RecordInfoApp _ _) = False + shouldInsertNamedFieldPuns _ = True + whenMaybe True x = x + whenMaybe False _ = Nothing + edits = [rendered] + <> maybeToList (whenMaybe (shouldInsertNamedFieldPuns record) (pragmaEdit enabledExtensions pragma)) + pure $ ca & L.edit ?~ mkWorkspaceEdit edits + where + mkWorkspaceEdit ::[TextEdit] -> WorkspaceEdit + mkWorkspaceEdit edits = WorkspaceEdit (Just $ Map.singleton uri edits) Nothing Nothing + +inlayHintDotdotProvider :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'Method_TextDocumentInlayHint +inlayHintDotdotProvider _ state pId InlayHintParams {_textDocument = TextDocumentIdentifier uri, _range = visibleRange} = do + nfp <- getNormalizedFilePathE uri + pragma <- getFirstPragma pId state nfp + runIdeActionE "ExplicitFields.CollectRecords" (shakeExtras state) $ do + (crr@CRR {crCodeActions, crCodeActionResolve}, pm) <- useWithStaleFastE CollectRecords nfp + let -- Get all records with dotdot in current nfp + records = [ record + | Just range <- [toCurrentRange pm visibleRange] + , uid <- RangeMap.elementsInRange range crCodeActions + , Just record <- [IntMap.lookup uid crCodeActionResolve] ] + -- Get the definition of each dotdot of record + locations = [ fmap (,record) (getDefinition nfp pos) + | record <- records + , pos <- maybeToList $ fmap _start $ recordInfoToDotDotRange record ] + defnLocsList <- lift $ sequence locations + pure $ InL $ mapMaybe (mkInlayHint crr pragma pm) defnLocsList + where + mkInlayHint :: CollectRecordsResult -> NextPragmaInfo -> PositionMapping -> (Maybe [(Location, Identifier)], RecordInfo) -> Maybe InlayHint + mkInlayHint CRR {enabledExtensions, nameMap} pragma pm (defnLocs, record) = + let range = recordInfoToDotDotRange record + textEdits = maybeToList (renderRecordInfoAsTextEdit nameMap record) + <> maybeToList (pragmaEdit enabledExtensions pragma) + names = renderRecordInfoAsDotdotLabelName record + in do + currentEnd <- range >>= toCurrentPosition pm . _end + names' <- names + defnLocs' <- defnLocs + let excludeDotDot (Location _ (Range _ end)) = end /= currentEnd + -- find location from dotdot definitions that name equal to label name + findLocation name locations = + let -- filter locations not within dotdot range + filteredLocations = filter (excludeDotDot . fst) locations + -- checks if 'a' is equal to 'Name' if the 'Either' is 'Right a', otherwise return 'False' + nameEq = either (const False) ((==) name) + in fmap fst $ find (nameEq . snd) filteredLocations + valueWithLoc = [ (stripOccNamePrefix $ T.pack $ printName name, findLocation name defnLocs') | name <- names' ] + -- use `, ` to separate labels with definition location + label = intersperse (mkInlayHintLabelPart (", ", Nothing)) $ fmap mkInlayHintLabelPart valueWithLoc + pure $ InlayHint { _position = currentEnd -- at the end of dotdot + , _label = InR label + , _kind = Nothing -- neither a type nor a parameter + , _textEdits = Just textEdits -- same as CodeAction + , _tooltip = Just $ InL (mkTitle enabledExtensions) -- same as CodeAction + , _paddingLeft = Just True -- padding after dotdot + , _paddingRight = Nothing + , _data_ = Nothing + } + mkInlayHintLabelPart (value, loc) = InlayHintLabelPart value Nothing loc Nothing + + +inlayHintPosRecProvider :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'Method_TextDocumentInlayHint +inlayHintPosRecProvider _ state _pId InlayHintParams {_textDocument = TextDocumentIdentifier uri, _range = visibleRange} = do + nfp <- getNormalizedFilePathE uri + runIdeActionE "ExplicitFields.CollectRecords" (shakeExtras state) $ do + (CRR {crCodeActions, nameMap, crCodeActionResolve}, pm) <- useWithStaleFastE CollectRecords nfp + let records = [ record + | Just range <- [toCurrentRange pm visibleRange] + , uid <- RangeMap.elementsInRange range crCodeActions + , Just record <- [IntMap.lookup uid crCodeActionResolve] ] + pure $ InL (concatMap (mkInlayHints nameMap pm) records) + where + mkInlayHints :: UniqFM Name [Name] -> PositionMapping -> RecordInfo -> [InlayHint] + mkInlayHints nameMap pm record@(RecordInfoApp _ (RecordAppExpr _ _ fla)) = + let textEdits = renderRecordInfoAsTextEdit nameMap record + in mapMaybe (mkInlayHint textEdits pm) fla + mkInlayHints _ _ _ = [] + + mkInlayHint :: Maybe TextEdit -> PositionMapping -> (Located FieldLabel, HsExpr GhcTc) -> Maybe InlayHint + mkInlayHint te pm (label, _) = + let (name, loc) = ((flSelector . unLoc) &&& (srcSpanToLocation . getLoc)) label + fieldDefLoc = srcSpanToLocation (nameSrcSpan name) + in do + (Location _ recRange) <- loc + currentStart <- toCurrentPosition pm (_start recRange) + pure InlayHint { _position = currentStart + , _label = InR $ pure (mkInlayHintLabelPart name fieldDefLoc) + , _kind = Nothing -- neither a type nor a parameter + , _textEdits = Just (maybeToList te) -- same as CodeAction + , _tooltip = Just $ InL "Expand positional record" -- same as CodeAction + , _paddingLeft = Nothing + , _paddingRight = Nothing + , _data_ = Nothing + } + + mkInlayHintLabelPart name loc = InlayHintLabelPart (printFieldName (pprNameUnqualified name) <> "=") Nothing loc Nothing + +mkTitle :: [Extension] -> Text +mkTitle exts = "Expand record wildcard" + <> if NamedFieldPuns `elem` exts + then mempty + else " (needs extension: NamedFieldPuns)" + + +pragmaEdit :: [Extension] -> NextPragmaInfo -> Maybe TextEdit +pragmaEdit exts pragma = if NamedFieldPuns `elem` exts + then Nothing + else Just $ insertNewPragma pragma NamedFieldPuns + + +collectRecordsRule :: Recorder (WithPriority Log) -> Rules () +collectRecordsRule recorder = + defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \CollectRecords nfp -> runMaybeT $ do + tmr <- useMT TypeCheck nfp + (CNR nameMap) <- useMT CollectNames nfp + let recs = getRecords tmr + logWith recorder Debug (LogCollectedRecords recs) + -- We want a list of unique numbers to link our the original code action we + -- give out, with the actual record info that we resolve it to. + uniques <- liftIO $ replicateM (length recs) (hashUnique <$> newUnique) + let recsWithUniques = zip uniques recs + -- For creating the code actions, a RangeMap of unique ids + crCodeActions = RangeMap.fromList' (toRangeAndUnique <$> recsWithUniques) + -- For resolving the code actions, a IntMap which links the unique id to + -- the relevant record info. + crCodeActionResolve = IntMap.fromList recsWithUniques + enabledExtensions = getEnabledExtensions tmr + pure CRR {crCodeActions, crCodeActionResolve, nameMap, enabledExtensions} + where + getEnabledExtensions :: TcModuleResult -> [Extension] + getEnabledExtensions = getExtensions . tmrParsed + toRangeAndUnique (uid, recordInfo) = (recordInfoToRange recordInfo, uid) + +getRecords :: TcModuleResult -> [RecordInfo] +getRecords (tcg_binds . tmrTypechecked -> valBinds) = collectRecords valBinds + +collectNamesRule :: Rules () +collectNamesRule = defineNoDiagnostics mempty $ \CollectNames nfp -> runMaybeT $ do + tmr <- useMT TypeCheck nfp + pure (CNR (getNames tmr)) + +-- | Collects all 'Name's of a given source file, to be used +-- in the variable usage analysis. +getNames :: TcModuleResult -> UniqFM Name [Name] +#if __GLASGOW_HASKELL__ < 910 +getNames (tmrRenamed -> (group,_,_,_)) = collectNames group +#else +getNames (tmrRenamed -> (group,_,_,_,_)) = collectNames group +#endif + +data CollectRecords = CollectRecords + deriving (Eq, Show, Generic) + +instance Hashable CollectRecords +instance NFData CollectRecords + +-- |The result of our map, this record includes everything we need to provide +-- code actions and resolve them later +data CollectRecordsResult = CRR + { -- |For providing the code action we need the unique id (Int) in a RangeMap + crCodeActions :: RangeMap Int + -- |For resolving the code action we need to link the unique id we + -- previously gave out with the record info that we use to make the edit + -- with. + , crCodeActionResolve :: IntMap.IntMap RecordInfo + -- |The name map allows us to prune unused record fields (some of the time) + , nameMap :: UniqFM Name [Name] + -- |We need to make sure NamedFieldPuns is enabled, if it's not we need to + -- add that to the text edit. (In addition we use it in creating the code + -- action title) + , enabledExtensions :: [Extension] + } + deriving (Generic) + +instance NFData CollectRecordsResult +instance NFData RecordInfo +instance NFData RecordAppExpr + +instance Show CollectRecordsResult where + show _ = "" + +type instance RuleResult CollectRecords = CollectRecordsResult + +data CollectNames = CollectNames + deriving (Eq, Show, Generic) + +instance Hashable CollectNames +instance NFData CollectNames + +data CollectNamesResult = CNR (UniqFM Name [Name]) + deriving (Generic) + +instance NFData CollectNamesResult + +instance Show CollectNamesResult where + show _ = "" + +type instance RuleResult CollectNames = CollectNamesResult + +data Saturated = Saturated | Unsaturated + deriving (Generic) + +instance NFData Saturated + +data RecordAppExpr + = RecordAppExpr + Saturated -- ^ Is the DataCon application fully saturated or partially applied? + (LHsExpr GhcTc) + [(Located FieldLabel, HsExpr GhcTc)] + deriving (Generic) + +data RecordInfo + = RecordInfoPat RealSrcSpan (Pat GhcTc) + | RecordInfoCon RealSrcSpan (HsExpr GhcTc) + | RecordInfoApp RealSrcSpan RecordAppExpr + deriving (Generic) + +instance Pretty RecordInfo where + pretty (RecordInfoPat ss p) = pretty (printFieldName ss) <> ":" <+> pretty (printOutputable p) + pretty (RecordInfoCon ss e) = pretty (printFieldName ss) <> ":" <+> pretty (printOutputable e) + pretty (RecordInfoApp ss (RecordAppExpr _ _ fla)) + = pretty (printFieldName ss) <> ":" <+> hsep (map (pretty . printOutputable) fla) + +recordInfoToRange :: RecordInfo -> Range +recordInfoToRange (RecordInfoPat ss _) = realSrcSpanToRange ss +recordInfoToRange (RecordInfoCon ss _) = realSrcSpanToRange ss +recordInfoToRange (RecordInfoApp ss _) = realSrcSpanToRange ss + +recordInfoToDotDotRange :: RecordInfo -> Maybe Range +recordInfoToDotDotRange (RecordInfoPat _ (ConPat _ _ (RecCon flds))) = srcSpanToRange . getLoc =<< rec_dotdot flds +recordInfoToDotDotRange (RecordInfoCon _ (RecordCon _ _ flds)) = srcSpanToRange . getLoc =<< rec_dotdot flds +recordInfoToDotDotRange _ = Nothing + +renderRecordInfoAsTextEdit :: UniqFM Name [Name] -> RecordInfo -> Maybe TextEdit +renderRecordInfoAsTextEdit names (RecordInfoPat ss pat) = TextEdit (realSrcSpanToRange ss) <$> showRecordPat names pat +renderRecordInfoAsTextEdit _ (RecordInfoCon ss expr) = TextEdit (realSrcSpanToRange ss) <$> showRecordCon expr +renderRecordInfoAsTextEdit _ (RecordInfoApp ss appExpr) = TextEdit (realSrcSpanToRange ss) <$> showRecordApp appExpr + +renderRecordInfoAsDotdotLabelName :: RecordInfo -> Maybe [Name] +renderRecordInfoAsDotdotLabelName (RecordInfoPat _ pat) = showRecordPatFlds pat +renderRecordInfoAsDotdotLabelName (RecordInfoCon _ expr) = showRecordConFlds expr +renderRecordInfoAsDotdotLabelName _ = Nothing + + +-- | Checks if a 'Name' is referenced in the given map of names. The +-- 'hasNonBindingOcc' check is necessary in order to make sure that only the +-- references at the use-sites are considered (i.e. the binding occurence +-- is excluded). For more information regarding the structure of the map, +-- refer to the documentation of 'collectNames'. +referencedIn :: Name -> UniqFM Name [Name] -> Bool +referencedIn name names = maybe True hasNonBindingOcc $ lookupUFM names name + where + hasNonBindingOcc :: [Name] -> Bool + hasNonBindingOcc = (> 1) . length + +-- Default to leaving the element in if somehow a name can't be extracted (i.e. +-- `getName` returns `Nothing`). +filterReferenced :: (a -> Maybe Name) -> UniqFM Name [Name] -> [a] -> [a] +filterReferenced getName names = filter (\x -> maybe True (`referencedIn` names) (getName x)) + + +preprocessRecordPat + :: p ~ GhcTc + => UniqFM Name [Name] + -> HsRecFields p (LPat p) + -> HsRecFields p (LPat p) +preprocessRecordPat = preprocessRecord (fmap varName . getFieldName . unLoc) + where getFieldName x = case unLoc (hfbRHS x) of + VarPat _ x' -> Just $ unLoc x' + _ -> Nothing + +-- No need to check the name usage in the record construction case +preprocessRecordCon :: HsRecFields (GhcPass c) arg -> HsRecFields (GhcPass c) arg +preprocessRecordCon = preprocessRecord (const Nothing) emptyUFM + +-- This function does two things: +-- 1) Tweak the AST type so that the pretty-printed record is in the +-- expanded form +-- 2) Determine the unused record fields so that they are filtered out +-- of the final output +-- +-- Regarding first point: +-- We make use of the `Outputable` instances on AST types to pretty-print +-- the renamed and expanded records back into source form, to be substituted +-- with the original record later. However, `Outputable` instance of +-- `HsRecFields` does smart things to print the records that originally had +-- wildcards in their original form (i.e. with dots, without field names), +-- even after the wildcard is removed by the renamer pass. This is undesirable, +-- as we want to print the records in their fully expanded form. +-- Here `rec_dotdot` is set to `Nothing` so that fields are printed without +-- such post-processing. +preprocessRecord + :: p ~ GhcPass c + => (LocatedA (HsRecField p arg) -> Maybe Name) + -> UniqFM Name [Name] + -> HsRecFields p arg + -> HsRecFields p arg +preprocessRecord getName names flds = flds { rec_dotdot = Nothing , rec_flds = rec_flds' } + where + no_pun_count = fromMaybe (length (rec_flds flds)) (recDotDot flds) + -- Field binds of the explicit form (e.g. `{ a = a' }`) should be + -- left as is, hence the split. + (no_puns, puns) = splitAt no_pun_count (rec_flds flds) + -- `hsRecPun` is set to `True` in order to pretty-print the fields as field + -- puns (since there is similar mechanism in the `Outputable` instance as + -- explained above). + puns' = map (mapLoc (\fld -> fld { hfbPun = True })) puns + -- Unused fields are filtered out so that they don't end up in the expanded + -- form. + punsUsed = filterReferenced getName names puns' + rec_flds' = no_puns <> punsUsed + +processRecordFlds + :: p ~ GhcPass c + => HsRecFields p arg + -> HsRecFields p arg +processRecordFlds flds = flds { rec_dotdot = Nothing , rec_flds = puns' } + where + no_pun_count = fromMaybe (length (rec_flds flds)) (recDotDot flds) + -- Field binds of the explicit form (e.g. `{ a = a' }`) should be drop + puns = drop no_pun_count (rec_flds flds) + -- `hsRecPun` is set to `True` in order to pretty-print the fields as field + -- puns (since there is similar mechanism in the `Outputable` instance as + -- explained above). + puns' = map (mapLoc (\fld -> fld { hfbPun = True })) puns + + +showRecordPat :: Outputable (Pat GhcTc) => UniqFM Name [Name] -> Pat GhcTc -> Maybe Text +showRecordPat names = fmap printFieldName . mapConPatDetail (\case + RecCon flds -> Just $ RecCon (preprocessRecordPat names flds) + _ -> Nothing) + +showRecordPatFlds :: Pat GhcTc -> Maybe [Name] +showRecordPatFlds (ConPat _ _ args) = do + fields <- processRecCon args + names <- mapM getFieldName (rec_flds fields) + pure names + where + processRecCon (RecCon flds) = Just $ processRecordFlds flds + processRecCon _ = Nothing +#if __GLASGOW_HASKELL__ < 911 + getOccName (FieldOcc x _) = Just $ getName x +#else + getOccName (FieldOcc _ x) = Just $ getName (unLoc x) +#endif + getOccName _ = Nothing + getFieldName = getOccName . unLoc . hfbLHS . unLoc +showRecordPatFlds _ = Nothing + +showRecordCon :: Outputable (HsExpr (GhcPass c)) => HsExpr (GhcPass c) -> Maybe Text +showRecordCon expr@(RecordCon _ _ flds) = + Just $ printOutputable $ + expr { rcon_flds = preprocessRecordCon flds } +showRecordCon _ = Nothing + +showRecordConFlds :: p ~ GhcTc => HsExpr p -> Maybe [Name] +showRecordConFlds (RecordCon _ _ flds) = + mapM getFieldName (rec_flds $ processRecordFlds flds) + where + getVarName (HsVar _ lidp) = Just $ getName lidp + getVarName _ = Nothing + getFieldName = getVarName . unLoc . hfbRHS . unLoc +showRecordConFlds _ = Nothing + +showRecordApp :: RecordAppExpr -> Maybe Text +showRecordApp (RecordAppExpr _ recConstr fla) + = Just $ printOutputable recConstr <> " { " + <> T.intercalate ", " (showFieldWithArg <$> fla) + <> " }" + where showFieldWithArg (field, arg) = printFieldName field <> " = " <> printOutputable arg + +collectRecords :: GenericQ [RecordInfo] +collectRecords = everythingBut (<>) (([], False) `mkQ` getRecPatterns `extQ` getRecCons) + +-- | Collect 'Name's into a map, indexed by the names' unique identifiers. +-- The 'Eq' instance of 'Name's makes use of their unique identifiers, hence +-- any 'Name' referring to the same entity is considered equal. In effect, +-- each individual list of names contains the binding occurrence, along with +-- all the occurrences at the use-sites (if there are any). +-- +-- @UniqFM Name [Name]@ is morally the same as @Map Unique [Name]@. +-- Using 'UniqFM' gains us a bit of performance (in theory) since it +-- internally uses 'IntMap'. More information regarding 'UniqFM' can be found in +-- the GHC source. +collectNames :: GenericQ (UniqFM Name [Name]) +collectNames = everything (plusUFM_C (<>)) (emptyUFM `mkQ` (\x -> unitUFM x [x])) + +getRecCons :: LHsExpr GhcTc -> ([RecordInfo], Bool) +-- When we stumble upon an occurrence of HsExpanded, we only want to follow a +-- single branch. We do this here, by explicitly returning occurrences from +-- traversing the original branch, and returning True, which keeps syb from +-- implicitly continuing to traverse. In addition, we have to return a list, +-- because there is a possibility that there were be more than one result per +-- branch + +#if __GLASGOW_HASKELL__ >= 910 +getRecCons (unLoc -> XExpr (ExpandedThingTc a _)) = (collectRecords a, False) +#else +getRecCons (unLoc -> XExpr (ExpansionExpr (HsExpanded _ a))) = (collectRecords a, True) +#endif +getRecCons e@(unLoc -> RecordCon _ _ flds) + | isJust (rec_dotdot flds) = (mkRecInfo e, False) + where + mkRecInfo :: LHsExpr GhcTc -> [RecordInfo] + mkRecInfo expr = + [ RecordInfoCon realSpan' (unLoc expr) | RealSrcSpan realSpan' _ <- [ getLoc expr ]] +getRecCons expr@(unLoc -> app@(HsApp _ _ _)) = + let fieldss = maybeToList $ getFields app [] + recInfo = concatMap mkRecInfo fieldss + in (recInfo, not (null recInfo)) + where + mkRecInfo :: RecordAppExpr -> [RecordInfo] + mkRecInfo appExpr = + [ RecordInfoApp realSpan' appExpr | RealSrcSpan realSpan' _ <- [ getLoc expr ] ] + + getFields :: HsExpr GhcTc -> [LHsExpr GhcTc] -> Maybe RecordAppExpr + getFields (HsApp _ constr@(unLoc -> expr) arg) args + | not (null fls) = Just $ + -- Code action is only valid if the constructor application is fully + -- saturated, but we still want to display the inlay hints for partially + -- applied constructors + RecordAppExpr + (if length fls <= length args + 1 then Saturated else Unsaturated) + constr + labelWithArgs + where fls = getExprFields expr + labelWithArgs = zipWith mkLabelWithArg fls (arg : args) + mkLabelWithArg label arg = (L (getLoc arg) label, unLoc arg) + getFields (HsApp _ constr arg) args = getFields (unLoc constr) (arg : args) + getFields _ _ = Nothing + + getExprFields :: HsExpr GhcTc -> [FieldLabel] + getExprFields (XExpr (ConLikeTc (conLikeFieldLabels -> fls) _ _)) = fls +#if __GLASGOW_HASKELL__ >= 911 + getExprFields (XExpr (WrapExpr _ expr)) = getExprFields expr +#else + getExprFields (XExpr (WrapExpr (HsWrap _ expr))) = getExprFields expr +#endif + getExprFields _ = [] +getRecCons _ = ([], False) + +getRecPatterns :: LPat GhcTc -> ([RecordInfo], Bool) +getRecPatterns conPat@(conPatDetails . unLoc -> Just (RecCon flds)) + | isJust (rec_dotdot flds) = (mkRecInfo conPat, False) + where + mkRecInfo :: LPat GhcTc -> [RecordInfo] + mkRecInfo pat = + [ RecordInfoPat realSpan' (unLoc pat) | RealSrcSpan realSpan' _ <- [ getLoc pat ]] +getRecPatterns _ = ([], False) + +printFieldName :: Outputable a => a -> Text +printFieldName = stripOccNamePrefix . printOutputable + diff --git a/plugins/hls-explicit-record-fields-plugin/test/Main.hs b/plugins/hls-explicit-record-fields-plugin/test/Main.hs new file mode 100644 index 0000000000..82ef449a25 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/Main.hs @@ -0,0 +1,398 @@ +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} + +module Main ( main ) where + +import Data.Either (rights) +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE (filePathToUri', + toNormalizedFilePath') +import Development.IDE.Test (canonicalizeUri) +import qualified Ide.Plugin.ExplicitFields as ExplicitFields +import System.FilePath ((<.>), ()) +import Test.Hls + +main :: IO () +main = defaultTestRunner test + +plugin :: PluginTestDescriptor ExplicitFields.Log +plugin = mkPluginTestDescriptor ExplicitFields.descriptor "explicit-fields" + +test :: TestTree +test = testGroup "explicit-fields" + [ testGroup "code actions" + [ mkTest "WildcardOnly" "WildcardOnly" 12 10 12 20 + , mkTest "Unused" "Unused" 12 10 12 20 + , mkTest "Unused2" "Unused2" 12 10 12 20 + , mkTest "WithPun" "WithPun" 13 10 13 25 + , mkTest "WithExplicitBind" "WithExplicitBind" 12 10 12 32 + , mkTest "Mixed" "Mixed" 14 10 14 37 + , mkTest "Construction" "Construction" 16 5 16 15 + , mkTest "PositionalConstruction" "PositionalConstruction" 15 5 15 15 + , mkTest "HsExpanded1" "HsExpanded1" 17 10 17 20 + , mkTest "HsExpanded2" "HsExpanded2" 23 10 23 22 + , mkTestNoAction "ExplicitBinds" "ExplicitBinds" 11 10 11 52 + , mkTestNoAction "Puns" "Puns" 12 10 12 31 + , mkTestNoAction "Infix" "Infix" 11 11 11 31 + , mkTestNoAction "Prefix" "Prefix" 10 11 10 28 + , mkTestNoAction "PartiallyAppliedCon" "PartiallyAppliedCon" 7 8 7 12 + , mkTest "PolymorphicRecordConstruction" "PolymorphicRecordConstruction" 15 5 15 15 + ] + , testGroup "inlay hints" + [ mkInlayHintsTest "Construction" Nothing 16 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "Construction" + foo <- mkLabelPart' 13 6 "foo" + bar <- mkLabelPart' 14 6 "bar" + baz <- mkLabelPart' 15 6 "baz" + (@?=) ih + [defInlayHint { _position = Position 16 14 + , _label = InR [ foo, commaPart + , bar, commaPart + , baz + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar, baz}" 16 5 15 + , mkPragmaTextEdit 2 + ] + , _tooltip = Just $ InL "Expand record wildcard (needs extension: NamedFieldPuns)" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "ConstructionDuplicateRecordFields" Nothing 16 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "ConstructionDuplicateRecordFields" + foo <- mkLabelPart' 13 6 "foo" + bar <- mkLabelPart' 14 6 "bar" + baz <- mkLabelPart' 15 6 "baz" + (@?=) ih + [defInlayHint { _position = Position 16 14 + , _label = InR [ foo, commaPart + , bar, commaPart + , baz + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar, baz}" 16 5 15 + , mkPragmaTextEdit 3 -- Not 2 of the DuplicateRecordFields pragma + ] + , _tooltip = Just $ InL "Expand record wildcard (needs extension: NamedFieldPuns)" + , _paddingLeft = Just True + }] + + , mkInlayHintsTest "PositionalConstruction" Nothing 15 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLengthSub1 "PositionalConstruction" + foo <- mkLabelPart' 5 4 "foo=" + bar <- mkLabelPart' 6 4 "bar=" + baz <- mkLabelPart' 7 4 "baz=" + (@?=) ih + [ defInlayHint { _position = Position 15 11 + , _label = InR [ foo ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + , defInlayHint { _position = Position 15 13 + , _label = InR [ bar ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + , defInlayHint { _position = Position 15 15 + , _label = InR [ baz ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + ] + , mkInlayHintsTest "PositionalConstructionDuplicateRecordFields" Nothing 15 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLengthSub1 "PositionalConstructionDuplicateRecordFields" + foo <- mkLabelPart' 5 4 "foo=" + bar <- mkLabelPart' 6 4 "bar=" + baz <- mkLabelPart' 7 4 "baz=" + (@?=) ih + [ defInlayHint { _position = Position 15 11 + , _label = InR [ foo ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + , defInlayHint { _position = Position 15 13 + , _label = InR [ bar ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + , defInlayHint { _position = Position 15 15 + , _label = InR [ baz ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + ] + , mkInlayHintsTest "HsExpanded1" Nothing 17 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "HsExpanded1" + foo <- mkLabelPart' 11 4 "foo" + (@?=) ih + [defInlayHint { _position = Position 17 19 + , _label = InR [ foo ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo}" 17 10 20 ] + , _tooltip = Just $ InL "Expand record wildcard" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "HsExpanded1" (Just " (positional)") 13 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLengthSub1 "HsExpanded1" + foo <- mkLabelPart' 11 4 "foo=" + (@?=) ih + [defInlayHint { _position = Position 13 21 + , _label = InR [ foo ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = 5 }" 13 15 22 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + }] + , mkInlayHintsTest "HsExpanded1DuplicateRecordFields" (Just " (positional)") 13 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLengthSub1 "HsExpanded1DuplicateRecordFields" + foo <- mkLabelPart' 11 4 "foo=" + (@?=) ih + [defInlayHint { _position = Position 13 21 + , _label = InR [ foo ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = 5 }" 13 15 22 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + }] + , mkInlayHintsTest "HsExpanded2" Nothing 23 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "HsExpanded2" + bar <- mkLabelPart' 14 4 "bar" + (@?=) ih + [defInlayHint { _position = Position 23 21 + , _label = InR [ bar ] + , _textEdits = Just [ mkLineTextEdit "YourRec {bar}" 23 10 22 ] + , _tooltip = Just $ InL "Expand record wildcard" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "HsExpanded2" (Just " (positional)") 16 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLengthSub1 "HsExpanded2" + foo <- mkLabelPart' 11 4 "foo=" + (@?=) ih + [defInlayHint { _position = Position 16 21 + , _label = InR [ foo ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = 5 }" 16 15 22 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + }] + , mkInlayHintsTest "Mixed" Nothing 14 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "Mixed" + baz <- mkLabelPart' 9 4 "baz" + quux <- mkLabelPart' 10 4 "quux" + (@?=) ih + [defInlayHint { _position = Position 14 36 + , _label = InR [ baz, commaPart + , quux + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar = bar', baz}" 14 10 37 ] + , _tooltip = Just $ InL "Expand record wildcard" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "Unused" Nothing 12 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "Unused" + foo <- mkLabelPart' 6 4 "foo" + bar <- mkLabelPart' 7 4 "bar" + baz <- mkLabelPart' 8 4 "baz" + (@?=) ih + [defInlayHint { _position = Position 12 19 + , _label = InR [ foo, commaPart + , bar, commaPart + , baz + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar}" 12 10 20 + , mkPragmaTextEdit 2 + ] + , _tooltip = Just $ InL "Expand record wildcard (needs extension: NamedFieldPuns)" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "Unused2" Nothing 12 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "Unused2" + foo <- mkLabelPart' 6 4 "foo" + bar <- mkLabelPart' 7 4 "bar" + baz <- mkLabelPart' 8 4 "baz" + (@?=) ih + [defInlayHint { _position = Position 12 19 + , _label = InR [ foo, commaPart + , bar, commaPart + , baz + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar}" 12 10 20 + , mkPragmaTextEdit 2 + ] + , _tooltip = Just $ InL "Expand record wildcard (needs extension: NamedFieldPuns)" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "WildcardOnly" Nothing 12 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "WildcardOnly" + foo <- mkLabelPart' 6 4 "foo" + bar <- mkLabelPart' 7 4 "bar" + baz <- mkLabelPart' 8 4 "baz" + (@?=) ih + [defInlayHint { _position = Position 12 19 + , _label = InR [ foo, commaPart + , bar, commaPart + , baz + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar, baz}" 12 10 20 + , mkPragmaTextEdit 2 + ] + , _tooltip = Just $ InL "Expand record wildcard (needs extension: NamedFieldPuns)" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "WithExplicitBind" Nothing 12 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "WithExplicitBind" + bar <- mkLabelPart' 7 4 "bar" + baz <- mkLabelPart' 8 4 "baz" + (@?=) ih + [defInlayHint { _position = Position 12 31 + , _label = InR [ bar, commaPart + , baz + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo = foo', bar, baz}" 12 10 32 + , mkPragmaTextEdit 2 + ] + , _tooltip = Just $ InL "Expand record wildcard (needs extension: NamedFieldPuns)" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "WithPun" Nothing 13 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLength "WithPun" + bar <- mkLabelPart' 8 4 "bar" + baz <- mkLabelPart' 9 4 "baz" + (@?=) ih + [defInlayHint { _position = Position 13 24 + , _label = InR [ bar, commaPart + , baz + ] + , _textEdits = Just [ mkLineTextEdit "MyRec {foo, bar, baz}" 13 10 25 ] + , _tooltip = Just $ InL "Expand record wildcard" + , _paddingLeft = Just True + }] + , mkInlayHintsTest "PolymorphicRecordConstruction" Nothing 15 $ \ih -> do + let mkLabelPart' = mkLabelPartOffsetLengthSub1 "PolymorphicRecordConstruction" + foo <- mkLabelPart' 5 4 "foo=" + bar <- mkLabelPart' 6 4 "bar=" + baz <- mkLabelPart' 7 4 "baz=" + (@?=) ih + [ defInlayHint { _position = Position 15 11 + , _label = InR [ foo ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + , defInlayHint { _position = Position 15 13 + , _label = InR [ bar ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + , defInlayHint { _position = Position 15 15 + , _label = InR [ baz ] + , _textEdits = Just [ mkLineTextEdit "MyRec { foo = a, bar = b, baz = c }" 15 5 16 ] + , _tooltip = Just $ InL "Expand positional record" + , _paddingLeft = Nothing + } + ] + ] + ] + +mkInlayHintsTest :: FilePath -> Maybe TestName -> UInt -> ([InlayHint] -> Assertion) -> TestTree +mkInlayHintsTest fp postfix line assert = + testCase (fp ++ concat postfix) $ + runSessionWithServer def plugin testDataDir $ do + doc <- openDoc (fp ++ ".hs") "haskell" + inlayHints <- getInlayHints doc (lineRange line) + liftIO $ assert inlayHints + where + lineRange line = Range (Position line 0) (Position line 1000) + +mkTestNoAction :: TestName -> FilePath -> UInt -> UInt -> UInt -> UInt -> TestTree +mkTestNoAction title fp x1 y1 x2 y2 = + testCase title $ + runSessionWithServer def plugin (testDataDir "noop") $ do + doc <- openDoc (fp <.> "hs") "haskell" + actions <- getExplicitFieldsActions doc x1 y1 x2 y2 + liftIO $ actions @?= [] + +mkTestWithCount :: Int -> TestName -> FilePath -> UInt -> UInt -> UInt -> UInt -> TestTree +mkTestWithCount cnt title fp x1 y1 x2 y2 = + goldenWithHaskellAndCaps def codeActionResolveCaps plugin title testDataDir fp "expected" "hs" $ \doc -> do + acts@(act:_) <- getExplicitFieldsActions doc x1 y1 x2 y2 + liftIO $ length acts @?= cnt + executeCodeAction act + +mkTest :: TestName -> FilePath -> UInt -> UInt -> UInt -> UInt -> TestTree +mkTest = mkTestWithCount 1 + +getExplicitFieldsActions + :: TextDocumentIdentifier + -> UInt -> UInt -> UInt -> UInt + -> Session [CodeAction] +getExplicitFieldsActions doc x1 y1 x2 y2 = + findExplicitFieldsAction <$> getAndResolveCodeActions doc range + where + range = Range (Position x1 y1) (Position x2 y2) + +findExplicitFieldsAction :: [a |? CodeAction] -> [CodeAction] +findExplicitFieldsAction = filter isExplicitFieldsCodeAction . rights . map toEither + +isExplicitFieldsCodeAction :: CodeAction -> Bool +isExplicitFieldsCodeAction CodeAction {_title} = + "Expand record wildcard" `T.isPrefixOf` _title + +defInlayHint :: InlayHint +defInlayHint = + InlayHint + { _position = Position 0 0 + , _label = InR [] + , _kind = Nothing + , _textEdits = Nothing + , _tooltip = Nothing + , _paddingLeft = Nothing + , _paddingRight = Nothing + , _data_ = Nothing + } + +mkLabelPart :: (Text -> UInt) -> FilePath -> UInt -> UInt -> Text -> IO InlayHintLabelPart +mkLabelPart offset fp line start value = do + uri' <- uri + pure $ InlayHintLabelPart { _location = Just (location uri' line start) + , _value = value + , _tooltip = Nothing + , _command = Nothing + } + where + toUri = fromNormalizedUri . filePathToUri' . toNormalizedFilePath' + uri = canonicalizeUri $ toUri (testDataDir (fp ++ ".hs")) + location uri line char = Location uri (Range (Position line char) (Position line (char + offset value))) + +mkLabelPartOffsetLength :: FilePath -> UInt -> UInt -> Text -> IO InlayHintLabelPart +mkLabelPartOffsetLength = mkLabelPart (fromIntegral . T.length) + +mkLabelPartOffsetLengthSub1 :: FilePath -> UInt -> UInt -> Text -> IO InlayHintLabelPart +mkLabelPartOffsetLengthSub1 = mkLabelPart (fromIntegral . subtract 1 . T.length) + +commaPart :: InlayHintLabelPart +commaPart = + InlayHintLabelPart + { _location = Nothing + , _value = ", " + , _tooltip = Nothing + , _command = Nothing + } + +mkLineTextEdit :: Text -> UInt -> UInt -> UInt -> TextEdit +mkLineTextEdit newText line x y = + TextEdit + { _range = Range (Position line x) (Position line y) + , _newText = newText + } + +mkPragmaTextEdit :: UInt -> TextEdit +mkPragmaTextEdit line = + TextEdit + { _range = Range (Position line 0) (Position line 0) + , _newText = "{-# LANGUAGE NamedFieldPuns #-}\n" + } + +testDataDir :: FilePath +testDataDir = "plugins" "hls-explicit-record-fields-plugin" "test" "testdata" diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/Construction.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/Construction.expected.hs new file mode 100644 index 0000000000..d1376c084d --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/Construction.expected.hs @@ -0,0 +1,18 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NamedFieldPuns #-} + +module Construction where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: () -> MyRec +convertMe _ = + let foo = 3 + bar = 5 + baz = 'a' + in MyRec {foo, bar, baz} diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/Construction.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/Construction.hs new file mode 100644 index 0000000000..5e18c66209 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/Construction.hs @@ -0,0 +1,17 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} + +module Construction where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: () -> MyRec +convertMe _ = + let foo = 3 + bar = 5 + baz = 'a' + in MyRec {..} diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/ConstructionDuplicateRecordFields.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/ConstructionDuplicateRecordFields.hs new file mode 100644 index 0000000000..420711f0da --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/ConstructionDuplicateRecordFields.hs @@ -0,0 +1,17 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE DuplicateRecordFields #-} +module Construction where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: () -> MyRec +convertMe _ = + let foo = 3 + bar = 5 + baz = 'a' + in MyRec {..} diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded1.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded1.expected.hs new file mode 100644 index 0000000000..2e970a5f35 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded1.expected.hs @@ -0,0 +1,19 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE RebindableSyntax #-} +{-# LANGUAGE NamedFieldPuns #-} + +module HsExpanded1 where +import Prelude + +ifThenElse :: Int -> Int -> Int -> Int +ifThenElse x y z = x + y + z + +data MyRec = MyRec + { foo :: Int } + +myRecExample = MyRec 5 + +convertMe :: Int +convertMe = + if (let MyRec {foo} = myRecExample + in foo) then 1 else 2 \ No newline at end of file diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded1.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded1.hs new file mode 100644 index 0000000000..8c0f2c341e --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded1.hs @@ -0,0 +1,19 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE RebindableSyntax #-} +{-# LANGUAGE NamedFieldPuns #-} + +module HsExpanded1 where +import Prelude + +ifThenElse :: Int -> Int -> Int -> Int +ifThenElse x y z = x + y + z + +data MyRec = MyRec + { foo :: Int } + +myRecExample = MyRec 5 + +convertMe :: Int +convertMe = + if (let MyRec {..} = myRecExample + in foo) then 1 else 2 \ No newline at end of file diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded1DuplicateRecordFields.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded1DuplicateRecordFields.hs new file mode 100644 index 0000000000..1e37d14668 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded1DuplicateRecordFields.hs @@ -0,0 +1,19 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE RebindableSyntax #-} +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE DuplicateRecordFields #-} +module HsExpanded1DuplicateRecordFields where +import Prelude + +ifThenElse :: Int -> Int -> Int -> Int +ifThenElse x y z = x + y + z + +data MyRec = MyRec + { foo :: Int } + +myRecExample = MyRec 5 + +convertMe :: Int +convertMe = + if (let MyRec {..} = myRecExample + in foo) then 1 else 2 diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded2.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded2.expected.hs new file mode 100644 index 0000000000..497752867c --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded2.expected.hs @@ -0,0 +1,25 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE RebindableSyntax #-} +{-# LANGUAGE NamedFieldPuns #-} + +module HsExpanded2 where +import Prelude + +ifThenElse :: Int -> Int -> Int -> Int +ifThenElse x y z = x + y + z + +data MyRec = MyRec + { foo :: Int } + +data YourRec = YourRec + { bar :: Int } + +myRecExample = MyRec 5 + +yourRecExample = YourRec 3 + +convertMe :: Int +convertMe = + if (let MyRec {..} = myRecExample + YourRec {bar} = yourRecExample + in bar) then 1 else 2 \ No newline at end of file diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded2.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded2.hs new file mode 100644 index 0000000000..7126fc0199 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/HsExpanded2.hs @@ -0,0 +1,25 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE RebindableSyntax #-} +{-# LANGUAGE NamedFieldPuns #-} + +module HsExpanded2 where +import Prelude + +ifThenElse :: Int -> Int -> Int -> Int +ifThenElse x y z = x + y + z + +data MyRec = MyRec + { foo :: Int } + +data YourRec = YourRec + { bar :: Int } + +myRecExample = MyRec 5 + +yourRecExample = YourRec 3 + +convertMe :: Int +convertMe = + if (let MyRec {..} = myRecExample + YourRec {..} = yourRecExample + in bar) then 1 else 2 \ No newline at end of file diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/Mixed.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/Mixed.expected.hs new file mode 100644 index 0000000000..fa7f32ab25 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/Mixed.expected.hs @@ -0,0 +1,15 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NamedFieldPuns #-} + +module Mixed where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + , quux :: Double + } + +convertMe :: MyRec -> String +convertMe MyRec {foo, bar = bar', baz} = show foo ++ show bar' ++ show baz diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/Mixed.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/Mixed.hs new file mode 100644 index 0000000000..ccf56cd3ab --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/Mixed.hs @@ -0,0 +1,15 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NamedFieldPuns #-} + +module Mixed where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + , quux :: Double + } + +convertMe :: MyRec -> String +convertMe MyRec {foo, bar = bar', ..} = show foo ++ show bar' ++ show baz diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.expected.hs new file mode 100644 index 0000000000..f289508524 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.expected.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE Haskell2010 #-} + +module PolymorphicRecordConstruction where + +data MyRec m = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: () -> MyRec () +convertMe _ = + let a = 3 + b = 5 + c = 'a' + in MyRec { foo = a, bar = b, baz = c } diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.hs new file mode 100644 index 0000000000..f8b9791da5 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/PolymorphicRecordConstruction.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE Haskell2010 #-} + +module PolymorphicRecordConstruction where + +data MyRec m = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: () -> MyRec () +convertMe _ = + let a = 3 + b = 5 + c = 'a' + in MyRec a b c diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstruction.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstruction.expected.hs new file mode 100644 index 0000000000..667fc25fe0 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstruction.expected.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE Haskell2010 #-} + +module PositionalConstruction where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: () -> MyRec +convertMe _ = + let a = 3 + b = 5 + c = 'a' + in MyRec { foo = a, bar = b, baz = c } diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstruction.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstruction.hs new file mode 100644 index 0000000000..0b2f8d9f86 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstruction.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE Haskell2010 #-} + +module PositionalConstruction where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: () -> MyRec +convertMe _ = + let a = 3 + b = 5 + c = 'a' + in MyRec a b c diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstructionDuplicateRecordFields.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstructionDuplicateRecordFields.hs new file mode 100644 index 0000000000..5227af9a83 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/PositionalConstructionDuplicateRecordFields.hs @@ -0,0 +1,17 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE DuplicateRecordFields #-} +module PositionalConstruction where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: () -> MyRec +convertMe _ = + let a = 3 + b = 5 + c = 'a' + in MyRec a b c + diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/Unused.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/Unused.expected.hs new file mode 100644 index 0000000000..29abba1cfa --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/Unused.expected.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NamedFieldPuns #-} + +module Unused where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: MyRec -> String +convertMe MyRec {foo, bar} = show foo ++ show bar diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/Unused.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/Unused.hs new file mode 100644 index 0000000000..40b98e9403 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/Unused.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} + +module Unused where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: MyRec -> String +convertMe MyRec {..} = show foo ++ show bar diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/Unused2.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/Unused2.expected.hs new file mode 100644 index 0000000000..5befab1ce8 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/Unused2.expected.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NamedFieldPuns #-} + +module Unused2 where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: MyRec -> String +convertMe MyRec {foo, bar} = let baz = "baz" in show foo ++ show bar ++ baz diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/Unused2.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/Unused2.hs new file mode 100644 index 0000000000..e66f880072 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/Unused2.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} + +module Unused2 where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: MyRec -> String +convertMe MyRec {..} = let baz = "baz" in show foo ++ show bar ++ baz diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/WildcardOnly.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/WildcardOnly.expected.hs new file mode 100644 index 0000000000..4b196f27fd --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/WildcardOnly.expected.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NamedFieldPuns #-} + +module WildcardOnly where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: MyRec -> String +convertMe MyRec {foo, bar, baz} = show foo ++ show bar ++ show baz diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/WildcardOnly.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/WildcardOnly.hs new file mode 100644 index 0000000000..f339895df4 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/WildcardOnly.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} + +module WildcardOnly where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: MyRec -> String +convertMe MyRec {..} = show foo ++ show bar ++ show baz diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/WithExplicitBind.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/WithExplicitBind.expected.hs new file mode 100644 index 0000000000..fff4d306cf --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/WithExplicitBind.expected.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NamedFieldPuns #-} + +module WithExplicitBind where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: MyRec -> String +convertMe MyRec {foo = foo', bar, baz} = show foo' ++ show bar ++ show baz diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/WithExplicitBind.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/WithExplicitBind.hs new file mode 100644 index 0000000000..b416a624f5 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/WithExplicitBind.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} + +module WithExplicitBind where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: MyRec -> String +convertMe MyRec {foo = foo', ..} = show foo' ++ show bar ++ show baz diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/WithPun.expected.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/WithPun.expected.hs new file mode 100644 index 0000000000..c4285b629b --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/WithPun.expected.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NamedFieldPuns #-} + +module WithPun where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: MyRec -> String +convertMe MyRec {foo, bar, baz} = show foo ++ show bar ++ show baz diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/WithPun.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/WithPun.hs new file mode 100644 index 0000000000..4b34cfa652 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/WithPun.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NamedFieldPuns #-} + +module WithPun where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: MyRec -> String +convertMe MyRec {foo, ..} = show foo ++ show bar ++ show baz diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/hie.yaml b/plugins/hls-explicit-record-fields-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..ee67c73150 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: []}} diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/ExplicitBinds.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/ExplicitBinds.hs new file mode 100644 index 0000000000..de44a8a57d --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/ExplicitBinds.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE Haskell2010 #-} + +module ExplicitBinds where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: MyRec -> String +convertMe MyRec {foo = foo', bar = bar', baz = baz'} = show foo' ++ show bar' ++ show baz' diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/Infix.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/Infix.hs new file mode 100644 index 0000000000..c361e9f2fd --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/Infix.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE Haskell2010 #-} + +module Infix where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + } + +convertMe :: MyRec -> String +convertMe (foo' `MyRec` bar') = show foo' ++ show bar' diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/PartiallyAppliedCon.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/PartiallyAppliedCon.hs new file mode 100644 index 0000000000..2f6f52e30b --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/PartiallyAppliedCon.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE Haskell2010 #-} + +module PartiallyAppliedCon where + +data T = MkT { fa :: Int, fb :: Char } + +foo :: Int -> Char -> T +foo x = MkT x diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/Prefix.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/Prefix.hs new file mode 100644 index 0000000000..c34ba0a389 --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/Prefix.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE Haskell2010 #-} + +module Prefix where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + } + +convertMe :: MyRec -> String +convertMe (foo' `MyRec` bar') = show foo' ++ show bar' diff --git a/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/Puns.hs b/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/Puns.hs new file mode 100644 index 0000000000..c81e66666d --- /dev/null +++ b/plugins/hls-explicit-record-fields-plugin/test/testdata/noop/Puns.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE Haskell2010 #-} +{-# LANGUAGE NamedFieldPuns #-} + +module Puns where + +data MyRec = MyRec + { foo :: Int + , bar :: Int + , baz :: Char + } + +convertMe :: MyRec -> String +convertMe MyRec {foo, bar, baz} = show foo ++ show bar ++ show baz diff --git a/plugins/hls-explicit-record-fields-plugin/wildcard.gif b/plugins/hls-explicit-record-fields-plugin/wildcard.gif new file mode 100644 index 0000000000..2cf10d6bf1 Binary files /dev/null and b/plugins/hls-explicit-record-fields-plugin/wildcard.gif differ diff --git a/plugins/hls-floskell-plugin/src/Ide/Plugin/Floskell.hs b/plugins/hls-floskell-plugin/src/Ide/Plugin/Floskell.hs new file mode 100644 index 0000000000..f78761958c --- /dev/null +++ b/plugins/hls-floskell-plugin/src/Ide/Plugin/Floskell.hs @@ -0,0 +1,62 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.Floskell + ( descriptor + , provider + ) where + +import Control.Monad.Except (throwError) +import Control.Monad.IO.Class +import Data.List (find) +import qualified Data.Text as T +import qualified Data.Text.Lazy as TL +import Development.IDE hiding (pluginHandlers) +import Development.IDE.Core.PluginUtils (mkFormattingHandlers) +import Floskell +import Ide.Plugin.Error +import Ide.PluginUtils +import Ide.Types +import Language.LSP.Protocol.Types + +-- --------------------------------------------------------------------- + +descriptor :: PluginId -> PluginDescriptor IdeState +descriptor plId = (defaultPluginDescriptor plId desc) + { pluginHandlers = mkFormattingHandlers provider + } + where + desc = "Provides formatting of Haskell files via floskell. Built with floskell-" <> VERSION_floskell + +-- --------------------------------------------------------------------- + +-- | Format provider of Floskell. +-- Formats the given source in either a given Range or the whole Document. +-- If the provider fails an error is returned that can be displayed to the user. +provider :: FormattingHandler IdeState +provider _ideState _token typ contents fp _ = do + let file = fromNormalizedFilePath fp + config <- liftIO $ findConfigOrDefault file + let (range, selectedContents) = case typ of + FormatText -> (fullRange contents, contents) + FormatRange r -> (normalize r, extractTextInRange (extendToFullLines r) contents) + result = reformat config (Just file) $ TL.fromStrict selectedContents + case result of + Left err -> throwError $ PluginInternalError $ T.pack $ "floskellCmd: " ++ err + Right new -> pure $ InL [TextEdit range $ TL.toStrict new] + +-- | Find Floskell Config, user and system wide or provides a default style. +-- Every directory of the filepath will be searched to find a user configuration. +-- Also looks into places such as XDG_CONFIG_DIRECTORY. +-- This function may not throw an exception and returns a default config. +findConfigOrDefault :: FilePath -> IO AppConfig +findConfigOrDefault file = do + mbConf <- findAppConfigIn file + case mbConf of + Just confFile -> readAppConfig confFile + Nothing -> + pure $ case find (\s -> styleName s == "gibiansky") styles of + Just gibiansky -> defaultAppConfig { appStyle = gibiansky } + Nothing -> defaultAppConfig + +-- --------------------------------------------------------------------- diff --git a/plugins/hls-floskell-plugin/test/Main.hs b/plugins/hls-floskell-plugin/test/Main.hs new file mode 100644 index 0000000000..ba4c707130 --- /dev/null +++ b/plugins/hls-floskell-plugin/test/Main.hs @@ -0,0 +1,30 @@ +{-# LANGUAGE OverloadedStrings #-} +module Main + ( main + ) where + +import qualified Ide.Plugin.Floskell as Floskell +import System.FilePath +import Test.Hls + +main :: IO () +main = defaultTestRunner tests + +floskellPlugin :: PluginTestDescriptor () +floskellPlugin = mkPluginTestDescriptor' Floskell.descriptor "floskell" + +tests :: TestTree +tests = testGroup "floskell" + [ goldenWithFloskell "formats a document" "Floskell" "formatted_document" $ \doc -> do + formatDoc doc (FormattingOptions 4 True Nothing Nothing Nothing) + + , goldenWithFloskell "formats a range" "Floskell" "formatted_range" $ \doc -> do + let range = Range (Position 1 0) (Position 4 22) + formatRange doc (FormattingOptions 4 True Nothing Nothing Nothing) range + ] + +goldenWithFloskell :: TestName -> FilePath -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +goldenWithFloskell title path desc = goldenWithHaskellDocFormatter def floskellPlugin "floskell" def title testDataDir path desc "hs" + +testDataDir :: FilePath +testDataDir = "plugins" "hls-floskell-plugin" "test" "testdata" diff --git a/plugins/hls-floskell-plugin/test/testdata/Floskell.formatted_document.hs b/plugins/hls-floskell-plugin/test/testdata/Floskell.formatted_document.hs new file mode 100644 index 0000000000..f97c49662c --- /dev/null +++ b/plugins/hls-floskell-plugin/test/testdata/Floskell.formatted_document.hs @@ -0,0 +1,17 @@ +module Floskell where + +import Data.List +import Prelude +import Data.Int + +foo :: Int -> Int +foo 3 = 2 +foo x = x + +bar :: String -> IO String +bar s = do + x <- return "hello" + return "asdf" + +data Baz = Baz { a :: Int, b :: String } + diff --git a/plugins/hls-floskell-plugin/test/testdata/Floskell.formatted_range.hs b/plugins/hls-floskell-plugin/test/testdata/Floskell.formatted_range.hs new file mode 100644 index 0000000000..dfeb343600 --- /dev/null +++ b/plugins/hls-floskell-plugin/test/testdata/Floskell.formatted_range.hs @@ -0,0 +1,14 @@ +module Floskell where +import Data.List +import Prelude +import Data.Int +foo :: Int -> Int +foo 3 = 2 +foo x = x +bar :: String -> IO String +bar s = do + x <- return "hello" + return "asdf" + +data Baz = Baz { a :: Int, b :: String } + diff --git a/plugins/hls-floskell-plugin/test/testdata/Floskell.hs b/plugins/hls-floskell-plugin/test/testdata/Floskell.hs new file mode 100644 index 0000000000..d93b3d9964 --- /dev/null +++ b/plugins/hls-floskell-plugin/test/testdata/Floskell.hs @@ -0,0 +1,15 @@ +module Floskell where +import Data.List + +import Prelude +import Data.Int +foo :: Int -> Int +foo 3 = 2 +foo x = x +bar :: String -> IO String +bar s = do + x <- return "hello" + return "asdf" + +data Baz = Baz { a :: Int, b :: String } + diff --git a/plugins/hls-floskell-plugin/test/testdata/hie.yaml b/plugins/hls-floskell-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..824558147d --- /dev/null +++ b/plugins/hls-floskell-plugin/test/testdata/hie.yaml @@ -0,0 +1,3 @@ +cradle: + direct: + arguments: [] diff --git a/plugins/hls-fourmolu-plugin/src/Ide/Plugin/Fourmolu.hs b/plugins/hls-fourmolu-plugin/src/Ide/Plugin/Fourmolu.hs new file mode 100644 index 0000000000..c12866d7f3 --- /dev/null +++ b/plugins/hls-fourmolu-plugin/src/Ide/Plugin/Fourmolu.hs @@ -0,0 +1,231 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +module Ide.Plugin.Fourmolu ( + descriptor, + provider, + LogEvent, +) where + +import Control.Exception +import Control.Lens ((^.)) +import Control.Monad (guard) +import Control.Monad.Error.Class (MonadError (throwError)) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Control.Monad.Trans.Class (MonadTrans (lift)) +import Control.Monad.Trans.Except (ExceptT (..), runExceptT) +import Data.Bifunctor (bimap) +import Data.List (intercalate) +import Data.Maybe (catMaybes) +import Data.Text (Text) +import qualified Data.Text as T +import Data.Version (showVersion) +import Development.IDE hiding (pluginHandlers) +import Development.IDE.Core.PluginUtils (mkFormattingHandlers) +import Development.IDE.GHC.Compat as Compat hiding (Cpp, + Warning, hang, + vcat) +import qualified Development.IDE.GHC.Compat.Util as S +import GHC.LanguageExtensions.Type (Extension (Cpp)) +import Ide.Plugin.Error +import Ide.Plugin.Properties +import Ide.PluginUtils (makeDiffTextEdit) +import Ide.Types +import Language.LSP.Protocol.Lens (HasTabSize (tabSize)) +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import Language.LSP.Server hiding (defaultConfig) +import Ormolu +import Ormolu.Config +import qualified Paths_fourmolu as Fourmolu +import System.Exit +import System.FilePath +import System.Process.Run (cwd, proc) +import System.Process.Text (readCreateProcessWithExitCode) +import Text.Read (readMaybe) + +#if MIN_VERSION_fourmolu(0,16,0) +import qualified Data.Yaml as Yaml +#endif + +descriptor :: Recorder (WithPriority LogEvent) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultPluginDescriptor plId desc) + { pluginHandlers = mkFormattingHandlers $ provider recorder plId + , pluginConfigDescriptor = defaultConfigDescriptor{configCustomConfig = mkCustomConfig properties} + } + where + desc = T.pack $ "Provides formatting of Haskell files via fourmolu. Built with fourmolu-" <> showVersion Fourmolu.version + +properties :: Properties '[ 'PropertyKey "external" 'TBoolean, 'PropertyKey "path" 'TString] +properties = + emptyProperties + & defineStringProperty + #path + "Set path to executable (for \"external\" mode)." + "fourmolu" + & defineBooleanProperty + #external + "Call out to an external \"fourmolu\" executable, rather than using the bundled library." + False + +provider :: Recorder (WithPriority LogEvent) -> PluginId -> FormattingHandler IdeState +provider recorder plId ideState token typ contents fp fo = ExceptT $ pluginWithIndefiniteProgress title token Cancellable $ \_updater -> runExceptT $ do + fileOpts <- + maybe [] (convertDynFlags . hsc_dflags . hscEnv) + <$> liftIO (runAction "Fourmolu" ideState $ use GhcSession fp) + useCLI <- liftIO $ runAction "Fourmolu" ideState $ usePropertyAction #external plId properties + fourmoluExePath <- fmap T.unpack $ liftIO $ runAction "Fourmolu" ideState $ usePropertyAction #path plId properties + if useCLI + then ExceptT . liftIO $ + handle @IOException (pure . Left . PluginInternalError . T.pack . show) $ + runExceptT (cliHandler fourmoluExePath fileOpts) + else do + logWith recorder Debug $ LogCompiledInVersion (showVersion Fourmolu.version) + FourmoluConfig{..} <- loadConfig recorder fp' + let config = + refineConfig ModuleSource Nothing Nothing Nothing $ + defaultConfig + { cfgDynOptions = map DynOption fileOpts + , cfgFixityOverrides = cfgFileFixities + , cfgRegion = region + , cfgDebug = False + , cfgPrinterOpts = resolvePrinterOpts [lspPrinterOpts, cfgFilePrinterOpts] + } + ExceptT . liftIO $ + bimap (PluginInternalError . T.pack . show) (InL . makeDiffTextEdit contents) + <$> try @OrmoluException (ormolu config fp' contents) + where + fp' = fromNormalizedFilePath fp + title = "Formatting " <> T.pack (takeFileName fp') + lspPrinterOpts = mempty{poIndentation = Just $ fromIntegral $ fo ^. tabSize} + region = case typ of + FormatText -> + RegionIndices Nothing Nothing + FormatRange (Range (Position sl _) (Position el _)) -> + RegionIndices (Just $ fromIntegral $ sl + 1) (Just $ fromIntegral $ el + 1) + cliHandler :: FilePath -> [String] -> ExceptT PluginError IO ([TextEdit] |? Null) + cliHandler path fileOpts = do + CLIVersionInfo{noCabal} <- do -- check Fourmolu version so that we know which flags to use + (exitCode, out, _err) <- liftIO $ readCreateProcessWithExitCode ( proc path ["-v"] ) "" + let version = do + guard $ exitCode == ExitSuccess + "fourmolu" : v : _ <- pure $ T.words out + traverse (readMaybe @Int . T.unpack) $ T.splitOn "." v + case version of + Just v -> do + logWith recorder Debug $ LogExternalVersion v + pure CLIVersionInfo + { noCabal = v >= [0, 7] + } + Nothing -> do + logWith recorder Debug $ LogExternalVersion [] + logWith recorder Warning $ NoVersion out + pure CLIVersionInfo + { noCabal = True + } + (exitCode, out, err) <- -- run Fourmolu + liftIO $ readCreateProcessWithExitCode + ( proc path $ + map ("-o" <>) fileOpts + <> mwhen noCabal ["--no-cabal"] + <> catMaybes + [ ("--start-line=" <>) . show <$> regionStartLine region + , ("--end-line=" <>) . show <$> regionEndLine region + ] + ){cwd = Just $ takeDirectory fp'} + contents + case exitCode of + ExitSuccess -> do + logWith recorder Debug $ StdErr err + pure $ InL $ makeDiffTextEdit contents out + ExitFailure n -> do + logWith recorder Info $ StdErr err + throwError $ PluginInternalError $ "Fourmolu failed with exit code " <> T.pack (show n) + +loadConfig :: + Recorder (WithPriority LogEvent) -> + FilePath -> + ExceptT PluginError (HandlerM Ide.Types.Config) FourmoluConfig +#if MIN_VERSION_fourmolu(0,16,0) +loadConfig recorder fp = do + liftIO (findConfigFile fp) >>= \case + Left (ConfigNotFound searchDirs) -> do + logWith recorder Info $ NoConfigPath searchDirs + pure emptyConfig + Right file -> do + logWith recorder Info $ ConfigPath file + liftIO (Yaml.decodeFileEither file) >>= \case + Left err -> do + let errorMessage = "Failed to load " <> T.pack file <> ": " <> T.pack (show err) + lift $ pluginSendNotification SMethod_WindowShowMessage $ + ShowMessageParams + { _type_ = MessageType_Error + , _message = errorMessage + } + throwError $ PluginInternalError errorMessage + Right cfg -> do + pure cfg +#else +loadConfig recorder fp = do + liftIO (loadConfigFile fp) >>= \case + ConfigLoaded file opts -> do + logWith recorder Info $ ConfigPath file + pure opts + ConfigNotFound searchDirs -> do + logWith recorder Info $ NoConfigPath searchDirs + pure emptyConfig + ConfigParseError f err -> do + lift $ pluginSendNotification SMethod_WindowShowMessage $ + ShowMessageParams + { _type_ = MessageType_Error + , _message = errorMessage + } + throwError $ PluginInternalError errorMessage + where + errorMessage = "Failed to load " <> T.pack f <> ": " <> T.pack (show err) +#endif + +data LogEvent + = NoVersion Text + | ConfigPath FilePath + | NoConfigPath [FilePath] + | StdErr Text + | LogCompiledInVersion String + | LogExternalVersion [Int] + deriving (Show) + +instance Pretty LogEvent where + pretty = \case + NoVersion t -> "Couldn't get Fourmolu version:" <> line <> indent 2 (pretty t) + ConfigPath p -> "Loaded Fourmolu config from: " <> pretty (show p) + NoConfigPath ps -> "No " <> pretty configFileName <> " found in any of:" + <> line <> indent 2 (vsep (map (pretty . show) ps)) + StdErr t -> "Fourmolu stderr:" <> line <> indent 2 (pretty t) + LogCompiledInVersion v -> "Using compiled in fourmolu-" <> pretty v + LogExternalVersion v -> + "Using external fourmolu" + <> if null v then "" else "-" + <> pretty (intercalate "." $ map show v) + +convertDynFlags :: DynFlags -> [String] +convertDynFlags df = + let pp = ["-pgmF=" <> p | not (null p)] + p = sPgm_F $ Compat.settings df + pm = map (("-fplugin=" <>) . moduleNameString) $ pluginModNames df + ex = map showExtension $ S.toList $ extensionFlags df + showExtension = \case + Cpp -> "-XCPP" + x -> "-X" ++ show x + in pp <> pm <> ex + +newtype CLIVersionInfo = CLIVersionInfo + { noCabal :: Bool + } + +mwhen :: Monoid a => Bool -> a -> a +mwhen b x = if b then x else mempty diff --git a/plugins/hls-fourmolu-plugin/test/Main.hs b/plugins/hls-fourmolu-plugin/test/Main.hs new file mode 100644 index 0000000000..483fae8ac8 --- /dev/null +++ b/plugins/hls-fourmolu-plugin/test/Main.hs @@ -0,0 +1,42 @@ +{-# LANGUAGE OverloadedStrings #-} +module Main + ( main + ) where + +import Data.Aeson +import qualified Data.Aeson.KeyMap as KM +import Data.Functor +import Ide.Plugin.Config +import qualified Ide.Plugin.Fourmolu as Fourmolu +import Language.LSP.Protocol.Types +import Language.LSP.Test +import System.FilePath +import Test.Hls + +main :: IO () +main = defaultTestRunner tests + +fourmoluPlugin :: PluginTestDescriptor Fourmolu.LogEvent +fourmoluPlugin = mkPluginTestDescriptor Fourmolu.descriptor "fourmolu" + +tests :: TestTree +tests = + testGroup "fourmolu" $ + [False, True] <&> \cli -> + testGroup + (if cli then "cli" else "lib") + [ goldenWithFourmolu cli "formats correctly" "Fourmolu" "formatted" $ \doc -> do + formatDoc doc (FormattingOptions 4 True Nothing Nothing Nothing) + , goldenWithFourmolu cli "formats imports correctly" "Fourmolu2" "formatted" $ \doc -> do + formatDoc doc (FormattingOptions 4 True Nothing Nothing Nothing) + , goldenWithFourmolu cli "uses correct operator fixities" "Fourmolu3" "formatted" $ \doc -> do + formatDoc doc (FormattingOptions 4 True Nothing Nothing Nothing) + ] + +goldenWithFourmolu :: Bool -> TestName -> FilePath -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +goldenWithFourmolu cli title path desc = goldenWithHaskellDocFormatter def fourmoluPlugin "fourmolu" conf title testDataDir path desc "hs" + where + conf = def{plcConfig = KM.fromList ["external" .= cli]} + +testDataDir :: FilePath +testDataDir = "plugins" "hls-fourmolu-plugin" "test" "testdata" diff --git a/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu.formatted.hs b/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu.formatted.hs new file mode 100644 index 0000000000..bdc198b408 --- /dev/null +++ b/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu.formatted.hs @@ -0,0 +1,16 @@ +module Fourmolu where + +import Data.List + +import Data.Int +import Prelude + +foo :: Int -> Int +foo 3 = 2 +foo x = x +bar :: String -> IO String +bar s = do + x <- return "hello" + return "asdf" + +data Baz = Baz {a :: Int, b :: String} diff --git a/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu.hs b/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu.hs new file mode 100644 index 0000000000..14eabbd69a --- /dev/null +++ b/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu.hs @@ -0,0 +1,15 @@ +module Fourmolu where +import Data.List + +import Prelude +import Data.Int +foo :: Int -> Int +foo 3 = 2 +foo x = x +bar :: String -> IO String +bar s = do + x <- return "hello" + return "asdf" + +data Baz = Baz { a :: Int, b :: String } + diff --git a/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu2.formatted.hs b/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu2.formatted.hs new file mode 100644 index 0000000000..b3d867e700 --- /dev/null +++ b/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu2.formatted.hs @@ -0,0 +1,5 @@ +import Data.Bool +import Data.Char +import Data.Data +import Data.Either +import Data.Int diff --git a/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu2.hs b/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu2.hs new file mode 100644 index 0000000000..bb011b5638 --- /dev/null +++ b/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu2.hs @@ -0,0 +1,5 @@ +import Data.Char +import Data.Either +import Data.Int +import Data.Data +import Data.Bool diff --git a/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu3.formatted.hs b/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu3.formatted.hs new file mode 100644 index 0000000000..ca766959cc --- /dev/null +++ b/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu3.formatted.hs @@ -0,0 +1,7 @@ +b :: Bool +b = + id $ + id $ + case True && True of + True -> True + False -> False diff --git a/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu3.hs b/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu3.hs new file mode 100644 index 0000000000..fafe4da859 --- /dev/null +++ b/plugins/hls-fourmolu-plugin/test/testdata/Fourmolu3.hs @@ -0,0 +1,6 @@ +b :: Bool +b = + id $ id $ + case True && True of + True -> True + False -> False diff --git a/plugins/hls-fourmolu-plugin/test/testdata/hie.yaml b/plugins/hls-fourmolu-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..824558147d --- /dev/null +++ b/plugins/hls-fourmolu-plugin/test/testdata/hie.yaml @@ -0,0 +1,3 @@ +cradle: + direct: + arguments: [] diff --git a/plugins/hls-gadt-plugin/README.md b/plugins/hls-gadt-plugin/README.md new file mode 100644 index 0000000000..de37ea9b5c --- /dev/null +++ b/plugins/hls-gadt-plugin/README.md @@ -0,0 +1,22 @@ +# GADT Converter Plugin + +The hls-gadt-plugin provides a code action that converts a datatype to GADT syntax. + +## Demo + +![GADT](gadt.gif) + +## Design +The plugin works in the following steps: +1. Get data declarations and enabled pragmas from parsed source. +2. Response a code action with a command to convert to GADT syntax if given position is a H98 data declaration. +3. Convert every part of H98 declaration to corresponding GADT's. +4. Print converted declaration. (See `prettyGADTDecl` source code for details) +5. Send edit request to LSP, the edit includes replacing origin data declaration to GADT and inserting a `GADTs` pragma if necessary. + +## Known limitations +- Currently all comments missed while converting to GADT syntax. + +## Change log +### 1.0.0.0 +- Release diff --git a/plugins/hls-gadt-plugin/gadt.gif b/plugins/hls-gadt-plugin/gadt.gif new file mode 100644 index 0000000000..4378c5d5b8 Binary files /dev/null and b/plugins/hls-gadt-plugin/gadt.gif differ diff --git a/plugins/hls-gadt-plugin/src/Ide/Plugin/GADT.hs b/plugins/hls-gadt-plugin/src/Ide/Plugin/GADT.hs new file mode 100644 index 0000000000..7aefa2c524 --- /dev/null +++ b/plugins/hls-gadt-plugin/src/Ide/Plugin/GADT.hs @@ -0,0 +1,141 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} +module Ide.Plugin.GADT (descriptor) where + +import Control.Lens ((^.)) + +import Control.Monad.Error.Class (MonadError (throwError), + liftEither) +import Control.Monad.IO.Class (MonadIO) +import Control.Monad.Trans.Class (MonadTrans (lift)) +import Control.Monad.Trans.Except (ExceptT, withExceptT) +import Data.Aeson (FromJSON, ToJSON, toJSON) +import Data.Either.Extra (maybeToEither) +import qualified Data.Map as Map +import qualified Data.Text as T +import Development.IDE +import Development.IDE.GHC.Compat + +import Data.Maybe (mapMaybe) +import Development.IDE.Core.PluginUtils +import Development.IDE.Spans.Pragmas (getFirstPragma, + insertNewPragma) +import GHC.Generics (Generic) +import Ide.Plugin.Error +import Ide.Plugin.GHC +import Ide.PluginUtils +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types + +descriptor :: PluginId -> PluginDescriptor IdeState +descriptor plId = (defaultPluginDescriptor plId "Provides a code action to convert datatypes to GADT syntax") + { Ide.Types.pluginHandlers = + mkPluginHandler SMethod_TextDocumentCodeAction codeActionHandler + , pluginCommands = + [PluginCommand toGADTSyntaxCommandId "convert data decl to GADT syntax" (toGADTCommand plId)] + } + +-- | Parameter used in the command +data ToGADTParams = ToGADTParams + { uri :: Uri + , range :: Range + } deriving (Generic, ToJSON, FromJSON) + +toGADTSyntaxCommandId :: CommandId +toGADTSyntaxCommandId = "GADT.toGADT" + +-- | A command replaces H98 data decl with GADT decl in place +toGADTCommand :: PluginId -> CommandFunction IdeState ToGADTParams +toGADTCommand pId@(PluginId pId') state _ ToGADTParams{..} = withExceptT handleGhcidePluginError $ do + nfp <- withExceptT GhcidePluginErrors $ getNormalizedFilePathE uri + (decls, exts) <- getInRangeH98DeclsAndExts state range nfp + (L ann decl) <- case decls of + [d] -> pure d + _ -> throwError $ UnexpectedNumberOfDeclarations (Prelude.length decls) + deps <- withExceptT GhcidePluginErrors + $ runActionE (T.unpack pId' <> ".GhcSessionDeps") state + $ useE GhcSessionDeps nfp + (hsc_dflags . hscEnv -> df) <- pure deps + txt <- withExceptT (PrettyGadtError . T.pack) $ liftEither $ T.pack <$> (prettyGADTDecl df . h98ToGADTDecl) decl + range <- liftEither + $ maybeToEither FailedToFindDataDeclRange + $ srcSpanToRange $ locA ann + pragma <- withExceptT GhcidePluginErrors $ getFirstPragma pId state nfp + let insertEdit = [insertNewPragma pragma GADTs | all (`notElem` exts) [GADTSyntax, GADTs]] + + _ <- lift $ pluginSendRequest + SMethod_WorkspaceApplyEdit + (ApplyWorkspaceEditParams Nothing (workSpaceEdit nfp (TextEdit range txt : insertEdit))) + (\_ -> pure ()) + + pure $ InR Null + where + workSpaceEdit nfp edits = WorkspaceEdit + (pure $ Map.fromList + [(filePathToUri $ fromNormalizedFilePath nfp, + edits)]) + Nothing Nothing + +codeActionHandler :: PluginMethodHandler IdeState Method_TextDocumentCodeAction +codeActionHandler state plId (CodeActionParams _ _ doc range _) = withExceptT handleGhcidePluginError $ do + nfp <- withExceptT GhcidePluginErrors $ getNormalizedFilePathE (doc ^. L.uri) + (inRangeH98Decls, _) <- getInRangeH98DeclsAndExts state range nfp + let actions = map (mkAction . printOutputable . tcdLName . unLoc) inRangeH98Decls + pure $ InL actions + where + mkAction :: T.Text -> Command |? CodeAction + mkAction name = InR CodeAction{..} + where + _title = "Convert \"" <> name <> "\" to GADT syntax" + _kind = Just CodeActionKind_RefactorRewrite + _diagnostics = Nothing + _isPreferred = Nothing + _disabled = Nothing + _edit = Nothing + _command = Just + $ mkLspCommand plId toGADTSyntaxCommandId _title (Just [toJSON mkParam]) + _data_ = Nothing + + mkParam = ToGADTParams (doc ^. L.uri) range + +-- | Get all H98 decls in the given range, and enabled extensions +getInRangeH98DeclsAndExts :: (MonadIO m) => + IdeState + -> Range + -> NormalizedFilePath + -> ExceptT GadtPluginError m ([LTyClDecl GP], [Extension]) +getInRangeH98DeclsAndExts state range nfp = do + pm <- withExceptT GhcidePluginErrors + $ runActionE "GADT.GetParsedModuleWithComments" state + $ useE GetParsedModuleWithComments nfp + let (L _ hsDecls) = hsmodDecls <$> pm_parsed_source pm + decls = filter isH98DataDecl + $ mapMaybe getDataDecl + $ filter (inRange range) hsDecls + exts = getExtensions pm + pure (decls, exts) + +data GadtPluginError + = UnexpectedNumberOfDeclarations Int + | FailedToFindDataDeclRange + | PrettyGadtError T.Text + | GhcidePluginErrors PluginError + +handleGhcidePluginError :: + GadtPluginError -> + PluginError +handleGhcidePluginError = \case + UnexpectedNumberOfDeclarations nums -> do + PluginInternalError $ "Expected one declaration but found: " <> T.pack (show nums) + FailedToFindDataDeclRange -> + PluginInternalError "Unable to get data decl range" + PrettyGadtError errMsg -> + PluginInternalError errMsg + GhcidePluginErrors errors -> + errors diff --git a/plugins/hls-gadt-plugin/src/Ide/Plugin/GHC.hs b/plugins/hls-gadt-plugin/src/Ide/Plugin/GHC.hs new file mode 100644 index 0000000000..f5687a9db3 --- /dev/null +++ b/plugins/hls-gadt-plugin/src/Ide/Plugin/GHC.hs @@ -0,0 +1,291 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE RecordWildCards #-} +{-# OPTIONS_GHC -Wno-missing-signatures #-} +{-# OPTIONS_GHC -Wno-overlapping-patterns #-} +module Ide.Plugin.GHC where + +#if !MIN_VERSION_ghc(9,11,0) +import Data.Functor ((<&>)) +#endif +import Data.List.Extra (stripInfix) +import qualified Data.Text as T +import Development.IDE +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Compat.ExactPrint +import GHC.Parser.Annotation (DeltaPos (..), + EpAnn (..), + EpAnnComments (EpaComments)) +#if MIN_VERSION_ghc(9,11,0) +import GHC.Parser.Annotation (EpToken (..)) +#endif +import Ide.PluginUtils (subRange) +import Language.Haskell.GHC.ExactPrint.Parsers (parseDecl) + +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] + +import qualified Data.List.NonEmpty as NE + +#if !MIN_VERSION_ghc(9,9,0) +import GHC.Parser.Annotation (Anchor (Anchor), + AnchorOperation (MovedAnchor), + SrcSpanAnn' (SrcSpanAnn), + TokenLocation (..), + spanAsAnchor) +#endif + +#if MIN_VERSION_ghc(9,9,0) +import GHC.Parser.Annotation (EpUniToken (..), + EpaLocation' (..), + noAnn) +import Language.Haskell.GHC.ExactPrint.Utils (showAst) +#endif + +#if MIN_VERSION_ghc(9,11,0) +import GHC.Types.SrcLoc (UnhelpfulSpanReason (..)) +#else +import GHC.Parser.Annotation (AddEpAnn (..)) +#endif + +type GP = GhcPass Parsed + +-- | Check if a given range is in the range of located item +inRange :: HasSrcSpan a => Range -> a -> Bool +inRange range s = maybe False (subRange range) (srcSpanToRange (getLoc s)) + +-- | Get data decl and its location +getDataDecl :: LHsDecl GP -> Maybe (LTyClDecl GP) +getDataDecl (L l (TyClD _ d@DataDecl{})) = Just (L l d) +getDataDecl _ = Nothing + +isConDeclH98 :: ConDecl GP -> Bool +isConDeclH98 ConDeclH98{} = True +isConDeclH98 _ = False + +isH98DataDecl :: LTyClDecl GP -> Bool +isH98DataDecl (L _ decl@DataDecl{}) = + any (isConDeclH98 . (\(L _ r) -> r)) (dd_cons $ tcdDataDefn decl) +isH98DataDecl _ = False + +-- | Convert H98 data type definition to GADT's +h98ToGADTDecl :: TyClDecl GP -> TyClDecl GP +h98ToGADTDecl = \case + DataDecl{..} -> DataDecl + { tcdDataDefn = updateDefn tcdLName tcdTyVars tcdDataDefn + , .. + } + x -> x + where + updateDefn dataName tyVars = \case + HsDataDefn{..} -> HsDataDefn + { dd_cons = + mapX (h98ToGADTConDecl dataName tyVars (wrapCtxt dd_ctxt)) <$> dd_cons + , dd_ctxt = emptyCtxt -- Context can't appear at the data name in GADT + , .. + } + x -> x + +-- | Convert H98 data constructor to GADT data constructor +h98ToGADTConDecl :: + LIdP GP -- ^Type constructor name, + -- used for constructing final result type in GADT + -> LHsQTyVars GP + -- ^Type variable names + -- used for constructing final result type in GADT + -> Maybe (LHsContext GP) + -- ^Data type context + -> ConDecl GP + -> ConDecl GP +h98ToGADTConDecl dataName tyVars ctxt = \case + ConDeclH98{..} -> + ConDeclGADT + +#if MIN_VERSION_ghc(9,11,0) + (AnnConDeclGADT [] [] NoEpUniTok) +#elif MIN_VERSION_ghc(9,9,0) + (NoEpUniTok, con_ext) +#else + con_ext +#endif + + (NE.singleton con_name) + +#if !MIN_VERSION_ghc(9,9,0) + (L NoTokenLoc HsNormalTok) +#endif + -- Ignore all existential type variable since GADT not needed + implicitTyVars + (mergeContext ctxt con_mb_cxt) + (renderDetails con_args) + renderResultTy + con_doc + x -> x + where + -- Parameters in the data constructor + renderDetails :: HsConDeclH98Details GP -> HsConDeclGADTDetails GP +#if MIN_VERSION_ghc(9,9,0) + renderDetails (PrefixCon _ args) = PrefixConGADT noExtField args +#else + renderDetails (PrefixCon _ args) = PrefixConGADT args +#endif +#if MIN_VERSION_ghc(9,9,0) + renderDetails (InfixCon arg1 arg2) = PrefixConGADT noExtField [arg1, arg2] +#else + renderDetails (InfixCon arg1 arg2) = PrefixConGADT [arg1, arg2] +#endif +#if MIN_VERSION_ghc(9,9,0) + renderDetails (RecCon recs) = RecConGADT NoEpUniTok recs +#else + renderDetails (RecCon recs) = RecConGADT recs noHsUniTok +#endif + + + -- | Construct GADT result type + renderResultTy :: LHsType GP + renderResultTy = case tyVars of + -- Without type variable + HsQTvs _ [] -> wrappedDataName + -- With type variable + HsQTvs _ vars -> foldl go wrappedDataName vars + _ -> wrappedDataName + where + wrappedDataName = wrap (HsTyVar noUsed NotPromoted dataName) + + -- Bundle data name with type vars by `HsAppTy` + go acc (L _(UserTyVar' var)) = + wrap + (HsAppTy noExtField acc + (wrap (HsTyVar noUsed NotPromoted var))) + go acc _ = acc + + -- Merge data type context and constructor type context + mergeContext :: Maybe (LHsContext GP) -> Maybe (LHsContext GP) -> Maybe (LHsContext GP) + mergeContext ctxt1 ctxt2 = + (wrap . map wrap) . map unParTy + <$> (getContextType ctxt1 <> getContextType ctxt2) + where + getContextType :: Maybe (LHsContext GP) -> Maybe [HsType GP] + getContextType ctxt = map unWrap . unWrap <$> ctxt + + -- Unparen the outmost, it only occurs at the outmost + -- for a valid type. + -- + -- Note for context paren rule: + -- + -- If only one element, it __can__ have a paren type. + -- If not, there can't have a parent type. + unParTy :: HsType GP -> HsType GP + unParTy (HsParTy _ ty) = unWrap ty + unParTy x = x +{- | +We use `printOutputable` to print H98 data decl as GADT syntax, +this print is not perfect, it will: + +1. Make data name and the `where` key word in different lines. +2. Make the whole data decl prints in one line if there is only one data constructor. +3. The ident size of every data constructor depends on its origin + format, and may have different ident size between constructors. + +Hence, we first use `printOutputable` to get an initial GADT syntax, +then use `ghc-exactprint` to parse the initial result, and finally +adjust the details that mentioned above. + +The adjustment includes: + +1. Make the `where` key word at the same line of data name. +2. Remove the extra blank line caused by adjustment of `where`. +3. Make every data constructor start with a new line and 2 spaces +-} +prettyGADTDecl :: DynFlags -> TyClDecl GP -> Either String String +prettyGADTDecl df decl = + let old = printOutputable decl + hsDecl = parseDecl df "unused" (T.unpack old) + tycld = adjustTyClD hsDecl + in removeExtraEmptyLine . exactPrint <$> tycld + where + adjustTyClD = \case + Right (L _ (TyClD _ tycld)) -> Right $ adjustDataDecl tycld + Right x -> Left $ "Expect TyClD but got " <> showAst x + Left err -> Left $ printWithoutUniques err + + adjustDataDecl DataDecl{..} = DataDecl + { tcdDExt = adjustWhere tcdDExt + , tcdDataDefn = tcdDataDefn + { +#if MIN_VERSION_ghc(9,11,0) + dd_ext = adjustDefnWhere (dd_ext tcdDataDefn), +#endif + dd_cons = + fmap adjustCon (dd_cons tcdDataDefn) + } + , .. + } + adjustDataDecl x = x + + -- Make every data constructor start with a new line and 2 spaces + adjustCon :: LConDecl GP -> LConDecl GP +#if MIN_VERSION_ghc(9,11,0) + adjustCon (L _ r) = + let delta = EpaDelta (UnhelpfulSpan UnhelpfulNoLocationInfo) (DifferentLine 1 2) [] + in L (EpAnn delta (AnnListItem []) (EpaComments [])) r +#elif MIN_VERSION_ghc(9,9,0) + adjustCon (L _ r) = + let delta = EpaDelta (DifferentLine 1 3) [] + in L (EpAnn delta (AnnListItem []) (EpaComments [])) r +#else + adjustCon (L (SrcSpanAnn _ loc) r) = + let go (Anchor a _) = Anchor a (MovedAnchor (DifferentLine 1 2)) + in L (SrcSpanAnn (EpAnn (go (spanAsAnchor loc)) (AnnListItem []) (EpaComments [])) loc) r +#endif + + -- Adjust where annotation to the same line of the type constructor +#if MIN_VERSION_ghc(9,11,0) + -- tcdDext is just a placeholder in ghc-9.12 + adjustWhere = id +#else + adjustWhere tcdDExt = tcdDExt <&> +#if !MIN_VERSION_ghc(9,9,0) + map +#endif + (\(AddEpAnn ann l) -> + if ann == AnnWhere + then AddEpAnn AnnWhere d1 + else AddEpAnn ann l + ) +#endif + +#if MIN_VERSION_ghc(9,11,0) + adjustDefnWhere annDataDefn + | andd_where annDataDefn == NoEpTok = annDataDefn + | otherwise = annDataDefn {andd_where = andd_where'} + where + (EpTok (EpaSpan aw)) = andd_where annDataDefn + andd_where' = EpTok (EpaDelta aw (SameLine 1) []) +#endif + -- Remove the first extra line if exist + removeExtraEmptyLine s = case stripInfix "\n\n" s of + Just (x, xs) -> x <> "\n" <> xs + Nothing -> s + +wrap :: forall a. WrapXRec GP a => a -> XRec GP a +wrap = wrapXRec @GP +wrapCtxt = id +emptyCtxt = Nothing +unWrap = unXRec @GP +mapX = mapXRec @GP +#if MIN_VERSION_ghc(9,9,0) +noUsed = noAnn +#else +noUsed = EpAnnNotUsed +#endif + +pattern UserTyVar' :: LIdP pass -> HsTyVarBndr flag pass +#if MIN_VERSION_ghc(9,11,0) +pattern UserTyVar' s <- HsTvb _ _ (HsBndrVar _ s) _ +#else +pattern UserTyVar' s <- UserTyVar _ _ s +#endif + +implicitTyVars = wrapXRec @GP mkHsOuterImplicit diff --git a/plugins/hls-gadt-plugin/test/Main.hs b/plugins/hls-gadt-plugin/test/Main.hs new file mode 100644 index 0000000000..e71c19aa28 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/Main.hs @@ -0,0 +1,74 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# OPTIONS_GHC -Wall #-} + +module Main where + +import Control.Monad (void) +import Data.Either (rights) +import qualified Data.Text as T +import qualified Ide.Plugin.GADT as GADT +import System.FilePath (()) +import Test.Hls + +main :: IO () +main = defaultTestRunner tests + +gadtPlugin :: PluginTestDescriptor () +gadtPlugin = mkPluginTestDescriptor' GADT.descriptor "GADT" + +tests :: TestTree +tests = testGroup "GADT" + [ runTest "range" "SimpleData" 2 0 2 1 + , runTest "SimpleData" "SimpleData" 2 0 2 10 + , runTest "SimpleNewtype" "SimpleNewtype" 2 0 2 17 + , runTest "Data" "Data" 2 0 2 36 + , runTest "Newtype" "Newtype" 2 0 2 21 + , runTest "Deriving" "Deriving" 2 0 2 56 + , runTest "Infix" "Infix" 2 0 2 35 + , runTest "Record" "Record" 2 0 5 1 + , runTest "TypeVariable" "TypeVariable" 2 0 2 32 + , runTest "DataContext" "DataContext" 2 0 2 31 + , runTest "DataContextParen" "DataContextParen" 2 0 3 6 + , runTest "Forall" "Forall" 2 0 2 44 + , runTest "ConstructorContext" "ConstructorContext" 2 0 2 38 + , runTest "Context" "Context" 2 0 4 41 + , runTest "Pragma" "Pragma" 2 0 3 29 + , runTest "SingleDerivingGHC92" "SingleDerivingGHC92" 2 0 3 14 + , gadtPragmaTest "ghc-9.2 don't need to insert GADTs pragma" False + ] + +gadtPragmaTest :: TestName -> Bool -> TestTree +gadtPragmaTest title hasGADT = testCase title + $ withCanonicalTempDir + $ \dir -> runSessionWithServer def gadtPlugin dir $ do + doc <- createDoc "A.hs" "haskell" (T.unlines ["module A where", "data Foo = Bar"]) + _ <- waitForProgressDone + (act:_) <- findGADTAction <$> getCodeActions doc (Range (Position 1 0) (Position 1 1)) + executeCodeAction act + let expected = T.unlines $ + ["{-# LANGUAGE GADTs #-}" | hasGADT] ++ + ["module A where", "data Foo where", " Bar :: Foo"] + contents <- skipManyTill anyMessage (getDocumentEdit doc) + liftIO $ contents @?= expected + +runTest :: TestName -> FilePath -> UInt -> UInt -> UInt -> UInt -> TestTree +runTest title fp x1 y1 x2 y2 = + goldenWithHaskellDoc def gadtPlugin title testDataDir fp "expected" "hs" $ \doc -> do + _ <- waitForProgressDone + (act:_) <- findGADTAction <$> getCodeActions doc (Range (Position x1 y1) (Position x2 y2)) + executeCodeAction act + void $ skipManyTill anyMessage (getDocumentEdit doc) + +findGADTAction :: [a |? CodeAction] -> [CodeAction] +findGADTAction = filter isGADTCodeAction . rights . map toEither + +isGADTCodeAction :: CodeAction -> Bool +isGADTCodeAction CodeAction{..} = case _kind of + Nothing -> False + Just kind -> case kind of + CodeActionKind_RefactorRewrite -> True + _ -> False + +testDataDir :: FilePath +testDataDir = "plugins" "hls-gadt-plugin" "test" "testdata" diff --git a/plugins/hls-gadt-plugin/test/testdata/ConstructorContext.expected.hs b/plugins/hls-gadt-plugin/test/testdata/ConstructorContext.expected.hs new file mode 100644 index 0000000000..1a23495395 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/ConstructorContext.expected.hs @@ -0,0 +1,4 @@ +module ConstructorContext where + +data Foo where + Bar :: Show a => a -> Foo diff --git a/plugins/hls-gadt-plugin/test/testdata/ConstructorContext.hs b/plugins/hls-gadt-plugin/test/testdata/ConstructorContext.hs new file mode 100644 index 0000000000..2becd24bbb --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/ConstructorContext.hs @@ -0,0 +1,3 @@ +module ConstructorContext where + +data Foo = forall a. (Show a) => Bar a diff --git a/plugins/hls-gadt-plugin/test/testdata/Context.expected.hs b/plugins/hls-gadt-plugin/test/testdata/Context.expected.hs new file mode 100644 index 0000000000..b6a8e6e468 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Context.expected.hs @@ -0,0 +1,5 @@ +module Context where + +data Foo a where + Bar :: (Eq a, Show b, Show a) => a -> b -> Foo a + Baz :: (Eq a, Show c) => c -> c -> Foo a diff --git a/plugins/hls-gadt-plugin/test/testdata/Context.hs b/plugins/hls-gadt-plugin/test/testdata/Context.hs new file mode 100644 index 0000000000..a8eff3ee06 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Context.hs @@ -0,0 +1,5 @@ +module Context where + +data (Eq a) => Foo a = + forall b c. (Show b, Show a) => + Bar a b | forall c. (Show c) => Baz c c diff --git a/plugins/hls-gadt-plugin/test/testdata/Data.expected.hs b/plugins/hls-gadt-plugin/test/testdata/Data.expected.hs new file mode 100644 index 0000000000..37f42e7453 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Data.expected.hs @@ -0,0 +1,5 @@ +module Data where + +data Foo where + Bar :: Int -> Foo + Baz :: Char -> String -> Foo diff --git a/plugins/hls-gadt-plugin/test/testdata/Data.hs b/plugins/hls-gadt-plugin/test/testdata/Data.hs new file mode 100644 index 0000000000..4c2c4fd7b9 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Data.hs @@ -0,0 +1,3 @@ +module Data where + +data Foo = Bar Int | Baz Char String diff --git a/plugins/hls-gadt-plugin/test/testdata/DataContext.expected.hs b/plugins/hls-gadt-plugin/test/testdata/DataContext.expected.hs new file mode 100644 index 0000000000..2cadc9d0ae --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/DataContext.expected.hs @@ -0,0 +1,5 @@ +module DataContext where + +data T a b where + F :: Ord a => a -> T a b + G :: Ord a => b -> T a b diff --git a/plugins/hls-gadt-plugin/test/testdata/DataContext.hs b/plugins/hls-gadt-plugin/test/testdata/DataContext.hs new file mode 100644 index 0000000000..44a6970670 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/DataContext.hs @@ -0,0 +1,3 @@ +module DataContext where + +data Ord a => T a b = F a | G b diff --git a/plugins/hls-gadt-plugin/test/testdata/DataContextParen.expected.hs b/plugins/hls-gadt-plugin/test/testdata/DataContextParen.expected.hs new file mode 100644 index 0000000000..2a0e998e50 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/DataContextParen.expected.hs @@ -0,0 +1,4 @@ +module DataContextParen where + +data F a where + G :: Eq a => a -> F a diff --git a/plugins/hls-gadt-plugin/test/testdata/DataContextParen.hs b/plugins/hls-gadt-plugin/test/testdata/DataContextParen.hs new file mode 100644 index 0000000000..63e245d734 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/DataContextParen.hs @@ -0,0 +1,4 @@ +module DataContextParen where + +data (Eq a) => F a + = G a diff --git a/plugins/hls-gadt-plugin/test/testdata/Deriving.expected.hs b/plugins/hls-gadt-plugin/test/testdata/Deriving.expected.hs new file mode 100644 index 0000000000..1c039ef80b --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Deriving.expected.hs @@ -0,0 +1,6 @@ +module Deriving where + +data Foo where + Bar :: Int -> Foo + Baz :: Char -> String -> Foo + deriving (Show, Eq) diff --git a/plugins/hls-gadt-plugin/test/testdata/Deriving.hs b/plugins/hls-gadt-plugin/test/testdata/Deriving.hs new file mode 100644 index 0000000000..8a75a7d7db --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Deriving.hs @@ -0,0 +1,3 @@ +module Deriving where + +data Foo = Bar Int | Baz Char String deriving (Show, Eq) diff --git a/plugins/hls-gadt-plugin/test/testdata/Forall.expected.hs b/plugins/hls-gadt-plugin/test/testdata/Forall.expected.hs new file mode 100644 index 0000000000..f410a9da5d --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Forall.expected.hs @@ -0,0 +1,4 @@ +module Forall where + +data Foo where + Bar :: Show a => a -> b -> a -> Foo diff --git a/plugins/hls-gadt-plugin/test/testdata/Forall.hs b/plugins/hls-gadt-plugin/test/testdata/Forall.hs new file mode 100644 index 0000000000..ebf163029a --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Forall.hs @@ -0,0 +1,3 @@ +module Forall where + +data Foo = forall a b. (Show a) => Bar a b a diff --git a/plugins/hls-gadt-plugin/test/testdata/Infix.expected.hs b/plugins/hls-gadt-plugin/test/testdata/Infix.expected.hs new file mode 100644 index 0000000000..0f1c0838d9 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Infix.expected.hs @@ -0,0 +1,5 @@ +module Infix where + +data Foo where + (:->) :: Int -> Char -> Foo + deriving () diff --git a/plugins/hls-gadt-plugin/test/testdata/Infix.hs b/plugins/hls-gadt-plugin/test/testdata/Infix.hs new file mode 100644 index 0000000000..45d6707f7c --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Infix.hs @@ -0,0 +1,3 @@ +module Infix where + +data Foo = Int :-> Char deriving () diff --git a/plugins/hls-gadt-plugin/test/testdata/Newtype.expected.hs b/plugins/hls-gadt-plugin/test/testdata/Newtype.expected.hs new file mode 100644 index 0000000000..bd2a4edf71 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Newtype.expected.hs @@ -0,0 +1,4 @@ +module Newtype where + +newtype Foo where + Bar :: Int -> Foo diff --git a/plugins/hls-gadt-plugin/test/testdata/Newtype.hs b/plugins/hls-gadt-plugin/test/testdata/Newtype.hs new file mode 100644 index 0000000000..fb2765f2f6 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Newtype.hs @@ -0,0 +1,3 @@ +module Newtype where + +newtype Foo = Bar Int diff --git a/plugins/hls-gadt-plugin/test/testdata/Pragma.expected.hs b/plugins/hls-gadt-plugin/test/testdata/Pragma.expected.hs new file mode 100644 index 0000000000..fe95040558 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Pragma.expected.hs @@ -0,0 +1,5 @@ +module Pragma where + +data F where + G :: {-# UNPACK #-}Int -> F + H :: {-# NOUNPACK #-}Char -> F diff --git a/plugins/hls-gadt-plugin/test/testdata/Pragma.hs b/plugins/hls-gadt-plugin/test/testdata/Pragma.hs new file mode 100644 index 0000000000..5624a6a6e6 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Pragma.hs @@ -0,0 +1,4 @@ +module Pragma where + +data F = G{-# UNPACK #-}Int + | H {-# NOUNPACK #-} Char diff --git a/plugins/hls-gadt-plugin/test/testdata/Record.expected.hs b/plugins/hls-gadt-plugin/test/testdata/Record.expected.hs new file mode 100644 index 0000000000..db599fa3a0 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Record.expected.hs @@ -0,0 +1,4 @@ +module Record where + +data Foo where + Foo :: {bar :: Char, baz :: Int} -> Foo diff --git a/plugins/hls-gadt-plugin/test/testdata/Record.hs b/plugins/hls-gadt-plugin/test/testdata/Record.hs new file mode 100644 index 0000000000..cc46115e52 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/Record.hs @@ -0,0 +1,6 @@ +module Record where + +data Foo = Foo { + bar :: Char, + baz :: Int +} diff --git a/plugins/hls-gadt-plugin/test/testdata/SimpleData.expected.hs b/plugins/hls-gadt-plugin/test/testdata/SimpleData.expected.hs new file mode 100644 index 0000000000..f8c4714c23 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/SimpleData.expected.hs @@ -0,0 +1,4 @@ +module SimpleData where + +data A where + B :: A diff --git a/plugins/hls-gadt-plugin/test/testdata/SimpleData.hs b/plugins/hls-gadt-plugin/test/testdata/SimpleData.hs new file mode 100644 index 0000000000..0112eb99fc --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/SimpleData.hs @@ -0,0 +1,3 @@ +module SimpleData where + +data A = B diff --git a/plugins/hls-gadt-plugin/test/testdata/SimpleNewtype.expected.hs b/plugins/hls-gadt-plugin/test/testdata/SimpleNewtype.expected.hs new file mode 100644 index 0000000000..718599b6ca --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/SimpleNewtype.expected.hs @@ -0,0 +1,4 @@ +module SimpleNewtype where + +newtype A where + B :: Int -> A diff --git a/plugins/hls-gadt-plugin/test/testdata/SimpleNewtype.hs b/plugins/hls-gadt-plugin/test/testdata/SimpleNewtype.hs new file mode 100644 index 0000000000..e5bd154766 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/SimpleNewtype.hs @@ -0,0 +1,3 @@ +module SimpleNewtype where + +newtype A = B Int diff --git a/plugins/hls-gadt-plugin/test/testdata/SingleDeriving.expected.hs b/plugins/hls-gadt-plugin/test/testdata/SingleDeriving.expected.hs new file mode 100644 index 0000000000..5a8d088c5a --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/SingleDeriving.expected.hs @@ -0,0 +1,5 @@ +module SingleDeriving where + +data Foo a b where + Bar :: b -> a -> Foo a b + deriving Eq diff --git a/plugins/hls-gadt-plugin/test/testdata/SingleDeriving.hs b/plugins/hls-gadt-plugin/test/testdata/SingleDeriving.hs new file mode 100644 index 0000000000..00cff15e95 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/SingleDeriving.hs @@ -0,0 +1,4 @@ +module SingleDeriving where + +data Foo a b = Bar b a + deriving (Eq) diff --git a/plugins/hls-gadt-plugin/test/testdata/SingleDerivingGHC92.expected.hs b/plugins/hls-gadt-plugin/test/testdata/SingleDerivingGHC92.expected.hs new file mode 100644 index 0000000000..46ea2c7b4d --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/SingleDerivingGHC92.expected.hs @@ -0,0 +1,5 @@ +module SingleDerivingGHC92 where + +data Foo a b where + Bar :: b -> a -> Foo a b + deriving (Eq) diff --git a/plugins/hls-gadt-plugin/test/testdata/SingleDerivingGHC92.hs b/plugins/hls-gadt-plugin/test/testdata/SingleDerivingGHC92.hs new file mode 100644 index 0000000000..d9ff28ae84 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/SingleDerivingGHC92.hs @@ -0,0 +1,4 @@ +module SingleDerivingGHC92 where + +data Foo a b = Bar b a + deriving (Eq) diff --git a/plugins/hls-gadt-plugin/test/testdata/TypeVariable.expected.hs b/plugins/hls-gadt-plugin/test/testdata/TypeVariable.expected.hs new file mode 100644 index 0000000000..5c2442be31 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/TypeVariable.expected.hs @@ -0,0 +1,5 @@ +module TypeVariable where + +data Foo a f where + Foo :: a -> Foo a f + Bar :: (f a) -> Foo a f diff --git a/plugins/hls-gadt-plugin/test/testdata/TypeVariable.hs b/plugins/hls-gadt-plugin/test/testdata/TypeVariable.hs new file mode 100644 index 0000000000..d7458ae4ad --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/TypeVariable.hs @@ -0,0 +1,3 @@ +module TypeVariable where + +data Foo a f = Foo a | Bar (f a) diff --git a/plugins/hls-gadt-plugin/test/testdata/hie.yaml b/plugins/hls-gadt-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..e678c92239 --- /dev/null +++ b/plugins/hls-gadt-plugin/test/testdata/hie.yaml @@ -0,0 +1,3 @@ +cradle: + direct: + arguments: ["-XHaskell2010", "-XExistentialQuantification", "-XGADTs"] diff --git a/plugins/hls-hlint-plugin/.hlint.yaml b/plugins/hls-hlint-plugin/.hlint.yaml new file mode 100644 index 0000000000..7bcab2e941 --- /dev/null +++ b/plugins/hls-hlint-plugin/.hlint.yaml @@ -0,0 +1,2 @@ +# This is here so that the tests in this package don't +# pick up the configuration from HLS's own .hlint.yaml diff --git a/plugins/hls-hlint-plugin/README.md b/plugins/hls-hlint-plugin/README.md new file mode 100644 index 0000000000..4fc4ebbf78 --- /dev/null +++ b/plugins/hls-hlint-plugin/README.md @@ -0,0 +1,11 @@ +# HLint Plugin for the [Haskell Language Server](https://github.com/haskell/haskell-language-server#readme) + +## Configuration + +This is typically done through an [HLint configuration file](https://github.com/ndmitchell/hlint#customizing-the-hints). +You can also change the behavior of HLint by adding a list of flags to `haskell.plugin.hlint.config.flags` +if your configuration is in a non-standard location or you want to change settings globally. + +## Known Differences from the `hlint` executable + +- The `hlint` executable by default turns on many extensions when parsing a file because it is not certain about the exact extensions that apply to the file (they may come from project files). This differs from HLS which uses only the extensions the file needs to parse the file. Hence it is possible for the `hlint` executable to report a parse error on a file, but the `hlint` plugin to work just fine on the same file. This does mean that the turning on/off of extensions in the hlint config may be ignored by the `hlint` plugin. diff --git a/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs b/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs new file mode 100644 index 0000000000..210e9f3910 --- /dev/null +++ b/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs @@ -0,0 +1,634 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE MultiWayIf #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE StrictData #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ViewPatterns #-} +{-# OPTIONS_GHC -Wno-orphans #-} + +#ifdef GHC_LIB +#define MIN_GHC_API_VERSION(x,y,z) MIN_VERSION_ghc_lib_parser(x,y,z) +#else +#define MIN_GHC_API_VERSION(x,y,z) MIN_VERSION_ghc(x,y,z) +#endif + +module Ide.Plugin.Hlint + ( + descriptor + , Log(..) + ) where +import Control.Arrow ((&&&)) +import Control.Concurrent.STM +import Control.DeepSeq +import Control.Exception +import Control.Lens ((?~), (^.)) +import Control.Monad.Error.Class (MonadError (throwError)) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Control.Monad.Trans.Except (ExceptT (..), + runExceptT) +import Data.Aeson.Types (FromJSON (..), + ToJSON (..), + Value (..)) +import qualified Data.ByteString as BS +import Data.Hashable +import qualified Data.HashMap.Strict as Map +import qualified Data.Map as M +import Data.Maybe +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Data.Typeable +import Development.IDE hiding + (Error, + getExtensions) +import Development.IDE.Core.Compile (sourceParser) +import Development.IDE.Core.FileStore (getVersionedTextDoc) +import Development.IDE.Core.Rules (defineNoFile, + getParsedModuleWithComments) +import Development.IDE.Core.Shake (getDiagnostics) + +#if APPLY_REFACT +import qualified Refact.Apply as Refact +import qualified Refact.Types as Refact +#if !MIN_VERSION_apply_refact(0,12,0) +import System.Environment (setEnv, + unsetEnv) +#endif +#endif + +import Development.IDE.GHC.Compat (DynFlags, + extensionFlags, + ms_hspp_opts, + topDir) +import qualified Development.IDE.GHC.Compat.Util as EnumSet + +#if MIN_GHC_API_VERSION(9,4,0) +import qualified GHC.Data.Strict as Strict +#endif +#if MIN_GHC_API_VERSION(9,0,0) +import GHC.Types.SrcLoc hiding + (RealSrcSpan) +import qualified GHC.Types.SrcLoc as GHC +#else +import qualified SrcLoc as GHC +import SrcLoc hiding + (RealSrcSpan) +#endif +import GHC.LanguageExtensions (Extension) +import Language.Haskell.GhclibParserEx.GHC.Driver.Session as GhclibParserEx (readExtension) +import System.FilePath (takeFileName) +import System.IO (IOMode (WriteMode), + hClose, + hPutStr, + hSetEncoding, + hSetNewlineMode, + noNewlineTranslation, + utf8, + withFile) +import System.IO.Temp + +import Ide.Plugin.Config hiding + (Config) +import Ide.Plugin.Error +import Ide.Plugin.Properties +import Ide.Plugin.Resolve +import Ide.PluginUtils +import Ide.Types hiding + (Config) +import Language.Haskell.HLint as Hlint +import qualified Language.LSP.Protocol.Lens as LSP +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding + (Null) +import qualified Language.LSP.Protocol.Types as LSP + +import Development.IDE.Core.PluginUtils as PluginUtils +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.Spans.Pragmas (LineSplitTextEdits (LineSplitTextEdits), + NextPragmaInfo (NextPragmaInfo), + getNextPragmaInfo, + lineSplitDeleteTextEdit, + lineSplitInsertTextEdit, + lineSplitTextEdits, + nextPragmaLine) +import GHC.Generics (Generic) +import Text.Regex.TDFA.Text () + +-- --------------------------------------------------------------------- + +data Log + = LogShake Shake.Log + | LogApplying NormalizedFilePath (Either String WorkspaceEdit) +#if APPLY_REFACT + | LogGeneratedIdeas NormalizedFilePath [[Refact.Refactoring Refact.SrcSpan]] +#endif + | LogGetIdeas NormalizedFilePath + | LogUsingExtensions NormalizedFilePath [String] -- Extension is only imported conditionally, so we just stringify them + | forall a. (Pretty a) => LogResolve a + +instance Pretty Log where + pretty = \case + LogShake log -> pretty log + LogApplying fp res -> "Applying hint(s) for" <+> viaShow fp <> ":" <+> viaShow res +#if APPLY_REFACT + LogGeneratedIdeas fp ideas -> "Generated hlint ideas for for" <+> viaShow fp <> ":" <+> viaShow ideas +#endif + LogUsingExtensions fp exts -> "Using extensions for " <+> viaShow fp <> ":" <> line <> indent 4 (pretty exts) + LogGetIdeas fp -> "Getting hlint ideas for " <+> viaShow fp + LogResolve msg -> pretty msg + +-- Reimplementing this, since the one in Development.IDE.GHC.Compat isn't for ghc-lib +#if !MIN_GHC_API_VERSION(9,0,0) +type BufSpan = () +#endif +pattern RealSrcSpan :: GHC.RealSrcSpan -> Maybe BufSpan -> GHC.SrcSpan +#if MIN_GHC_API_VERSION(9,4,0) +pattern RealSrcSpan x y <- GHC.RealSrcSpan x (fromStrictMaybe -> y) +#elif MIN_GHC_API_VERSION(9,0,0) +pattern RealSrcSpan x y = GHC.RealSrcSpan x y +#else +pattern RealSrcSpan x y <- ((,Nothing) -> (GHC.RealSrcSpan x, y)) +#endif +{-# COMPLETE RealSrcSpan, UnhelpfulSpan #-} + +#if MIN_GHC_API_VERSION(9,4,0) +fromStrictMaybe :: Strict.Maybe a -> Maybe a +fromStrictMaybe (Strict.Just a ) = Just a +fromStrictMaybe Strict.Nothing = Nothing +#endif + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + let resolveRecorder = cmapWithPrio LogResolve recorder + (pluginCommands, pluginHandlers) = mkCodeActionWithResolveAndCommand resolveRecorder plId codeActionProvider (resolveProvider recorder) + desc = "Provides HLint diagnostics and code actions. Built with hlint-" <> VERSION_hlint + in (defaultPluginDescriptor plId desc) + { pluginRules = rules recorder plId + , pluginCommands = pluginCommands + , pluginHandlers = pluginHandlers + , pluginConfigDescriptor = defaultConfigDescriptor + { configHasDiagnostics = True + , configCustomConfig = mkCustomConfig properties + } + } + +-- This rule only exists for generating file diagnostics +-- so the RuleResult is empty +data GetHlintDiagnostics = GetHlintDiagnostics + deriving (Eq, Show, Generic) +instance Hashable GetHlintDiagnostics +instance NFData GetHlintDiagnostics + +type instance RuleResult GetHlintDiagnostics = () + +-- | Hlint rules to generate file diagnostics based on hlint hints +-- This rule is recomputed when: +-- - A file has been edited via +-- - `getIdeas` -> `getParsedModule` in any case +-- - `getIdeas` -> `getFileContents` if the hls ghc does not match the hlint default ghc +-- - The client settings have changed, to honour the `hlintOn` setting, via `getClientConfigAction` +-- - The hlint specific settings have changed, via `getHlintSettingsRule` +rules :: Recorder (WithPriority Log) -> PluginId -> Rules () +rules recorder plugin = do + define (cmapWithPrio LogShake recorder) $ \GetHlintDiagnostics file -> do + config <- getPluginConfigAction plugin + let hlintOn = plcGlobalOn config && plcDiagnosticsOn config + ideas <- if hlintOn then getIdeas recorder file else return (Right []) + return (diagnostics file ideas, Just ()) + + defineNoFile (cmapWithPrio LogShake recorder) $ \GetHlintSettings -> do + (Config flags) <- getHlintConfig plugin + liftIO $ argsSettings flags + + action $ do + files <- Map.keys <$> getFilesOfInterestUntracked + Shake.runWithSignal (Proxy @"kick/start/hlint") (Proxy @"kick/done/hlint") files GetHlintDiagnostics + + where + + diagnostics :: NormalizedFilePath -> Either ParseError [Idea] -> [FileDiagnostic] + diagnostics file (Right ideas) = + [ideErrorFromLspDiag diag file Nothing | i <- ideas, Just diag <- [ideaToDiagnostic i]] + diagnostics file (Left parseErr) = + [ideErrorFromLspDiag (parseErrorToDiagnostic parseErr) file Nothing] + + + ideaToDiagnostic :: Idea -> Maybe Diagnostic + ideaToDiagnostic idea = do + diagnosticSeverity <- ideaSeverityToDiagnosticSeverity (ideaSeverity idea) + pure $ + LSP.Diagnostic { + _range = srcSpanToRange $ ideaSpan idea + , _severity = Just diagnosticSeverity + -- we are encoding the fact that idea has refactorings in diagnostic code + , _code = Just (InR $ T.pack $ codePre ++ ideaHint idea) + , _source = Just "hlint" + , _message = idea2Message idea + , _relatedInformation = Nothing + , _tags = Nothing + , _codeDescription = Nothing + , _data_ = Nothing + } + + where + codePre = if null $ ideaRefactoring idea then "" else "refact:" + + -- We only propogate error severity of hlint and downgrade other severities to Info. + -- Currently, there are just 2 error level serverities present in hlint by default: https://github.com/ndmitchell/hlint/issues/1549#issuecomment-1892701824. + -- And according to ndmitchell: The default error level severities of the two hints are justified and it's fairly uncommon to happen. + -- GH Issue about discussion on this: https://github.com/ndmitchell/hlint/issues/1549 + ideaSeverityToDiagnosticSeverity :: Hlint.Severity -> Maybe LSP.DiagnosticSeverity + ideaSeverityToDiagnosticSeverity Hlint.Ignore = Nothing + ideaSeverityToDiagnosticSeverity Hlint.Suggestion = Just LSP.DiagnosticSeverity_Information + ideaSeverityToDiagnosticSeverity Hlint.Warning = Just LSP.DiagnosticSeverity_Information + ideaSeverityToDiagnosticSeverity Hlint.Error = Just LSP.DiagnosticSeverity_Error + + idea2Message :: Idea -> T.Text + idea2Message idea = T.unlines $ [T.pack $ ideaHint idea, "Found:", " " <> T.pack (ideaFrom idea)] + <> toIdea <> map (T.pack . show) (ideaNote idea) + where + toIdea :: [T.Text] + toIdea = case ideaTo idea of + Nothing -> [] + Just i -> [T.pack "Why not:", T.pack $ " " ++ i] + + + parseErrorToDiagnostic :: ParseError -> Diagnostic + parseErrorToDiagnostic (Hlint.ParseError l msg contents) = + LSP.Diagnostic { + _range = srcSpanToRange l + , _severity = Just LSP.DiagnosticSeverity_Information + , _code = Just (InR sourceParser) + , _source = Just "hlint" + , _message = T.unlines [T.pack msg,T.pack contents] + , _relatedInformation = Nothing + , _tags = Nothing + , _codeDescription = Nothing + , _data_ = Nothing + } + + -- This one is defined in Development.IDE.GHC.Error but here + -- the types could come from ghc-lib or ghc + srcSpanToRange :: SrcSpan -> LSP.Range + srcSpanToRange (RealSrcSpan span _) = Range { + _start = LSP.Position { + _line = fromIntegral $ srcSpanStartLine span - 1 + , _character = fromIntegral $ srcSpanStartCol span - 1} + , _end = LSP.Position { + _line = fromIntegral $ srcSpanEndLine span - 1 + , _character = fromIntegral $ srcSpanEndCol span - 1} + } + srcSpanToRange (UnhelpfulSpan _) = noRange + +getIdeas :: Recorder (WithPriority Log) -> NormalizedFilePath -> Action (Either ParseError [Idea]) +getIdeas recorder nfp = do + logWith recorder Debug $ LogGetIdeas nfp + (flags, classify, hint) <- useNoFile_ GetHlintSettings + + let applyHints' (Just (Right modEx)) = Right $ applyHints classify hint [modEx] + applyHints' (Just (Left err)) = Left err + applyHints' Nothing = Right [] + + fmap applyHints' (moduleEx flags) + + where moduleEx :: ParseFlags -> Action (Maybe (Either ParseError ModuleEx)) + moduleEx flags = do + mbpm <- getParsedModuleWithComments nfp + -- If ghc was not able to parse the module, we disable hlint diagnostics + if isNothing mbpm + then return Nothing + else do + flags' <- setExtensions flags + contents <- getFileContents nfp + let fp = fromNormalizedFilePath nfp + let contents' = T.unpack . Rope.toText <$> contents + Just <$> liftIO (parseModuleEx flags' fp contents') + + setExtensions flags = do + hlintExts <- getExtensions nfp + logWith recorder Debug $ LogUsingExtensions nfp (fmap show hlintExts) + return $ flags { enabledExtensions = hlintExts } + +-- Gets extensions from ModSummary dynflags for the file. +-- Previously this would union extensions from both hlint's parsedFlags +-- and the ModSummary dynflags. However using the parsedFlags extensions +-- can sometimes interfere with the hlint parsing of the file. +-- See https://github.com/haskell/haskell-language-server/issues/1279 +getExtensions :: NormalizedFilePath -> Action [Extension] +getExtensions nfp = do + dflags <- getFlags + let hscExts = EnumSet.toList (extensionFlags dflags) + let hscExts' = mapMaybe (GhclibParserEx.readExtension . show) hscExts + return hscExts' + where getFlags :: Action DynFlags + getFlags = do + modsum <- use_ GetModSummary nfp + return $ ms_hspp_opts $ msrModSummary modsum + +-- --------------------------------------------------------------------- + +data GetHlintSettings = GetHlintSettings + deriving (Eq, Show, Generic) +instance Hashable GetHlintSettings +instance NFData GetHlintSettings +instance NFData Hint where rnf = rwhnf +instance NFData Classify where rnf = rwhnf +instance NFData ParseFlags where rnf = rwhnf +instance Show Hint where show = const "" +instance Show ParseFlags where show = const "" + +type instance RuleResult GetHlintSettings = (ParseFlags, [Classify], Hint) + +-- --------------------------------------------------------------------- + +newtype Config = Config [String] + +properties :: Properties '[ 'PropertyKey "flags" ('TArray String)] +properties = emptyProperties + & defineArrayProperty #flags + "Flags used by hlint" [] + +-- | Get the plugin config +getHlintConfig :: PluginId -> Action Config +getHlintConfig pId = + Config + <$> usePropertyAction #flags pId properties + +-- --------------------------------------------------------------------- +codeActionProvider :: PluginMethodHandler IdeState Method_TextDocumentCodeAction +codeActionProvider ideState _pluginId (CodeActionParams _ _ documentId _ context) + | let TextDocumentIdentifier uri = documentId + , Just docNormalizedFilePath <- uriToNormalizedFilePath (toNormalizedUri uri) + = do + verTxtDocId <- + liftIO $ + runAction "Hlint.getVersionedTextDoc" ideState $ + getVersionedTextDoc documentId + liftIO $ fmap (InL . map LSP.InR) $ do + allDiagnostics <- atomically $ getDiagnostics ideState + + let numHintsInDoc = length + [lspDiagnostic + | diag <- allDiagnostics + , let lspDiagnostic = fdLspDiagnostic diag + , validCommand lspDiagnostic + , fdFilePath diag == docNormalizedFilePath + ] + let numHintsInContext = length + [diagnostic | diagnostic <- diags + , validCommand diagnostic + ] + let singleHintCodeActions = diags >>= diagnosticToCodeActions verTxtDocId + if numHintsInDoc > 1 && numHintsInContext > 0 then do + pure $ singleHintCodeActions ++ [applyAllAction verTxtDocId] + else + pure singleHintCodeActions + | otherwise + = pure $ InL [] + + where + applyAllAction verTxtDocId = + let args = Just $ toJSON (ApplyHint verTxtDocId Nothing) + in LSP.CodeAction "Apply all hints" (Just LSP.CodeActionKind_QuickFix) Nothing Nothing Nothing Nothing Nothing args + + -- |Some hints do not have an associated refactoring + validCommand (LSP.Diagnostic _ _ (Just (InR code)) _ (Just "hlint") _ _ _ _) = + "refact:" `T.isPrefixOf` code + validCommand _ = + False + + diags = context ^. LSP.diagnostics + +resolveProvider :: Recorder (WithPriority Log) -> ResolveFunction IdeState HlintResolveCommands Method_CodeActionResolve +resolveProvider recorder ideState _plId ca uri resolveValue = do + file <- getNormalizedFilePathE uri + case resolveValue of + (ApplyHint verTxtDocId oneHint) -> do + edit <- ExceptT $ liftIO $ applyHint recorder ideState file oneHint verTxtDocId + pure $ ca & LSP.edit ?~ edit + (IgnoreHint verTxtDocId hintTitle ) -> do + edit <- ExceptT $ liftIO $ ignoreHint recorder ideState file verTxtDocId hintTitle + pure $ ca & LSP.edit ?~ edit + +applyRefactAvailable :: Bool +#if APPLY_REFACT +applyRefactAvailable = True +#else +applyRefactAvailable = False +#endif + +-- | Convert a hlint diagnostic into an apply and an ignore code action +-- if applicable +diagnosticToCodeActions :: VersionedTextDocumentIdentifier -> LSP.Diagnostic -> [LSP.CodeAction] +diagnosticToCodeActions verTxtDocId diagnostic + | LSP.Diagnostic{ _source = Just "hlint", _code = Just (InR code), _range = LSP.Range start _ } <- diagnostic + , let isHintApplicable = "refact:" `T.isPrefixOf` code && applyRefactAvailable + , let hint = T.replace "refact:" "" code + , let suppressHintTitle = "Ignore hint \"" <> hint <> "\" in this module" + , let suppressHintArguments = IgnoreHint verTxtDocId hint + = catMaybes + -- Applying the hint is marked preferred because it addresses the underlying error. + -- Disabling the rule isn't, because less often used and configuration can be adapted. + [ if | isHintApplicable + , let applyHintTitle = "Apply hint \"" <> hint <> "\"" + applyHintArguments = ApplyHint verTxtDocId (Just $ OneHint start hint) -> + Just (mkCodeAction applyHintTitle diagnostic (Just (toJSON applyHintArguments)) True) + | otherwise -> Nothing + , Just (mkCodeAction suppressHintTitle diagnostic (Just (toJSON suppressHintArguments)) False) + ] + | otherwise = [] + +mkCodeAction :: T.Text -> LSP.Diagnostic -> Maybe Value -> Bool -> LSP.CodeAction +mkCodeAction title diagnostic data_ isPreferred = + LSP.CodeAction + { _title = title + , _kind = Just LSP.CodeActionKind_QuickFix + , _diagnostics = Just [diagnostic] + , _isPreferred = Just isPreferred + , _disabled = Nothing + , _edit = Nothing + , _command = Nothing + , _data_ = data_ + } + +mkSuppressHintTextEdits :: DynFlags -> Rope -> T.Text -> [LSP.TextEdit] +mkSuppressHintTextEdits dynFlags fileContents hint = + let + NextPragmaInfo{ nextPragmaLine, lineSplitTextEdits } = getNextPragmaInfo dynFlags (Just fileContents) + nextPragmaLinePosition = Position (fromIntegral nextPragmaLine) 0 + nextPragmaRange = Range nextPragmaLinePosition nextPragmaLinePosition + textEdit = LSP.TextEdit nextPragmaRange $ "{- HLINT ignore \"" <> hint <> "\" -}\n" + lineSplitTextEditList = maybe [] (\LineSplitTextEdits{..} -> [lineSplitInsertTextEdit, lineSplitDeleteTextEdit]) lineSplitTextEdits + in + textEdit : lineSplitTextEditList +-- --------------------------------------------------------------------- + +ignoreHint :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> VersionedTextDocumentIdentifier -> HintTitle -> IO (Either PluginError WorkspaceEdit) +ignoreHint _recorder ideState nfp verTxtDocId ignoreHintTitle = runExceptT $ do + (_, fileContents) <- runActionE "Hlint.GetFileContents" ideState $ useE GetFileContents nfp + (msr, _) <- runActionE "Hlint.GetModSummaryWithoutTimestamps" ideState $ useWithStaleE GetModSummaryWithoutTimestamps nfp + case fileContents of + Just contents -> do + let dynFlags = ms_hspp_opts $ msrModSummary msr + textEdits = mkSuppressHintTextEdits dynFlags contents ignoreHintTitle + workspaceEdit = + LSP.WorkspaceEdit + (Just (M.singleton (verTxtDocId ^. LSP.uri) textEdits)) + Nothing + Nothing + pure workspaceEdit + Nothing -> throwError $ PluginInternalError "Unable to get fileContents" + +-- --------------------------------------------------------------------- +data HlintResolveCommands = + ApplyHint + { verTxtDocId :: VersionedTextDocumentIdentifier + -- |If Nothing, apply all hints, otherise only apply + -- the given hint + , oneHint :: Maybe OneHint + } + | IgnoreHint + { verTxtDocId :: VersionedTextDocumentIdentifier + , ignoreHintTitle :: HintTitle + } deriving (Generic, ToJSON, FromJSON) + +type HintTitle = T.Text + +data OneHint = + OneHint + { oneHintPos :: Position + , oneHintTitle :: HintTitle + } deriving (Generic, Eq, Show, ToJSON, FromJSON) + +applyHint :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> Maybe OneHint -> VersionedTextDocumentIdentifier -> IO (Either PluginError WorkspaceEdit) +#if !APPLY_REFACT +applyHint _ _ _ _ _ = + -- https://github.com/ndmitchell/hlint/pull/1594#issuecomment-2338898673 + evaluate $ error "Cannot apply refactoring: apply-refact does not work on GHC 9.10" +#else +applyHint recorder ide nfp mhint verTxtDocId = + runExceptT $ do + let runAction' :: Action a -> IO a + runAction' = runAction "applyHint" ide + let errorHandlers = [ Handler $ \e -> return (Left (show (e :: IOException))) + , Handler $ \e -> return (Left (show (e :: ErrorCall))) + ] + ideas <- bimapExceptT (PluginInternalError . T.pack . showParseError) id $ ExceptT $ runAction' $ getIdeas recorder nfp + let ideas' = maybe ideas (`filterIdeas` ideas) mhint + let commands = map ideaRefactoring ideas' + logWith recorder Debug $ LogGeneratedIdeas nfp commands + let fp = fromNormalizedFilePath nfp + mbOldContent <- fmap (fmap Rope.toText) $ liftIO $ runAction' $ getFileContents nfp + oldContent <- maybe (liftIO $ fmap T.decodeUtf8 (BS.readFile fp)) return mbOldContent + modsum <- liftIO $ runAction' $ use_ GetModSummary nfp + let dflags = ms_hspp_opts $ msrModSummary modsum + + -- set Nothing as "position" for "applyRefactorings" because + -- applyRefactorings expects the provided position to be _within_ the scope + -- of each refactoring it will apply. + -- But "Idea"s returned by HLint point to starting position of the expressions + -- that contain refactorings, so they are often outside the refactorings' boundaries. + let position = Nothing + let writeFileUTF8NoNewLineTranslation file txt = + withFile file WriteMode $ \h -> do + hSetEncoding h utf8 + hSetNewlineMode h noNewlineTranslation + hPutStr h (T.unpack txt) + res <- + liftIO $ withSystemTempFile (takeFileName fp) $ \temp h -> do + hClose h + writeFileUTF8NoNewLineTranslation temp oldContent + exts <- runAction' $ getExtensions nfp + -- We have to reparse extensions to remove the invalid ones + let (enabled, disabled, _invalid) = Refact.parseExtensions $ map show exts + let refactExts = map show $ enabled ++ disabled + (Right <$> applyRefactorings (topDir dflags) position commands temp refactExts) + `catches` errorHandlers + case res of + Right appliedFile -> do + let wsEdit = diffText' True (verTxtDocId, oldContent) (T.pack appliedFile) IncludeDeletions + ExceptT $ return (Right wsEdit) + Left err -> + throwError $ PluginInternalError $ T.pack err + where + -- | If we are only interested in applying a particular hint then + -- let's filter out all the irrelevant ideas + filterIdeas :: OneHint -> [Idea] -> [Idea] + filterIdeas (OneHint (Position l c) title) ideas = + let title' = T.unpack title + ideaPos = (srcSpanStartLine &&& srcSpanStartCol) . toRealSrcSpan . ideaSpan + in filter (\i -> ideaHint i == title' && ideaPos i == (fromIntegral $ l+1, fromIntegral $ c+1)) ideas + + toRealSrcSpan (RealSrcSpan real _) = real + toRealSrcSpan (UnhelpfulSpan x) = error $ "No real source span: " ++ show x + + showParseError :: Hlint.ParseError -> String + showParseError (Hlint.ParseError location message content) = + unlines [show location, message, content] + +-- | Map over both failure and success. +bimapExceptT :: Functor m => (e -> f) -> (a -> b) -> ExceptT e m a -> ExceptT f m b +bimapExceptT f g (ExceptT m) = ExceptT (fmap h m) where + h (Left e) = Left (f e) + h (Right a) = Right (g a) +{-# INLINE bimapExceptT #-} + +-- --------------------------------------------------------------------------- +-- Apply-refact compatability, documentation copied from upstream apply-refact +-- --------------------------------------------------------------------------- + +-- | Apply a set of refactorings as supplied by HLint +-- +-- This compatibility function abstracts over https://github.com/mpickering/apply-refact/issues/133 +-- for backwards compatability. +applyRefactorings :: + -- | FilePath to [GHC's libdir](https://downloads.haskell.org/ghc/latest/docs/users_guide/using.html#ghc-flag---print-libdir). + -- + -- It is possible to use @libdir@ from [ghc-paths package](https://hackage.haskell.org/package/ghc-paths), but note + -- this will make it difficult to provide a binary distribution of your program. + FilePath -> + -- | Apply hints relevant to a specific position + Maybe (Int, Int) -> + -- | 'Refactoring's to apply. Each inner list corresponds to an HLint + -- . + -- An @Idea@ may have more than one 'Refactoring'. + -- + -- The @Idea@s are sorted in ascending order of starting location, and are applied + -- in that order. If two @Idea@s start at the same location, the one with the larger + -- source span comes first. An @Idea@ is filtered out (ignored) if there is an @Idea@ + -- prior to it which has an overlapping source span and is not filtered out. + [[Refact.Refactoring Refact.SrcSpan]] -> + -- | Target file + FilePath -> + -- | GHC extensions, e.g., @LambdaCase@, @NoStarIsType@. The list is processed from left + -- to right. An extension (e.g., @StarIsType@) may be overridden later (e.g., by @NoStarIsType@). + -- + -- These are in addition to the @LANGUAGE@ pragmas in the target file. When they conflict + -- with the @LANGUAGE@ pragmas, pragmas win. + [String] -> + IO String +applyRefactorings = +#if MIN_VERSION_apply_refact(0,12,0) + Refact.applyRefactorings +#else + \libdir pos refacts fp exts -> withRuntimeLibdir libdir (Refact.applyRefactorings pos refacts fp exts) + + where + -- Setting a environment variable with the libdir used by ghc-exactprint. + -- It is a workaround for an error caused by the use of a hardcoded at compile time libdir + -- in ghc-exactprint that makes dependent executables non portables. + -- See https://github.com/alanz/ghc-exactprint/issues/96. + -- WARNING: this code is not thread safe, so if you try to apply several async refactorings + -- it could fail. That case is not very likely so we assume the risk. + withRuntimeLibdir :: FilePath -> IO a -> IO a + withRuntimeLibdir libdir = bracket_ (setEnv key libdir) (unsetEnv key) + where key = "GHC_EXACTPRINT_GHC_LIBDIR" +#endif +#endif diff --git a/plugins/hls-hlint-plugin/test/Main.hs b/plugins/hls-hlint-plugin/test/Main.hs new file mode 100644 index 0000000000..360a9c0c01 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/Main.hs @@ -0,0 +1,490 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +module Main + ( main + ) where + +import Control.Lens ((^.)) +import Control.Monad (guard, when) +import Data.Aeson (Value (..), object, (.=)) +import Data.Functor (void) +import Data.List (find) +import qualified Data.Map as Map +import Data.Maybe (fromJust, isJust) +import Data.Proxy (Proxy (Proxy)) +import qualified Data.Text as T +import Ide.Plugin.Config (Config (..)) +import qualified Ide.Plugin.Config as Plugin +import qualified Ide.Plugin.Hlint as HLint +import qualified Language.LSP.Protocol.Lens as L +import System.FilePath ((<.>), ()) +import Test.Hls +import Test.Hls.FileSystem + +main :: IO () +main = defaultTestRunner tests + +hlintPlugin :: PluginTestDescriptor HLint.Log +hlintPlugin = mkPluginTestDescriptor HLint.descriptor "hlint" + +tests :: TestTree +tests = testGroup "hlint" [ + suggestionsTests + , configTests + , ignoreHintTests + , applyHintTests + , resolveTests + ] + +getIgnoreHintText :: T.Text -> T.Text +getIgnoreHintText name = "Ignore hint \"" <> name <> "\" in this module" + +getApplyHintText :: T.Text -> T.Text +getApplyHintText name = "Apply hint \"" <> name <> "\"" + +resolveTests :: TestTree +resolveTests = knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testGroup "hlint resolve tests" + [ + ignoreHintGoldenResolveTest + "Resolve version of: Ignore hint in this module inserts hlint ignore pragma" + "IgnoreHintAction" + (Point 2 8) + "Eta reduce" + , applyHintGoldenResolveTest + "Resolve version of: [#2612] Apply hint works when operator fixities go right-to-left" + "RightToLeftFixities" + (Point 6 13) + "Avoid reverse" + ] + + +ignoreHintTests :: TestTree +ignoreHintTests = testGroup "hlint ignore hint tests" + [ + ignoreHintGoldenTest + "Ignore hint in this module inserts hlint ignore pragma" + "IgnoreHintAction" + (Point 2 8) + "Eta reduce" + ] + +applyHintTests :: TestTree +applyHintTests = knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testGroup "hlint apply hint tests" + [ + applyHintGoldenTest + "[#2612] Apply hint works when operator fixities go right-to-left" + "RightToLeftFixities" + (Point 6 13) + "Avoid reverse" + ] + +suggestionsTests :: TestTree +suggestionsTests = + testGroup "hlint suggestions" [ + knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "provides 3.8 code actions including apply all" $ runHlintSession "" $ do + doc <- openDoc "Base.hs" "haskell" + diags@(reduceDiag:_) <- hlintCaptureKick + + liftIO $ do + length diags @?= 2 -- "Eta Reduce" and "Redundant Id" + reduceDiag ^. L.range @?= Range (Position 1 0) (Position 1 12) + reduceDiag ^. L.severity @?= Just DiagnosticSeverity_Information + reduceDiag ^. L.code @?= Just (InR "refact:Eta reduce") + reduceDiag ^. L.source @?= Just "hlint" + + cas <- map fromAction <$> getAllCodeActions doc + + let redundantIdHintName = "Redundant id" + let etaReduceHintName = "Eta reduce" + let applyAll = find (\ca -> "Apply all hints" `T.isSuffixOf` (ca ^. L.title)) cas + let redId = find (\ca -> redundantIdHintName `T.isInfixOf` (ca ^. L.title)) cas + let redEta = find (\ca -> etaReduceHintName `T.isInfixOf` (ca ^. L.title)) cas + let ignoreRedundantIdInThisModule = find (\ca -> getIgnoreHintText redundantIdHintName == (ca ^.L.title)) cas + let ignoreEtaReduceThisModule = find (\ca -> getIgnoreHintText etaReduceHintName == (ca ^.L.title)) cas + + liftIO $ isJust applyAll @? "There is Apply all hints code action" + liftIO $ isJust redId @? "There is Redundant id code action" + liftIO $ isJust redEta @? "There is Eta reduce code action" + liftIO $ isJust ignoreRedundantIdInThisModule @? "There is ignore Redundant id code action" + liftIO $ isJust ignoreEtaReduceThisModule @? "There is ignore Eta reduce code action" + + executeCodeAction (fromJust redId) + + contents <- skipManyTill anyMessage $ getDocumentEdit doc + liftIO $ contents @?= "main = undefined\nfoo x = x\n" + + , knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "falls back to pre 3.8 code actions" $ + runSessionWithTestConfig def + { testConfigCaps = noLiteralCaps + , testDirLocation = Left testDir + , testPluginDescriptor = hlintPlugin + , testShiftRoot = True} $ const $ do + doc <- openDoc "Base.hs" "haskell" + + _ <- hlintCaptureKick + + cars <- getAllCodeActions doc + etaReduce <- liftIO $ inspectCommand cars ["Eta reduce"] + + executeCommand etaReduce + + contents <- skipManyTill anyMessage $ getDocumentEdit doc + liftIO $ contents @?= "main = undefined\nfoo = id\n" + + , testCase ".hlint.yaml fixity rules are applied" $ runHlintSession "fixity" $ do + doc <- openDoc "FixityUse.hs" "haskell" + testNoHlintDiagnostics doc + + , testCase "changing document contents updates hlint diagnostics" $ runHlintSession "" $ do + doc <- openDoc "Base.hs" "haskell" + testHlintDiagnostics doc + + let change = TextDocumentContentChangeEvent $ InL + TextDocumentContentChangePartial + { _range = Range (Position 1 8) (Position 1 12) + , _rangeLength = Nothing + , _text = "x" + } + + changeDoc doc [change] + -- We need to wait until hlint has been rerun and clears the diagnostic + [] <- waitForDiagnosticsFrom doc + + let change' = TextDocumentContentChangeEvent $ InL + TextDocumentContentChangePartial + { _range = Range (Position 1 8) (Position 1 12) + , _rangeLength = Nothing + , _text = "id x" + } + changeDoc doc [change'] + testHlintDiagnostics doc + + , testCase "[#554] hlint diagnostics works with CPP via ghc -XCPP argument" $ runHlintSession "cpp" $ do + doc <- openDoc "CppCond.hs" "haskell" + testHlintDiagnostics doc + + , knownBrokenForHlintOnGhcLib "hlint doesn't take in account cpp flag as ghc -D argument" $ + testCase "[#554] hlint diagnostics works with CPP via language pragma" $ runHlintSession "cpp" $ do + doc <- openDoc "CppCond.hs" "haskell" + testHlintDiagnostics doc + + , testCase "[#554] hlint diagnostics works with CPP via -XCPP argument and flag via #include header" $ runHlintSession "cpp" $ do + doc <- openDoc "CppHeader.hs" "haskell" + testHlintDiagnostics doc + + , knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "[#590] apply-refact works with -XLambdaCase argument" $ runHlintSession "lambdacase" $ do + testRefactor "LambdaCase.hs" "Redundant bracket" + expectedLambdaCase + + , knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "[#1242] apply-refact works with -XTypeApplications argument" $ runHlintSession "typeapps" $ do + testRefactor "TypeApplication.hs" "Redundant bracket" + expectedTypeApp + + , knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "apply hints works with LambdaCase via language pragma" $ runHlintSession "" $ do + testRefactor "LambdaCase.hs" "Redundant bracket" + ("{-# LANGUAGE LambdaCase #-}" : expectedLambdaCase) + + , ignoreTestBecause "apply-refact doesn't work with cpp" $ + testCase "apply hints works with CPP via -XCPP argument" $ runHlintSession "cpp" $ do + testRefactor "CppCond.hs" "Redundant bracket" + expectedCPP + + , ignoreTestBecause "apply-refact doesn't work with cpp" $ + testCase "apply hints works with CPP via language pragma" $ runHlintSession "" $ do + testRefactor "CppCond.hs" "Redundant bracket" + ("{-# LANGUAGE CPP #-}" : expectedCPP) + + , testCase "hlint diagnostics ignore hints honouring .hlint.yaml" $ runHlintSession "ignore" $ do + doc <- openDoc "CamelCase.hs" "haskell" + testNoHlintDiagnostics doc + + , testCase "hlint diagnostics ignore hints honouring ANN annotations" $ runHlintSession "" $ do + doc <- openDoc "IgnoreAnn.hs" "haskell" + testNoHlintDiagnostics doc + + , testCase "hlint diagnostics ignore hints honouring HLINT annotations" $ runHlintSession "" $ do + doc <- openDoc "IgnoreAnnHlint.hs" "haskell" + testNoHlintDiagnostics doc + + , knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "apply-refact preserve regular comments" $ runHlintSession "" $ do + testRefactor "Comments.hs" "Redundant bracket" expectedComments + + , knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testCase "[#2290] apply all hints works with a trailing comment" $ runHlintSession "" $ do + testRefactor "TwoHintsAndComment.hs" "Apply all hints" expectedComments2 + + , testCase "applyAll is shown only when there is at least one diagnostic in range" $ runHlintSession "" $ do + doc <- openDoc "TwoHints.hs" "haskell" + _ <- hlintCaptureKick + + firstLine <- map fromAction <$> getCodeActions doc (mkRange 0 0 0 0) + secondLine <- map fromAction <$> getCodeActions doc (mkRange 1 0 1 0) + thirdLine <- map fromAction <$> getCodeActions doc (mkRange 2 0 2 0) + multiLine <- map fromAction <$> getCodeActions doc (mkRange 0 0 2 0) + + let hasApplyAll = isJust . find (\ca -> "Apply all hints" `T.isSuffixOf` (ca ^. L.title)) + + liftIO $ hasApplyAll firstLine @? "Missing apply all code action" + liftIO $ hasApplyAll secondLine @? "Missing apply all code action" + liftIO $ not (hasApplyAll thirdLine) @? "Unexpected apply all code action" + liftIO $ hasApplyAll multiLine @? "Missing apply all code action" + + , testCase "hlint should warn about unused extensions" $ runHlintSession "unusedext" $ do + _ <- openDoc "UnusedExtension.hs" "haskell" + diags@(unusedExt:_) <- hlintCaptureKick + + liftIO $ do + length diags @?= 1 + unusedExt ^. L.code @?= Just (InR "refact:Unused LANGUAGE pragma") + + , testCase "[#1279] hlint should not activate extensions like PatternSynonyms" $ runHlintSession "" $ do + doc <- openDoc "PatternKeyword.hs" "haskell" + -- hlint will report a parse error if PatternSynonyms is enabled + testNoHlintDiagnostics doc + , testCase "hlint should not warn about redundant irrefutable pattern with LANGUAGE Strict" $ runHlintSession "" $ do + doc <- openDoc "StrictData.hs" "haskell" + testNoHlintDiagnostics doc + ] + where + testRefactor file caTitle expected = do + doc <- openDoc file "haskell" + testHlintDiagnostics doc + + cas <- map fromAction <$> getAllCodeActions doc + let ca = find (\ca -> caTitle `T.isInfixOf` (ca ^. L.title)) cas + liftIO $ isJust ca @? ("There is '" ++ T.unpack caTitle ++"' code action") + + executeCodeAction (fromJust ca) + + contents <- skipManyTill anyMessage $ getDocumentEdit doc + liftIO $ contents @?= T.unlines expected + + expectedLambdaCase = [ "module LambdaCase where", "" + , "f = \\case \"true\" -> True" + , " _ -> False" + ] + expectedCPP = [ "module CppCond where", "" + , "#ifdef FLAG" + , "f = 1" + , "#else" + , "g = 2" + , "#endif", "" + ] + expectedComments = case ghcVersion of + GHC912 -> [ "-- comment before header" + , "module Comments where", "" + , "{-# standalone annotation #-}", "" + , "-- standalone comment", "" + , "-- | haddock comment" + , "f = {- inline comment -}{- inline comment inside refactored code -} 1 -- ending comment", "" + , "-- final comment" + ] + + _ -> [ "-- comment before header" + , "module Comments where", "" + , "{-# standalone annotation #-}", "" + , "-- standalone comment", "" + , "-- | haddock comment" + , "f = {- inline comment -} {- inline comment inside refactored code -}1 -- ending comment", "" + , "-- final comment" + ] + expectedComments2 = [ "module TwoHintsAndComment where" + , "biggest = foldr1 max -- the line above will show two hlint hints, \"eta reduce\" and \"use maximum\"" + ] + expectedTypeApp = [ "module TypeApplication where", "" + , "a = id @Int 1" + ] + + +configTests :: TestTree +configTests = testGroup "hlint plugin config" [ + + testCase "changing hlint plugin configuration enables or disables hlint diagnostics" $ runHlintSession "" $ do + setIgnoringConfigurationRequests False + enableHlint + + doc <- openDoc "Base.hs" "haskell" + testHlintDiagnostics doc + + disableHlint + + testNoHlintDiagnostics doc + + , testCase "adding hlint flags to plugin configuration removes hlint diagnostics" $ runHlintSession "" $ do + setIgnoringConfigurationRequests False + enableHlint + + doc <- openDoc "Base.hs" "haskell" + testHlintDiagnostics doc + + let config' = hlintConfigWithFlags ["--ignore=Redundant id", "--hint=test-hlint-config.yaml"] + setHlsConfig config' + + testNoHlintDiagnostics doc + + , testCase "adding hlint flags to plugin configuration adds hlint diagnostics" $ runHlintSession "" $ do + setIgnoringConfigurationRequests False + enableHlint + + doc <- openDoc "Generalise.hs" "haskell" + + testNoHlintDiagnostics doc + + let config' = hlintConfigWithFlags ["--with-group=generalise"] + setHlsConfig config' + + diags' <- hlintCaptureKick + d <- liftIO $ inspectDiagnostic diags' ["Use <>"] + + liftIO $ do + length diags' @?= 1 + d ^. L.range @?= Range (Position 1 10) (Position 1 21) + d ^. L.severity @?= Just DiagnosticSeverity_Information + ] + +testDir :: FilePath +testDir = "plugins/hls-hlint-plugin/test/testdata" + +runHlintSession :: FilePath -> Session a -> IO a +runHlintSession subdir = failIfSessionTimeout . + runSessionWithTestConfig def + { testConfigCaps = codeActionNoResolveCaps + , testShiftRoot = True + , testDirLocation = Left (testDir subdir) + , testPluginDescriptor = hlintPlugin + } + . const + +hlintKickDone :: Session () +hlintKickDone = kick (Proxy @"kick/done/hlint") >>= guard . not . null + +hlintKickStart :: Session () +hlintKickStart = kick (Proxy @"kick/start/hlint") >>= guard . not . null + +hlintCaptureKick :: Session [Diagnostic] +hlintCaptureKick = captureKickDiagnostics hlintKickStart hlintKickDone + +noHlintDiagnostics :: HasCallStack => [Diagnostic] -> Assertion +noHlintDiagnostics diags = + all (not . isHlintDiagnostic) diags @? "There are hlint diagnostics" + +isHlintDiagnostic :: Diagnostic -> Bool +isHlintDiagnostic diag = + Just "hlint" == diag ^. L.source + +testHlintDiagnostics :: HasCallStack => TextDocumentIdentifier -> Session () +testHlintDiagnostics doc = do + diags <- captureKickNonEmptyDiagnostics doc + liftIO $ length diags > 0 @? "There are no hlint diagnostics" + +captureKickNonEmptyDiagnostics :: HasCallStack => TextDocumentIdentifier -> Session [Diagnostic] +captureKickNonEmptyDiagnostics doc = do + diags <- hlintCaptureKick + if null diags + then captureKickNonEmptyDiagnostics doc + else pure diags + +testNoHlintDiagnostics :: HasCallStack => TextDocumentIdentifier -> Session () +testNoHlintDiagnostics _doc = do + diags <- hlintCaptureKick + liftIO $ noHlintDiagnostics diags + +hlintConfigWithFlags :: [T.Text] -> Config +hlintConfigWithFlags flags = + def + { Plugin.plugins = Map.fromList [("hlint", + def { Plugin.plcGlobalOn = True, Plugin.plcConfig = unObject $ object ["flags" .= flags] } + )] } + where + unObject (Object obj) = obj + unObject _ = undefined + +enableHlint :: Session () +enableHlint = setHlsConfig $ def { Plugin.plugins = Map.fromList [ ("hlint", def { Plugin.plcGlobalOn = True }) ] } + +disableHlint :: Session () +disableHlint = setHlsConfig $ def { Plugin.plugins = Map.fromList [ ("hlint", def { Plugin.plcGlobalOn = False }) ] } + +-- We have two main code paths in the plugin depending on how hlint interacts with ghc: +-- * One when hlint uses ghc-lib (all ghc versions but the last version supported by hlint) +-- * Another one when hlint uses directly ghc (only one version, which not have to be the last version supported by ghcide) +-- As we always are using ghc through ghcide the code to get the ghc parsed AST differs +-- So the issues and bugs usually only affects to one code path or the other. +-- Although a given hlint version supports one direct ghc, we could use several versions of hlint +-- each one supporting a different ghc version. It should be a temporary situation though. +knownBrokenForHlintOnGhcLib :: String -> TestTree -> TestTree +knownBrokenForHlintOnGhcLib = ignoreTestBecause + +-- 1's based +data Point = Point { + line :: !Int, + column :: !Int +} + +pointToRange :: Point -> Range +pointToRange Point {..} + | line <- fromIntegral $ subtract 1 line + , column <- fromIntegral $ subtract 1 column = + Range (Position line column) (Position line $ column + 1) + +getCodeActionTitle :: (Command |? CodeAction) -> Maybe T.Text +getCodeActionTitle commandOrCodeAction + | InR CodeAction {_title} <- commandOrCodeAction = Just _title + | otherwise = Nothing + +makeCodeActionNotFoundAtString :: Point -> String +makeCodeActionNotFoundAtString Point {..} = + "CodeAction not found at line: " <> show line <> ", column: " <> show column + +-- ------------------------------------------------------------------------ +-- Test runner helpers +-- ------------------------------------------------------------------------ + +ignoreHintGoldenTest :: TestName -> FilePath -> Point -> T.Text -> TestTree +ignoreHintGoldenTest testCaseName goldenFilename point hintName = + goldenTest testCaseName goldenFilename point (getIgnoreHintText hintName) + +applyHintGoldenTest :: TestName -> FilePath -> Point -> T.Text -> TestTree +applyHintGoldenTest testCaseName goldenFilename point hintName = do + goldenTest testCaseName goldenFilename point (getApplyHintText hintName) + +goldenTest :: TestName -> FilePath -> Point -> T.Text -> TestTree +goldenTest testCaseName goldenFilename point hintText = + setupGoldenHlintTest testCaseName goldenFilename codeActionNoResolveCaps $ \document -> do + _ <- hlintCaptureKick + actions <- getCodeActions document $ pointToRange point + case find ((== Just hintText) . getCodeActionTitle) actions of + Just (InR codeAction) -> do + executeCodeAction codeAction + when (isJust (codeAction ^. L.command)) $ + void $ skipManyTill anyMessage $ getDocumentEdit document + _ -> liftIO $ assertFailure $ makeCodeActionNotFoundAtString point + + +setupGoldenHlintTest :: TestName -> FilePath -> ClientCapabilities -> (TextDocumentIdentifier -> Session ()) -> TestTree +setupGoldenHlintTest testName path config = + goldenWithTestConfig def + { testConfigCaps = config + , testShiftRoot = True + , testPluginDescriptor = hlintPlugin + , testDirLocation = Right tree + } testName tree path "expected" "hs" + where tree = mkVirtualFileTree testDir (directProject (path <.> "hs")) + +ignoreHintGoldenResolveTest :: TestName -> FilePath -> Point -> T.Text -> TestTree +ignoreHintGoldenResolveTest testCaseName goldenFilename point hintName = + goldenResolveTest testCaseName goldenFilename point (getIgnoreHintText hintName) + +applyHintGoldenResolveTest :: TestName -> FilePath -> Point -> T.Text -> TestTree +applyHintGoldenResolveTest testCaseName goldenFilename point hintName = do + goldenResolveTest testCaseName goldenFilename point (getApplyHintText hintName) + +goldenResolveTest :: TestName -> FilePath -> Point -> T.Text -> TestTree +goldenResolveTest testCaseName goldenFilename point hintText = + setupGoldenHlintTest testCaseName goldenFilename codeActionResolveCaps $ \document -> do + _ <- hlintCaptureKick + actions <- getAndResolveCodeActions document $ pointToRange point + case find ((== Just hintText) . getCodeActionTitle) actions of + Just (InR codeAction) -> executeCodeAction codeAction + _ -> liftIO $ assertFailure $ makeCodeActionNotFoundAtString point diff --git a/test/testdata/ApplyRefact2.hs b/plugins/hls-hlint-plugin/test/testdata/Base.hs similarity index 100% rename from test/testdata/ApplyRefact2.hs rename to plugins/hls-hlint-plugin/test/testdata/Base.hs diff --git a/plugins/hls-hlint-plugin/test/testdata/Comments.hs b/plugins/hls-hlint-plugin/test/testdata/Comments.hs new file mode 100644 index 0000000000..849ebecf95 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/Comments.hs @@ -0,0 +1,11 @@ +-- comment before header +module Comments where + +{-# standalone annotation #-} + +-- standalone comment + +-- | haddock comment +f = {- inline comment -} ({- inline comment inside refactored code -}1) -- ending comment + +-- final comment diff --git a/plugins/hls-hlint-plugin/test/testdata/CppCond.hs b/plugins/hls-hlint-plugin/test/testdata/CppCond.hs new file mode 100644 index 0000000000..6ba9eadba3 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/CppCond.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE CPP #-} +module CppCond where + +#ifdef FLAG +f = (1) +#else +g = 2 +#endif diff --git a/plugins/hls-hlint-plugin/test/testdata/Generalise.hs b/plugins/hls-hlint-plugin/test/testdata/Generalise.hs new file mode 100644 index 0000000000..eefcc77013 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/Generalise.hs @@ -0,0 +1,2 @@ +main = undefined +foo x y = [x, x] ++ y diff --git a/plugins/hls-hlint-plugin/test/testdata/IgnoreAnn.hs b/plugins/hls-hlint-plugin/test/testdata/IgnoreAnn.hs new file mode 100644 index 0000000000..402e0bb090 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/IgnoreAnn.hs @@ -0,0 +1,5 @@ +module IgnoreAnn where + +{-# ANN module "HLint: ignore Redundant bracket" #-} +f = (1) + diff --git a/plugins/hls-hlint-plugin/test/testdata/IgnoreAnnHlint.hs b/plugins/hls-hlint-plugin/test/testdata/IgnoreAnnHlint.hs new file mode 100644 index 0000000000..27cd7f4851 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/IgnoreAnnHlint.hs @@ -0,0 +1,7 @@ +module IgnoreHlintAnn where + +{- HLINT ignore "Redundant bracket" -} +f = (1) + +{-# HLINT ignore "Use camelCase" #-} +camel_case = undefined diff --git a/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.expected.hs b/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.expected.hs new file mode 100644 index 0000000000..b3ae28995e --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.expected.hs @@ -0,0 +1,3 @@ +{- HLINT ignore "Eta reduce" -} +module IgnoreHintAction where +foo x = id x diff --git a/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.hs b/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.hs new file mode 100644 index 0000000000..7fb147a40f --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.hs @@ -0,0 +1,2 @@ +module IgnoreHintAction where +foo x = id x diff --git a/plugins/hls-hlint-plugin/test/testdata/LambdaCase.hs b/plugins/hls-hlint-plugin/test/testdata/LambdaCase.hs new file mode 100644 index 0000000000..b0f36a258c --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/LambdaCase.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE LambdaCase #-} +module LambdaCase where + +f = \case "true" -> (True) + _ -> False diff --git a/plugins/hls-hlint-plugin/test/testdata/PatternKeyword.hs b/plugins/hls-hlint-plugin/test/testdata/PatternKeyword.hs new file mode 100644 index 0000000000..21f430e104 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/PatternKeyword.hs @@ -0,0 +1,3 @@ +module Foo (pattern) where + +pattern = 42 diff --git a/plugins/hls-hlint-plugin/test/testdata/RightToLeftFixities.expected.hs b/plugins/hls-hlint-plugin/test/testdata/RightToLeftFixities.expected.hs new file mode 100644 index 0000000000..32483bef6f --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/RightToLeftFixities.expected.hs @@ -0,0 +1,6 @@ +module RightToLeftFixities where +import Data.List (sortOn) +import Control.Arrow ((&&&)) +import Data.Ord (Down(Down)) +functionB :: [String] -> [(Char,Int)] +functionB = sortOn (Down . snd) . map (head &&& length) . id diff --git a/plugins/hls-hlint-plugin/test/testdata/RightToLeftFixities.hs b/plugins/hls-hlint-plugin/test/testdata/RightToLeftFixities.hs new file mode 100644 index 0000000000..a9b5d141b3 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/RightToLeftFixities.hs @@ -0,0 +1,6 @@ +module RightToLeftFixities where +import Data.List (sortOn) +import Control.Arrow ((&&&)) +import Data.Ord (Down(Down)) +functionB :: [String] -> [(Char,Int)] +functionB = reverse . sortOn snd . map (head &&& length) . id diff --git a/plugins/hls-hlint-plugin/test/testdata/StrictData.hs b/plugins/hls-hlint-plugin/test/testdata/StrictData.hs new file mode 100644 index 0000000000..0865b611e3 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/StrictData.hs @@ -0,0 +1,2 @@ +{-# LANGUAGE Strict #-} +f ~x = x diff --git a/plugins/hls-hlint-plugin/test/testdata/TwoHints.hs b/plugins/hls-hlint-plugin/test/testdata/TwoHints.hs new file mode 100644 index 0000000000..64c57da7b0 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/TwoHints.hs @@ -0,0 +1,2 @@ +f = (1) +g = (1) diff --git a/plugins/hls-hlint-plugin/test/testdata/TwoHintsAndComment.hs b/plugins/hls-hlint-plugin/test/testdata/TwoHintsAndComment.hs new file mode 100644 index 0000000000..f017bd0762 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/TwoHintsAndComment.hs @@ -0,0 +1,2 @@ +module TwoHintsAndComment where +biggest items = foldr1 max items -- the line above will show two hlint hints, "eta reduce" and "use maximum" diff --git a/plugins/hls-hlint-plugin/test/testdata/cpp/CppCond.hs b/plugins/hls-hlint-plugin/test/testdata/cpp/CppCond.hs new file mode 100644 index 0000000000..cb2cc70c60 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/cpp/CppCond.hs @@ -0,0 +1,7 @@ +module ApplyRefact3 where + +#ifdef FLAG +f = (1) +#else +g = 2 +#endif diff --git a/plugins/hls-hlint-plugin/test/testdata/cpp/CppHeader.hs b/plugins/hls-hlint-plugin/test/testdata/cpp/CppHeader.hs new file mode 100644 index 0000000000..b4c3f9d69f --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/cpp/CppHeader.hs @@ -0,0 +1,9 @@ +module CppHeader where + +#include "test.h" + +#ifdef TEST +f = (1) +#else +f = 1 +#endif diff --git a/plugins/hls-hlint-plugin/test/testdata/cpp/hie.yaml b/plugins/hls-hlint-plugin/test/testdata/cpp/hie.yaml new file mode 100644 index 0000000000..845a503179 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/cpp/hie.yaml @@ -0,0 +1,7 @@ +cradle: + direct: + arguments: + - "-XCPP" + - "-DFLAG" + - "CppCond" + - "CppHeader" diff --git a/plugins/hls-hlint-plugin/test/testdata/cpp/test.h b/plugins/hls-hlint-plugin/test/testdata/cpp/test.h new file mode 100644 index 0000000000..9fca16e358 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/cpp/test.h @@ -0,0 +1 @@ +#define TEST diff --git a/plugins/hls-hlint-plugin/test/testdata/fixity/.hlint.yaml b/plugins/hls-hlint-plugin/test/testdata/fixity/.hlint.yaml new file mode 100644 index 0000000000..5d90b0d9eb --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/fixity/.hlint.yaml @@ -0,0 +1 @@ +- fixity: "infixl 3 " diff --git a/plugins/hls-hlint-plugin/test/testdata/fixity/FixityDef.hs b/plugins/hls-hlint-plugin/test/testdata/fixity/FixityDef.hs new file mode 100644 index 0000000000..729f8c6e84 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/fixity/FixityDef.hs @@ -0,0 +1,5 @@ +module FixityDef where + +infixl 3 +() :: Maybe a -> Maybe (Maybe b) -> Maybe String +() a b = Nothing diff --git a/plugins/hls-hlint-plugin/test/testdata/fixity/FixityUse.hs b/plugins/hls-hlint-plugin/test/testdata/fixity/FixityUse.hs new file mode 100644 index 0000000000..646240dab5 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/fixity/FixityUse.hs @@ -0,0 +1,6 @@ +module FixityUse where + +import FixityDef + +foo :: Char -> Maybe Int -> Maybe String +foo c mInt = show <$> mInt pure <$> Just c diff --git a/plugins/hls-hlint-plugin/test/testdata/fixity/hie.yaml b/plugins/hls-hlint-plugin/test/testdata/fixity/hie.yaml new file mode 100644 index 0000000000..81e7f9be75 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/fixity/hie.yaml @@ -0,0 +1,6 @@ +cradle: + direct: + arguments: + - "FixityDef" + - "FixityUse" + diff --git a/plugins/hls-hlint-plugin/test/testdata/hie.yaml b/plugins/hls-hlint-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..28e1a7bbc1 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/hie.yaml @@ -0,0 +1,16 @@ +cradle: + direct: + arguments: + - "-DFLAG" + - "-Wno-unrecognised-pragmas" + - "Base" + - "Comments" + - "CppCond" + - "Generalise" + - "IgnoreAnn" + - "IgnoreAnnHlint" + - "LambdaCase" + - "TwoHints" + - "PatternKeyword" + - "StrictData" + - "TwoHintsAndComment" diff --git a/plugins/hls-hlint-plugin/test/testdata/ignore/.hlint.yaml b/plugins/hls-hlint-plugin/test/testdata/ignore/.hlint.yaml new file mode 100644 index 0000000000..f76f860aa9 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/ignore/.hlint.yaml @@ -0,0 +1,2 @@ +- ignore: { name: "Redundant bracket" } +- ignore: { name: "Use camelCase" } diff --git a/plugins/hls-hlint-plugin/test/testdata/ignore/CamelCase.hs b/plugins/hls-hlint-plugin/test/testdata/ignore/CamelCase.hs new file mode 100644 index 0000000000..29cc08f961 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/ignore/CamelCase.hs @@ -0,0 +1,5 @@ +module CamelCase where + +f = (1) + +camel_case = undefined diff --git a/plugins/hls-hlint-plugin/test/testdata/ignore/hie.yaml b/plugins/hls-hlint-plugin/test/testdata/ignore/hie.yaml new file mode 100644 index 0000000000..f792a84060 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/ignore/hie.yaml @@ -0,0 +1,4 @@ +cradle: + direct: + arguments: + - "CamelCase" diff --git a/plugins/hls-hlint-plugin/test/testdata/lambdacase/LambdaCase.hs b/plugins/hls-hlint-plugin/test/testdata/lambdacase/LambdaCase.hs new file mode 100644 index 0000000000..555c764162 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/lambdacase/LambdaCase.hs @@ -0,0 +1,4 @@ +module LambdaCase where + +f = \case "true" -> (True) + _ -> False diff --git a/plugins/hls-hlint-plugin/test/testdata/lambdacase/hie.yaml b/plugins/hls-hlint-plugin/test/testdata/lambdacase/hie.yaml new file mode 100644 index 0000000000..c9c69a13b5 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/lambdacase/hie.yaml @@ -0,0 +1,5 @@ +cradle: + direct: + arguments: + - "-XLambdaCase" + - "LambdaCase" diff --git a/plugins/hls-hlint-plugin/test/testdata/test-hlint-config.yaml b/plugins/hls-hlint-plugin/test/testdata/test-hlint-config.yaml new file mode 100644 index 0000000000..23b72d5fad --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/test-hlint-config.yaml @@ -0,0 +1 @@ +- ignore: { name: Eta reduce } diff --git a/plugins/hls-hlint-plugin/test/testdata/typeapps/TypeApplication.hs b/plugins/hls-hlint-plugin/test/testdata/typeapps/TypeApplication.hs new file mode 100644 index 0000000000..5fecd5cf6e --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/typeapps/TypeApplication.hs @@ -0,0 +1,3 @@ +module TypeApplication where + +a = (id @Int 1) diff --git a/plugins/hls-hlint-plugin/test/testdata/typeapps/hie.yaml b/plugins/hls-hlint-plugin/test/testdata/typeapps/hie.yaml new file mode 100644 index 0000000000..17788bcb70 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/typeapps/hie.yaml @@ -0,0 +1,5 @@ +cradle: + direct: + arguments: + - "-XTypeApplications" + - "TypeApplications" diff --git a/plugins/hls-hlint-plugin/test/testdata/unusedext/UnusedExtension.hs b/plugins/hls-hlint-plugin/test/testdata/unusedext/UnusedExtension.hs new file mode 100644 index 0000000000..f652e62f05 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/unusedext/UnusedExtension.hs @@ -0,0 +1 @@ +{-# LANGUAGE BangPatterns #-} diff --git a/plugins/hls-hlint-plugin/test/testdata/unusedext/hie.yaml b/plugins/hls-hlint-plugin/test/testdata/unusedext/hie.yaml new file mode 100644 index 0000000000..5d6dfc5138 --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/unusedext/hie.yaml @@ -0,0 +1,4 @@ +cradle: + direct: + arguments: + - "UnusedExtension" diff --git a/plugins/hls-module-name-plugin/src/Ide/Plugin/ModuleName.hs b/plugins/hls-module-name-plugin/src/Ide/Plugin/ModuleName.hs new file mode 100644 index 0000000000..5dc053f47d --- /dev/null +++ b/plugins/hls-module-name-plugin/src/Ide/Plugin/ModuleName.hs @@ -0,0 +1,196 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} + +{- | Keep the module name in sync with its file path. + +Provide CodeLenses to: +* Add a module header ("module /moduleName/ where") to empty Haskell files +* Fix the module name if incorrect +-} +module Ide.Plugin.ModuleName ( + descriptor, + Log, +) where + +import Control.Monad (forM_, void) +import Control.Monad.IO.Class (liftIO) +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Except +import Control.Monad.Trans.Maybe +import Data.Aeson (toJSON) +import Data.Char (isLower, isUpper) +import Data.List (intercalate, minimumBy, + stripPrefix) +import qualified Data.List.NonEmpty as NE +import qualified Data.Map as Map +import Data.Maybe (mapMaybe) +import Data.Ord (comparing) +import Data.String (IsString) +import qualified Data.Text as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE (GetParsedModule (GetParsedModule), + GhcSession (GhcSession), + IdeState, Pretty, + Priority (Debug), + Recorder, WithPriority, + colon, evalGhcEnv, + hscEnv, logWith, + realSrcSpanToRange, + rootDir, runAction, + useWithStale, (<+>)) +import Development.IDE.Core.FileStore (getFileContents) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.PositionMapping (toCurrentRange) +import Development.IDE.GHC.Compat (GenLocated (L), + getSessionDynFlags, + hsmodName, importPaths, + locA, moduleNameString, + pattern RealSrcSpan, + pm_parsed_source, unLoc) +import Ide.Logger (Pretty (..)) +import Ide.Plugin.Error +import Ide.PluginUtils (toAbsolute) +import Ide.Types +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types +import System.FilePath (dropExtension, normalise, + pathSeparator, + splitDirectories, + takeFileName) + +-- |Plugin descriptor +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultPluginDescriptor plId "Provides a code action to alter the module name if it is wrong") + { pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeLens (codeLens recorder) + , pluginCommands = [PluginCommand updateModuleNameCommand "set name of module to match with file path" (command recorder)] + } + +updateModuleNameCommand :: IsString p => p +updateModuleNameCommand = "updateModuleName" + +-- | Generate code lenses +codeLens :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'Method_TextDocumentCodeLens +codeLens recorder state pluginId CodeLensParams{_textDocument=TextDocumentIdentifier uri} = do + res <- action recorder state uri + pure $ InL (asCodeLens <$> res) + where + asCodeLens :: Action -> CodeLens + asCodeLens Replace{..} = CodeLens aRange (Just cmd) Nothing + where + cmd = mkLspCommand pluginId updateModuleNameCommand aTitle (Just [toJSON aUri]) + +-- | (Quasi) Idempotent command execution: recalculate action to execute on command request +command :: Recorder (WithPriority Log) -> CommandFunction IdeState Uri +command recorder state _ uri = do + actMaybe <- action recorder state uri + forM_ actMaybe $ \Replace{..} -> + let + -- | Convert an Action to the corresponding edit operation + edit = WorkspaceEdit (Just $ Map.singleton aUri [TextEdit aRange aCode]) Nothing Nothing + in + void $ lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (const (pure ())) + pure $ InR Null + +-- | A source code change +data Action = Replace + { aUri :: Uri + , aRange :: Range + , aTitle :: T.Text + , aCode :: T.Text + } + deriving (Show) + +-- | Required action (that can be converted to either CodeLenses or CodeActions) +action :: Recorder (WithPriority Log) -> IdeState -> Uri -> ExceptT PluginError (HandlerM c) [Action] +action recorder state uri = do + nfp <- getNormalizedFilePathE uri + fp <- uriToFilePathE uri + + contents <- liftIO $ runAction "ModuleName.getFileContents" state $ getFileContents nfp + let emptyModule = maybe True (T.null . T.strip . Rope.toText) contents + + correctNames <- mapExceptT liftIO $ pathModuleNames recorder state nfp fp + logWith recorder Debug (CorrectNames correctNames) + let bestName = minimumBy (comparing T.length) <$> NE.nonEmpty correctNames + logWith recorder Debug (BestName bestName) + + statedNameMaybe <- liftIO $ codeModuleName state nfp + logWith recorder Debug (ModuleName $ snd <$> statedNameMaybe) + case (bestName, statedNameMaybe) of + (Just bestName, Just (nameRange, statedName)) + | statedName `notElem` correctNames -> + pure [Replace uri nameRange ("Set module name to " <> bestName) bestName] + (Just bestName, Nothing) + | emptyModule -> + let code = "module " <> bestName <> " where\n" + in pure [Replace uri (Range (Position 0 0) (Position 0 0)) code code] + _ -> pure [] + +-- | Possible module names, as derived by the position of the module in the +-- source directories. There may be more than one possible name, if the source +-- directories are nested inside each other. +pathModuleNames :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> FilePath -> ExceptT PluginError IO [T.Text] +pathModuleNames recorder state normFilePath filePath + | firstLetter isLower $ takeFileName filePath = return ["Main"] + | otherwise = do + (session, _) <- runActionE "ModuleName.ghcSession" state $ useWithStaleE GhcSession normFilePath + srcPaths <- liftIO $ evalGhcEnv (hscEnv session) $ importPaths <$> getSessionDynFlags + logWith recorder Debug (SrcPaths srcPaths) + + -- Append a `pathSeparator` to make the path looks like a directory, + -- and then we can drop it uniformly. + -- See https://github.com/haskell/haskell-language-server/pull/3092 for details. + let paths = map (normalise . (<> pure pathSeparator)) srcPaths + logWith recorder Debug (NormalisedPaths paths) + + -- TODO, this can be avoid if the filePath is already absolute, + -- we can avoid the toAbsolute call in the future. + -- see Note [Root Directory] + let mdlPath = (toAbsolute $ rootDir state) filePath + logWith recorder Debug (AbsoluteFilePath mdlPath) + + let suffixes = mapMaybe (`stripPrefix` mdlPath) paths + pure (map moduleNameFrom suffixes) + where + firstLetter :: (Char -> Bool) -> FilePath -> Bool + firstLetter _ [] = False + firstLetter pred (c:_) = pred c + + moduleNameFrom = + T.pack + . intercalate "." + -- Do not suggest names whose components start from a lower-case char, + -- they are guaranteed to be malformed. + . filter (firstLetter isUpper) + . splitDirectories + . dropExtension + +-- | The module name, as stated in the module +codeModuleName :: IdeState -> NormalizedFilePath -> IO (Maybe (Range, T.Text)) +codeModuleName state nfp = runMaybeT $ do + (pm, mp) <- MaybeT . runAction "ModuleName.GetParsedModule" state $ useWithStale GetParsedModule nfp + L (locA -> (RealSrcSpan l _)) m <- MaybeT . pure . hsmodName . unLoc $ pm_parsed_source pm + range <- MaybeT . pure $ toCurrentRange mp (realSrcSpanToRange l) + pure (range, T.pack $ moduleNameString m) + +data Log = + CorrectNames [T.Text] + | BestName (Maybe T.Text) + | ModuleName (Maybe T.Text) + | SrcPaths [FilePath] + | NormalisedPaths [FilePath] + | AbsoluteFilePath FilePath + deriving Show + +instance Pretty Log where + pretty log = "ModuleName." <> case log of + CorrectNames log -> "CorrectNames" <> colon <+> pretty log + BestName log -> "BestName" <> colon <+> pretty log + ModuleName log -> "StatedNameMaybe" <> colon <+> pretty log + SrcPaths log -> "SrcPaths" <> colon <+> pretty log + NormalisedPaths log -> "NormalisedPaths" <> colon <+> pretty log + AbsoluteFilePath log -> "AbsoluteFilePath" <> colon <+> pretty log diff --git a/plugins/hls-module-name-plugin/test/Main.hs b/plugins/hls-module-name-plugin/test/Main.hs new file mode 100644 index 0000000000..ba1ed756e5 --- /dev/null +++ b/plugins/hls-module-name-plugin/test/Main.hs @@ -0,0 +1,66 @@ +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} +module Main + ( main + ) where + +import Control.Monad (void) +import qualified Ide.Plugin.ModuleName as ModuleName +import System.FilePath +import Test.Hls + +main :: IO () +main = defaultTestRunner tests + +moduleNamePlugin :: PluginTestDescriptor ModuleName.Log +moduleNamePlugin = mkPluginTestDescriptor ModuleName.descriptor "moduleName" + +tests :: TestTree +tests = + testGroup "moduleName" + [ goldenWithModuleName "Add module header to empty module" "TEmptyModule" $ \doc -> do + [CodeLens { _command = Just c }] <- getCodeLenses doc + executeCommand c + void $ skipManyTill anyMessage (message SMethod_WorkspaceApplyEdit) + + , goldenWithModuleName "Fix wrong module name" "TWrongModuleName" $ \doc -> do + [CodeLens { _command = Just c }] <- getCodeLenses doc + executeCommand c + void $ skipManyTill anyMessage (message SMethod_WorkspaceApplyEdit) + + , goldenWithModuleName "Must infer module name as Main, if the file name starts with a lowercase" "mainlike" $ \doc -> do + [CodeLens { _command = Just c }] <- getCodeLenses doc + executeCommand c + void $ skipManyTill anyMessage (message SMethod_WorkspaceApplyEdit) + + , goldenWithModuleName "Fix wrong module name in nested directory" "subdir/TWrongModuleName" $ \doc -> do + [CodeLens { _command = Just c }] <- getCodeLenses doc + executeCommand c + void $ skipManyTill anyMessage (message SMethod_WorkspaceApplyEdit) + , testCase "Should not show code lens if the module name is correct" $ + runSessionWithServer def moduleNamePlugin testDataDir $ do + doc <- openDoc "CorrectName.hs" "haskell" + lenses <- getCodeLenses doc + liftIO $ lenses @?= [] + closeDoc doc + -- https://github.com/haskell/haskell-language-server/issues/3047 + , goldenWithModuleName "Fix#3047" "canonicalize/Lib/A" $ \doc -> do + [CodeLens { _command = Just c }] <- getCodeLenses doc + executeCommand c + void $ skipManyTill anyMessage (message SMethod_WorkspaceApplyEdit) + , testCase "Keep stale lens even if parse failed" $ do + runSessionWithServer def moduleNamePlugin testDataDir $ do + doc <- openDoc "Stale.hs" "haskell" + oldLens <- getCodeLenses doc + let edit = TextEdit (mkRange 1 0 1 0) "f =" + _ <- applyEdit doc edit + newLens <- getCodeLenses doc + liftIO $ newLens @?= oldLens + closeDoc doc + ] + +goldenWithModuleName :: TestName -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +goldenWithModuleName title path = goldenWithHaskellDoc def moduleNamePlugin title testDataDir path "expected" "hs" + +testDataDir :: FilePath +testDataDir = "plugins" "hls-module-name-plugin" "test" "testdata" diff --git a/plugins/hls-module-name-plugin/test/testdata/CorrectName.hs b/plugins/hls-module-name-plugin/test/testdata/CorrectName.hs new file mode 100644 index 0000000000..e78f2247a1 --- /dev/null +++ b/plugins/hls-module-name-plugin/test/testdata/CorrectName.hs @@ -0,0 +1 @@ +module CorrectName where diff --git a/plugins/hls-module-name-plugin/test/testdata/Stale.hs b/plugins/hls-module-name-plugin/test/testdata/Stale.hs new file mode 100644 index 0000000000..efbf93bbde --- /dev/null +++ b/plugins/hls-module-name-plugin/test/testdata/Stale.hs @@ -0,0 +1 @@ +module Foo where diff --git a/plugins/hls-module-name-plugin/test/testdata/TEmptyModule.expected.hs b/plugins/hls-module-name-plugin/test/testdata/TEmptyModule.expected.hs new file mode 100644 index 0000000000..214c20b678 --- /dev/null +++ b/plugins/hls-module-name-plugin/test/testdata/TEmptyModule.expected.hs @@ -0,0 +1,3 @@ +module TEmptyModule where + + diff --git a/plugins/hls-module-name-plugin/test/testdata/TEmptyModule.hs b/plugins/hls-module-name-plugin/test/testdata/TEmptyModule.hs new file mode 100644 index 0000000000..139597f9cb --- /dev/null +++ b/plugins/hls-module-name-plugin/test/testdata/TEmptyModule.hs @@ -0,0 +1,2 @@ + + diff --git a/plugins/hls-module-name-plugin/test/testdata/TWrongModuleName.expected.hs b/plugins/hls-module-name-plugin/test/testdata/TWrongModuleName.expected.hs new file mode 100644 index 0000000000..87fb0f5b10 --- /dev/null +++ b/plugins/hls-module-name-plugin/test/testdata/TWrongModuleName.expected.hs @@ -0,0 +1,7 @@ +module TWrongModuleName + ( x + ) +where + +x :: Integer +x = 11 diff --git a/plugins/hls-module-name-plugin/test/testdata/TWrongModuleName.hs b/plugins/hls-module-name-plugin/test/testdata/TWrongModuleName.hs new file mode 100644 index 0000000000..ede67750f5 --- /dev/null +++ b/plugins/hls-module-name-plugin/test/testdata/TWrongModuleName.hs @@ -0,0 +1,7 @@ +module BadName + ( x + ) +where + +x :: Integer +x = 11 diff --git a/plugins/hls-module-name-plugin/test/testdata/cabal.project b/plugins/hls-module-name-plugin/test/testdata/cabal.project new file mode 100644 index 0000000000..1406cd0907 --- /dev/null +++ b/plugins/hls-module-name-plugin/test/testdata/cabal.project @@ -0,0 +1 @@ +packages: ./canonicalize diff --git a/plugins/hls-module-name-plugin/test/testdata/canonicalize/Lib/A.expected.hs b/plugins/hls-module-name-plugin/test/testdata/canonicalize/Lib/A.expected.hs new file mode 100644 index 0000000000..c5877f7100 --- /dev/null +++ b/plugins/hls-module-name-plugin/test/testdata/canonicalize/Lib/A.expected.hs @@ -0,0 +1 @@ +module Lib.A where diff --git a/plugins/hls-module-name-plugin/test/testdata/canonicalize/Lib/A.hs b/plugins/hls-module-name-plugin/test/testdata/canonicalize/Lib/A.hs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/hls-module-name-plugin/test/testdata/canonicalize/canonicalize.cabal b/plugins/hls-module-name-plugin/test/testdata/canonicalize/canonicalize.cabal new file mode 100644 index 0000000000..dc0e099ed3 --- /dev/null +++ b/plugins/hls-module-name-plugin/test/testdata/canonicalize/canonicalize.cabal @@ -0,0 +1,7 @@ +cabal-version: 2.4 +name: canonicalize +version: 0.1.0.0 + +library + build-depends: base + hs-source-dirs: ./ diff --git a/plugins/hls-module-name-plugin/test/testdata/hie.yaml b/plugins/hls-module-name-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..fb1c7521c3 --- /dev/null +++ b/plugins/hls-module-name-plugin/test/testdata/hie.yaml @@ -0,0 +1,17 @@ +cradle: + multi: + - path: "./" + config: + cradle: + direct: + arguments: + - "-isubdir" + - "TEmptyModule" + - "TWrongModuleName" + - "CorrectName" + - path: "./canonicalize" + config: + cradle: + cabal: + - path: "./" + component: "lib:canonicalize" diff --git a/plugins/hls-module-name-plugin/test/testdata/mainlike.expected.hs b/plugins/hls-module-name-plugin/test/testdata/mainlike.expected.hs new file mode 100644 index 0000000000..6ca9a1fce6 --- /dev/null +++ b/plugins/hls-module-name-plugin/test/testdata/mainlike.expected.hs @@ -0,0 +1 @@ +module Main where diff --git a/plugins/hls-module-name-plugin/test/testdata/mainlike.hs b/plugins/hls-module-name-plugin/test/testdata/mainlike.hs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plugins/hls-module-name-plugin/test/testdata/subdir/TWrongModuleName.expected.hs b/plugins/hls-module-name-plugin/test/testdata/subdir/TWrongModuleName.expected.hs new file mode 100644 index 0000000000..87fb0f5b10 --- /dev/null +++ b/plugins/hls-module-name-plugin/test/testdata/subdir/TWrongModuleName.expected.hs @@ -0,0 +1,7 @@ +module TWrongModuleName + ( x + ) +where + +x :: Integer +x = 11 diff --git a/plugins/hls-module-name-plugin/test/testdata/subdir/TWrongModuleName.hs b/plugins/hls-module-name-plugin/test/testdata/subdir/TWrongModuleName.hs new file mode 100644 index 0000000000..ede67750f5 --- /dev/null +++ b/plugins/hls-module-name-plugin/test/testdata/subdir/TWrongModuleName.hs @@ -0,0 +1,7 @@ +module BadName + ( x + ) +where + +x :: Integer +x = 11 diff --git a/plugins/hls-notes-plugin/README.md b/plugins/hls-notes-plugin/README.md new file mode 100644 index 0000000000..7b05669d46 --- /dev/null +++ b/plugins/hls-notes-plugin/README.md @@ -0,0 +1,32 @@ +# Note plugin + +The [Note convention](https://gitlab.haskell.org/ghc/ghc/-/wikis/commentary/coding-style#2-using-notes) is a nice way to hoist and share big chunks of documentation out of the body of functions. This is done by referencing a long form note from within the function. This plugin extends goto-definition to jump from the reference to the note. + +# Example + +Main.hs +```haskell +module Main where + +main :: IO +main = do + doSomething -- We need this here, see Note [Do Something] in Foo + -- Using at-signs around the note works as well: + -- see @Note [Do Something]@ in Foo +``` + +Foo.hs +```haskell +module Foo where + +doSomething :: IO () +doSomething = undefined + +{- +Note [Do Something] +~~~~~~~~~~~~~~~~~~~ +Some very important explanation +-} +``` + +Using "Go-to-definition" on the Note reference in `Main.hs` will jump to the beginning of the note in `Foo.hs`. diff --git a/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs b/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs new file mode 100644 index 0000000000..db1696d94b --- /dev/null +++ b/plugins/hls-notes-plugin/src/Ide/Plugin/Notes.hs @@ -0,0 +1,197 @@ +module Ide.Plugin.Notes (descriptor, Log) where + +import Control.Lens ((^.)) +import Control.Monad.Except (ExceptT, MonadError, + throwError) +import Control.Monad.IO.Class (liftIO) +import qualified Data.Array as A +import Data.Foldable (foldl') +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as HM +import qualified Data.HashSet as HS +import Data.List (uncons) +import Data.Maybe (catMaybes, listToMaybe, + mapMaybe) +import Data.Text (Text, intercalate) +import qualified Data.Text as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Data.Traversable (for) +import Development.IDE hiding (line) +import Development.IDE.Core.PluginUtils (runActionE, useE) +import Development.IDE.Core.Shake (toKnownFiles) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.Graph.Classes (Hashable, NFData) +import GHC.Generics (Generic) +import Ide.Plugin.Error (PluginError (..)) +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message (Method (Method_TextDocumentDefinition, Method_TextDocumentReferences), + SMethod (SMethod_TextDocumentDefinition, SMethod_TextDocumentReferences)) +import Language.LSP.Protocol.Types +import Text.Regex.TDFA (Regex, caseSensitive, + defaultCompOpt, + defaultExecOpt, + makeRegexOpts, matchAllText) + +data Log + = LogShake Shake.Log + | LogNotesFound NormalizedFilePath [(Text, [Position])] + | LogNoteReferencesFound NormalizedFilePath [(Text, [Position])] + deriving Show + +data GetNotesInFile = MkGetNotesInFile + deriving (Show, Generic, Eq, Ord) + deriving anyclass (Hashable, NFData) +-- The GetNotesInFile action scans the source file and extracts a map of note +-- definitions (note name -> position) and a map of note references +-- (note name -> [position]). +type instance RuleResult GetNotesInFile = (HM.HashMap Text Position, HM.HashMap Text [Position]) + +data GetNotes = MkGetNotes + deriving (Show, Generic, Eq, Ord) + deriving anyclass (Hashable, NFData) +-- GetNotes collects all note definition across all files in the +-- project. It returns a map from note name to pair of (filepath, position). +type instance RuleResult GetNotes = HashMap Text (NormalizedFilePath, Position) + +data GetNoteReferences = MkGetNoteReferences + deriving (Show, Generic, Eq, Ord) + deriving anyclass (Hashable, NFData) +-- GetNoteReferences collects all note references across all files in the +-- project. It returns a map from note name to list of (filepath, position). +type instance RuleResult GetNoteReferences = HashMap Text [(NormalizedFilePath, Position)] + +instance Pretty Log where + pretty = \case + LogShake l -> pretty l + LogNoteReferencesFound file refs -> "Found note references in " <> prettyNotes file refs + LogNotesFound file notes -> "Found notes in " <> prettyNotes file notes + where prettyNotes file hm = pretty (show file) <> ": [" + <> pretty (intercalate ", " (fmap (\(s, p) -> "\"" <> s <> "\" at " <> intercalate ", " (map (T.pack . show) p)) hm)) <> "]" + +{- +The first time the user requests a jump-to-definition on a note reference, the +project is indexed and searched for all note definitions. Their location and +title is then saved in the HLS database to be retrieved for all future requests. +-} +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = (defaultPluginDescriptor plId "Provides goto definition support for GHC-style notes") + { Ide.Types.pluginRules = findNotesRules recorder + , Ide.Types.pluginHandlers = + mkPluginHandler SMethod_TextDocumentDefinition jumpToNote + <> mkPluginHandler SMethod_TextDocumentReferences listReferences + } + +findNotesRules :: Recorder (WithPriority Log) -> Rules () +findNotesRules recorder = do + defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \MkGetNotesInFile nfp -> do + findNotesInFile nfp recorder + + defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \MkGetNotes _ -> do + targets <- toKnownFiles <$> useNoFile_ GetKnownTargets + definedNotes <- catMaybes <$> mapM (\nfp -> fmap (HM.map (nfp,) . fst) <$> use MkGetNotesInFile nfp) (HS.toList targets) + pure $ Just $ HM.unions definedNotes + + defineNoDiagnostics (cmapWithPrio LogShake recorder) $ \MkGetNoteReferences _ -> do + targets <- toKnownFiles <$> useNoFile_ GetKnownTargets + definedReferences <- catMaybes <$> for (HS.toList targets) (\nfp -> do + references <- fmap snd <$> use MkGetNotesInFile nfp + pure $ fmap (HM.map (fmap (nfp,))) references + ) + pure $ Just $ foldl' (HM.unionWith (<>)) HM.empty definedReferences + +err :: MonadError PluginError m => Text -> Maybe a -> m a +err s = maybe (throwError $ PluginInternalError s) pure + +getNote :: NormalizedFilePath -> IdeState -> Position -> ExceptT PluginError (HandlerM c) (Maybe Text) +getNote nfp state (Position l c) = do + contents <- + err "Error getting file contents" + =<< liftIO (runAction "notes.getfileContents" state (getFileContents nfp)) + line <- err "Line not found in file" (listToMaybe $ Rope.lines $ fst + (Rope.splitAtLine 1 $ snd $ Rope.splitAtLine (fromIntegral l) contents)) + pure $ listToMaybe $ mapMaybe (atPos $ fromIntegral c) $ matchAllText noteRefRegex line + where + atPos c arr = case arr A.! 0 of + -- We check if the line we are currently at contains a note + -- reference. However, we need to know if the cursor is within the + -- match or somewhere else. The second entry of the array contains + -- the title of the note as extracted by the regex. + (_, (c', len)) -> if c' <= c && c <= c' + len + then Just (fst (arr A.! 1)) else Nothing + +listReferences :: PluginMethodHandler IdeState Method_TextDocumentReferences +listReferences state _ param + | Just nfp <- uriToNormalizedFilePath uriOrig + = do + let pos@(Position l _) = param ^. L.position + noteOpt <- getNote nfp state pos + case noteOpt of + Nothing -> pure (InR Null) + Just note -> do + notes <- runActionE "notes.definedNoteReferencess" state $ useE MkGetNoteReferences nfp + poss <- err ("Note reference (a comment of the form `{- Note [" <> note <> "] -}`) not found") (HM.lookup note notes) + pure $ InL (mapMaybe (\(noteFp, pos@(Position l' _)) -> if l' == l then Nothing else Just ( + Location (fromNormalizedUri $ normalizedFilePathToUri noteFp) (Range pos pos))) poss) + where + uriOrig = toNormalizedUri $ param ^. (L.textDocument . L.uri) +listReferences _ _ _ = throwError $ PluginInternalError "conversion to normalized file path failed" + +jumpToNote :: PluginMethodHandler IdeState Method_TextDocumentDefinition +jumpToNote state _ param + | Just nfp <- uriToNormalizedFilePath uriOrig + = do + noteOpt <- getNote nfp state (param ^. L.position) + case noteOpt of + Nothing -> pure (InR (InR Null)) + Just note -> do + notes <- runActionE "notes.definedNotes" state $ useE MkGetNotes nfp + (noteFp, pos) <- err ("Note definition (a comment of the form `{- Note [" <> note <> "]\\n~~~ ... -}`) not found") (HM.lookup note notes) + pure $ InL (Definition (InL + (Location (fromNormalizedUri $ normalizedFilePathToUri noteFp) (Range pos pos)) + )) + where + uriOrig = toNormalizedUri $ param ^. (L.textDocument . L.uri) +jumpToNote _ _ _ = throwError $ PluginInternalError "conversion to normalized file path failed" + +findNotesInFile :: NormalizedFilePath -> Recorder (WithPriority Log) -> Action (Maybe (HM.HashMap Text Position, HM.HashMap Text [Position])) +findNotesInFile file recorder = do + -- GetFileContents only returns a value if the file is open in the editor of + -- the user. If not, we need to read it from disk. + contentOpt <- (snd =<<) <$> use GetFileContents file + content <- case contentOpt of + Just x -> pure $ Rope.toText x + Nothing -> liftIO $ readFileUtf8 $ fromNormalizedFilePath file + let noteMatches = (A.! 1) <$> matchAllText noteRegex content + notes = toPositions noteMatches content + logWith recorder Debug $ LogNotesFound file (HM.toList notes) + let refMatches = (A.! 1) <$> matchAllText noteRefRegex content + refs = toPositions refMatches content + logWith recorder Debug $ LogNoteReferencesFound file (HM.toList refs) + pure $ Just (HM.mapMaybe (fmap fst . uncons) notes, refs) + where + uint = fromIntegral . toInteger + -- the regex library returns the character index of the match. However + -- to return the position from HLS we need it as a (line, character) + -- tuple. To convert between the two we count the newline characters and + -- reset the current character index every time. For every regex match, + -- once we have counted up to their character index, we save the current + -- line and character values instead. + toPositions matches = snd . fst . T.foldl' (\case + (([], m), _) -> const (([], m), (0, 0, 0)) + ((x@(name, (char, _)):xs, m), (n, nc, c)) -> \char' -> + let !c' = c + 1 + (!n', !nc') = if char' == '\n' then (n + 1, c') else (n, nc) + p@(!_, !_) = if char == c then + (xs, HM.insertWith (<>) name [Position (uint n') (uint (char - nc'))] m) + else (x:xs, m) + in (p, (n', nc', c')) + ) ((matches, HM.empty), (0, 0, 0)) + +noteRefRegex, noteRegex :: Regex +(noteRefRegex, noteRegex) = + ( mkReg ("note \\[(.+)\\]" :: String) + , mkReg ("note \\[([[:print:]]+)\\][[:blank:]]*\r?\n[[:blank:]]*(--)?[[:blank:]]*~~~" :: String) + ) + where + mkReg = makeRegexOpts (defaultCompOpt { caseSensitive = False }) defaultExecOpt diff --git a/plugins/hls-notes-plugin/test/NotesTest.hs b/plugins/hls-notes-plugin/test/NotesTest.hs new file mode 100644 index 0000000000..f84bed9731 --- /dev/null +++ b/plugins/hls-notes-plugin/test/NotesTest.hs @@ -0,0 +1,77 @@ +module Main (main) where + +import Ide.Plugin.Notes (Log, descriptor) +import System.FilePath (()) +import Test.Hls + +plugin :: PluginTestDescriptor Log +plugin = mkPluginTestDescriptor descriptor "notes" + +main :: IO () +main = defaultTestRunner $ + testGroup "Notes" + [ gotoNoteTests + , noteReferenceTests + ] + +runSessionWithServer' :: FilePath -> (FilePath -> Session a) -> IO a +runSessionWithServer' fp act = + runSessionWithTestConfig def + { testLspConfig = def + , testPluginDescriptor = plugin + , testDirLocation = Left fp + } act + +noteReferenceTests :: TestTree +noteReferenceTests = testGroup "Note References" + [ + testCase "multi_file" $ runSessionWithServer' testDataDir $ \dir -> do + doc <- openDoc "NoteDef.hs" "haskell" + waitForKickDone + refs <- getReferences doc (Position 21 15) False + let fp = dir "NoteDef.hs" + liftIO $ refs @?= [ + Location (filePathToUri (dir "Other.hs")) (Range (Position 6 13) (Position 6 13)), + Location (filePathToUri fp) (Range (Position 9 9) (Position 9 9)), + Location (filePathToUri fp) (Range (Position 5 67) (Position 5 67)) + ] + ] + +gotoNoteTests :: TestTree +gotoNoteTests = testGroup "Goto Note Definition" + [ + testCase "single_file" $ runSessionWithServer' testDataDir $ \dir -> do + doc <- openDoc "NoteDef.hs" "haskell" + waitForKickDone + defs <- getDefinitions doc (Position 3 41) + let fp = dir "NoteDef.hs" + liftIO $ defs @?= InL (Definition (InR [Location (filePathToUri fp) (Range (Position 11 9) (Position 11 9))])) + , testCase "liberal_format" $ runSessionWithServer' testDataDir $ \dir -> do + doc <- openDoc "NoteDef.hs" "haskell" + waitForKickDone + defs <- getDefinitions doc (Position 5 64) + let fp = dir "NoteDef.hs" + liftIO $ defs @?= InL (Definition (InR [Location (filePathToUri fp) (Range (Position 21 11) (Position 21 11))])) + + , testCase "invalid_note" $ runSessionWithServer' testDataDir $ const $ do + doc <- openDoc "NoteDef.hs" "haskell" + waitForKickDone + defs <- getDefinitions doc (Position 6 54) + liftIO $ defs @?= InL (Definition (InR [])) + + , testCase "no_note" $ runSessionWithServer' testDataDir $ const $ do + doc <- openDoc "NoteDef.hs" "haskell" + waitForKickDone + defs <- getDefinitions doc (Position 1 0) + liftIO $ defs @?= InL (Definition (InR [])) + + , testCase "unopened_file" $ runSessionWithServer' testDataDir $ \dir -> do + doc <- openDoc "Other.hs" "haskell" + waitForKickDone + defs <- getDefinitions doc (Position 5 20) + let fp = dir "NoteDef.hs" + liftIO $ defs @?= InL (Definition (InR [Location (filePathToUri fp) (Range (Position 15 6) (Position 15 6))])) + ] + +testDataDir :: FilePath +testDataDir = "plugins" "hls-notes-plugin" "test" "testdata" diff --git a/plugins/hls-notes-plugin/test/testdata/NoteDef.hs b/plugins/hls-notes-plugin/test/testdata/NoteDef.hs new file mode 100644 index 0000000000..c4b450ced4 --- /dev/null +++ b/plugins/hls-notes-plugin/test/testdata/NoteDef.hs @@ -0,0 +1,31 @@ +module NoteDef (foo) where + +foo :: Int -> Int +foo _ = 0 -- We always return zero, see Note [Returning zero from foo] + +-- The plugin is more liberal with the note definitions, see Note [Single line comments] +-- It does not work on wrong note definitions, see Note [Not a valid Note] + +-- We can also have multiple references to the same note, see +-- Note [Single line comments] + +{- Note [Returning zero from foo] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +This is a big long form note, with very important info + +Note [Multiple notes in comment] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +This is also a very common thing to do for GHC + +-} + + -- Note [Single line comments] + -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + -- GHC's notes script only allows multiline comments to define notes, but in the + -- HLS codebase this single line style can be found as well. + +{- Note [Not a valid Note] + +~~~~~~~~~~~~ +The underline needs to be directly under the Note header +-} diff --git a/plugins/hls-notes-plugin/test/testdata/Other.hs b/plugins/hls-notes-plugin/test/testdata/Other.hs new file mode 100644 index 0000000000..aa64e19a79 --- /dev/null +++ b/plugins/hls-notes-plugin/test/testdata/Other.hs @@ -0,0 +1,7 @@ +module Other where + +import NoteDef + +bar :: Int +bar = 4 -- See @Note [Multiple notes in comment]@ in NoteDef +-- See Note [Single line comments] diff --git a/plugins/hls-notes-plugin/test/testdata/hie.yaml b/plugins/hls-notes-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..59cc740ee8 --- /dev/null +++ b/plugins/hls-notes-plugin/test/testdata/hie.yaml @@ -0,0 +1,5 @@ +cradle: + direct: + arguments: + - Other + - NoteDef diff --git a/plugins/hls-ormolu-plugin/src/Ide/Plugin/Ormolu.hs b/plugins/hls-ormolu-plugin/src/Ide/Plugin/Ormolu.hs new file mode 100644 index 0000000000..90c5214d8e --- /dev/null +++ b/plugins/hls-ormolu-plugin/src/Ide/Plugin/Ormolu.hs @@ -0,0 +1,200 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedStrings #-} +module Ide.Plugin.Ormolu + ( descriptor + , provider + , LogEvent + ) +where + +import Control.Exception (Handler (..), IOException, + SomeException (..), catches, + handle) +import Control.Monad.Except (runExceptT, throwError) +import Control.Monad.Extra +import Control.Monad.Trans +import Control.Monad.Trans.Except (ExceptT (..), mapExceptT) +import Data.Functor ((<&>)) +import Data.List (intercalate) +import Data.Maybe (catMaybes) +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE hiding (pluginHandlers) +import Development.IDE.Core.PluginUtils (mkFormattingHandlers) +import Development.IDE.GHC.Compat (hsc_dflags, moduleNameString) +import qualified Development.IDE.GHC.Compat as D +import qualified Development.IDE.GHC.Compat.Util as S +import GHC.LanguageExtensions.Type +import Ide.Plugin.Error (PluginError (PluginInternalError)) +import Ide.Plugin.Properties +import Ide.PluginUtils +import Ide.Types hiding (Config) +import qualified Ide.Types as Types +import Language.LSP.Protocol.Types +import Language.LSP.Server hiding (defaultConfig) +import Ormolu +import System.Exit +import System.FilePath +import System.Process.Run (cwd, proc) +import System.Process.Text (readCreateProcessWithExitCode) +import Text.Read (readMaybe) + +-- --------------------------------------------------------------------- + +descriptor :: Recorder (WithPriority LogEvent) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultPluginDescriptor plId desc) + { pluginHandlers = mkFormattingHandlers $ provider recorder plId, + pluginConfigDescriptor = defaultConfigDescriptor {configCustomConfig = mkCustomConfig properties} + } + where + desc = "Provides formatting of Haskell files via ormolu. Built with ormolu-" <> VERSION_ormolu + +properties :: Properties '[ 'PropertyKey "external" 'TBoolean] +properties = + emptyProperties + & defineBooleanProperty + #external + "Call out to an external \"ormolu\" executable, rather than using the bundled library" + False + +-- --------------------------------------------------------------------- + +provider :: Recorder (WithPriority LogEvent) -> PluginId -> FormattingHandler IdeState +provider recorder plId ideState token typ contents fp _ = ExceptT $ pluginWithIndefiniteProgress title token Cancellable $ \_updater -> runExceptT $ do + fileOpts <- + maybe [] (fromDyn . hsc_dflags . hscEnv) + <$> liftIO (runAction "Ormolu" ideState $ use GhcSession fp) + useCLI <- liftIO $ runAction "Ormolu" ideState $ usePropertyAction #external plId properties + + if useCLI + then mapExceptT liftIO $ ExceptT + $ handle @IOException + (pure . Left . PluginInternalError . T.pack . show) + $ runExceptT $ cliHandler fileOpts + else do + logWith recorder Debug $ LogCompiledInVersion VERSION_ormolu + + let + fmt :: T.Text -> Config RegionIndices -> IO (Either SomeException T.Text) + fmt cont conf = flip catches handlers $ do +#if MIN_VERSION_ormolu(0,5,3) + cabalInfo <- getCabalInfoForSourceFile fp' <&> \case + CabalNotFound -> Nothing + CabalDidNotMention cabalInfo -> Just cabalInfo + CabalFound cabalInfo -> Just cabalInfo +#if MIN_VERSION_ormolu(0,7,0) + (fixityOverrides, moduleReexports) <- getDotOrmoluForSourceFile fp' + let conf' = refineConfig ModuleSource cabalInfo (Just fixityOverrides) (Just moduleReexports) conf +#else + fixityOverrides <- traverse getFixityOverridesForSourceFile cabalInfo + let conf' = refineConfig ModuleSource cabalInfo fixityOverrides conf +#endif + let cont' = cont +#else + let conf' = conf + cont' = T.unpack cont +#endif + Right <$> ormolu conf' fp' cont' + handlers = + [ Handler $ pure . Left . SomeException @OrmoluException + , Handler $ pure . Left . SomeException @IOException + ] + + res <- liftIO $ fmt contents defaultConfig { cfgDynOptions = map DynOption fileOpts, cfgRegion = region } + ret res + where + fp' = fromNormalizedFilePath fp + + region :: RegionIndices + region = case typ of + FormatText -> + RegionIndices Nothing Nothing + FormatRange (Range (Position sl _) (Position el _)) -> + RegionIndices (Just $ fromIntegral $ sl + 1) (Just $ fromIntegral $ el + 1) + + title = T.pack $ "Formatting " <> takeFileName (fromNormalizedFilePath fp) + + ret :: Either SomeException T.Text -> ExceptT PluginError (HandlerM Types.Config) ([TextEdit] |? Null) + ret (Left err) = throwError $ PluginInternalError . T.pack $ "ormoluCmd: " ++ show err + ret (Right new) = pure $ InL $ makeDiffTextEdit contents new + + fromDyn :: D.DynFlags -> [String] + fromDyn df = + let + pp = + let p = D.sPgm_F $ D.settings df + in ["-pgmF=" <> p | not (null p)] + pm = ("-fplugin=" <>) . moduleNameString <$> D.pluginModNames df + ex = showExtension <$> S.toList (D.extensionFlags df) + in pp <> pm <> ex + + cliHandler :: [String] -> ExceptT PluginError IO ([TextEdit] |? Null) + cliHandler fileOpts = do + CLIVersionInfo{noCabal} <- do -- check Ormolu version so that we know which flags to use + (exitCode, out, _err) <- liftIO $ readCreateProcessWithExitCode ( proc "ormolu" ["--version"] ) "" + let version = do + guard $ exitCode == ExitSuccess + "ormolu" : v : _ <- pure $ T.words out + traverse (readMaybe @Int . T.unpack) $ T.splitOn "." v + case version of + Just v -> do + logWith recorder Debug $ LogExternalVersion v + pure CLIVersionInfo + { noCabal = v >= [0, 7] + } + Nothing -> do + logWith recorder Debug $ LogExternalVersion [] + logWith recorder Warning $ NoVersion out + pure CLIVersionInfo + { noCabal = True + } + (exitCode, out, err) <- do -- run Ormolu + let commandArgs = map ("-o" <>) fileOpts + -- "The --stdin-input-file option is necessary when using input from + -- stdin and accounting for .cabal files" as per Ormolu documentation + <> (if noCabal then ["--no-cabal"] else ["--stdin-input-file", fp']) + <> catMaybes + [ ("--start-line=" <>) . show <$> regionStartLine region + , ("--end-line=" <>) . show <$> regionEndLine region + ] + cwd = takeDirectory fp' + logWith recorder Debug $ LogOrmoluCommand commandArgs cwd + liftIO $ readCreateProcessWithExitCode (proc "ormolu" commandArgs) {cwd = Just cwd} contents + case exitCode of + ExitSuccess -> do + when (not $ T.null err) $ logWith recorder Debug $ StdErr err + pure $ InL $ makeDiffTextEdit contents out + ExitFailure n -> do + logWith recorder Info $ StdErr err + throwError $ PluginInternalError $ "Ormolu failed with exit code " <> T.pack (show n) + +newtype CLIVersionInfo = CLIVersionInfo + { noCabal :: Bool + } + +data LogEvent + = NoVersion Text + | StdErr Text + | LogCompiledInVersion String + | LogExternalVersion [Int] + | LogOrmoluCommand [String] FilePath + deriving (Show) + +instance Pretty LogEvent where + pretty = \case + NoVersion t -> "Couldn't get Ormolu version:" <> line <> indent 2 (pretty t) + StdErr t -> "Ormolu stderr:" <> line <> indent 2 (pretty t) + LogCompiledInVersion v -> "Using compiled in ormolu-" <> pretty v + LogExternalVersion v -> + "Using external ormolu" + <> if null v then "" else "-" + <> pretty (intercalate "." $ map show v) + LogOrmoluCommand commandArgs cwd -> "Running: `ormolu " <> pretty (unwords commandArgs) <> "` in directory " <> pretty cwd + +showExtension :: Extension -> String +showExtension Cpp = "-XCPP" +showExtension other = "-X" ++ show other diff --git a/plugins/hls-ormolu-plugin/test/Main.hs b/plugins/hls-ormolu-plugin/test/Main.hs new file mode 100644 index 0000000000..05f7a2a115 --- /dev/null +++ b/plugins/hls-ormolu-plugin/test/Main.hs @@ -0,0 +1,43 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE OverloadedStrings #-} +module Main + ( main + ) where + +import Data.Aeson +import qualified Data.Aeson.KeyMap as KM +import Data.Functor +import Ide.Plugin.Config +import qualified Ide.Plugin.Ormolu as Ormolu +import Language.LSP.Protocol.Types +import System.FilePath +import Test.Hls + +main :: IO () +main = defaultTestRunner tests + +ormoluPlugin :: PluginTestDescriptor Ormolu.LogEvent +ormoluPlugin = mkPluginTestDescriptor Ormolu.descriptor "ormolu" + +tests :: TestTree +tests = testGroup "ormolu" $ + [False, True] <&> \cli -> + testGroup (if cli then "cli" else "lib") + [ goldenWithOrmolu cli "formats correctly" "Ormolu" "formatted" $ \doc -> do + formatDoc doc (FormattingOptions 4 True Nothing Nothing Nothing) + , goldenWithOrmolu cli "formats imports correctly" "Ormolu2" "formatted" $ \doc -> do + formatDoc doc (FormattingOptions 4 True Nothing Nothing Nothing) +#if MIN_VERSION_ormolu(0,5,3) + , goldenWithOrmolu cli "formats operators correctly" "Ormolu3" "formatted" $ \doc -> do + formatDoc doc (FormattingOptions 4 True Nothing Nothing Nothing) +#endif + ] + +goldenWithOrmolu :: Bool -> TestName -> FilePath -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +goldenWithOrmolu cli title path desc = + goldenWithHaskellDocFormatter def ormoluPlugin "ormolu" conf title testDataDir path desc "hs" + where + conf = def{plcConfig = KM.fromList ["external" .= cli]} + +testDataDir :: FilePath +testDataDir = "plugins" "hls-ormolu-plugin" "test" "testdata" diff --git a/plugins/hls-ormolu-plugin/test/testdata/.ormolu b/plugins/hls-ormolu-plugin/test/testdata/.ormolu new file mode 100644 index 0000000000..8710f312a9 --- /dev/null +++ b/plugins/hls-ormolu-plugin/test/testdata/.ormolu @@ -0,0 +1 @@ +infixl 7 .=? diff --git a/plugins/hls-ormolu-plugin/test/testdata/Ormolu.expected.hs b/plugins/hls-ormolu-plugin/test/testdata/Ormolu.expected.hs new file mode 100644 index 0000000000..c986937b74 --- /dev/null +++ b/plugins/hls-ormolu-plugin/test/testdata/Ormolu.expected.hs @@ -0,0 +1,16 @@ +module Ormolu where + +import Data.Int +import Data.List +import Prelude + +foo :: Int -> Int +foo 3 = 2 +foo x = x + +bar :: String -> IO String +bar s = do + x <- return "hello" + return "asdf" + +data Baz = Baz {a :: Int, b :: String} diff --git a/plugins/hls-ormolu-plugin/test/testdata/Ormolu.formatted.hs b/plugins/hls-ormolu-plugin/test/testdata/Ormolu.formatted.hs new file mode 100644 index 0000000000..c986937b74 --- /dev/null +++ b/plugins/hls-ormolu-plugin/test/testdata/Ormolu.formatted.hs @@ -0,0 +1,16 @@ +module Ormolu where + +import Data.Int +import Data.List +import Prelude + +foo :: Int -> Int +foo 3 = 2 +foo x = x + +bar :: String -> IO String +bar s = do + x <- return "hello" + return "asdf" + +data Baz = Baz {a :: Int, b :: String} diff --git a/plugins/hls-ormolu-plugin/test/testdata/Ormolu.hs b/plugins/hls-ormolu-plugin/test/testdata/Ormolu.hs new file mode 100644 index 0000000000..7a181537b8 --- /dev/null +++ b/plugins/hls-ormolu-plugin/test/testdata/Ormolu.hs @@ -0,0 +1,15 @@ +module Ormolu where +import Data.List + +import Prelude +import Data.Int +foo :: Int -> Int +foo 3 = 2 +foo x = x +bar :: String -> IO String +bar s = do + x <- return "hello" + return "asdf" + +data Baz = Baz { a :: Int, b :: String } + diff --git a/plugins/hls-ormolu-plugin/test/testdata/Ormolu2.expected.hs b/plugins/hls-ormolu-plugin/test/testdata/Ormolu2.expected.hs new file mode 100644 index 0000000000..b3d867e700 --- /dev/null +++ b/plugins/hls-ormolu-plugin/test/testdata/Ormolu2.expected.hs @@ -0,0 +1,5 @@ +import Data.Bool +import Data.Char +import Data.Data +import Data.Either +import Data.Int diff --git a/plugins/hls-ormolu-plugin/test/testdata/Ormolu2.formatted.hs b/plugins/hls-ormolu-plugin/test/testdata/Ormolu2.formatted.hs new file mode 100644 index 0000000000..b3d867e700 --- /dev/null +++ b/plugins/hls-ormolu-plugin/test/testdata/Ormolu2.formatted.hs @@ -0,0 +1,5 @@ +import Data.Bool +import Data.Char +import Data.Data +import Data.Either +import Data.Int diff --git a/plugins/hls-ormolu-plugin/test/testdata/Ormolu2.hs b/plugins/hls-ormolu-plugin/test/testdata/Ormolu2.hs new file mode 100644 index 0000000000..bb011b5638 --- /dev/null +++ b/plugins/hls-ormolu-plugin/test/testdata/Ormolu2.hs @@ -0,0 +1,5 @@ +import Data.Char +import Data.Either +import Data.Int +import Data.Data +import Data.Bool diff --git a/plugins/hls-ormolu-plugin/test/testdata/Ormolu3.expected.hs b/plugins/hls-ormolu-plugin/test/testdata/Ormolu3.expected.hs new file mode 100644 index 0000000000..386166490e --- /dev/null +++ b/plugins/hls-ormolu-plugin/test/testdata/Ormolu3.expected.hs @@ -0,0 +1,5 @@ +foo :: String +foo = + "a" .=? "b" + <> "c" .=? "d" + <> "e" .=? "f" diff --git a/plugins/hls-ormolu-plugin/test/testdata/Ormolu3.formatted.hs b/plugins/hls-ormolu-plugin/test/testdata/Ormolu3.formatted.hs new file mode 100644 index 0000000000..386166490e --- /dev/null +++ b/plugins/hls-ormolu-plugin/test/testdata/Ormolu3.formatted.hs @@ -0,0 +1,5 @@ +foo :: String +foo = + "a" .=? "b" + <> "c" .=? "d" + <> "e" .=? "f" diff --git a/plugins/hls-ormolu-plugin/test/testdata/Ormolu3.hs b/plugins/hls-ormolu-plugin/test/testdata/Ormolu3.hs new file mode 100644 index 0000000000..fd7e1f4270 --- /dev/null +++ b/plugins/hls-ormolu-plugin/test/testdata/Ormolu3.hs @@ -0,0 +1,3 @@ +foo :: String +foo = "a" .=? "b" + <> "c" .=? "d" <> "e" .=? "f" diff --git a/plugins/hls-ormolu-plugin/test/testdata/hie.yaml b/plugins/hls-ormolu-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..824558147d --- /dev/null +++ b/plugins/hls-ormolu-plugin/test/testdata/hie.yaml @@ -0,0 +1,3 @@ +cradle: + direct: + arguments: [] diff --git a/plugins/hls-ormolu-plugin/test/testdata/test.cabal b/plugins/hls-ormolu-plugin/test/testdata/test.cabal new file mode 100644 index 0000000000..004fc9e694 --- /dev/null +++ b/plugins/hls-ormolu-plugin/test/testdata/test.cabal @@ -0,0 +1,3 @@ +cabal-version: 3.0 +name: test +version: 0 diff --git a/plugins/hls-overloaded-record-dot-plugin/README.md b/plugins/hls-overloaded-record-dot-plugin/README.md new file mode 100644 index 0000000000..7b15d09911 --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/README.md @@ -0,0 +1,18 @@ +# Explicit Record Fields Plugin + +`hls-overloaded-record-dot-plugin` is a plugin to convert record selectors to record dot syntax in GHC 9.2 and above. + + +## Demo + +![Convert Record Selector Demo](example.gif) + + +## Known limitations + +hls-overloaded-record-dot-plugin currently only converts record selectors to the record dot syntax, and will not help you convert your record updaters to overloaded record update syntax. + + +## Change log +### 1.0.0.0 +- Release diff --git a/plugins/hls-overloaded-record-dot-plugin/example.gif b/plugins/hls-overloaded-record-dot-plugin/example.gif new file mode 100644 index 0000000000..e0fbd192bb Binary files /dev/null and b/plugins/hls-overloaded-record-dot-plugin/example.gif differ diff --git a/plugins/hls-overloaded-record-dot-plugin/src/Ide/Plugin/OverloadedRecordDot.hs b/plugins/hls-overloaded-record-dot-plugin/src/Ide/Plugin/OverloadedRecordDot.hs new file mode 100644 index 0000000000..8ead286b67 --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/src/Ide/Plugin/OverloadedRecordDot.hs @@ -0,0 +1,318 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ViewPatterns #-} +module Ide.Plugin.OverloadedRecordDot + ( descriptor + , Log + ) where + +-- based off of Berk Okzuturk's hls-explicit-records-fields-plugin + +import Control.Lens ((^.)) +import Control.Monad (replicateM) +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Except (ExceptT) +import Data.Aeson (FromJSON, ToJSON, toJSON) +import Data.Generics (GenericQ, everythingBut, + mkQ) +import qualified Data.IntMap.Strict as IntMap +import qualified Data.Map as Map +import Data.Maybe (mapMaybe, maybeToList) +import Data.Text (Text) +import Data.Unique (hashUnique, newUnique) +import Development.IDE (IdeState, + NormalizedFilePath, + Pretty (..), Range, + Recorder (..), Rules, + WithPriority (..), + realSrcSpanToRange) +import Development.IDE.Core.RuleTypes (TcModuleResult (..), + TypeCheck (..)) +import Development.IDE.Core.Shake (define, useWithStale) +import qualified Development.IDE.Core.Shake as Shake + +import Control.DeepSeq (rwhnf) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.PositionMapping (PositionMapping, + toCurrentRange) +import Development.IDE.GHC.Compat (Extension (OverloadedRecordDot), + GhcPass, HsExpr (..), + LHsExpr, Pass (..), + appPrec, dollarName, + getLoc, hs_valds, + parenthesizeHsExpr, + pattern RealSrcSpan, + unLoc) +import Development.IDE.GHC.Util (getExtensions, + printOutputable) +import Development.IDE.Graph (RuleResult) +import Development.IDE.Graph.Classes (Hashable, NFData (rnf)) +import Development.IDE.Spans.Pragmas (NextPragmaInfo (..), + getFirstPragma, + insertNewPragma) +import GHC.Generics (Generic) +import Ide.Logger (Priority (..), + cmapWithPrio, logWith, + (<+>)) +import Ide.Plugin.Error (PluginError (..), + getNormalizedFilePathE, + handleMaybe) +import Ide.Plugin.RangeMap (RangeMap) +import qualified Ide.Plugin.RangeMap as RangeMap +import Ide.Plugin.Resolve (mkCodeActionHandlerWithResolve) +import Ide.Types (PluginDescriptor (..), + PluginId (..), + PluginMethodHandler, + ResolveFunction, + defaultPluginDescriptor) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message (Method (..)) +import Language.LSP.Protocol.Types (CodeAction (..), + CodeActionKind (CodeActionKind_RefactorRewrite), + CodeActionParams (..), + TextEdit (..), Uri (..), + WorkspaceEdit (WorkspaceEdit, _changeAnnotations, _changes, _documentChanges), + type (|?) (..)) + + +#if __GLASGOW_HASKELL__ < 910 +import Development.IDE.GHC.Compat (HsExpansion (HsExpanded)) +#else +import Development.IDE.GHC.Compat (XXExprGhcRn (..)) +#endif + + +data Log + = LogShake Shake.Log + | LogCollectedRecordSelectors [RecordSelectorExpr] + | forall a. (Pretty a) => LogResolve a + +instance Pretty Log where + pretty = \case + LogShake shakeLog -> pretty shakeLog + LogCollectedRecordSelectors recs -> "Collected record selectors:" + <+> pretty recs + LogResolve msg -> pretty msg + +data CollectRecordSelectors = CollectRecordSelectors + deriving (Eq, Show, Generic) + +instance Hashable CollectRecordSelectors +instance NFData CollectRecordSelectors + +data CollectRecordSelectorsResult = CRSR + { -- |We store everything in here that we need to create the unresolved + -- codeAction: the range, an uniquely identifiable int, and the selector + --selector expression (HSExpr) that we use to generate the name + records :: RangeMap (Int, HsExpr (GhcPass 'Renamed)) + -- |This is for when we need to fully generate a textEdit. It contains the + -- whole expression we are interested in indexed to the unique id we got + -- from the previous field + , recordInfos :: IntMap.IntMap RecordSelectorExpr + , enabledExtensions :: [Extension] + } + deriving (Generic) + +instance NFData CollectRecordSelectorsResult + +instance Show CollectRecordSelectorsResult where + show _ = "" + +type instance RuleResult CollectRecordSelectors = CollectRecordSelectorsResult + +-- |Where we store our collected record selectors +data RecordSelectorExpr = RecordSelectorExpr + { -- |The location of the matched expression + location :: Range, + -- |The record selector, this is found in front of recordExpr, but get's + -- placed after it when converted into record dot syntax + selectorExpr :: LHsExpr (GhcPass 'Renamed), + -- |The record expression. The only requirement is that it evaluates to a + -- record in the end + recordExpr :: LHsExpr (GhcPass 'Renamed) } + +instance Pretty RecordSelectorExpr where + pretty (RecordSelectorExpr _ rs se) = pretty (printOutputable rs) <> ":" + <+> pretty (printOutputable se) + +instance NFData RecordSelectorExpr where + rnf = rwhnf + +-- |The data that is serialized and placed in the data field of resolvable +-- code actions +data ORDResolveData = ORDRD { + -- |We need the uri to get shake results + uri :: Uri + -- |The unique id that allows us to find the specific codeAction we want +, uniqueID :: Int +} deriving (Generic, Show) +instance ToJSON ORDResolveData +instance FromJSON ORDResolveData + +descriptor :: Recorder (WithPriority Log) -> PluginId + -> PluginDescriptor IdeState +descriptor recorder plId = + let resolveRecorder = cmapWithPrio LogResolve recorder + pluginHandler = mkCodeActionHandlerWithResolve resolveRecorder codeActionProvider resolveProvider + in (defaultPluginDescriptor plId "Provides a code action to convert record selector usage to use overloaded record dot syntax") + { pluginHandlers = pluginHandler + , pluginRules = collectRecSelsRule recorder + } + +resolveProvider :: ResolveFunction IdeState ORDResolveData 'Method_CodeActionResolve +resolveProvider ideState plId ca uri (ORDRD _ int) = + do + nfp <- getNormalizedFilePathE uri + CRSR _ crsDetails exts <- collectRecSelResult ideState nfp + pragma <- getFirstPragma plId ideState nfp + rse <- handleMaybe PluginStaleResolve $ IntMap.lookup int crsDetails + pure $ ca {_edit = mkWorkspaceEdit uri rse exts pragma} + +codeActionProvider :: PluginMethodHandler IdeState 'Method_TextDocumentCodeAction +codeActionProvider ideState _ (CodeActionParams _ _ caDocId caRange _) = + do + nfp <- getNormalizedFilePathE (caDocId ^. L.uri) + CRSR crsMap _ exts <- collectRecSelResult ideState nfp + let mkCodeAction (crsM, nse) = InR CodeAction + { -- We pass the record selector to the title function, so that + -- we can have the name of the record selector in the title of + -- the codeAction. This allows the user can easily distinguish + -- between the different codeActions when using nested record + -- selectors, the disadvantage is we need to print out the + -- name of the record selector which will decrease performance + _title = mkCodeActionTitle exts nse + , _kind = Just CodeActionKind_RefactorRewrite + , _diagnostics = Nothing + , _isPreferred = Nothing + , _disabled = Nothing + , _edit = Nothing + , _command = Nothing + , _data_ = Just $ toJSON $ ORDRD (caDocId ^. L.uri) crsM + } + actions = map mkCodeAction (RangeMap.filterByRange caRange crsMap) + pure $ InL actions + where + mkCodeActionTitle :: [Extension] -> HsExpr (GhcPass 'Renamed) -> Text + mkCodeActionTitle exts se = + if OverloadedRecordDot `elem` exts + then title + else title <> " (needs extension: OverloadedRecordDot)" + where + title = "Convert `" <> printOutputable se <> "` to record dot syntax" + +mkWorkspaceEdit:: Uri -> RecordSelectorExpr -> [Extension] -> NextPragmaInfo-> Maybe WorkspaceEdit +mkWorkspaceEdit uri recSel exts pragma = + Just $ WorkspaceEdit + { _changes = + Just (Map.singleton uri (convertRecordSelectors recSel : maybeToList pragmaEdit)) + , _documentChanges = Nothing + , _changeAnnotations = Nothing} + where pragmaEdit = + if OverloadedRecordDot `elem` exts + then Nothing + else Just $ insertNewPragma pragma OverloadedRecordDot + +collectRecSelsRule :: Recorder (WithPriority Log) -> Rules () +collectRecSelsRule recorder = define (cmapWithPrio LogShake recorder) $ + \CollectRecordSelectors nfp -> + useWithStale TypeCheck nfp >>= \case + -- `useWithStale` here allows us to be able to return codeActions even + -- if the file does not typecheck. The disadvantage being that we + -- sometimes will end up corrupting code. This is most obvious in that + -- used code actions will continue to be presented, and when applied + -- multiple times will almost always cause code corruption. + Nothing -> pure ([], Nothing) + Just (tmr, pm) -> do + let -- We need the file's extensions to check whether we need to add + -- the OverloadedRecordDot pragma + exts = getEnabledExtensions tmr + recSels = mapMaybe (rewriteRange pm) (getRecordSelectors tmr) + -- We are creating a list as long as our rec selectors of unique int s + -- created by calling hashUnique on a Unique. The reason why we are + -- extracting the ints is because they don't need any work to serialize. + uniques <- liftIO $ replicateM (length recSels) (hashUnique <$> newUnique) + logWith recorder Debug (LogCollectedRecordSelectors recSels) + let crsUniquesAndDetails = zip uniques recSels + -- We need the rangeMap to be able to filter by range later + rangeAndUnique = toRangeAndUnique <$> crsUniquesAndDetails + crsMap :: RangeMap (Int, HsExpr (GhcPass 'Renamed)) + crsMap = RangeMap.fromList' rangeAndUnique + pure ([], CRSR <$> Just crsMap <*> Just (IntMap.fromList crsUniquesAndDetails) <*> Just exts) + where getEnabledExtensions :: TcModuleResult -> [Extension] + getEnabledExtensions = getExtensions . tmrParsed + getRecordSelectors :: TcModuleResult -> [RecordSelectorExpr] +#if __GLASGOW_HASKELL__ >= 910 + getRecordSelectors (tmrRenamed -> (hs_valds -> valBinds,_,_,_,_)) = collectRecordSelectors valBinds +#else + getRecordSelectors (tmrRenamed -> (hs_valds -> valBinds,_,_,_)) = collectRecordSelectors valBinds +#endif + rewriteRange :: PositionMapping -> RecordSelectorExpr + -> Maybe RecordSelectorExpr + rewriteRange pm recSel = + case toCurrentRange pm (location recSel) of + Just newLoc -> Just $ recSel{location = newLoc} + Nothing -> Nothing + toRangeAndUnique (uid, RecordSelectorExpr l (unLoc -> se) _) = (l, (uid, se)) + +convertRecordSelectors :: RecordSelectorExpr -> TextEdit +convertRecordSelectors RecordSelectorExpr{..} = + TextEdit location $ convertRecSel selectorExpr recordExpr + +-- |Converts a record selector expression into record dot syntax, currently we +-- are using printOutputable to do it. We are also letting GHC decide when to +-- parenthesize the record expression +convertRecSel :: LHsExpr (GhcPass 'Renamed) -> LHsExpr (GhcPass 'Renamed) -> Text +convertRecSel se re = printOutputable (parenthesizeHsExpr appPrec re) <> "." + <> printOutputable se + +collectRecordSelectors :: GenericQ [RecordSelectorExpr] +-- It's important that we use everthingBut here, because if we used everything +-- we would get duplicates for every case that occurs inside a HsExpanded +-- expression. Please see the test MultilineExpanded.hs +collectRecordSelectors = everythingBut (<>) (([], False) `mkQ` getRecSels) + +-- |We want to return a list here, because on the occasion that we encounter a +-- HsExpanded expression, we want to return all the results from recursing on +-- one branch, which could be multiple matches. Again see MultilineExpanded.hs +getRecSels :: LHsExpr (GhcPass 'Renamed) -> ([RecordSelectorExpr], Bool) +-- When we stumble upon an occurrence of HsExpanded, we only want to follow one +-- branch. We do this here, by explicitly returning occurrences from traversing +-- the original branch, and returning True, which keeps syb from implicitly +-- continuing to traverse. +#if __GLASGOW_HASKELL__ >= 910 +getRecSels (unLoc -> XExpr (ExpandedThingRn a _)) = (collectRecordSelectors a, True) +#else +getRecSels (unLoc -> XExpr (HsExpanded a _)) = (collectRecordSelectors a, True) +#endif +-- applied record selection: "selector record" or "selector (record)" or +-- "selector selector2.record2" +#if __GLASGOW_HASKELL__ >= 911 +getRecSels e@(unLoc -> HsApp _ se@(unLoc -> XExpr (HsRecSelRn _)) re) = +#else +getRecSels e@(unLoc -> HsApp _ se@(unLoc -> HsRecSel _ _) re) = +#endif + ( [ RecordSelectorExpr (realSrcSpanToRange realSpan') se re + | RealSrcSpan realSpan' _ <- [ getLoc e ] ], False ) +-- Record selection where the field is being applied with the "$" operator: +-- "selector $ record" +#if __GLASGOW_HASKELL__ >= 911 +getRecSels e@(unLoc -> OpApp _ se@(unLoc -> XExpr (HsRecSelRn _)) +#else +getRecSels e@(unLoc -> OpApp _ se@(unLoc -> HsRecSel _ _) +#endif + (unLoc -> HsVar _ (unLoc -> d)) re) | d == dollarName = + ( [ RecordSelectorExpr (realSrcSpanToRange realSpan') se re + | RealSrcSpan realSpan' _ <- [ getLoc e ] ], False ) +getRecSels _ = ([], False) + +collectRecSelResult :: MonadIO m => IdeState -> NormalizedFilePath + -> ExceptT PluginError m CollectRecordSelectorsResult +collectRecSelResult ideState = + runActionE "overloadedRecordDot.collectRecordSelectors" ideState + . useE CollectRecordSelectors + diff --git a/plugins/hls-overloaded-record-dot-plugin/test/Main.hs b/plugins/hls-overloaded-record-dot-plugin/test/Main.hs new file mode 100644 index 0000000000..bcbdfe184d --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/Main.hs @@ -0,0 +1,76 @@ +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} + +module Main ( main ) where + +import Data.Either (rights) +import qualified Data.Text as T +import qualified Ide.Plugin.OverloadedRecordDot as OverloadedRecordDot +import System.FilePath (()) +import Test.Hls + +main :: IO () +main = + defaultTestRunner test + +plugin :: PluginTestDescriptor OverloadedRecordDot.Log +plugin = mkPluginTestDescriptor OverloadedRecordDot.descriptor "overloaded-record-dot" + +test :: TestTree +test = testGroup "overloaded-record-dot" + (mkTest "Simple" "Simple" "name" 10 7 10 15 + <> mkTest "NoPragmaNeeded" "NoPragmaNeeded" "name" 11 7 11 15 + <> mkTest "NestedParens" "NestedParens" "name" 15 7 15 24 + <> mkTest "NestedDot" "NestedDot" "name" 17 7 17 22 + <> mkTest "NestedDollar" "NestedDollar" "name" 15 7 15 24 + <> mkTest "MultilineCase" "MultilineCase" "name" 10 7 12 15 + <> mkTest "Multiline" "Multiline" "name" 10 7 11 15 + <> mkTest "MultilineExpanded" "MultilineExpanded" "owner" 28 8 28 19) + +mkTest :: TestName -> FilePath -> T.Text -> UInt -> UInt -> UInt -> UInt -> [TestTree] +mkTest title fp selectorName x1 y1 x2 y2 = + [mkNoResolveTest (title <> " without resolve") fp selectorName x1 y1 x2 y2, + mkResolveTest (title <> " with resolve") fp selectorName x1 y1 x2 y2] + +mkNoResolveTest :: TestName -> FilePath -> T.Text -> UInt -> UInt -> UInt -> UInt -> TestTree +mkNoResolveTest title fp selectorName x1 y1 x2 y2 = + goldenWithHaskellAndCaps def codeActionNoResolveCaps plugin title testDataDir fp "expected" "hs" $ \doc -> do + (act:_) <- getExplicitFieldsActions doc selectorName x1 y1 x2 y2 + executeCodeAction act + +mkResolveTest :: TestName -> FilePath -> T.Text -> UInt -> UInt -> UInt -> UInt -> TestTree +mkResolveTest title fp selectorName x1 y1 x2 y2 = + goldenWithHaskellAndCaps def codeActionResolveCaps plugin title testDataDir fp "expected" "hs" $ \doc -> do + (act:_) <- getAndResolveExplicitFieldsActions doc selectorName x1 y1 x2 y2 + executeCodeAction act + + +getExplicitFieldsActions + :: TextDocumentIdentifier + -> T.Text + -> UInt -> UInt -> UInt -> UInt + -> Session [CodeAction] +getExplicitFieldsActions doc selectorName x1 y1 x2 y2 = + findExplicitFieldsAction selectorName <$> getCodeActions doc range + where + range = Range (Position x1 y1) (Position x2 y2) + +getAndResolveExplicitFieldsActions + :: TextDocumentIdentifier + -> T.Text + -> UInt -> UInt -> UInt -> UInt + -> Session [CodeAction] +getAndResolveExplicitFieldsActions doc selectorName x1 y1 x2 y2 = do + findExplicitFieldsAction selectorName <$> getAndResolveCodeActions doc range + where + range = Range (Position x1 y1) (Position x2 y2) + +findExplicitFieldsAction :: T.Text -> [a |? CodeAction] -> [CodeAction] +findExplicitFieldsAction selectorName = filter (isExplicitFieldsCodeAction selectorName) . rights . map toEither + +isExplicitFieldsCodeAction :: T.Text -> CodeAction -> Bool +isExplicitFieldsCodeAction selectorName CodeAction {_title} = + ("Convert `" <> selectorName <> "` to record dot syntax") `T.isPrefixOf` _title + +testDataDir :: FilePath +testDataDir = "plugins" "hls-overloaded-record-dot-plugin" "test" "testdata" diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/Multiline.expected.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/Multiline.expected.hs new file mode 100644 index 0000000000..f046bb0f35 --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/Multiline.expected.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE OverloadedRecordDot #-} +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +test :: String +test = man.name diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/Multiline.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/Multiline.hs new file mode 100644 index 0000000000..f9a6400ac6 --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/Multiline.hs @@ -0,0 +1,12 @@ +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +test :: String +test = name + man diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/MultilineCase.expected.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/MultilineCase.expected.hs new file mode 100644 index 0000000000..5508aacb92 --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/MultilineCase.expected.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE OverloadedRecordDot #-} +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +test :: String +test = (case True of + True -> man + False -> man).name diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/MultilineCase.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/MultilineCase.hs new file mode 100644 index 0000000000..4659858f89 --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/MultilineCase.hs @@ -0,0 +1,13 @@ +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +test :: String +test = name $ case True of + True -> man + False -> man diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/MultilineExpanded.expected.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/MultilineExpanded.expected.hs new file mode 100644 index 0000000000..0c1b9b4de8 --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/MultilineExpanded.expected.hs @@ -0,0 +1,32 @@ +{-# LANGUAGE OverloadedRecordDot #-} + +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +data Building = Building {address :: String, owner :: Happy} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +home :: Building +home = Building {address = "No. 10 Privet Dr.", owner = man} + +home2 :: Building +home2 = Building {address = "No. 6 Beach Ave.", owner = man} + +home3 :: Building +home3 = Building {address = "No. 12 Central Blvd.", owner = man} + +n:: Int +n = 3 + +test :: String +test = (case n of + 0 -> owner home + 1 -> home2.owner + 2 -> owner home3 + _ -> man).name + diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/MultilineExpanded.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/MultilineExpanded.hs new file mode 100644 index 0000000000..e9fc606f2b --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/MultilineExpanded.hs @@ -0,0 +1,32 @@ +{-# LANGUAGE OverloadedRecordDot #-} + +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +data Building = Building {address :: String, owner :: Happy} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +home :: Building +home = Building {address = "No. 10 Privet Dr.", owner = man} + +home2 :: Building +home2 = Building {address = "No. 6 Beach Ave.", owner = man} + +home3 :: Building +home3 = Building {address = "No. 12 Central Blvd.", owner = man} + +n:: Int +n = 3 + +test :: String +test = (case n of + 0 -> owner home + 1 -> owner home2 + 2 -> owner home3 + _ -> man).name + diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedDollar.expected.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedDollar.expected.hs new file mode 100644 index 0000000000..fa15181d2f --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedDollar.expected.hs @@ -0,0 +1,17 @@ +{-# LANGUAGE OverloadedRecordDot #-} +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +data Building = Building {address :: String, owner :: Happy} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +home :: Building +home = Building {address = "No. 10 Privet Dr.", owner = man} + +test :: String +test = (owner home).name diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedDollar.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedDollar.hs new file mode 100644 index 0000000000..cbd47da8bc --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedDollar.hs @@ -0,0 +1,16 @@ +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +data Building = Building {address :: String, owner :: Happy} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +home :: Building +home = Building {address = "No. 10 Privet Dr.", owner = man} + +test :: String +test = name $ owner home diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedDot.expected.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedDot.expected.hs new file mode 100644 index 0000000000..2022eacc20 --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedDot.expected.hs @@ -0,0 +1,18 @@ +{-# LANGUAGE OverloadedRecordDot #-} + +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +data Building = Building {address :: String, owner :: Happy} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +home :: Building +home = Building {address = "No. 10 Privet Dr.", owner = man} + +test :: String +test = home.owner.name diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedDot.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedDot.hs new file mode 100644 index 0000000000..c25922fb6f --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedDot.hs @@ -0,0 +1,18 @@ +{-# LANGUAGE OverloadedRecordDot #-} + +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +data Building = Building {address :: String, owner :: Happy} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +home :: Building +home = Building {address = "No. 10 Privet Dr.", owner = man} + +test :: String +test = name home.owner diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedParens.expected.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedParens.expected.hs new file mode 100644 index 0000000000..fa15181d2f --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedParens.expected.hs @@ -0,0 +1,17 @@ +{-# LANGUAGE OverloadedRecordDot #-} +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +data Building = Building {address :: String, owner :: Happy} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +home :: Building +home = Building {address = "No. 10 Privet Dr.", owner = man} + +test :: String +test = (owner home).name diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedParens.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedParens.hs new file mode 100644 index 0000000000..578ffd9a6c --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NestedParens.hs @@ -0,0 +1,16 @@ +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +data Building = Building {address :: String, owner :: Happy} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +home :: Building +home = Building {address = "No. 10 Privet Dr.", owner = man} + +test :: String +test = name (owner home) diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/NoPragmaNeeded.expected.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NoPragmaNeeded.expected.hs new file mode 100644 index 0000000000..f046bb0f35 --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NoPragmaNeeded.expected.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE OverloadedRecordDot #-} +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +test :: String +test = man.name diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/NoPragmaNeeded.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NoPragmaNeeded.hs new file mode 100644 index 0000000000..9f88a03775 --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/NoPragmaNeeded.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE OverloadedRecordDot #-} +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +test :: String +test = name man diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/Simple.expected.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/Simple.expected.hs new file mode 100644 index 0000000000..f046bb0f35 --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/Simple.expected.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE OverloadedRecordDot #-} +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +test :: String +test = man.name diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/Simple.hs b/plugins/hls-overloaded-record-dot-plugin/test/testdata/Simple.hs new file mode 100644 index 0000000000..40fd5a71e1 --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/Simple.hs @@ -0,0 +1,11 @@ +data Happy = Happy {name :: String, age :: Int, happy :: Bool} + +main :: IO () +main = do + putStrLn test + +man :: Happy +man = Happy {name = "Happy", age = 1, happy = True} + +test :: String +test = name man diff --git a/plugins/hls-overloaded-record-dot-plugin/test/testdata/hie.yaml b/plugins/hls-overloaded-record-dot-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..ee67c73150 --- /dev/null +++ b/plugins/hls-overloaded-record-dot-plugin/test/testdata/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: []}} diff --git a/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs b/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs new file mode 100644 index 0000000000..23bfd727cf --- /dev/null +++ b/plugins/hls-pragmas-plugin/src/Ide/Plugin/Pragmas.hs @@ -0,0 +1,335 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE MultiWayIf #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} + +-- | Provides code actions to add missing pragmas (whenever GHC suggests to) +module Ide.Plugin.Pragmas + ( suggestPragmaDescriptor + , completionDescriptor + , suggestDisableWarningDescriptor + -- For testing + , validPragmas + , AppearWhere(..) + ) where + +import Control.Lens hiding (List) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import qualified Data.Aeson as JSON +import Data.Char (isAlphaNum) +import qualified Data.Foldable as Foldable +import Data.List.Extra (nubOrdOn) +import qualified Data.Map as M +import Data.Maybe (mapMaybe) +import qualified Data.Text as T +import Development.IDE hiding (line) +import Development.IDE.Core.Compile (sourceParser, + sourceTypecheck) +import Development.IDE.Core.PluginUtils +import Development.IDE.GHC.Compat +import Development.IDE.Plugin.Completions (ghcideCompletionsPluginPriority) +import Development.IDE.Plugin.Completions.Logic (getCompletionPrefixFromRope) +import Development.IDE.Plugin.Completions.Types (PosPrefixInfo (..)) +import qualified Development.IDE.Spans.Pragmas as Pragmas +import Ide.Plugin.Error +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import qualified Language.LSP.Protocol.Message as LSP +import qualified Language.LSP.Protocol.Types as LSP +import qualified Text.Fuzzy as Fuzzy + +-- --------------------------------------------------------------------- + +suggestPragmaDescriptor :: PluginId -> PluginDescriptor IdeState +suggestPragmaDescriptor plId = (defaultPluginDescriptor plId "Provides a code action to add missing LANGUAGE pragmas") + { pluginHandlers = mkPluginHandler LSP.SMethod_TextDocumentCodeAction suggestPragmaProvider + , pluginPriority = defaultPluginPriority + 1000 + } + +completionDescriptor :: PluginId -> PluginDescriptor IdeState +completionDescriptor plId = (defaultPluginDescriptor plId "Provides completion of LANGAUGE pragmas") + { pluginHandlers = mkPluginHandler LSP.SMethod_TextDocumentCompletion completion + , pluginPriority = ghcideCompletionsPluginPriority + 1 + } + +suggestDisableWarningDescriptor :: PluginId -> PluginDescriptor IdeState +suggestDisableWarningDescriptor plId = (defaultPluginDescriptor plId "Provides a code action to disable warnings") + { pluginHandlers = mkPluginHandler LSP.SMethod_TextDocumentCodeAction suggestDisableWarningProvider + -- #3636 Suggestions to disable warnings should appear last. + , pluginPriority = 0 + } + +-- --------------------------------------------------------------------- +-- | Title and pragma +type PragmaEdit = (T.Text, Pragma) + +data Pragma = LangExt T.Text | OptGHC T.Text + deriving (Show, Eq, Ord) + +suggestPragmaProvider :: PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +suggestPragmaProvider = mkCodeActionProvider suggest + +suggestDisableWarningProvider :: PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +suggestDisableWarningProvider = mkCodeActionProvider $ const suggestDisableWarning + +mkCodeActionProvider :: (Maybe DynFlags -> Diagnostic -> [PragmaEdit]) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +mkCodeActionProvider mkSuggest state _plId + (LSP.CodeActionParams _ _ LSP.TextDocumentIdentifier{ _uri = uri } _ (LSP.CodeActionContext diags _monly _)) = do + normalizedFilePath <- getNormalizedFilePathE uri + -- ghc session to get some dynflags even if module isn't parsed + (hscEnv -> hsc_dflags -> sessionDynFlags, _) <- + runActionE "Pragmas.GhcSession" state $ useWithStaleE GhcSession normalizedFilePath + fileContents <- liftIO $ runAction "Pragmas.GetFileContents" state $ getFileContents normalizedFilePath + parsedModule <- liftIO $ runAction "Pragmas.GetParsedModule" state $ getParsedModule normalizedFilePath + let parsedModuleDynFlags = ms_hspp_opts . pm_mod_summary <$> parsedModule + nextPragmaInfo = Pragmas.getNextPragmaInfo sessionDynFlags fileContents + pedits = nubOrdOn snd $ concatMap (mkSuggest parsedModuleDynFlags) diags + pure $ LSP.InL $ pragmaEditToAction uri nextPragmaInfo <$> pedits + + + +-- | Add a Pragma to the given URI at the top of the file. +-- It is assumed that the pragma name is a valid pragma, +-- thus, not validated. +pragmaEditToAction :: Uri -> Pragmas.NextPragmaInfo -> PragmaEdit -> (LSP.Command LSP.|? LSP.CodeAction) +pragmaEditToAction uri Pragmas.NextPragmaInfo{ nextPragmaLine, lineSplitTextEdits } (title, p) = + LSP.InR $ LSP.CodeAction title (Just LSP.CodeActionKind_QuickFix) (Just []) Nothing Nothing (Just edit) Nothing Nothing + where + render (OptGHC x) = "{-# OPTIONS_GHC -Wno-" <> x <> " #-}\n" + render (LangExt x) = "{-# LANGUAGE " <> x <> " #-}\n" + pragmaInsertPosition = Position (fromIntegral nextPragmaLine) 0 + pragmaInsertRange = Range pragmaInsertPosition pragmaInsertPosition + -- workaround the fact that for some reason lsp-test applies text + -- edits in reverse order than lsp (tried in both coc.nvim and vscode) + textEdits = + if | Just (Pragmas.LineSplitTextEdits insertTextEdit deleteTextEdit) <- lineSplitTextEdits + , let LSP.TextEdit{ _range, _newText } = insertTextEdit -> + [LSP.TextEdit _range (render p <> _newText), deleteTextEdit] + | otherwise -> [LSP.TextEdit pragmaInsertRange (render p)] + + edit = + LSP.WorkspaceEdit + (Just $ M.singleton uri textEdits) + Nothing + Nothing + +suggest :: Maybe DynFlags -> Diagnostic -> [PragmaEdit] +suggest dflags diag = + suggestAddPragma dflags diag + +-- --------------------------------------------------------------------- + +suggestDisableWarning :: Diagnostic -> [PragmaEdit] +suggestDisableWarning diagnostic + | Just (Just (JSON.Array attachedReasons)) <- diagnostic ^? attachedReason + = + [ ("Disable \"" <> w <> "\" warnings", OptGHC w) + | JSON.String attachedReason <- Foldable.toList attachedReasons + , Just w <- [T.stripPrefix "-W" attachedReason] + , w `notElem` warningBlacklist + ] + | otherwise = [] + +warningBlacklist :: [T.Text] +warningBlacklist = + -- Don't suggest disabling type errors as a solution to all type errors. + [ "deferred-type-errors" + -- Don't suggest disabling out of scope errors as a solution to all out of scope errors. + , "deferred-out-of-scope-variables" + ] + +-- --------------------------------------------------------------------- + +-- | Offer to add a missing Language Pragma to the top of a file. +-- Pragmas are defined by a curated list of known pragmas, see 'possiblePragmas'. +suggestAddPragma :: Maybe DynFlags -> Diagnostic -> [PragmaEdit] +suggestAddPragma mDynflags Diagnostic {_message, _source} + | _source == Just sourceTypecheck || _source == Just sourceParser = genPragma _message + where + genPragma target = + [("Add \"" <> r <> "\"", LangExt r) | r <- findPragma target, r `notElem` disabled] + disabled + | Just dynFlags <- mDynflags = + -- GHC does not export 'OnOff', so we have to view it as string + mapMaybe (T.stripPrefix "Off " . printOutputable) (extensions dynFlags) + | otherwise = + -- When the module failed to parse, we don't have access to its + -- dynFlags. In that case, simply don't disable any pragmas. + [] +suggestAddPragma _ _ = [] + +-- | Find all Pragmas are an infix of the search term. +findPragma :: T.Text -> [T.Text] +findPragma str = concatMap check possiblePragmas + where + check p = [p | T.isInfixOf p str] + + -- We exclude the Strict extension as it causes many false positives, see + -- the discussion at https://github.com/haskell/ghcide/pull/638 + -- + -- We don't include the No- variants, as GHC never suggests disabling an + -- extension in an error message. + possiblePragmas :: [T.Text] + possiblePragmas = + [ name + | FlagSpec{flagSpecName = T.pack -> name} <- xFlags + , "Strict" /= name + ] + +-- | All language pragmas, including the No- variants +allPragmas :: [T.Text] +allPragmas = + concat + [ [name, "No" <> name] + | FlagSpec{flagSpecName = T.pack -> name} <- xFlags + ] + <> + -- These pragmas are not part of xFlags as they are not reversable + -- by prepending "No". + [ -- Safe Haskell + "Unsafe" + , "Trustworthy" + , "Safe" + + -- Language Version Extensions + , "Haskell98" + , "Haskell2010" + , "GHC2021" + ] + +-- --------------------------------------------------------------------- +flags :: [T.Text] +flags = map T.pack $ flagsForCompletion False + +completion :: PluginMethodHandler IdeState 'LSP.Method_TextDocumentCompletion +completion ide _ complParams = do + let (LSP.TextDocumentIdentifier uri) = complParams ^. L.textDocument + position@(Position ln col) = complParams ^. L.position + contents <- liftIO $ runAction "Pragmas.GetUriContents" ide $ getUriContents $ toNormalizedUri uri + fmap LSP.InL $ case (contents, uriToFilePath' uri) of + (Just cnts, Just _path) -> + pure $ result $ getCompletionPrefixFromRope position cnts + where + result pfix + | "{-# language" `T.isPrefixOf` line + = map mkLanguagePragmaCompl $ + Fuzzy.simpleFilter word allPragmas + | "{-# options_ghc" `T.isPrefixOf` line + = let optionPrefix = getGhcOptionPrefix pfix + prefixLength = fromIntegral $ T.length optionPrefix + prefixRange = LSP.Range (Position ln (col - prefixLength)) position + in map (mkGhcOptionCompl prefixRange) $ Fuzzy.simpleFilter optionPrefix flags + | "{-#" `T.isPrefixOf` line + = [ mkPragmaCompl (a <> suffix) b c + | (a, b, c, w) <- validPragmas, w == NewLine + ] + | -- Do not suggest any pragmas under any of these conditions: + -- 1. Current line is an import + -- 2. There is a module name right before the current word. + -- Something like `Text.la` shouldn't suggest adding the + -- 'LANGUAGE' pragma. + -- 3. The user has not typed anything yet. + "import" `T.isPrefixOf` line || not (T.null module_) || T.null word + = [] + | otherwise + = [ mkPragmaCompl (prefix <> pragmaTemplate <> suffix) matcher detail + | (pragmaTemplate, matcher, detail, appearWhere) <- validPragmas + , case appearWhere of + -- Only suggest a pragma that needs its own line if the whole line + -- fuzzily matches the pragma + NewLine -> Fuzzy.test line matcher + -- Only suggest a pragma that appears in the middle of a line when + -- the current word is not the only thing in the line and the + -- current word fuzzily matches the pragma + CanInline -> line /= word && Fuzzy.test word matcher + ] + where + line = T.toLower $ fullLine pfix + module_ = prefixScope pfix + word = prefixText pfix + -- Not completely correct, may fail if more than one "{-#" exists. + -- We can ignore it since it rarely happens. + prefix + | "{-# " `T.isInfixOf` line = "" + | "{-#" `T.isInfixOf` line = " " + | otherwise = "{-# " + suffix + | " #-}" `T.isSuffixOf` line = "" + | "#-}" `T.isSuffixOf` line = " " + | "-}" `T.isSuffixOf` line = " #" + | "}" `T.isSuffixOf` line = " #-" + | otherwise = " #-}" + _ -> return [] + +----------------------------------------------------------------------- + +-- | Pragma where exist +data AppearWhere = + NewLine + -- ^Must be on a new line + | CanInline + -- ^Can appear in the line + deriving (Show, Eq) + +validPragmas :: [(T.Text, T.Text, T.Text, AppearWhere)] +validPragmas = + [ ("LANGUAGE ${1:extension}" , "LANGUAGE" , "{-# LANGUAGE #-}" , NewLine) + , ("OPTIONS_GHC -${1:option}" , "OPTIONS_GHC" , "{-# OPTIONS_GHC #-}" , NewLine) + , ("INLINE ${1:function}" , "INLINE" , "{-# INLINE #-}" , NewLine) + , ("NOINLINE ${1:function}" , "NOINLINE" , "{-# NOINLINE #-}" , NewLine) + , ("INLINABLE ${1:function}" , "INLINABLE" , "{-# INLINABLE #-}" , NewLine) + , ("WARNING ${1:message}" , "WARNING" , "{-# WARNING #-}" , CanInline) + , ("DEPRECATED ${1:message}" , "DEPRECATED" , "{-# DEPRECATED #-}" , CanInline) + , ("ANN ${1:annotation}" , "ANN" , "{-# ANN #-}" , NewLine) + , ("RULES" , "RULES" , "{-# RULES #-}" , NewLine) + , ("SPECIALIZE ${1:function}" , "SPECIALIZE" , "{-# SPECIALIZE #-}" , NewLine) + , ("SPECIALIZE INLINE ${1:function}", "SPECIALIZE INLINE", "{-# SPECIALIZE INLINE #-}", NewLine) + , ("SPECIALISE ${1:function}" , "SPECIALISE" , "{-# SPECIALISE #-}" , NewLine) + , ("SPECIALISE INLINE ${1:function}", "SPECIALISE INLINE", "{-# SPECIALISE INLINE #-}", NewLine) + , ("MINIMAL ${1:functions}" , "MINIMAL" , "{-# MINIMAL #-}" , CanInline) + , ("UNPACK" , "UNPACK" , "{-# UNPACK #-}" , CanInline) + , ("NOUNPACK" , "NOUNPACK" , "{-# NOUNPACK #-}" , CanInline) + , ("COMPLETE ${1:function}" , "COMPLETE" , "{-# COMPLETE #-}" , NewLine) + , ("OVERLAPPING" , "OVERLAPPING" , "{-# OVERLAPPING #-}" , CanInline) + , ("OVERLAPPABLE" , "OVERLAPPABLE" , "{-# OVERLAPPABLE #-}" , CanInline) + , ("OVERLAPS" , "OVERLAPS" , "{-# OVERLAPS #-}" , CanInline) + , ("INCOHERENT" , "INCOHERENT" , "{-# INCOHERENT #-}" , CanInline) + ] + +mkPragmaCompl :: T.Text -> T.Text -> T.Text -> LSP.CompletionItem +mkPragmaCompl insertText label detail = + LSP.CompletionItem label Nothing (Just LSP.CompletionItemKind_Keyword) Nothing (Just detail) + Nothing Nothing Nothing Nothing Nothing (Just insertText) (Just LSP.InsertTextFormat_Snippet) + Nothing Nothing Nothing Nothing Nothing Nothing Nothing + +mkLanguagePragmaCompl :: T.Text -> LSP.CompletionItem +mkLanguagePragmaCompl label = + LSP.CompletionItem label Nothing (Just LSP.CompletionItemKind_Keyword) Nothing Nothing + Nothing Nothing Nothing Nothing Nothing Nothing Nothing + Nothing Nothing Nothing Nothing Nothing Nothing Nothing + +mkGhcOptionCompl :: Range -> T.Text -> LSP.CompletionItem +mkGhcOptionCompl editRange completedFlag = + LSP.CompletionItem completedFlag Nothing (Just LSP.CompletionItemKind_Keyword) Nothing Nothing + Nothing Nothing Nothing Nothing Nothing Nothing Nothing + Nothing (Just insertCompleteFlag) Nothing Nothing Nothing Nothing Nothing + where + insertCompleteFlag = LSP.InL $ LSP.TextEdit editRange completedFlag + +-- The prefix extraction logic of getCompletionPrefix +-- doesn't consider '-' part of prefix which breaks completion +-- of flags like "-ddump-xyz". For OPTIONS_GHC completion we need the whole thing +-- to be considered completion prefix, but `prefixText posPrefixInfo` would return"xyz" in this case +getGhcOptionPrefix :: PosPrefixInfo -> T.Text +getGhcOptionPrefix PosPrefixInfo {cursorPos = Position _ col, fullLine}= + T.takeWhileEnd isGhcOptionChar beforePos + where + beforePos = T.take (fromIntegral col) fullLine + + -- Is this character contained in some GHC flag? Based on: + -- >>> nub . sort . concat $ GHC.Driver.Session.flagsForCompletion False + -- "#-.01234589=ABCDEFGHIJKLMNOPQRSTUVWX_abcdefghijklmnopqrstuvwxyz" + isGhcOptionChar :: Char -> Bool + isGhcOptionChar c = isAlphaNum c || c `elem` ("#-.=_" :: String) diff --git a/plugins/hls-pragmas-plugin/test/Main.hs b/plugins/hls-pragmas-plugin/test/Main.hs new file mode 100644 index 0000000000..1e38e439ab --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/Main.hs @@ -0,0 +1,224 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Main + ( main + ) where + +import Control.Lens ((<&>), (^.)) +import Data.Aeson +import Data.Foldable +import qualified Data.Text as T +import Ide.Plugin.Pragmas +import qualified Language.LSP.Protocol.Lens as L +import System.FilePath +import Test.Hls + +main :: IO () +main = defaultTestRunner tests + +pragmasSuggestPlugin :: PluginTestDescriptor () +pragmasSuggestPlugin = mkPluginTestDescriptor' suggestPragmaDescriptor "pragmas" + +pragmasCompletionPlugin :: PluginTestDescriptor () +pragmasCompletionPlugin = mkPluginTestDescriptor' completionDescriptor "pragmas" + +pragmasDisableWarningPlugin :: PluginTestDescriptor () +pragmasDisableWarningPlugin = mkPluginTestDescriptor' suggestDisableWarningDescriptor "pragmas" + +tests :: TestTree +tests = + testGroup "pragmas" + [ codeActionTests + , codeActionTests' + , completionTests + , completionSnippetTests + , dontSuggestCompletionTests + ] + +codeActionTests :: TestTree +codeActionTests = + testGroup "code actions" + [ codeActionTestWithPragmasSuggest "Block comment then line comment doesn't split line" "BlockCommentThenLineComment" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Block comment then single-line block comment doesn't split line" "BlockCommentThenSingleLineBlockComment" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Block comment then multi-line block comment doesn't split line" "BlockCommentThenMultiLineBlockComment" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Block comment then line haddock splits line" "BlockCommentThenLineHaddock" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Block comment then single-line block haddock splits line" "BlockCommentThenSingleLineBlockHaddock" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Block comment then multi-line block haddock splits line" "BlockCommentThenMultiLineBlockHaddock" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Pragma then line comment doesn't split line" "PragmaThenLineComment" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Pragma then single-line block comment doesn't split line" "PragmaThenSingleLineBlockComment" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Pragma then multi-line block comment splits line" "PragmaThenMultiLineBlockComment" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Pragma then line haddock splits line" "PragmaThenLineHaddock" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Pragma then single-line block haddock splits line" "PragmaThenSingleLineBlockHaddock" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Pragma then multi-line block haddock splits line" "PragmaThenMultiLineBlockHaddock" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Pragma then single-line block haddock single-line block comment splits line" "PragmaThenSingleLineBlockHaddockSingleLineBlockComment" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Block comment then single-line block haddock single-line block comment splits line" "BlockCommentThenSingleLineBlockHaddockSingleLineBlockComment" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Pragma then line haddock then newline line comment splits line" "PragmaThenLineHaddockNewlineLineComment" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "does not add pragma after OPTIONS_GHC pragma located after a declaration" "OptionsGhcAfterDecl" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "adds LANGUAGE with no other pragmas at start ignoring later INLINE pragma" "AddPragmaIgnoreInline" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "adds LANGUAGE before Doc comments after interchanging pragmas" "BeforeDocInterchanging" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")] + , codeActionTestWithPragmasSuggest "Add language after altering OPTIONS_GHC and Language" "AddLanguagePragmaAfterInterchaningOptsGhcAndLangs" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "Add language after pragmas with non standard space between prefix and name" "AddPragmaWithNonStandardSpacingInPrecedingPragmas" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "adds LANGUAGE after OptGHC at start ignoring later INLINE pragma" "AddPragmaAfterOptsGhcIgnoreInline" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "adds LANGUAGE ignore later Ann pragma" "AddPragmaIgnoreLaterAnnPragma" [("Add \"BangPatterns\"", "Contains BangPatterns code action")] + , codeActionTestWithPragmasSuggest "adds LANGUAGE after interchanging pragmas ignoring later Ann pragma" "AddLanguageAfterInterchaningIgnoringLaterAnn" [("Add \"BangPatterns\"", "Contains BangPatterns code action")] + , codeActionTestWithPragmasSuggest "adds LANGUAGE after OptGHC preceded by another language pragma" "AddLanguageAfterLanguageThenOptsGhc" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")] + , codeActionTestWithPragmasSuggest "adds LANGUAGE pragma after shebang and last language pragma" "AfterShebangAndPragma" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")] + , codeActionTestWithPragmasSuggest "adds above module keyword on first line" "ModuleOnFirstLine" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "adds LANGUAGE pragma after GHC_OPTIONS" "AfterGhcOptions" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "adds LANGUAGE pragma after shebang and GHC_OPTIONS" "AfterShebangAndOpts" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "adds LANGUAGE pragma after shebang, GHC_OPTIONS and language pragma" "AfterShebangAndOptionsAndPragma" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "adds LANGUAGE pragma after all others ignoring later INLINE pragma" "AfterShebangAndOptionsAndPragmasIgnoreInline" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "adds LANGUAGE pragma after all others ignoring multiple later INLINE pragma" "AfterAllWithMultipleInlines" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "adds LANGUAGE pragma correctly ignoring later INLINE pragma" "AddLanguagePragma" [("Add \"TupleSections\"", "Contains TupleSections code action")] + , codeActionTestWithPragmasSuggest "adds TypeApplications pragma" "TypeApplications" [("Add \"TypeApplications\"", "Contains TypeApplications code action")] + , codeActionTestWithPragmasSuggest "after shebang" "AfterShebang" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")] + , codeActionTestWithPragmasSuggest "append to existing pragmas" "AppendToExisting" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")] + , codeActionTestWithPragmasSuggest "before doc comments NamedFieldPuns" "BeforeDocComment" [("Add \"NamedFieldPuns\"", "Contains NamedFieldPuns code action")] + , codeActionTestWithPragmasSuggest "adds TypeSynonymInstances pragma" "NeedsPragmas" [("Add \"TypeSynonymInstances\"", "Contains TypeSynonymInstances code action"), ("Add \"FlexibleInstances\"", "Contains FlexibleInstances code action")] + , codeActionTestWithDisableWarning "before doc comments missing-signatures" "MissingSignatures" [("Disable \"missing-signatures\" warnings", "Contains missing-signatures code action")] + , codeActionTestWithDisableWarning "before doc comments unused-imports" "UnusedImports" [("Disable \"unused-imports\" warnings", "Contains unused-imports code action")] + ] + +codeActionTestWithPragmasSuggest :: String -> FilePath -> [(T.Text, String)] -> TestTree +codeActionTestWithPragmasSuggest = codeActionTestWith pragmasSuggestPlugin + +codeActionTestWithDisableWarning :: String -> FilePath -> [(T.Text, String)] -> TestTree +codeActionTestWithDisableWarning = codeActionTestWith pragmasDisableWarningPlugin + +codeActionTestWith :: PluginTestDescriptor () -> String -> FilePath -> [(T.Text, String)] -> TestTree +codeActionTestWith descriptor testComment fp actions = + goldenWithPragmas descriptor testComment fp $ \doc -> do + _ <- waitForDiagnosticsFrom doc + cas <- map fromAction <$> getAllCodeActions doc + mapM_ (\(action, contains) -> go action contains cas) actions + action <- case cas of + (a:_) -> pure a + [] -> liftIO $ assertFailure "Expected non-empty list of code actions" + executeCodeAction action + where + go action contains cas = liftIO $ action `elem` map (^. L.title) cas @? contains + +codeActionTests' :: TestTree +codeActionTests' = + testGroup "additional code actions" + [ goldenWithPragmas pragmasSuggestPlugin "no duplication" "NamedFieldPuns" $ \doc -> do + _ <- waitForDiagnosticsFrom doc + cas <- map fromAction <$> getCodeActions doc (Range (Position 8 9) (Position 8 9)) + ca <- liftIO $ case cas of + [ca] -> pure ca + _ -> assertFailure $ "Expected one code action, but got: " <> show cas + liftIO $ (ca ^. L.title == "Add \"NamedFieldPuns\"") @? "NamedFieldPuns code action" + executeCodeAction ca + , goldenWithPragmas pragmasDisableWarningPlugin "doesn't suggest disabling type errors" "DeferredTypeErrors" $ \doc -> do + _ <- waitForDiagnosticsFrom doc + cas <- map fromAction <$> getAllCodeActions doc + liftIO $ "Disable \"deferred-type-errors\" warnings" `notElem` map (^. L.title) cas @? "Doesn't contain deferred-type-errors code action" + liftIO $ length cas == 0 @? "Expected no code actions, but got: " <> show cas + , goldenWithPragmas pragmasDisableWarningPlugin "doesn't suggest disabling out of scope variables" "DeferredOutOfScopeVariables" $ \doc -> do + _ <- waitForDiagnosticsFrom doc + cas <- map fromAction <$> getAllCodeActions doc + liftIO $ "Disable \"deferred-out-of-scope-variables\" warnings" `notElem` map (^. L.title) cas @? "Doesn't contain deferred-out-of-scope-variables code action" + liftIO $ length cas == 0 @? "Expected no code actions, but got: " <> show cas + ] + +completionTests :: TestTree +completionTests = + testGroup "completions" + [ completionTest "completes pragmas" "Completion.hs" "" "LANGUAGE" (Just InsertTextFormat_Snippet) (Just "LANGUAGE ${1:extension} #-}") (Just "{-# LANGUAGE #-}") (0, 4, 0, 34, 0, 4) + , completionTest "completes pragmas with existing closing pragma bracket" "Completion.hs" "" "LANGUAGE" (Just InsertTextFormat_Snippet) (Just "LANGUAGE ${1:extension}") (Just "{-# LANGUAGE #-}") (0, 4, 0, 31, 0, 4) + , completionTest "completes pragmas with existing closing comment bracket" "Completion.hs" "" "LANGUAGE" (Just InsertTextFormat_Snippet) (Just "LANGUAGE ${1:extension} #") (Just "{-# LANGUAGE #-}") (0, 4, 0, 32, 0, 4) + , completionTest "completes pragmas with existing closing bracket" "Completion.hs" "" "LANGUAGE" (Just InsertTextFormat_Snippet) (Just "LANGUAGE ${1:extension} #-") (Just "{-# LANGUAGE #-}") (0, 4, 0, 33, 0, 4) + , completionTest "completes options pragma" "Completion.hs" "OPTIONS" "OPTIONS_GHC" (Just InsertTextFormat_Snippet) (Just "OPTIONS_GHC -${1:option} #-}") (Just "{-# OPTIONS_GHC #-}") (0, 4, 0, 34, 0, 4) + , completionTest "completes ghc options pragma values" "Completion.hs" "{-# OPTIONS_GHC -Wno-red #-}\n" "-Wno-redundant-constraints" Nothing Nothing Nothing (0, 0, 0, 0, 0, 24) + , completionTest "completes ghc options pragma values with multiple dashes" "Completion.hs" "{-# OPTIONS_GHC -fmax-worker-ar #-}\n" "-fmax-worker-args" Nothing Nothing Nothing (0, 0, 0, 0, 0, 31) + , completionTest "completes multiple ghc options within single pragma" "Completion.hs" "{-# OPTIONS_GHC -ddump-simpl -ddump-spl #-}\n" "-ddump-splices" Nothing Nothing Nothing (0, 0, 0, 0, 0, 39) + , completionTest "completes language extensions" "Completion.hs" "" "OverloadedStrings" Nothing Nothing Nothing (0, 24, 0, 31, 0, 24) + , completionTest "completes language extensions case insensitive" "Completion.hs" "lAnGuaGe Overloaded" "OverloadedStrings" Nothing Nothing Nothing (0, 4, 0, 34, 0, 24) + , completionTest "completes the Strict language extension" "Completion.hs" "Str" "Strict" Nothing Nothing Nothing (0, 13, 0, 31, 0, 16) + , completionTest "completes No- language extensions" "Completion.hs" "NoOverload" "NoOverloadedStrings" Nothing Nothing Nothing (0, 13, 0, 31, 0, 23) + , completionTest "completes GHC2021 extensions" "Completion.hs" "ghc" "GHC2021" Nothing Nothing Nothing (0, 13, 0, 31, 0, 16) + ] + +completionSnippetTests :: TestTree +completionSnippetTests = + testGroup "expand snippet to pragma" $ + validPragmas <&> + (\(insertText, label, detail, appearWhere) -> + let inputPrefix = + case appearWhere of + NewLine -> "" + CanInline -> "something " + input = inputPrefix <> (T.toLower $ T.init label) + in completionTest (T.unpack label) + "Completion.hs" input label (Just InsertTextFormat_Snippet) + (Just $ "{-# " <> insertText <> " #-}") (Just detail) + (0, 0, 0, 34, 0, fromIntegral $ T.length input)) + +dontSuggestCompletionTests :: TestTree +dontSuggestCompletionTests = + testGroup "do not suggest pragmas" $ + let replaceFuncBody newBody = Just $ mkEdit (8,6) (8,8) newBody + writeInEmptyLine txt = Just $ mkEdit (3,0) (3,0) txt + generalTests = [ provideNoCompletionsTest "in imports" "Completion.hs" (Just $ mkEdit (3,0) (3,0) "import WA") (Position 3 8) + , provideNoCompletionsTest "when no word has been typed" "Completion.hs" Nothing (Position 3 0) + , provideNoCompletionsTest "when expecting auto complete on modules" "Completion.hs" (Just $ mkEdit (8,6) (8,8) "Data.Maybe.WA") (Position 8 19) + ] + individualPragmaTests = validPragmas <&> \(_insertText,label,_detail,appearWhere) -> + let completionPrompt = T.toLower $ T.init label + promptLen = fromIntegral (T.length completionPrompt) + in case appearWhere of + CanInline -> + provideNoUndesiredCompletionsTest ("at new line: " <> T.unpack label) "Completion.hs" (Just label) (writeInEmptyLine completionPrompt) (Position 3 0) + NewLine -> + provideNoUndesiredCompletionsTest ("inline: " <> T.unpack label) "Completion.hs" (Just label) (replaceFuncBody completionPrompt) (Position 8 (6 + promptLen)) + in generalTests ++ individualPragmaTests + +mkEdit :: (UInt,UInt) -> (UInt,UInt) -> T.Text -> TextEdit +mkEdit (startLine, startCol) (endLine, endCol) newText = + TextEdit (Range (Position startLine startCol) (Position endLine endCol)) newText + +completionTest :: String -> FilePath -> T.Text -> T.Text -> Maybe InsertTextFormat -> Maybe T.Text -> Maybe T.Text -> (UInt, UInt, UInt, UInt, UInt, UInt) -> TestTree +completionTest testComment fileName replacementText expectedLabel expectedFormat expectedInsertText detail (delFromLine, delFromCol, delToLine, delToCol, completeAtLine, completeAtCol) = + testCase testComment $ runSessionWithServer def pragmasCompletionPlugin testDataDir $ do + doc <- openDoc fileName "haskell" + _ <- waitForDiagnostics + let te = TextEdit (Range (Position delFromLine delFromCol) (Position delToLine delToCol)) replacementText + _ <- applyEdit doc te + compls <- getCompletions doc (Position completeAtLine completeAtCol) + item <- getCompletionByLabel expectedLabel compls + liftIO $ do + item ^. L.label @?= expectedLabel + item ^. L.kind @?= Just CompletionItemKind_Keyword + item ^. L.insertTextFormat @?= expectedFormat + item ^. L.insertText @?= expectedInsertText + item ^. L.detail @?= detail + +provideNoCompletionsTest :: String -> FilePath -> Maybe TextEdit -> Position -> TestTree +provideNoCompletionsTest testComment fileName mTextEdit pos = + provideNoUndesiredCompletionsTest testComment fileName Nothing mTextEdit pos + +provideNoUndesiredCompletionsTest :: String -> FilePath -> Maybe T.Text -> Maybe TextEdit -> Position -> TestTree +provideNoUndesiredCompletionsTest testComment fileName mUndesiredLabel mTextEdit pos = + testCase testComment $ runSessionWithServer def pragmasCompletionPlugin testDataDir $ do + setConfigSection "haskell" disableGhcideCompletions + doc <- openDoc fileName "haskell" + _ <- waitForDiagnostics + mapM_ (applyEdit doc) mTextEdit + compls <- getCompletions doc pos + liftIO $ case mUndesiredLabel of + Nothing -> compls @?= [] + Just undesiredLabel -> do + case find (\c -> c ^. L.label == undesiredLabel) compls of + Just c -> assertFailure $ + "Did not expect a completion with label=" <> T.unpack undesiredLabel + <> ", got completion: "<> show c + Nothing -> pure () + +disableGhcideCompletions :: Value +disableGhcideCompletions = object [ "plugin" .= object [ "ghcide-completions" .= object ["globalOn" .= False]]] + +goldenWithPragmas :: PluginTestDescriptor () -> TestName -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +goldenWithPragmas descriptor title path = goldenWithHaskellDoc def descriptor title testDataDir path "expected" "hs" + +testDataDir :: FilePath +testDataDir = "plugins" "hls-pragmas-plugin" "test" "testdata" diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddLanguageAfterInterchaningIgnoringLaterAnn.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AddLanguageAfterInterchaningIgnoringLaterAnn.expected.hs new file mode 100644 index 0000000000..43857d29e9 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddLanguageAfterInterchaningIgnoringLaterAnn.expected.hs @@ -0,0 +1,18 @@ +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} +{-# LANGUAGE RecordWildCards #-} +{-# OPTIONS_GHC -Wno-deferred-type-errors #-} +{-# LANGUAGE BangPatterns #-} + +data Metaprogram = Metaprogram + { mp_name :: !Text + , mp_known_by_auto :: !Bool + , mp_show_code_action :: !Bool + , mp_program :: !(TacticsM ()) + } + deriving stock Generic +{-# ANN Metaprogram "hello" #-} + +instance NFData Metaprogram where + rnf (!(Metaprogram !_ !_ !_ !_)) = () diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddLanguageAfterInterchaningIgnoringLaterAnn.hs b/plugins/hls-pragmas-plugin/test/testdata/AddLanguageAfterInterchaningIgnoringLaterAnn.hs new file mode 100644 index 0000000000..aeebfe6da8 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddLanguageAfterInterchaningIgnoringLaterAnn.hs @@ -0,0 +1,17 @@ +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} +{-# LANGUAGE RecordWildCards #-} +{-# OPTIONS_GHC -Wno-deferred-type-errors #-} + +data Metaprogram = Metaprogram + { mp_name :: !Text + , mp_known_by_auto :: !Bool + , mp_show_code_action :: !Bool + , mp_program :: !(TacticsM ()) + } + deriving stock Generic +{-# ANN Metaprogram "hello" #-} + +instance NFData Metaprogram where + rnf (!(Metaprogram !_ !_ !_ !_)) = () diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddLanguageAfterLanguageThenOptsGhc.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AddLanguageAfterLanguageThenOptsGhc.expected.hs new file mode 100644 index 0000000000..30b994895f --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddLanguageAfterLanguageThenOptsGhc.expected.hs @@ -0,0 +1,21 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-deferred-type-errors #-} +{-# LANGUAGE NamedFieldPuns #-} +-- | Doc Comment +{- Block -} + +module BeforeDocComment where + +test :: Int -> Integer +test x = x * 2 + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddLanguageAfterLanguageThenOptsGhc.hs b/plugins/hls-pragmas-plugin/test/testdata/AddLanguageAfterLanguageThenOptsGhc.hs new file mode 100644 index 0000000000..db7b48c1fe --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddLanguageAfterLanguageThenOptsGhc.hs @@ -0,0 +1,20 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-deferred-type-errors #-} +-- | Doc Comment +{- Block -} + +module BeforeDocComment where + +test :: Int -> Integer +test x = x * 2 + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddLanguagePragma.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AddLanguagePragma.expected.hs new file mode 100644 index 0000000000..74f146f15e --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddLanguagePragma.expected.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TupleSections #-} + +module NeedsLanguagePragma where + +tupleSection = (1,) <$> Just 2 + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddLanguagePragma.hs b/plugins/hls-pragmas-plugin/test/testdata/AddLanguagePragma.hs new file mode 100644 index 0000000000..2f29ee4e13 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddLanguagePragma.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE OverloadedStrings #-} + +module NeedsLanguagePragma where + +tupleSection = (1,) <$> Just 2 + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddLanguagePragmaAfterInterchaningOptsGhcAndLangs.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AddLanguagePragmaAfterInterchaningOptsGhcAndLangs.expected.hs new file mode 100644 index 0000000000..c47973d46b --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddLanguagePragmaAfterInterchaningOptsGhcAndLangs.expected.hs @@ -0,0 +1,21 @@ +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} +{-# LANGUAGE RecordWildCards #-} +{-# OPTIONS_GHC -Wno-deferred-type-errors #-} +{-# LANGUAGE TupleSections #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 + +{-# INLINE subOne #-} +subOne :: Int -> Int +subOne x = x - 1 + +tupleSection = (1, ) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddLanguagePragmaAfterInterchaningOptsGhcAndLangs.hs b/plugins/hls-pragmas-plugin/test/testdata/AddLanguagePragmaAfterInterchaningOptsGhcAndLangs.hs new file mode 100644 index 0000000000..f33ddea50a --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddLanguagePragmaAfterInterchaningOptsGhcAndLangs.hs @@ -0,0 +1,20 @@ +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} +{-# LANGUAGE RecordWildCards #-} +{-# OPTIONS_GHC -Wno-deferred-type-errors #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 + +{-# INLINE subOne #-} +subOne :: Int -> Int +subOne x = x - 1 + +tupleSection = (1, ) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddOptsGhcAfterLanguage.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AddOptsGhcAfterLanguage.expected.hs new file mode 100644 index 0000000000..aec9879630 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddOptsGhcAfterLanguage.expected.hs @@ -0,0 +1,12 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-deferred-type-errors #-} +-- | Doc Comment +{- Block -} + +module BeforeDocComment where + +test :: Int -> Integer +test x = x * 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddOptsGhcAfterLanguage.hs b/plugins/hls-pragmas-plugin/test/testdata/AddOptsGhcAfterLanguage.hs new file mode 100644 index 0000000000..98b4f63969 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddOptsGhcAfterLanguage.hs @@ -0,0 +1,11 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +-- | Doc Comment +{- Block -} + +module BeforeDocComment where + +test :: Int -> Integer +test x = x * 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterOptsGhcIgnoreInline.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterOptsGhcIgnoreInline.expected.hs new file mode 100644 index 0000000000..b726b9e2e7 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterOptsGhcIgnoreInline.expected.hs @@ -0,0 +1,12 @@ +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE TupleSections #-} +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterOptsGhcIgnoreInline.hs b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterOptsGhcIgnoreInline.hs new file mode 100644 index 0000000000..19b9b23df7 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterOptsGhcIgnoreInline.hs @@ -0,0 +1,11 @@ +{-# OPTIONS_GHC -Wall #-} +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterShebangPrecededByLangAndOptsGhc.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterShebangPrecededByLangAndOptsGhc.expected.hs new file mode 100644 index 0000000000..b29fc22fd8 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterShebangPrecededByLangAndOptsGhc.expected.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# LANGUAGE TupleSections #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterShebangPrecededByLangAndOptsGhc.hs b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterShebangPrecededByLangAndOptsGhc.hs new file mode 100644 index 0000000000..ddbd5a600c --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterShebangPrecededByLangAndOptsGhc.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterShebangPrecededByLanguage.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterShebangPrecededByLanguage.expected.hs new file mode 100644 index 0000000000..4d6e3afa50 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterShebangPrecededByLanguage.expected.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE OverloadedStrings #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# LANGUAGE TupleSections #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterShebangPrecededByLanguage.hs b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterShebangPrecededByLanguage.hs new file mode 100644 index 0000000000..348868fe42 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaAfterShebangPrecededByLanguage.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE OverloadedStrings #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddPragmaIgnoreInline.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaIgnoreInline.expected.hs new file mode 100644 index 0000000000..c98ef3d206 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaIgnoreInline.expected.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE TupleSections #-} +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddPragmaIgnoreInline.hs b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaIgnoreInline.hs new file mode 100644 index 0000000000..cfe3fb872a --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaIgnoreInline.hs @@ -0,0 +1,10 @@ +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddPragmaIgnoreLaterAnnPragma.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaIgnoreLaterAnnPragma.expected.hs new file mode 100644 index 0000000000..fc0d14f4e0 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaIgnoreLaterAnnPragma.expected.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE BangPatterns #-} +data Metaprogram = Metaprogram + { mp_name :: !Text + , mp_known_by_auto :: !Bool + , mp_show_code_action :: !Bool + , mp_program :: !(TacticsM ()) + } + deriving stock Generic +{-# ANN Metaprogram "hello" #-} + +instance NFData Metaprogram where + rnf (!(Metaprogram !_ !_ !_ !_)) = () diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddPragmaIgnoreLaterAnnPragma.hs b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaIgnoreLaterAnnPragma.hs new file mode 100644 index 0000000000..1a0ca043da --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaIgnoreLaterAnnPragma.hs @@ -0,0 +1,11 @@ +data Metaprogram = Metaprogram + { mp_name :: !Text + , mp_known_by_auto :: !Bool + , mp_show_code_action :: !Bool + , mp_program :: !(TacticsM ()) + } + deriving stock Generic +{-# ANN Metaprogram "hello" #-} + +instance NFData Metaprogram where + rnf (!(Metaprogram !_ !_ !_ !_)) = () diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddPragmaWithNonStandardSpacingInPrecedingPragmas.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaWithNonStandardSpacingInPrecedingPragmas.expected.hs new file mode 100644 index 0000000000..943ce77dd5 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaWithNonStandardSpacingInPrecedingPragmas.expected.hs @@ -0,0 +1,6 @@ +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-deferred-type-errors #-} +{-# LANGUAGE TupleSections #-} + +tupleSection = (1, ) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AddPragmaWithNonStandardSpacingInPrecedingPragmas.hs b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaWithNonStandardSpacingInPrecedingPragmas.hs new file mode 100644 index 0000000000..d04d545368 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AddPragmaWithNonStandardSpacingInPrecedingPragmas.hs @@ -0,0 +1,5 @@ +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-deferred-type-errors #-} + +tupleSection = (1, ) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterAllWithMultipleInlines.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterAllWithMultipleInlines.expected.hs new file mode 100644 index 0000000000..4b9adb0269 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterAllWithMultipleInlines.expected.hs @@ -0,0 +1,22 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TupleSections #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 + +{-# INLINE subOne #-} +subOne :: Int -> Int +subOne x = x - 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterAllWithMultipleInlines.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterAllWithMultipleInlines.hs new file mode 100644 index 0000000000..d05bc2088b --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterAllWithMultipleInlines.hs @@ -0,0 +1,21 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 + +{-# INLINE subOne #-} +subOne :: Int -> Int +subOne x = x - 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterGhcOptions.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterGhcOptions.expected.hs new file mode 100644 index 0000000000..c17952a575 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterGhcOptions.expected.hs @@ -0,0 +1,18 @@ +{-# OPTIONS_GHC -Wall #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} +{-# LANGUAGE TupleSections #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 + +{-# INLINE subOne #-} +subOne :: Int -> Int +subOne x = x - 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterGhcOptions.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterGhcOptions.hs new file mode 100644 index 0000000000..cf1c45eaf3 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterGhcOptions.hs @@ -0,0 +1,17 @@ +{-# OPTIONS_GHC -Wall #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 + +{-# INLINE subOne #-} +subOne :: Int -> Int +subOne x = x - 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterShebang.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterShebang.expected.hs new file mode 100644 index 0000000000..674522f897 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterShebang.expected.hs @@ -0,0 +1,13 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# LANGUAGE NamedFieldPuns #-} + +module AfterShebang where + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterShebang.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterShebang.hs new file mode 100644 index 0000000000..7abb60bc4b --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterShebang.hs @@ -0,0 +1,12 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" + +module AfterShebang where + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOptionsAndPragma.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOptionsAndPragma.expected.hs new file mode 100644 index 0000000000..67f8957d83 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOptionsAndPragma.expected.hs @@ -0,0 +1,13 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TupleSections #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOptionsAndPragma.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOptionsAndPragma.hs new file mode 100644 index 0000000000..284bf8e015 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOptionsAndPragma.hs @@ -0,0 +1,12 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} +{-# LANGUAGE OverloadedStrings #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOptionsAndPragmasIgnoreInline.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOptionsAndPragmasIgnoreInline.expected.hs new file mode 100644 index 0000000000..c06d6640e8 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOptionsAndPragmasIgnoreInline.expected.hs @@ -0,0 +1,18 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TupleSections #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOptionsAndPragmasIgnoreInline.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOptionsAndPragmasIgnoreInline.hs new file mode 100644 index 0000000000..1fd3c6d1e4 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOptionsAndPragmasIgnoreInline.hs @@ -0,0 +1,17 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 + +{-# INLINE addOne #-} +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOpts.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOpts.expected.hs new file mode 100644 index 0000000000..317750eb89 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOpts.expected.hs @@ -0,0 +1,12 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} +{-# LANGUAGE TupleSections #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOpts.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOpts.hs new file mode 100644 index 0000000000..0c14945395 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndOpts.hs @@ -0,0 +1,11 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} + +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndPragma.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndPragma.expected.hs new file mode 100644 index 0000000000..6c7c802d23 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndPragma.expected.hs @@ -0,0 +1,16 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NamedFieldPuns #-} +-- | Doc Comment +{- Block -} + +module BeforeDocComment where + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndPragma.hs b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndPragma.hs new file mode 100644 index 0000000000..fcae5211d7 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AfterShebangAndPragma.hs @@ -0,0 +1,15 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# LANGUAGE OverloadedStrings #-} +-- | Doc Comment +{- Block -} + +module BeforeDocComment where + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/AppendToExisting.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/AppendToExisting.expected.hs new file mode 100644 index 0000000000..46c37a9ffa --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AppendToExisting.expected.hs @@ -0,0 +1,12 @@ +-- | Doc before pragma +{-# OPTIONS_GHC -Wno-dodgy-imports #-} +{-# LANGUAGE NamedFieldPuns #-} +module AppendToExisting where + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/AppendToExisting.hs b/plugins/hls-pragmas-plugin/test/testdata/AppendToExisting.hs new file mode 100644 index 0000000000..2beb29aab4 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/AppendToExisting.hs @@ -0,0 +1,11 @@ +-- | Doc before pragma +{-# OPTIONS_GHC -Wno-dodgy-imports #-} +module AppendToExisting where + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/BeforeDocComment.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/BeforeDocComment.expected.hs new file mode 100644 index 0000000000..e5201b9892 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BeforeDocComment.expected.hs @@ -0,0 +1,15 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# LANGUAGE NamedFieldPuns #-} +-- | Doc Comment +{- Block -} + +module BeforeDocComment where + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/BeforeDocComment.hs b/plugins/hls-pragmas-plugin/test/testdata/BeforeDocComment.hs new file mode 100644 index 0000000000..aacabf2d3c --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BeforeDocComment.hs @@ -0,0 +1,14 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +-- | Doc Comment +{- Block -} + +module BeforeDocComment where + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/BeforeDocInterchanging.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/BeforeDocInterchanging.expected.hs new file mode 100644 index 0000000000..26ad7a44c1 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BeforeDocInterchanging.expected.hs @@ -0,0 +1,18 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-deferred-type-errors #-} +{-# LANGUAGE NamedFieldPuns #-} +-- | Doc Comment +{- Block -} + +module BeforeDocComment where + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/BeforeDocInterchanging.hs b/plugins/hls-pragmas-plugin/test/testdata/BeforeDocInterchanging.hs new file mode 100644 index 0000000000..60b7c69bd8 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BeforeDocInterchanging.hs @@ -0,0 +1,17 @@ +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-deferred-type-errors #-} +-- | Doc Comment +{- Block -} + +module BeforeDocComment where + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenLineComment.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenLineComment.expected.hs new file mode 100644 index 0000000000..63bfdcc6f1 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenLineComment.expected.hs @@ -0,0 +1,6 @@ +{- block comment -} -- line comment +{-# LANGUAGE TupleSections #-} + +module BlockCommentThenLineComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenLineComment.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenLineComment.hs new file mode 100644 index 0000000000..57f1a06c95 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenLineComment.hs @@ -0,0 +1,5 @@ +{- block comment -} -- line comment + +module BlockCommentThenLineComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenLineHaddock.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenLineHaddock.expected.hs new file mode 100644 index 0000000000..052321006e --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenLineHaddock.expected.hs @@ -0,0 +1,7 @@ +{- block comment -} +{-# LANGUAGE TupleSections #-} +-- | line haddock + +module BlockCommentThenLineHaddock where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenLineHaddock.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenLineHaddock.hs new file mode 100644 index 0000000000..bcfa9069c6 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenLineHaddock.hs @@ -0,0 +1,5 @@ +{- block comment -} -- | line haddock + +module BlockCommentThenLineHaddock where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenMultiLineBlockComment.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenMultiLineBlockComment.expected.hs new file mode 100644 index 0000000000..e1cebffbd7 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenMultiLineBlockComment.expected.hs @@ -0,0 +1,10 @@ +{- block comment -} {- multi +line +block +comment +-} +{-# LANGUAGE TupleSections #-} + +module BlockCommentThenMultiLineBlockComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenMultiLineBlockComment.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenMultiLineBlockComment.hs new file mode 100644 index 0000000000..8eb3f0a92d --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenMultiLineBlockComment.hs @@ -0,0 +1,9 @@ +{- block comment -} {- multi +line +block +comment +-} + +module BlockCommentThenMultiLineBlockComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenMultiLineBlockHaddock.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenMultiLineBlockHaddock.expected.hs new file mode 100644 index 0000000000..99b111b37b --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenMultiLineBlockHaddock.expected.hs @@ -0,0 +1,13 @@ +{- block comment -} +{-# LANGUAGE TupleSections #-} +{-| multi +line +block +haddock +-} + +module BlockCommentThenMultiLineBlockHaddock where +import Data.List (intercalate) +import System.IO (hFlush) + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenMultiLineBlockHaddock.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenMultiLineBlockHaddock.hs new file mode 100644 index 0000000000..f8e118dd54 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenMultiLineBlockHaddock.hs @@ -0,0 +1,11 @@ +{- block comment -} {-| multi +line +block +haddock +-} + +module BlockCommentThenMultiLineBlockHaddock where +import Data.List (intercalate) +import System.IO (hFlush) + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockComment.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockComment.expected.hs new file mode 100644 index 0000000000..2e9e5c8781 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockComment.expected.hs @@ -0,0 +1,6 @@ +{- block comment -} {- single line block comment -} +{-# LANGUAGE TupleSections #-} + +module BlockCommentThenSingleLineBlockComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockComment.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockComment.hs new file mode 100644 index 0000000000..f8f6c0158d --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockComment.hs @@ -0,0 +1,5 @@ +{- block comment -} {- single line block comment -} + +module BlockCommentThenSingleLineBlockComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockHaddock.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockHaddock.expected.hs new file mode 100644 index 0000000000..04e664101c --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockHaddock.expected.hs @@ -0,0 +1,7 @@ +{- block comment -} +{-# LANGUAGE TupleSections #-} +{-| single line block haddock -} + +module BlockCommentThenSingleLineBlockHaddock where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockHaddock.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockHaddock.hs new file mode 100644 index 0000000000..3bbb81ef04 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockHaddock.hs @@ -0,0 +1,5 @@ +{- block comment -} {-| single line block haddock -} + +module BlockCommentThenSingleLineBlockHaddock where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockHaddockSingleLineBlockComment.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockHaddockSingleLineBlockComment.expected.hs new file mode 100644 index 0000000000..aa886340b9 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockHaddockSingleLineBlockComment.expected.hs @@ -0,0 +1,7 @@ +{- block comment -} +{-# LANGUAGE TupleSections #-} +{-| single line block haddock -} {- single line block comment -} + +module BlockCommentThenSingleLineBlockHaddockSingleLineBlockComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockHaddockSingleLineBlockComment.hs b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockHaddockSingleLineBlockComment.hs new file mode 100644 index 0000000000..b28de4d0dc --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/BlockCommentThenSingleLineBlockHaddockSingleLineBlockComment.hs @@ -0,0 +1,5 @@ +{- block comment -} {-| single line block haddock -} {- single line block comment -} + +module BlockCommentThenSingleLineBlockHaddockSingleLineBlockComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/Completion.hs b/plugins/hls-pragmas-plugin/test/testdata/Completion.hs new file mode 100644 index 0000000000..9427f3dc03 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/Completion.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE OverloadedStrings #-} +import Data.Maybe +import qualified Data.List + +main :: IO () +main = putStrLn "hello" + +foo :: Either a b -> Either a b +foo = id diff --git a/plugins/hls-pragmas-plugin/test/testdata/DeferredOutOfScopeVariables.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/DeferredOutOfScopeVariables.expected.hs new file mode 100644 index 0000000000..38d17261dc --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/DeferredOutOfScopeVariables.expected.hs @@ -0,0 +1,5 @@ +module DeferredOutOfScopeVariables where + +f :: () +f = let x = Doesn'tExist + in undefined diff --git a/plugins/hls-pragmas-plugin/test/testdata/DeferredOutOfScopeVariables.hs b/plugins/hls-pragmas-plugin/test/testdata/DeferredOutOfScopeVariables.hs new file mode 100644 index 0000000000..38d17261dc --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/DeferredOutOfScopeVariables.hs @@ -0,0 +1,5 @@ +module DeferredOutOfScopeVariables where + +f :: () +f = let x = Doesn'tExist + in undefined diff --git a/plugins/hls-pragmas-plugin/test/testdata/DeferredTypeErrors.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/DeferredTypeErrors.expected.hs new file mode 100644 index 0000000000..a0e2fd6162 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/DeferredTypeErrors.expected.hs @@ -0,0 +1,4 @@ +module DeferredTypeErrors where + +foo :: Int +foo = () diff --git a/plugins/hls-pragmas-plugin/test/testdata/DeferredTypeErrors.hs b/plugins/hls-pragmas-plugin/test/testdata/DeferredTypeErrors.hs new file mode 100644 index 0000000000..a0e2fd6162 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/DeferredTypeErrors.hs @@ -0,0 +1,4 @@ +module DeferredTypeErrors where + +foo :: Int +foo = () diff --git a/plugins/hls-pragmas-plugin/test/testdata/MissingSignatures.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/MissingSignatures.expected.hs new file mode 100644 index 0000000000..4db5b18f68 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/MissingSignatures.expected.hs @@ -0,0 +1,3 @@ +{-# OPTIONS_GHC -Wall #-} +{-# OPTIONS_GHC -Wno-missing-signatures #-} +main = putStrLn "hello" diff --git a/plugins/hls-pragmas-plugin/test/testdata/MissingSignatures.hs b/plugins/hls-pragmas-plugin/test/testdata/MissingSignatures.hs new file mode 100644 index 0000000000..9d2f668112 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/MissingSignatures.hs @@ -0,0 +1,2 @@ +{-# OPTIONS_GHC -Wall #-} +main = putStrLn "hello" diff --git a/plugins/hls-pragmas-plugin/test/testdata/ModuleOnFirstLine.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/ModuleOnFirstLine.expected.hs new file mode 100644 index 0000000000..522c7c78b7 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/ModuleOnFirstLine.expected.hs @@ -0,0 +1,4 @@ +{-# LANGUAGE TupleSections #-} +module Main where + +tupleSection = (1,) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/ModuleOnFirstLine.hs b/plugins/hls-pragmas-plugin/test/testdata/ModuleOnFirstLine.hs new file mode 100644 index 0000000000..1234c57c27 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/ModuleOnFirstLine.hs @@ -0,0 +1,3 @@ +module Main where + +tupleSection = (1,) <$> Just 2 diff --git a/plugins/hls-pragmas-plugin/test/testdata/NamedFieldPuns.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/NamedFieldPuns.expected.hs new file mode 100644 index 0000000000..4de5a51e2b --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/NamedFieldPuns.expected.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE NamedFieldPuns #-} +module NamedFieldPuns where + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/NamedFieldPuns.hs b/plugins/hls-pragmas-plugin/test/testdata/NamedFieldPuns.hs new file mode 100644 index 0000000000..6651685e70 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/NamedFieldPuns.hs @@ -0,0 +1,9 @@ +module NamedFieldPuns where + +data Record = Record + { a :: Int, + b :: Double, + c :: String + } + +f Record{a, b} = a diff --git a/plugins/hls-pragmas-plugin/test/testdata/NeedsPragmas.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/NeedsPragmas.expected.hs new file mode 100644 index 0000000000..668e381a9f --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/NeedsPragmas.expected.hs @@ -0,0 +1,17 @@ +{-# LANGUAGE TypeSynonymInstances #-} +module NeedsPragmas where + +import GHC.Generics + +main = putStrLn "hello" + +type Foo = Int + +instance Show Foo where + show x = undefined + +instance Show (Int,String) where + show = undefined + +data FFF a = FFF Int String a + deriving (Generic,Functor,Traversable) diff --git a/test/testdata/addPragmas/NeedsPragmas.hs b/plugins/hls-pragmas-plugin/test/testdata/NeedsPragmas.hs similarity index 90% rename from test/testdata/addPragmas/NeedsPragmas.hs rename to plugins/hls-pragmas-plugin/test/testdata/NeedsPragmas.hs index e82ad67ec2..18a8853972 100644 --- a/test/testdata/addPragmas/NeedsPragmas.hs +++ b/plugins/hls-pragmas-plugin/test/testdata/NeedsPragmas.hs @@ -1,3 +1,4 @@ +module NeedsPragmas where import GHC.Generics diff --git a/plugins/hls-pragmas-plugin/test/testdata/OptionsGhcAfterDecl.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/OptionsGhcAfterDecl.expected.hs new file mode 100644 index 0000000000..21da00f779 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/OptionsGhcAfterDecl.expected.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE TupleSections #-} +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 + +{-# OPTIONS_GHC addOne #-} +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/OptionsGhcAfterDecl.hs b/plugins/hls-pragmas-plugin/test/testdata/OptionsGhcAfterDecl.hs new file mode 100644 index 0000000000..c61dc893a3 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/OptionsGhcAfterDecl.hs @@ -0,0 +1,10 @@ +data Something = Something { + foo :: !String, + bar :: !Int +} + +tupleSection = (1, ) <$> Just 2 + +{-# OPTIONS_GHC addOne #-} +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaFollowedBySingleLineBlockHaddock.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaFollowedBySingleLineBlockHaddock.hs new file mode 100644 index 0000000000..558bc744be --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaFollowedBySingleLineBlockHaddock.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE TypeApplications #-} {-| haddock -} + +module PragmaFollowedByBlockHaddock where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineComment.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineComment.expected.hs new file mode 100644 index 0000000000..7c78855a5c --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineComment.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TypeApplications #-} -- line comment +{-# LANGUAGE TupleSections #-} + +module PragmaThenLineComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineComment.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineComment.hs new file mode 100644 index 0000000000..9120cc3e31 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineComment.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE TypeApplications #-} -- line comment + +module PragmaThenLineComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineHaddock.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineHaddock.expected.hs new file mode 100644 index 0000000000..07fba12351 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineHaddock.expected.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TupleSections #-} +-- | line haddock + +module PragmaThenLineHaddock where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineHaddock.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineHaddock.hs new file mode 100644 index 0000000000..fa58dbd564 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineHaddock.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE TypeApplications #-} -- | line haddock + +module PragmaThenLineHaddock where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineHaddockNewlineLineComment.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineHaddockNewlineLineComment.expected.hs new file mode 100644 index 0000000000..e9a671727f --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineHaddockNewlineLineComment.expected.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TupleSections #-} +-- | line haddock +-- line comment + +module PragmaThenLineHaddockNewlineLineComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineHaddockNewlineLineComment.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineHaddockNewlineLineComment.hs new file mode 100644 index 0000000000..ea4f3d0ee9 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenLineHaddockNewlineLineComment.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TypeApplications #-} -- | line haddock +-- line comment + +module PragmaThenLineHaddockNewlineLineComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenMultiLineBlockComment.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenMultiLineBlockComment.expected.hs new file mode 100644 index 0000000000..0003b1834b --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenMultiLineBlockComment.expected.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TupleSections #-} +{- multi +line +block +comment +-} + +module PragmaThenSingleLineBlockComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenMultiLineBlockComment.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenMultiLineBlockComment.hs new file mode 100644 index 0000000000..dd3605dd46 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenMultiLineBlockComment.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE TypeApplications #-} {- multi +line +block +comment +-} + +module PragmaThenSingleLineBlockComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenMultiLineBlockHaddock.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenMultiLineBlockHaddock.expected.hs new file mode 100644 index 0000000000..cac02d6617 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenMultiLineBlockHaddock.expected.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TupleSections #-} +{-| multi +line +block +haddock +-} + +module PragmaThenMultiLineBlockHaddock where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenMultiLineBlockHaddock.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenMultiLineBlockHaddock.hs new file mode 100644 index 0000000000..b51f8af6d3 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenMultiLineBlockHaddock.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE TypeApplications #-} {-| multi +line +block +haddock +-} + +module PragmaThenMultiLineBlockHaddock where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockComment.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockComment.expected.hs new file mode 100644 index 0000000000..d4dddb9134 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockComment.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TypeApplications #-} {- single line block comment -} +{-# LANGUAGE TupleSections #-} + +module PragmaThenSingleLineBlockComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockComment.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockComment.hs new file mode 100644 index 0000000000..0fe715bfb1 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockComment.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE TypeApplications #-} {- single line block comment -} + +module PragmaThenSingleLineBlockComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockHaddock.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockHaddock.expected.hs new file mode 100644 index 0000000000..9553c23ef8 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockHaddock.expected.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TupleSections #-} +{-| single line block haddock -} + +module PragmaThenSingleLineBlockHaddock where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockHaddock.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockHaddock.hs new file mode 100644 index 0000000000..5cf8336bf4 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockHaddock.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE TypeApplications #-} {-| single line block haddock -} + +module PragmaThenSingleLineBlockHaddock where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockHaddockSingleLineBlockComment.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockHaddockSingleLineBlockComment.expected.hs new file mode 100644 index 0000000000..459f1a83d5 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockHaddockSingleLineBlockComment.expected.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TupleSections #-} +{-| single line block haddock -} {- single line block comment -} + +module PragmaThenSingleLineBlockHaddockSingleLineBlockComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockHaddockSingleLineBlockComment.hs b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockHaddockSingleLineBlockComment.hs new file mode 100644 index 0000000000..5de9892518 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/PragmaThenSingleLineBlockHaddockSingleLineBlockComment.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE TypeApplications #-} {-| single line block haddock -} {- single line block comment -} + +module PragmaThenSingleLineBlockHaddockSingleLineBlockComment where + +a = (1,) diff --git a/plugins/hls-pragmas-plugin/test/testdata/TypeApplications.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/TypeApplications.expected.hs new file mode 100644 index 0000000000..cdbba9c0cb --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/TypeApplications.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} +module TypeApplications where + +foo :: forall a. a -> a +foo = id @a diff --git a/plugins/hls-pragmas-plugin/test/testdata/TypeApplications.hs b/plugins/hls-pragmas-plugin/test/testdata/TypeApplications.hs new file mode 100644 index 0000000000..55e56bdd9d --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/TypeApplications.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE ScopedTypeVariables #-} +module TypeApplications where + +foo :: forall a. a -> a +foo = id @a diff --git a/plugins/hls-pragmas-plugin/test/testdata/UnusedImports.expected.hs b/plugins/hls-pragmas-plugin/test/testdata/UnusedImports.expected.hs new file mode 100644 index 0000000000..a18d6959e9 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/UnusedImports.expected.hs @@ -0,0 +1,7 @@ +{-# OPTIONS_GHC -Wall #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} + + +module M where + +import Data.Functor diff --git a/plugins/hls-pragmas-plugin/test/testdata/UnusedImports.hs b/plugins/hls-pragmas-plugin/test/testdata/UnusedImports.hs new file mode 100644 index 0000000000..9d49c0c95d --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/UnusedImports.hs @@ -0,0 +1,6 @@ +{-# OPTIONS_GHC -Wall #-} + + +module M where + +import Data.Functor diff --git a/plugins/hls-pragmas-plugin/test/testdata/hie.yaml b/plugins/hls-pragmas-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..7b4d912951 --- /dev/null +++ b/plugins/hls-pragmas-plugin/test/testdata/hie.yaml @@ -0,0 +1,7 @@ +cradle: + direct: + arguments: + - "-XHaskell2010" + - "NeedsPragmas" + - "TypeApplications" + - "NamedFieldPuns" diff --git a/plugins/hls-qualify-imported-names-plugin/README.md b/plugins/hls-qualify-imported-names-plugin/README.md new file mode 100644 index 0000000000..caab371ec9 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/README.md @@ -0,0 +1,52 @@ +# Qualify Imported Names + +![Qualify Imported Names Demo](qualify-imported-names-demo.gif) + +## Summary + +Rewrite imported names to be qualified. + +## Motivation + +You've imported a number of modules, and have written a lot of code with unqualified names. You want to import a new module but you know there will be a number name clashes so you want to switch your current usage of names to be qualified. + +It would be nice if you could change +``` +import Blah +``` +to +``` +import Blah as Bloo +``` +then magically qualify all the previous names imported from `Blah` with `Bloo`. After doing that you could then change +``` +import Blah as Bloo +``` +to +``` +import qualified Blah as Bloo +``` +and import your the new module using names from it without worry. + +Well, now you can... + +## Usage + +1. Put cursor over the import declaration you want to qualify names from. +2. Initiate a Code Action. +3. Select `Qualify imported names`. + +## Features +- Names are qualified on a per-import declaration basis. +- Names are qualified by the imported module's alias if it has one, otherwise by the imported module's name. +- If the import declaration has an explicit import list then the plugin will qualify only names on the list. +- If the import declaration has an explicit hiding list then the plugin will qualify names from the imported module that are not on the list. + +## Future possibilities +- It may be possible to use the LSP rename functionality to ask for a name so that we don't have to do the `as Alias` dance. + +## Change log +### 1.0.0.1 +- GHC 9.2.1 compatibility +### 1.0.0.0 +- Released... diff --git a/plugins/hls-qualify-imported-names-plugin/qualify-imported-names-demo.gif b/plugins/hls-qualify-imported-names-plugin/qualify-imported-names-demo.gif new file mode 100644 index 0000000000..197a1eee49 Binary files /dev/null and b/plugins/hls-qualify-imported-names-plugin/qualify-imported-names-demo.gif differ diff --git a/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs b/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs new file mode 100644 index 0000000000..6917d0a7a9 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs @@ -0,0 +1,243 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE MultiWayIf #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} + +module Ide.Plugin.QualifyImportedNames (descriptor) where + +import Control.Lens ((^.)) +import Control.Monad (foldM) +import Control.Monad.Trans.State.Strict (State) +import qualified Control.Monad.Trans.State.Strict as State +import Data.DList (DList) +import qualified Data.DList as DList +import Data.Foldable (find) +import Data.List (sortOn) +import qualified Data.List as List +import qualified Data.Map.Strict as Map +import Data.Maybe (fromMaybe, isJust, mapMaybe) +import qualified Data.Set as Set +import Data.Text (Text) +import qualified Data.Text as Text +import qualified Data.Text.Lines as Text.Lines +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE (spanContainsRange) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.RuleTypes (GetFileContents (GetFileContents), + GetHieAst (GetHieAst), + HieAstResult (HAR, refMap), + TcModuleResult (TcModuleResult, tmrParsed, tmrTypechecked), + TypeCheck (TypeCheck)) +import Development.IDE.Core.Shake (IdeState) +import Development.IDE.GHC.Compat (GenLocated (..), GhcPs, + GlobalRdrElt, GlobalRdrEnv, + HsModule (hsmodImports), + ImpDeclSpec (ImpDeclSpec, is_as, is_dloc, is_qual), + ImportSpec (ImpSpec), + LImportDecl, ModuleName, + Name, NameEnv, ParsedModule, + SrcSpan, + TcGblEnv (tcg_rdr_env), + emptyUFM, globalRdrEnvElts, + gre_imp, gre_name, locA, + lookupNameEnv, + moduleNameString, + nameOccName, occNameString, + pattern GRE, + pattern ParsedModule, + plusUFM_C, pm_parsed_source, + srcSpanEndCol, + srcSpanEndLine, + srcSpanStartCol, + srcSpanStartLine, unitUFM) +import Development.IDE.Types.Location (Position (Position), + Range (Range), Uri) +import GHC.Iface.Ext.Types (ContextInfo (..), Identifier, + IdentifierDetails (..), Span) +import GHC.Iface.Ext.Utils (RefMap) +import Ide.Plugin.Error (PluginError (PluginRuleFailed), + getNormalizedFilePathE, + handleMaybe) +import Ide.Types (PluginDescriptor (pluginHandlers), + PluginId, + PluginMethodHandler, + defaultPluginDescriptor, + mkPluginHandler) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message (Method (Method_TextDocumentCodeAction), + SMethod (SMethod_TextDocumentCodeAction)) +import Language.LSP.Protocol.Types (CodeAction (CodeAction, _command, _data_, _diagnostics, _disabled, _edit, _isPreferred, _kind, _title), + CodeActionKind (CodeActionKind_QuickFix), + CodeActionParams (CodeActionParams), + TextEdit (TextEdit), + WorkspaceEdit (WorkspaceEdit, _changeAnnotations, _changes, _documentChanges), + type (|?) (InL, InR)) + +#if !MIN_VERSION_base(4,20,0) +import Data.Foldable (foldl') +#endif + +thenCmp :: Ordering -> Ordering -> Ordering +{-# INLINE thenCmp #-} +thenCmp EQ ordering = ordering +thenCmp ordering _ = ordering + +descriptor :: PluginId -> PluginDescriptor IdeState +descriptor pluginId = (defaultPluginDescriptor pluginId "Provides a code action to qualify imported names") { + pluginHandlers = mconcat + [ mkPluginHandler SMethod_TextDocumentCodeAction codeActionProvider + ] +} + +findLImportDeclAt :: Range -> ParsedModule -> Maybe (LImportDecl GhcPs) +findLImportDeclAt range parsedModule + | ParsedModule {..} <- parsedModule + , L _ hsModule <- pm_parsed_source + , locatedImportDecls <- hsmodImports hsModule = + find (\ (L (locA -> srcSpan) _) -> fromMaybe False $ srcSpan `spanContainsRange` range) locatedImportDecls + +makeCodeActions :: Uri -> [TextEdit] -> [a |? CodeAction] +makeCodeActions uri textEdits = [InR CodeAction {..} | not (null textEdits)] + where _title = "Qualify imported names" + _kind = Just CodeActionKind_QuickFix + _command = Nothing + _edit = Just WorkspaceEdit {..} + _changes = Just $ Map.singleton uri textEdits + _documentChanges = Nothing + _diagnostics = Nothing + _isPreferred = Nothing + _disabled = Nothing + _data_ = Nothing + _changeAnnotations = Nothing + +data ImportedBy = ImportedBy { + importedByAlias :: !ModuleName, + importedBySrcSpan :: !SrcSpan +} + +isRangeWithinImportedBy :: Range -> ImportedBy -> Bool +isRangeWithinImportedBy range ImportedBy{importedBySrcSpan} = fromMaybe False $ spanContainsRange importedBySrcSpan range + +globalRdrEnvToNameToImportedByMap :: GlobalRdrEnv -> NameEnv [ImportedBy] +globalRdrEnvToNameToImportedByMap = + fmap DList.toList . foldl' (plusUFM_C (<>)) emptyUFM . map globalRdrEltToNameToImportedByMap . globalRdrEnvElts + where + globalRdrEltToNameToImportedByMap :: GlobalRdrElt -> NameEnv (DList ImportedBy) + globalRdrEltToNameToImportedByMap GRE {..} = + unitUFM gre_name $ DList.fromList $ mapMaybe importSpecToImportedBy gre_imp + + importSpecToImportedBy :: ImportSpec -> Maybe ImportedBy + importSpecToImportedBy (ImpSpec ImpDeclSpec {..} _) + | is_qual = Nothing + | otherwise = Just (ImportedBy is_as is_dloc) + +data IdentifierSpan = IdentifierSpan { + identifierSpanLine :: !Int, + identifierSpanStartCol :: !Int, + identifierSpanEndCol :: !Int +} deriving (Show, Eq) + +instance Ord IdentifierSpan where + compare (IdentifierSpan line1 startCol1 endCol1) (IdentifierSpan line2 startCol2 endCol2) = + (line1 `compare` line2) `thenCmp` (startCol1 `compare` startCol2) `thenCmp` (endCol1 `compare` endCol2) + +realSrcSpanToIdentifierSpan :: Span -> Maybe IdentifierSpan +realSrcSpanToIdentifierSpan realSrcSpan + | let startLine = srcSpanStartLine realSrcSpan - 1 + , let endLine = srcSpanEndLine realSrcSpan - 1 + , startLine == endLine + , let startCol = srcSpanStartCol realSrcSpan - 1 + , let endCol = srcSpanEndCol realSrcSpan - 1 = + Just $ IdentifierSpan startLine startCol endCol + | otherwise = Nothing + +identifierSpanToRange :: IdentifierSpan -> Range +identifierSpanToRange (IdentifierSpan line startCol endCol) = + Range (Position (fromIntegral line) (fromIntegral startCol)) (Position (fromIntegral line) (fromIntegral endCol)) + +data UsedIdentifier = UsedIdentifier { + usedIdentifierName :: !Name, + usedIdentifierSpan :: !IdentifierSpan +} + +refMapToUsedIdentifiers :: RefMap a -> [UsedIdentifier] +refMapToUsedIdentifiers = DList.toList . Map.foldlWithKey' folder DList.empty + where + folder acc identifier spanIdentifierDetailsPairs = + DList.fromList (mapMaybe (uncurry (getUsedIdentifier identifier)) spanIdentifierDetailsPairs) <> acc + + getUsedIdentifier :: Identifier -> Span -> IdentifierDetails a -> Maybe UsedIdentifier + getUsedIdentifier identifier span IdentifierDetails {..} + | Just identifierSpan <- realSrcSpanToIdentifierSpan span + , Right name <- identifier + , Use `Set.member` identInfo = Just $ UsedIdentifier name identifierSpan + | otherwise = Nothing + +updateColOffset :: Int -> Int -> Int -> Int +updateColOffset row lineOffset colOffset + | row == lineOffset = colOffset + | otherwise = 0 + +usedIdentifiersToTextEdits :: Range -> NameEnv [ImportedBy] -> Rope -> [UsedIdentifier] -> [TextEdit] +usedIdentifiersToTextEdits range nameToImportedByMap source usedIdentifiers + | let sortedUsedIdentifiers = sortOn usedIdentifierSpan usedIdentifiers = + State.evalState + (makeStateComputation sortedUsedIdentifiers) + (Text.Lines.lines (Rope.toTextLines source), 0, 0) + where + folder :: [TextEdit] -> UsedIdentifier -> State ([Text], Int, Int) [TextEdit] + folder prevTextEdits UsedIdentifier{usedIdentifierName, usedIdentifierSpan} + | Just importedBys <- lookupNameEnv nameToImportedByMap usedIdentifierName + , Just ImportedBy{importedByAlias} <- find (isRangeWithinImportedBy range) importedBys + , let IdentifierSpan row startCol _ = usedIdentifierSpan + , let identifierRange = identifierSpanToRange usedIdentifierSpan + , let aliasText = Text.pack $ moduleNameString importedByAlias + , let identifierText = Text.pack $ occNameString $ nameOccName usedIdentifierName + , let qualifiedIdentifierText = aliasText <> "." <> identifierText = do + (sourceTextLines, lineOffset, updateColOffset row lineOffset -> colOffset) <- State.get + let lines = List.drop (row - lineOffset) sourceTextLines + let (replacementText, remainingLines) = + if | line : remainingLines <- lines + , let lineStartingAtIdentifier = Text.drop (startCol - colOffset) line + , Just (c, _) <- Text.uncons lineStartingAtIdentifier + , let isParenthesized = c == '(' + , let isBackticked = c == '`' + , let replacementText = + if | isParenthesized -> "(" <> qualifiedIdentifierText <> ")" + | isBackticked -> "`" <> qualifiedIdentifierText <> "`" + | otherwise -> qualifiedIdentifierText -> + (replacementText, lineStartingAtIdentifier : remainingLines) + | otherwise -> (qualifiedIdentifierText, lines) + let textEdit = TextEdit identifierRange replacementText + State.put (remainingLines, row, startCol) + pure $ textEdit : prevTextEdits + | otherwise = pure prevTextEdits + + makeStateComputation :: [UsedIdentifier] -> State ([Text], Int, Int) [TextEdit] + makeStateComputation usedIdentifiers = foldM folder [] usedIdentifiers + +-- The overall idea: +-- 1. GlobalRdrEnv from typechecking phase contains info on what imported a +-- name. +-- 2. refMap from GetHieAst contains location of names and how they are used. +-- 3. For each used name in refMap check whether the name comes from an import +-- at the origin of the code action. +codeActionProvider :: PluginMethodHandler IdeState Method_TextDocumentCodeAction +codeActionProvider ideState _pluginId (CodeActionParams _ _ documentId range _) = do + normalizedFilePath <- getNormalizedFilePathE (documentId ^. L.uri) + TcModuleResult { tmrParsed, tmrTypechecked } <- runActionE "QualifyImportedNames.TypeCheck" ideState $ useE TypeCheck normalizedFilePath + if isJust (findLImportDeclAt range tmrParsed) + then do + HAR {..} <- runActionE "QualifyImportedNames.GetHieAst" ideState (useE GetHieAst normalizedFilePath) + (_, sourceM) <- runActionE "QualifyImportedNames.GetFileContents" ideState (useE GetFileContents normalizedFilePath) + source <- handleMaybe (PluginRuleFailed "GetFileContents") sourceM + let globalRdrEnv = tcg_rdr_env tmrTypechecked + nameToImportedByMap = globalRdrEnvToNameToImportedByMap globalRdrEnv + usedIdentifiers = refMapToUsedIdentifiers refMap + textEdits = usedIdentifiersToTextEdits range nameToImportedByMap source usedIdentifiers + pure $ InL (makeCodeActions (documentId ^. L.uri) textEdits) + else pure $ InL [] + diff --git a/plugins/hls-qualify-imported-names-plugin/test/Main.hs b/plugins/hls-qualify-imported-names-plugin/test/Main.hs new file mode 100644 index 0000000000..1d932be601 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/Main.hs @@ -0,0 +1,141 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +module Main (main) where + +import Data.Foldable (find) +import Data.Text (Text) +import qualified Ide.Plugin.QualifyImportedNames as QualifyImportedNames +import System.FilePath (()) +import Test.Hls (CodeAction (CodeAction, _title), + Command, MonadIO (liftIO), + PluginTestDescriptor, + Position (Position), + Range (Range), Session, + TestName, TestTree, + TextDocumentIdentifier, + assertBool, assertFailure, + def, defaultTestRunner, + executeCodeAction, + getCodeActions, + goldenWithHaskellDoc, + mkPluginTestDescriptor', + openDoc, runSessionWithServer, + testCase, testGroup, + type (|?) (InR)) + +import Prelude + +-- 1's based +data Point = Point { + line :: !Int, + column :: !Int +} + +makePoint :: Int -> Int -> Point +makePoint line column + | line >= 1 && column >= 1 = Point line column + | otherwise = error "Line or column is less than 1." + +isEmpty :: Foldable f => f a -> Bool +isEmpty = null + +makeCodeActionNotFoundAtString :: Point -> String +makeCodeActionNotFoundAtString Point {..} = + "CodeAction not found at line: " <> show line <> ", column: " <> show column + +makeCodeActionFoundAtString :: Point -> String +makeCodeActionFoundAtString Point {..} = + "CodeAction found at line: " <> show line <> ", column: " <> show column + +main :: IO () +main = defaultTestRunner $ testGroup "Qualify Imported Names" + [ + testCase "No CodeAction when not at import" $ + runSessionWithServer def pluginDescriptor testDataDir $ do + let point = makePoint 1 1 + document <- openDoc "NoImport.hs" "haskell" + actions <- getCodeActions document $ pointToRange point + liftIO $ assertBool (makeCodeActionFoundAtString point) (isEmpty actions) + , testCase "No CodeAction when import is qualified" $ + runSessionWithServer def pluginDescriptor testDataDir $ do + let point = makePoint 3 1 + document <- openDoc "QualifiedImport.hs" "haskell" + actions <- getCodeActions document $ pointToRange point + liftIO $ assertBool (makeCodeActionFoundAtString point) (isEmpty actions) + , codeActionGoldenTest + "CodeAction qualifies names with alias if imported module is aliased" + "AliasedImport" + (makePoint 3 1) + , codeActionGoldenTest + "CodeAction qualifies names with module name if imported module is not aliased" + "UnaliasedImport" + (makePoint 3 1) + , codeActionGoldenTest + "CodeAction qualifies only names in import's explicit non-hiding list" + "ExplicitImport" + (makePoint 4 1) + , codeActionGoldenTest + "CodeAction qualifies only names outside of import's explicit hiding list" + "ExplicitHidingImport" + (makePoint 4 1) + , codeActionGoldenTest + "CodeAction can qualify names not defined in modules they are imported from" + "Reexported" + (makePoint 3 1) + , codeActionGoldenTest + "CodeAction can qualify explicitly imported Prelude" + "ExplicitPrelude" + (makePoint 3 1) + , codeActionGoldenTest + "CodeAction qualifies only imported names" + "OnlyImportedNames" + (makePoint 3 1) + , codeActionGoldenTest + "CodeAction qualifies parenthesized operators properly" + "Parenthesized" + (makePoint 3 1) + , codeActionGoldenTest + "CodeAction qualifies backticked operators properly" + "Backticked" + (makePoint 3 1) + , codeActionGoldenTest + "CodeAction qualifies parenthesized and backticked operators on the same line properly" + "SameLine" + (makePoint 3 1) + , codeActionGoldenTest + "CodeAction doesn't qualify already qualified names" + "NoDoubleQualify" + (makePoint 3 1) + ] + +codeActionGoldenTest :: TestName -> FilePath -> Point -> TestTree +codeActionGoldenTest testCaseName goldenFilename point = + goldenWithQualifyImportedNames testCaseName goldenFilename $ \document -> do + actions <- getCodeActions document $ pointToRange point + case find ((== Just "Qualify imported names") . getCodeActionTitle) actions of + Just (InR codeAction) -> executeCodeAction codeAction + _ -> liftIO $ assertFailure $ makeCodeActionNotFoundAtString point + +testDataDir :: String +testDataDir = "plugins" "hls-qualify-imported-names-plugin" "test" "data" + +pluginDescriptor :: PluginTestDescriptor () +pluginDescriptor = mkPluginTestDescriptor' QualifyImportedNames.descriptor "qualifyImportedNames" + +getCodeActionTitle :: (Command |? CodeAction) -> Maybe Text +getCodeActionTitle commandOrCodeAction + | InR CodeAction {_title} <- commandOrCodeAction = Just _title + | otherwise = Nothing + +goldenWithQualifyImportedNames :: TestName -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +goldenWithQualifyImportedNames testName path = + goldenWithHaskellDoc def pluginDescriptor testName testDataDir path "expected" "hs" + +pointToRange :: Point -> Range +pointToRange Point {..} + | line <- fromIntegral $ subtract 1 line + , column <- fromIntegral $ subtract 1 column = + Range (Position line column) (Position line $ column + 1) + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/A.hs b/plugins/hls-qualify-imported-names-plugin/test/data/A.hs new file mode 100644 index 0000000000..d6ecd773cb --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/A.hs @@ -0,0 +1,12 @@ +module A (module B, a, b, op) where + +import B + +a :: Int -> Int +a = id + +b :: String -> String +b = id + +op :: Int -> Int -> Int +op = (+) diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/AliasedImport.expected.hs b/plugins/hls-qualify-imported-names-plugin/test/data/AliasedImport.expected.hs new file mode 100644 index 0000000000..406ab21f81 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/AliasedImport.expected.hs @@ -0,0 +1,6 @@ +module AliasedImport where + +import A as B + +thing = B.a + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/AliasedImport.hs b/plugins/hls-qualify-imported-names-plugin/test/data/AliasedImport.hs new file mode 100644 index 0000000000..92febc3dbd --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/AliasedImport.hs @@ -0,0 +1,6 @@ +module AliasedImport where + +import A as B + +thing = a + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/B.hs b/plugins/hls-qualify-imported-names-plugin/test/data/B.hs new file mode 100644 index 0000000000..78795f4a2e --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/B.hs @@ -0,0 +1,7 @@ +module B where + +c :: Int +c = 3 + +d :: String +d = "d" diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/Backticked.expected.hs b/plugins/hls-qualify-imported-names-plugin/test/data/Backticked.expected.hs new file mode 100644 index 0000000000..debea0794b --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/Backticked.expected.hs @@ -0,0 +1,10 @@ +module Backticked where + +import Prelude + +f a b = a `Prelude.elem` b + +g a b = + let h = f a b + in a `Prelude.elem` b + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/Backticked.hs b/plugins/hls-qualify-imported-names-plugin/test/data/Backticked.hs new file mode 100644 index 0000000000..981b46e84d --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/Backticked.hs @@ -0,0 +1,10 @@ +module Backticked where + +import Prelude + +f a b = a `elem` b + +g a b = + let h = f a b + in a `elem` b + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitHidingImport.expected.hs b/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitHidingImport.expected.hs new file mode 100644 index 0000000000..013d8fe630 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitHidingImport.expected.hs @@ -0,0 +1,8 @@ +module ExplicitHidingImport where + +import A +import A hiding (b) + +thing1 = A.a +thing2 = b + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitHidingImport.hs b/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitHidingImport.hs new file mode 100644 index 0000000000..41c3421e86 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitHidingImport.hs @@ -0,0 +1,8 @@ +module ExplicitHidingImport where + +import A +import A hiding (b) + +thing1 = a +thing2 = b + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitImport.expected.hs b/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitImport.expected.hs new file mode 100644 index 0000000000..bb18252868 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitImport.expected.hs @@ -0,0 +1,8 @@ +module ExplicitImport where + +import A (a) +import A (b) + +thing1 = a +thing2 = A.b + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitImport.hs b/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitImport.hs new file mode 100644 index 0000000000..3e2a7dba1e --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitImport.hs @@ -0,0 +1,8 @@ +module ExplicitImport where + +import A (a) +import A (b) + +thing1 = a +thing2 = b + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitPrelude.expected.hs b/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitPrelude.expected.hs new file mode 100644 index 0000000000..64e67291aa --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitPrelude.expected.hs @@ -0,0 +1,10 @@ +module ExplicitPrelude where + +import Prelude + +f :: Prelude.String -> Prelude.Int -> Prelude.Maybe Prelude.Bool +f a b = Prelude.Just Prelude.False + +class Prelude.Functor f => MyClass f where + method :: f Prelude.Int + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitPrelude.hs b/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitPrelude.hs new file mode 100644 index 0000000000..9fcdb9c5f9 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/ExplicitPrelude.hs @@ -0,0 +1,10 @@ +module ExplicitPrelude where + +import Prelude + +f :: String -> Int -> Maybe Bool +f a b = Just False + +class Functor f => MyClass f where + method :: f Int + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/NoDoubleQualify.expected.hs b/plugins/hls-qualify-imported-names-plugin/test/data/NoDoubleQualify.expected.hs new file mode 100644 index 0000000000..bf09b95ba2 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/NoDoubleQualify.expected.hs @@ -0,0 +1,7 @@ +module NoDoubleQualify where + +import A as AAA + +thing = AAA.a +thing2 = (AAA.op) +thing3 = 1 `AAA.op` 2 diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/NoDoubleQualify.hs b/plugins/hls-qualify-imported-names-plugin/test/data/NoDoubleQualify.hs new file mode 100644 index 0000000000..bf09b95ba2 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/NoDoubleQualify.hs @@ -0,0 +1,7 @@ +module NoDoubleQualify where + +import A as AAA + +thing = AAA.a +thing2 = (AAA.op) +thing3 = 1 `AAA.op` 2 diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/NoImport.hs b/plugins/hls-qualify-imported-names-plugin/test/data/NoImport.hs new file mode 100644 index 0000000000..9e7cac3885 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/NoImport.hs @@ -0,0 +1,4 @@ +module NoImport where + +f = 3 + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/OnlyImportedNames.expected.hs b/plugins/hls-qualify-imported-names-plugin/test/data/OnlyImportedNames.expected.hs new file mode 100644 index 0000000000..f0d7cec81d --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/OnlyImportedNames.expected.hs @@ -0,0 +1,16 @@ +module OnlyImportedNames where + +import A + +thing1 a = a + +thing2 b = b + +thing3 = f1 A.a A.c + +thing4 = f2 A.b A.d + +f1 a = a + +f2 c b = let { d = "k"; e = A.a } in c d ++ c b + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/OnlyImportedNames.hs b/plugins/hls-qualify-imported-names-plugin/test/data/OnlyImportedNames.hs new file mode 100644 index 0000000000..514a2f7eff --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/OnlyImportedNames.hs @@ -0,0 +1,16 @@ +module OnlyImportedNames where + +import A + +thing1 a = a + +thing2 b = b + +thing3 = f1 a c + +thing4 = f2 b d + +f1 a = a + +f2 c b = let { d = "k"; e = a } in c d ++ c b + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/Parenthesized.expected.hs b/plugins/hls-qualify-imported-names-plugin/test/data/Parenthesized.expected.hs new file mode 100644 index 0000000000..969f2a1a4f --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/Parenthesized.expected.hs @@ -0,0 +1,6 @@ +module Parenthesized where + +import Prelude + +thing :: [Prelude.Int] -> [Prelude.Int] -> [Prelude.Int] +thing = (Prelude.<>) diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/Parenthesized.hs b/plugins/hls-qualify-imported-names-plugin/test/data/Parenthesized.hs new file mode 100644 index 0000000000..f94e35f9a2 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/Parenthesized.hs @@ -0,0 +1,6 @@ +module Parenthesized where + +import Prelude + +thing :: [Int] -> [Int] -> [Int] +thing = (<>) diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/QualifiedImport.hs b/plugins/hls-qualify-imported-names-plugin/test/data/QualifiedImport.hs new file mode 100644 index 0000000000..d79756cd7b --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/QualifiedImport.hs @@ -0,0 +1,4 @@ +module QualifiedImport where + +import qualified A + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/Reexported.expected.hs b/plugins/hls-qualify-imported-names-plugin/test/data/Reexported.expected.hs new file mode 100644 index 0000000000..beb612266b --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/Reexported.expected.hs @@ -0,0 +1,6 @@ +module Reexported where + +import A + +thing = A.c + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/Reexported.hs b/plugins/hls-qualify-imported-names-plugin/test/data/Reexported.hs new file mode 100644 index 0000000000..55bd197c09 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/Reexported.hs @@ -0,0 +1,6 @@ +module Reexported where + +import A + +thing = c + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/SameLine.expected.hs b/plugins/hls-qualify-imported-names-plugin/test/data/SameLine.expected.hs new file mode 100644 index 0000000000..c772679c22 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/SameLine.expected.hs @@ -0,0 +1,5 @@ +module SameLine where + +import A + +thing = ((A.a) . (A.a) . (A.a)) (1 `A.op` 2 `A.op` 3 `A.op` 4) diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/SameLine.hs b/plugins/hls-qualify-imported-names-plugin/test/data/SameLine.hs new file mode 100644 index 0000000000..4a69baf238 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/SameLine.hs @@ -0,0 +1,5 @@ +module SameLine where + +import A + +thing = ((a) . (a) . (a)) (1 `op` 2 `op` 3 `op` 4) diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/UnaliasedImport.expected.hs b/plugins/hls-qualify-imported-names-plugin/test/data/UnaliasedImport.expected.hs new file mode 100644 index 0000000000..f24e555ad8 --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/UnaliasedImport.expected.hs @@ -0,0 +1,6 @@ +module UnaliasedImport where + +import A + +thing = A.a + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/UnaliasedImport.hs b/plugins/hls-qualify-imported-names-plugin/test/data/UnaliasedImport.hs new file mode 100644 index 0000000000..c7fc8eec0a --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/UnaliasedImport.hs @@ -0,0 +1,6 @@ +module UnaliasedImport where + +import A + +thing = a + diff --git a/plugins/hls-qualify-imported-names-plugin/test/data/hie.yaml b/plugins/hls-qualify-imported-names-plugin/test/data/hie.yaml new file mode 100644 index 0000000000..a3f448957c --- /dev/null +++ b/plugins/hls-qualify-imported-names-plugin/test/data/hie.yaml @@ -0,0 +1,17 @@ +cradle: + direct: + arguments: + - A.hs + - B.hs + - QualifiedImport.hs + - NoImport.hs + - AliasedImport.hs + - UnaliasedImport.hs + - ExplicitImport.hs + - ExplicitHidingImport.hs + - Reexported.hs + - ExplicitPrelude.hs + - OnlyImportedNames.hs + - Parenthesized.hs + - Backticked.hs + diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Compat/ExactPrint.hs b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Compat/ExactPrint.hs new file mode 100644 index 0000000000..7c337dcd00 --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Compat/ExactPrint.hs @@ -0,0 +1,19 @@ +-- | This module contains compatibility constructs to write type signatures across +-- multiple ghc-exactprint versions, accepting that anything more ambitious is +-- pretty much impossible with the GHC 9.2 redesign of ghc-exactprint +module Development.IDE.GHC.Compat.ExactPrint + ( module ExactPrint + , printA + , transformA + ) where + +import Language.Haskell.GHC.ExactPrint as ExactPrint + +printA :: (ExactPrint ast) => ast -> String +printA ast = exactPrint ast + +transformA + :: Monad m => ast1 -> (ast1 -> TransformT m ast2) -> m ast2 +transformA ast f = do + (ast',_ ,_) <- runTransformFromT 0 (f ast) + return ast' diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs new file mode 100644 index 0000000000..638d14c51d --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/Dump.hs @@ -0,0 +1,379 @@ +{-# LANGUAGE CPP #-} +module Development.IDE.GHC.Dump(showAstDataHtml) where +import qualified Data.ByteString as B +import Data.Data hiding (Fixity) +import Development.IDE.GHC.Compat hiding (LocatedA, + NameAnn) +import Development.IDE.GHC.Compat.ExactPrint (ExactPrint, exactPrint) +import Development.IDE.GHC.Compat.Util +import Generics.SYB (ext1Q, ext2Q, extQ) +import GHC.Hs hiding (AnnLet) +import GHC.Hs.Dump +import GHC.Plugins hiding (AnnLet) +import Prelude hiding ((<>)) + +-- | Show a GHC syntax tree in HTML. +showAstDataHtml :: (Data a, ExactPrint a) => a -> SDoc +showAstDataHtml a0 = html $ + header $$ + body (tag' [("id",text (show @String "myUL"))] "ul" $ vcat + [ + li (pre $ text (exactPrint a0)), + li (showAstDataHtml' a0), + li (nested "Raw" $ pre $ showAstData NoBlankSrcSpan NoBlankEpAnnotations a0) + ]) + where + tag = tag' [] + tag' attrs t cont = + angleBrackets (text t <+> hcat [text a<>char '=' <>v | (a,v) <- attrs]) + <> cont + <> angleBrackets (char '/' <> text t) + ul = tag' [("class", text (show @String "nested"))] "ul" + li = tag "li" + caret x = tag' [("class", text "caret")] "span" "" <+> x + nested foo cts + | otherwise = foo $$ (caret $ ul cts) + body cts = tag "body" $ cts $$ tag "script" (text js) + header = tag "head" $ tag "style" $ text css + html = tag "html" + pre = tag "pre" + showAstDataHtml' :: Data a => a -> SDoc + showAstDataHtml' = + generic + `ext1Q` list + `extQ` string `extQ` fastString `extQ` srcSpan `extQ` realSrcSpan +#if !MIN_VERSION_ghc(9,11,0) + `extQ` annotation +#endif + `extQ` annotationModule +#if !MIN_VERSION_ghc(9,11,0) + `extQ` annotationAddEpAnn +#endif + `extQ` annotationGrhsAnn + `extQ` annotationEpAnnHsCase + `extQ` annotationEpAnnHsLet + `extQ` annotationAnnList + `extQ` annotationEpAnnImportDecl + `extQ` annotationAnnParen + `extQ` annotationTrailingAnn + `extQ` annotationEpaLocation +#if !MIN_VERSION_ghc(9,11,0) + `extQ` addEpAnn +#endif + `extQ` lit `extQ` litr `extQ` litt + `extQ` sourceText + `extQ` deltaPos + `extQ` epaAnchor +#if !MIN_VERSION_ghc(9,9,0) + `extQ` anchorOp +#endif + `extQ` bytestring + `extQ` name `extQ` occName `extQ` moduleName `extQ` var + `extQ` dataCon + `extQ` bagName `extQ` bagRdrName `extQ` bagVar `extQ` nameSet + `extQ` fixity + `ext2Q` located + `extQ` srcSpanAnnA + `extQ` srcSpanAnnL + `extQ` srcSpanAnnP + `extQ` srcSpanAnnC + `extQ` srcSpanAnnN + + where generic :: Data a => a -> SDoc + generic t = nested (text $ showConstr (toConstr t)) + (vcat (gmapQ (li . showAstDataHtml') t)) + + string :: String -> SDoc + string = text . normalize_newlines . show + + fastString :: FastString -> SDoc + fastString s = braces $ + text "FastString:" + <+> text (normalize_newlines . show $ s) + + bytestring :: B.ByteString -> SDoc + bytestring = text . normalize_newlines . show + + list [] = brackets empty + list [x] = "[]" $$ showAstDataHtml' x + list xs = nested "[]" (vcat $ map (li . showAstDataHtml') xs) + + -- Eliminate word-size dependence + lit :: HsLit GhcPs -> SDoc + lit (HsWordPrim s x) = numericLit "HsWord{64}Prim" x s + lit (HsWord64Prim s x) = numericLit "HsWord{64}Prim" x s + lit (HsIntPrim s x) = numericLit "HsInt{64}Prim" x s + lit (HsInt64Prim s x) = numericLit "HsInt{64}Prim" x s + lit l = generic l + + litr :: HsLit GhcRn -> SDoc + litr (HsWordPrim s x) = numericLit "HsWord{64}Prim" x s + litr (HsWord64Prim s x) = numericLit "HsWord{64}Prim" x s + litr (HsIntPrim s x) = numericLit "HsInt{64}Prim" x s + litr (HsInt64Prim s x) = numericLit "HsInt{64}Prim" x s + litr l = generic l + + litt :: HsLit GhcTc -> SDoc + litt (HsWordPrim s x) = numericLit "HsWord{64}Prim" x s + litt (HsWord64Prim s x) = numericLit "HsWord{64}Prim" x s + litt (HsIntPrim s x) = numericLit "HsInt{64}Prim" x s + litt (HsInt64Prim s x) = numericLit "HsInt{64}Prim" x s + litt l = generic l + + numericLit :: String -> Integer -> SourceText -> SDoc + numericLit tag x s = braces $ hsep [ text tag + , generic x + , generic s ] + + sourceText :: SourceText -> SDoc + sourceText NoSourceText = text "NoSourceText" + +#if MIN_VERSION_ghc(9,7,0) + sourceText (SourceText src) = text "SourceText" <+> ftext src +#else + sourceText (SourceText src) = text "SourceText" <+> text src +#endif + + epaAnchor :: EpaLocation -> SDoc + +#if MIN_VERSION_ghc(9,9,0) + epaAnchor (EpaSpan s) = parens $ text "EpaSpan" <+> srcSpan s +#else + epaAnchor (EpaSpan r _) = text "EpaSpan" <+> realSrcSpan r +#endif + +#if MIN_VERSION_ghc(9,11,0) + epaAnchor (EpaDelta s d cs) = text "EpaDelta" <+> srcSpan s <+> deltaPos d <+> showAstDataHtml' cs +#else + epaAnchor (EpaDelta d cs) = text "EpaDelta" <+> deltaPos d <+> showAstDataHtml' cs +#endif + +#if !MIN_VERSION_ghc(9,9,0) + anchorOp :: AnchorOperation -> SDoc + anchorOp UnchangedAnchor = "UnchangedAnchor" + anchorOp (MovedAnchor dp) = "MovedAnchor " <> deltaPos dp +#endif + + deltaPos :: DeltaPos -> SDoc + deltaPos (SameLine c) = text "SameLine" <+> ppr c + deltaPos (DifferentLine l c) = text "DifferentLine" <+> ppr l <+> ppr c + + name :: Name -> SDoc + name nm = braces $ text "Name:" <+> ppr nm + + occName n = braces $ + text "OccName:" + <+> text (occNameString n) + + moduleName :: ModuleName -> SDoc + moduleName m = braces $ text "ModuleName:" <+> ppr m + + srcSpan :: SrcSpan -> SDoc + srcSpan ss = char ' ' <> + hang (ppr ss) 1 + -- TODO: show annotations here + (text "") + + realSrcSpan :: RealSrcSpan -> SDoc + realSrcSpan ss = braces $ char ' ' <> + hang (ppr ss) 1 + -- TODO: show annotations here + (text "") + +#if !MIN_VERSION_ghc(9,11,0) + addEpAnn :: AddEpAnn -> SDoc + addEpAnn (AddEpAnn a s) = text "AddEpAnn" <+> ppr a <+> epaAnchor s +#endif + + var :: Var -> SDoc + var v = braces $ text "Var:" <+> ppr v + + dataCon :: DataCon -> SDoc + dataCon c = braces $ text "DataCon:" <+> ppr c + + bagRdrName:: Bag (LocatedA (HsBind GhcPs)) -> SDoc + bagRdrName bg = braces $ + text "Bag(LocatedA (HsBind GhcPs)):" + $$ (list . bagToList $ bg) + + bagName :: Bag (LocatedA (HsBind GhcRn)) -> SDoc + bagName bg = braces $ + text "Bag(LocatedA (HsBind Name)):" + $$ (list . bagToList $ bg) + + bagVar :: Bag (LocatedA (HsBind GhcTc)) -> SDoc + bagVar bg = braces $ + text "Bag(LocatedA (HsBind Var)):" + $$ (list . bagToList $ bg) + + nameSet ns = braces $ + text "NameSet:" + $$ (list . nameSetElemsStable $ ns) + + fixity :: Fixity -> SDoc + fixity fx = braces $ + text "Fixity:" + <+> ppr fx + + located :: (Data a, Data b) => GenLocated a b -> SDoc + located (L ss a) + = nested "L" (li (showAstDataHtml' ss) $$ li (showAstDataHtml' a)) + + -- ------------------------- + +#if !MIN_VERSION_ghc(9,11,0) + annotation :: EpAnn [AddEpAnn] -> SDoc + annotation = annotation' (text "EpAnn [AddEpAnn]") +#endif + + annotationModule :: EpAnn AnnsModule -> SDoc + annotationModule = annotation' (text "EpAnn AnnsModule") + +#if !MIN_VERSION_ghc(9,11,0) + annotationAddEpAnn :: EpAnn AddEpAnn -> SDoc + annotationAddEpAnn = annotation' (text "EpAnn AddEpAnn") +#endif + + annotationGrhsAnn :: EpAnn GrhsAnn -> SDoc + annotationGrhsAnn = annotation' (text "EpAnn GrhsAnn") + + annotationEpAnnHsCase :: EpAnn EpAnnHsCase -> SDoc + annotationEpAnnHsCase = annotation' (text "EpAnn EpAnnHsCase") + + annotationEpAnnHsLet :: EpAnn NoEpAnns -> SDoc + annotationEpAnnHsLet = annotation' (text "EpAnn NoEpAnns") + +#if MIN_VERSION_ghc(9,11,0) + annotationAnnList :: EpAnn (AnnList ()) -> SDoc +#else + annotationAnnList :: EpAnn AnnList -> SDoc +#endif + annotationAnnList = annotation' (text "EpAnn AnnList") + + annotationEpAnnImportDecl :: EpAnn EpAnnImportDecl -> SDoc + annotationEpAnnImportDecl = annotation' (text "EpAnn EpAnnImportDecl") + + annotationAnnParen :: EpAnn AnnParen -> SDoc + annotationAnnParen = annotation' (text "EpAnn AnnParen") + + annotationTrailingAnn :: EpAnn TrailingAnn -> SDoc + annotationTrailingAnn = annotation' (text "EpAnn TrailingAnn") + + annotationEpaLocation :: EpAnn EpaLocation -> SDoc + annotationEpaLocation = annotation' (text "EpAnn EpaLocation") + + annotation' :: forall a. Data a => SDoc -> EpAnn a -> SDoc + annotation' _tag anns = nested (text $ showConstr (toConstr anns)) + (vcat (map li $ gmapQ showAstDataHtml' anns)) + + -- ------------------------- + +#if MIN_VERSION_ghc(9,9,0) + srcSpanAnnA :: EpAnn AnnListItem -> SDoc + srcSpanAnnA = locatedAnn'' (text "SrcSpanAnnA") + +#if MIN_VERSION_ghc(9,11,0) + srcSpanAnnL :: EpAnn (AnnList ()) -> SDoc +#else + srcSpanAnnL :: EpAnn AnnList -> SDoc +#endif + srcSpanAnnL = locatedAnn'' (text "SrcSpanAnnL") + + srcSpanAnnP :: EpAnn AnnPragma -> SDoc + srcSpanAnnP = locatedAnn'' (text "SrcSpanAnnP") + + srcSpanAnnC :: EpAnn AnnContext -> SDoc + srcSpanAnnC = locatedAnn'' (text "SrcSpanAnnC") + + srcSpanAnnN :: EpAnn NameAnn -> SDoc + srcSpanAnnN = locatedAnn'' (text "SrcSpanAnnN") + + locatedAnn'' :: forall a. Data a => SDoc -> EpAnn a -> SDoc + locatedAnn'' tag ss = parens $ + case cast ss of + Just (ann :: EpAnn a) -> + text (showConstr (toConstr ann)) + $$ vcat (gmapQ showAstDataHtml' ann) + Nothing -> text "locatedAnn:unmatched" <+> tag + <+> (parens $ text (showConstr (toConstr ss))) +#else + srcSpanAnnA :: SrcSpanAnn' (EpAnn AnnListItem) -> SDoc + srcSpanAnnA = locatedAnn'' (text "SrcSpanAnnA") + + srcSpanAnnL :: SrcSpanAnn' (EpAnn AnnList) -> SDoc + srcSpanAnnL = locatedAnn'' (text "SrcSpanAnnL") + + srcSpanAnnP :: SrcSpanAnn' (EpAnn AnnPragma) -> SDoc + srcSpanAnnP = locatedAnn'' (text "SrcSpanAnnP") + + srcSpanAnnC :: SrcSpanAnn' (EpAnn AnnContext) -> SDoc + srcSpanAnnC = locatedAnn'' (text "SrcSpanAnnC") + + srcSpanAnnN :: SrcSpanAnn' (EpAnn NameAnn) -> SDoc + srcSpanAnnN = locatedAnn'' (text "SrcSpanAnnN") + + locatedAnn'' :: forall a. Data a + => SDoc -> SrcSpanAnn' a -> SDoc + locatedAnn'' tag ss = + case cast ss of + Just ((SrcSpanAnn ann s) :: SrcSpanAnn' a) -> + nested "SrcSpanAnn" ( + li(showAstDataHtml' ann) + $$ li(srcSpan s)) + Nothing -> text "locatedAnn:unmatched" <+> tag + <+> text (showConstr (toConstr ss)) +#endif + + +normalize_newlines :: String -> String +normalize_newlines ('\\':'r':'\\':'n':xs) = '\\':'n':normalize_newlines xs +normalize_newlines (x:xs) = x:normalize_newlines xs +normalize_newlines [] = [] + +css :: String +css = unlines + [ "body {background-color: black; color: white ;}" + , "/* Remove default bullets */" + , "ul, #myUL {" + , " list-style-type: none;" + , "}" + , "/* Remove margins and padding from the parent ul */" + , "#myUL {" + , " margin: 0; " + , " padding: 0; " + , "} " + , "/* Style the caret/arrow */ " + , ".caret { " + , " cursor: pointer; " + , " user-select: none; /* Prevent text selection */" + , "} " + , "/* Create the caret/arrow with a unicode, and style it */" + , ".caret::before { " + , " content: \"\\25B6 \"; " + , " color: white; " + , " display: inline-block; " + , " margin-right: 6px; " + , "} " + , "/* Rotate the caret/arrow icon when clicked on (using JavaScript) */" + , ".caret-down::before { " + , " transform: rotate(90deg); " + , "} " + , "/* Hide the nested list */ " + , ".nested { " + , " display: none; " + , "} " + , "/* Show the nested list when the user clicks on the caret/arrow (with JavaScript) */" + , ".active { " + , " display: block;}" + ] + +js :: String +js = unlines + [ "var toggler = document.getElementsByClassName(\"caret\");" + , "var i;" + , "for (i = 0; i < toggler.length; i++) {" + , " toggler[i].addEventListener(\"click\", function() {" + , " this.parentElement.querySelector(\".nested\").classList.toggle(\"active\");" + , " this.classList.toggle(\"caret-down\");" + , " }); }" + ] diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs new file mode 100644 index 0000000000..666de9a6f2 --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/GHC/ExactPrint.hs @@ -0,0 +1,835 @@ +{-# LANGUAGE GADTs #-} +{-# LANGUAGE TypeFamilies #-} +{-# OPTIONS_GHC -Wno-orphans #-} + +-- | This module hosts various abstractions and utility functions to work with ghc-exactprint. +module Development.IDE.GHC.ExactPrint + ( Graft(..), + graftDecls, + graftDeclsWithM, + annotate, + annotateDecl, + hoistGraft, + graftWithM, + graftExprWithM, + genericGraftWithSmallestM, + genericGraftWithLargestM, + graftSmallestDeclsWithM, + transform, + transformM, + ExactPrint(..), + modifySmallestDeclWithM, + modifyMgMatchesT, + modifyMgMatchesT', + modifySigWithM, + genAnchor1, + setPrecedingLines, + addParens, + addParensToCtxt, + modifyAnns, + removeComma, + -- * Helper function + eqSrcSpan, + epl, + epAnn, + removeTrailingComma, + annotateParsedSource, + getAnnotatedParsedSourceRule, + GetAnnotatedParsedSource(..), + ASTElement (..), + ExceptStringT (..), + TransformT, + Log(..), + ) +where + +import Control.Applicative (Alternative) +import Control.Arrow ((***)) +import Control.DeepSeq +import Control.Monad +import qualified Control.Monad.Fail as Fail +import Control.Monad.IO.Class (MonadIO) +import Control.Monad.Trans.Class +import Control.Monad.Trans.Except +import Control.Monad.Zip +import Data.Bifunctor +import Data.Bool (bool) +import qualified Data.DList as DL +import Data.Either.Extra (mapLeft) +import Data.Functor.Classes +import Data.Functor.Contravariant +import Data.Monoid (All (All), getAll) +import qualified Data.Text as T +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Shake hiding (Log) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat hiding (parseImport, + parsePattern, + parseType) +import Development.IDE.GHC.Compat.ExactPrint +import Development.IDE.Graph (RuleResult, Rules) +import Development.IDE.Graph.Classes +import Generics.SYB +import Generics.SYB.GHC +import qualified GHC.Generics as GHC +import Ide.Logger (Pretty (pretty), + Recorder, + WithPriority, + cmapWithPrio) +import Ide.PluginUtils +import Language.Haskell.GHC.ExactPrint.Parsers +import Language.LSP.Protocol.Types + +import Control.Lens (_last, (&)) +import Control.Lens.Operators ((%~)) +import Data.List (partition) +import GHC (DeltaPos (..), + SrcSpanAnnN) + +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] + +#if !MIN_VERSION_ghc(9,9,0) +import Data.Default (Default) +import GHC ( Anchor (..), + AnchorOperation, + EpAnn (..), + NameAdornment (NameParens), + NameAnn (..), + SrcSpanAnn' (SrcSpanAnn), + SrcSpanAnnA, + TrailingAnn (AddCommaAnn), + emptyComments, + realSrcSpan, + spanAsAnchor) +import GHC.Parser.Annotation (AnnContext (..), + EpaLocation (EpaDelta), + deltaPos) +import GHC.Types.SrcLoc (generatedSrcSpan) +#endif +#if MIN_VERSION_ghc(9,11,0) +import GHC.Types.SrcLoc (UnhelpfulSpanReason(..)) +#endif + +#if MIN_VERSION_ghc(9,9,0) +import GHC ( +#if !MIN_VERSION_ghc(9,11,0) + Anchor, +#endif + AnnContext (..), + EpAnn (..), + EpaLocation, + EpaLocation' (..), +#if MIN_VERSION_ghc(9,11,0) + EpToken (..), +#endif + NameAdornment (..), + NameAnn (..), + SrcSpanAnnA, + TrailingAnn (..), + deltaPos, + emptyComments, + spanAsAnchor) +#endif +setPrecedingLines :: +#if !MIN_VERSION_ghc(9,9,0) + Default t => +#endif + LocatedAn t a -> Int -> Int -> LocatedAn t a +setPrecedingLines ast n c = setEntryDP ast (deltaPos n c) +------------------------------------------------------------------------------ + +data Log = LogShake Shake.Log deriving Show + +instance Pretty Log where + pretty = \case + LogShake shakeLog -> pretty shakeLog + +data GetAnnotatedParsedSource = GetAnnotatedParsedSource + deriving (Eq, Show, GHC.Generic) + +instance Hashable GetAnnotatedParsedSource +instance NFData GetAnnotatedParsedSource +type instance RuleResult GetAnnotatedParsedSource = ParsedSource + +instance Show (HsModule GhcPs) where + show _ = "" + +-- | Get the latest version of the annotated parse source with comments. +getAnnotatedParsedSourceRule :: Recorder (WithPriority Log) -> Rules () +getAnnotatedParsedSourceRule recorder = define (cmapWithPrio LogShake recorder) $ \GetAnnotatedParsedSource nfp -> do + pm <- use GetParsedModuleWithComments nfp + return ([], fmap annotateParsedSource pm) + +annotateParsedSource :: ParsedModule -> ParsedSource +annotateParsedSource (ParsedModule _ ps _) = +#if MIN_VERSION_ghc(9,9,0) + ps +#else + (makeDeltaAst ps) +#endif + +#if MIN_VERSION_ghc(9,11,0) +type Anchor = EpaLocation +#endif + +------------------------------------------------------------------------------ + +{- | A transformation for grafting source trees together. Use the semigroup + instance to combine 'Graft's, and run them via 'transform'. +-} +newtype Graft m a = Graft + { runGraft :: DynFlags -> a -> TransformT m a + } + +hoistGraft :: (forall x. m x -> n x) -> Graft m a -> Graft n a +hoistGraft h (Graft f) = Graft (fmap (hoistTransform h) . f) + +newtype ExceptStringT m a = ExceptStringT {runExceptString :: ExceptT String m a} + deriving newtype + ( MonadTrans + , Monad + , Functor + , Applicative + , Alternative + , Foldable + , Contravariant + , MonadIO + , Eq1 + , Ord1 + , Show1 + , Read1 + , MonadZip + , MonadPlus + , Eq + , Ord + , Show + , Read + ) + +instance Monad m => Fail.MonadFail (ExceptStringT m) where + fail = ExceptStringT . ExceptT . pure . Left + +instance Monad m => Semigroup (Graft m a) where + Graft a <> Graft b = Graft $ \dflags -> a dflags >=> b dflags + +instance Monad m => Monoid (Graft m a) where + mempty = Graft $ const pure + +------------------------------------------------------------------------------ + +-- | Convert a 'Graft' into a 'WorkspaceEdit'. +transform :: + DynFlags -> + ClientCapabilities -> + VersionedTextDocumentIdentifier -> + Graft (Either String) ParsedSource -> + ParsedSource -> + Either String WorkspaceEdit +transform dflags ccs verTxtDocId f a = do + let src = printA a + a' <- transformA a $ runGraft f dflags + let res = printA a' + pure $ diffText ccs (verTxtDocId, T.pack src) (T.pack res) IncludeDeletions + +------------------------------------------------------------------------------ + +-- | Convert a 'Graft' into a 'WorkspaceEdit'. +transformM :: + Monad m => + DynFlags -> + ClientCapabilities -> + VersionedTextDocumentIdentifier -> + Graft (ExceptStringT m) ParsedSource -> + ParsedSource -> + m (Either String WorkspaceEdit) +transformM dflags ccs verTextDocId f a = runExceptT $ + runExceptString $ do + let src = printA a + a' <- transformA a $ runGraft f dflags + let res = printA a' + pure $ diffText ccs (verTextDocId, T.pack src) (T.pack res) IncludeDeletions + + +-- | Returns whether or not this node requires its immediate children to have +-- be parenthesized and have a leading space. +-- +-- A more natural type for this function would be to return @(Bool, Bool)@, but +-- we use 'All' instead for its monoid instance. +needsParensSpace :: + HsExpr GhcPs -> + -- | (Needs parens, needs space) + (All, All) +needsParensSpace HsLam{} = (All False, All False) +#if !MIN_VERSION_ghc(9,9,0) +needsParensSpace HsLamCase{} = (All False, All True) +#endif +needsParensSpace HsApp{} = mempty +needsParensSpace HsAppType{} = mempty +needsParensSpace OpApp{} = mempty +needsParensSpace HsPar{} = (All False, All False) +needsParensSpace SectionL{} = (All False, All False) +needsParensSpace SectionR{} = (All False, All False) +needsParensSpace ExplicitTuple{} = (All False, All False) +needsParensSpace ExplicitSum{} = (All False, All False) +needsParensSpace HsCase{} = (All False, All True) +needsParensSpace HsIf{} = (All False, All False) +needsParensSpace HsMultiIf{} = (All False, All False) +needsParensSpace HsLet{} = (All False, All True) +needsParensSpace HsDo{} = (All False, All False) +needsParensSpace ExplicitList{} = (All False, All False) +needsParensSpace RecordCon{} = (All False, All True) +needsParensSpace RecordUpd{} = mempty +needsParensSpace _ = mempty + + +------------------------------------------------------------------------------ + +{- | Construct a 'Graft', replacing the node at the given 'SrcSpan' with the + given @Located ast@. The node at that position must already be a @Located + ast@, or this is a no-op. +-} +graft' :: + forall ast a l. + (Data a, ASTElement l ast) => + -- | Do we need to insert a space before this grafting? In do blocks, the + -- answer is no, or we will break layout. But in function applications, + -- the answer is yes, or the function call won't get its argument. Yikes! + -- + -- More often the answer is yes, so when in doubt, use that. + Bool -> + SrcSpan -> + LocatedAn l ast -> + Graft (Either String) a +graft' needs_space dst val = Graft $ \dflags a -> do + val' <- annotate dflags needs_space val + pure $ + everywhere' + ( mkT $ + \case + (L src _ :: LocatedAn l ast) + | locA src `eqSrcSpan` dst -> val' + l -> l + ) + a + + +-- | Like 'graft', but specialized to 'LHsExpr', and intelligently inserts +-- parentheses if they're necessary. +graftExpr :: + forall a. + (Data a) => + SrcSpan -> + LHsExpr GhcPs -> + Graft (Either String) a +graftExpr dst val = Graft $ \dflags a -> do + let (needs_space, mk_parens) = getNeedsSpaceAndParenthesize dst a + + runGraft + (graft' needs_space dst $ mk_parens val) + dflags + a + +getNeedsSpaceAndParenthesize :: + (ASTElement l ast, Data a) => + SrcSpan -> + a -> + (Bool, LocatedAn l ast -> LocatedAn l ast) +getNeedsSpaceAndParenthesize dst a = + -- Traverse the tree, looking for our replacement node. But keep track of + -- the context (parent HsExpr constructor) we're in while we do it. This + -- lets us determine whether or not we need parentheses. + let (needs_parens, needs_space) = + everythingWithContext (Nothing, Nothing) (<>) + ( mkQ (mempty, ) $ \x s -> case x of + (L src _ :: LHsExpr GhcPs) | locA src `eqSrcSpan` dst -> + (s, s) + L _ x' -> (mempty, Just *** Just $ needsParensSpace x') + ) a + in ( maybe True getAll needs_space + , bool id maybeParensAST $ maybe False getAll needs_parens + ) + + +------------------------------------------------------------------------------ + +graftExprWithM :: + forall m a. + (Fail.MonadFail m, Data a) => + SrcSpan -> + (LHsExpr GhcPs -> TransformT m (Maybe (LHsExpr GhcPs))) -> + Graft m a +graftExprWithM dst trans = Graft $ \dflags a -> do + let (needs_space, mk_parens) = getNeedsSpaceAndParenthesize dst a + + everywhereM' + ( mkM $ + \case + val@(L src _ :: LHsExpr GhcPs) + | locA src `eqSrcSpan` dst -> do + mval <- trans val + case mval of + Just val' -> do + val'' <- + hoistTransform (either Fail.fail pure) + (annotate @AnnListItem @(HsExpr GhcPs) dflags needs_space (mk_parens val')) + pure val'' + Nothing -> pure val + l -> pure l + ) + a + +graftWithM :: + forall ast m a l. + (Fail.MonadFail m, Data a, ASTElement l ast) => + SrcSpan -> + (LocatedAn l ast -> TransformT m (Maybe (LocatedAn l ast))) -> + Graft m a +graftWithM dst trans = Graft $ \dflags a -> do + everywhereM' + ( mkM $ + \case + val@(L src _ :: LocatedAn l ast) + | locA src `eqSrcSpan` dst -> do + mval <- trans val + case mval of + Just val' -> do + val'' <- + hoistTransform (either Fail.fail pure) $ + annotate dflags False $ maybeParensAST val' + pure val'' + Nothing -> pure val + l -> pure l + ) + a + +-- | Run the given transformation only on the smallest node in the tree that +-- contains the 'SrcSpan'. +genericGraftWithSmallestM :: + forall m a ast. + (Monad m, Data a, Typeable ast) => + -- | The type of nodes we'd like to consider when finding the smallest. + Proxy (Located ast) -> + SrcSpan -> + (DynFlags -> ast -> GenericM (TransformT m)) -> + Graft m a +genericGraftWithSmallestM proxy dst trans = Graft $ \dflags -> + smallestM (genericIsSubspan proxy dst) (trans dflags) + +-- | Run the given transformation only on the largest node in the tree that +-- contains the 'SrcSpan'. +genericGraftWithLargestM :: + forall m a ast. + (Monad m, Data a, Typeable ast) => + -- | The type of nodes we'd like to consider when finding the largest. + Proxy (Located ast) -> + SrcSpan -> + (DynFlags -> ast -> GenericM (TransformT m)) -> + Graft m a +genericGraftWithLargestM proxy dst trans = Graft $ \dflags -> + largestM (genericIsSubspan proxy dst) (trans dflags) + + +graftDecls :: + forall a. + (HasDecls a) => + SrcSpan -> + [LHsDecl GhcPs] -> + Graft (Either String) a +graftDecls dst decs0 = Graft $ \dflags a -> do + decs <- forM decs0 $ \decl -> do + annotateDecl dflags decl + let go [] = DL.empty + go (L src e : rest) + | locA src `eqSrcSpan` dst = DL.fromList decs <> DL.fromList rest + | otherwise = DL.singleton (L src e) <> go rest + modifyDeclsT (pure . DL.toList . go) a + + +-- | Replace the smallest declaration whose SrcSpan satisfies the given condition with a new +-- list of declarations. +-- +-- For example, if you would like to move a where-clause-defined variable to the same +-- level as its parent HsDecl, you could use this function. +-- +-- When matching declaration is found in the sub-declarations of `a`, `Just r` is also returned with the new `a`. +-- If no declaration matched, then `Nothing` is returned. +modifySmallestDeclWithM :: + forall a m r. + (HasDecls a, Monad m) => + (SrcSpan -> m Bool) -> + (LHsDecl GhcPs -> TransformT m ([LHsDecl GhcPs], r)) -> + a -> + TransformT m (a, Maybe r) +modifySmallestDeclWithM validSpan f a = do + let modifyMatchingDecl [] = pure (DL.empty, Nothing) + modifyMatchingDecl (ldecl@(L src _) : rest) = + TransformT (lift $ validSpan $ locA src) >>= \case + True -> do + (decs', r) <- f ldecl + pure (DL.fromList decs' <> DL.fromList rest, Just r) + False -> first (DL.singleton ldecl <>) <$> modifyMatchingDecl rest + modifyDeclsT' (fmap (first DL.toList) . modifyMatchingDecl) a + +#if MIN_VERSION_ghc(9,11,0) +generatedAnchor :: DeltaPos -> Anchor +generatedAnchor dp = EpaDelta (UnhelpfulSpan UnhelpfulNoLocationInfo) dp [] +#elif MIN_VERSION_ghc(9,9,0) +generatedAnchor :: DeltaPos -> Anchor +generatedAnchor dp = EpaDelta dp [] +#else +generatedAnchor :: AnchorOperation -> Anchor +generatedAnchor anchorOp = GHC.Anchor (GHC.realSrcSpan generatedSrcSpan) anchorOp +#endif + +setAnchor :: Anchor -> SrcSpanAnnN -> SrcSpanAnnN +#if MIN_VERSION_ghc(9,9,0) +setAnchor anc (EpAnn _ nameAnn comments) = + EpAnn anc nameAnn comments +#else +setAnchor anc (SrcSpanAnn (EpAnn _ nameAnn comments) span) = + SrcSpanAnn (EpAnn anc nameAnn comments) span +setAnchor _ spanAnnN = spanAnnN +#endif + +removeTrailingAnns :: SrcSpanAnnN -> SrcSpanAnnN +#if MIN_VERSION_ghc(9,9,0) +removeTrailingAnns (EpAnn anc nameAnn comments) = + let nameAnnSansTrailings = nameAnn {nann_trailing = []} + in EpAnn anc nameAnnSansTrailings comments +#else +removeTrailingAnns (SrcSpanAnn (EpAnn anc nameAnn comments) span) = + let nameAnnSansTrailings = nameAnn {nann_trailing = []} + in SrcSpanAnn (EpAnn anc nameAnnSansTrailings comments) span +removeTrailingAnns spanAnnN = spanAnnN +#endif + +-- | Modify the type signature for the given IdP. This function handles splitting a multi-sig +-- SigD into multiple SigD if the type signature is changed. +-- +-- For example, update the type signature for `foo` from `Int` to `Bool`: +-- +-- - foo :: Int +-- + foo :: Bool +-- +-- - foo, bar :: Int +-- + bar :: Int +-- + foo :: Bool +-- +-- - foo, bar, baz :: Int +-- + bar, baz :: Int +-- + foo :: Bool +modifySigWithM :: + forall a m. + (HasDecls a, Monad m, ExactPrint a) => + IdP GhcPs -> + (LHsSigType GhcPs -> LHsSigType GhcPs) -> + a -> + TransformT m a +modifySigWithM queryId f a = do + let modifyMatchingSigD :: [LHsDecl GhcPs] -> TransformT m (DL.DList (LHsDecl GhcPs)) + modifyMatchingSigD [] = pure DL.empty + modifyMatchingSigD (ldecl@(L annSigD (SigD xsig (TypeSig xTypeSig ids (HsWC xHsWc lHsSig)))) : rest) + | queryId `elem` (unLoc <$> ids) = do + let newSig = f lHsSig + -- If this signature update caused no change, then we don't need to split up multi-signatures + if newSig `geq` lHsSig + then pure $ DL.singleton ldecl <> DL.fromList rest + else case partition ((== queryId) . unLoc) ids of + ([L annMatchedId matchedId], otherIds) -> + let matchedId' = L (setAnchor genAnchor0 $ removeTrailingAnns annMatchedId) matchedId + matchedIdSig = + let sig' = SigD xsig (TypeSig xTypeSig [matchedId'] (HsWC xHsWc newSig)) + epAnn = bool (noAnnSrcSpanDP +#if !MIN_VERSION_ghc(9,9,0) + generatedSrcSpan +#endif + (DifferentLine 1 0)) + annSigD (null otherIds) + in L epAnn sig' + otherSig = case otherIds of + [] -> [] +#if MIN_VERSION_ghc(9,9,0) + (L epAnn id1:ids) -> +#else + (L (SrcSpanAnn epAnn span) id1:ids) -> +#endif + [ + let epAnn' = case epAnn of + EpAnn _ nameAnn commentsId1 -> EpAnn genAnchor0 nameAnn commentsId1 +#if MIN_VERSION_ghc(9,9,0) + ids' = L epAnn' id1:ids +#else + EpAnnNotUsed -> EpAnn genAnchor0 mempty emptyComments + ids' = L (SrcSpanAnn epAnn' span) id1:ids +#endif + ids'' = ids' & _last %~ first removeTrailingAnns + in L annSigD (SigD xsig (TypeSig xTypeSig ids'' (HsWC xHsWc lHsSig))) + ] + in pure $ DL.fromList otherSig <> DL.singleton matchedIdSig <> DL.fromList rest + _ -> error "multiple ids matched" + modifyMatchingSigD (ldecl : rest) = (DL.singleton ldecl <>) <$> modifyMatchingSigD rest + modifyDeclsT (fmap DL.toList . modifyMatchingSigD) $ makeDeltaAst a + +genAnchor0 :: Anchor +genAnchor0 = generatedAnchor m0 + +genAnchor1 :: Anchor +genAnchor1 = generatedAnchor m1 + +#if MIN_VERSION_ghc(9,9,0) +m0, m1 :: DeltaPos +m0 = SameLine 0 +m1 = SameLine 1 +#endif + + +-- | Apply a transformation to the decls contained in @t@ +modifyDeclsT' :: (HasDecls t, HasTransform m) + => ([LHsDecl GhcPs] -> m ([LHsDecl GhcPs], r)) + -> t -> m (t, r) +modifyDeclsT' action t = do +#if MIN_VERSION_ghc_exactprint(1,10,0) + decls <- pure $ hsDecls t +#else + decls <- liftT $ hsDecls t +#endif + (decls', r) <- action decls +#if MIN_VERSION_ghc_exactprint(1,10,0) + t' <- pure $ replaceDecls t decls' +#else + t' <- liftT $ replaceDecls t decls' +#endif + pure (t', r) + +-- | Modify each LMatch in a MatchGroup +modifyMgMatchesT :: + Monad m => + MatchGroup GhcPs (LHsExpr GhcPs) -> + (LMatch GhcPs (LHsExpr GhcPs) -> TransformT m (LMatch GhcPs (LHsExpr GhcPs))) -> + TransformT m (MatchGroup GhcPs (LHsExpr GhcPs)) +modifyMgMatchesT mg f = fst <$> modifyMgMatchesT' mg (fmap (, ()) . f) () ((.) pure . const) + +-- | Modify the each LMatch in a MatchGroup +modifyMgMatchesT' :: + Monad m => + MatchGroup GhcPs (LHsExpr GhcPs) -> + (LMatch GhcPs (LHsExpr GhcPs) -> TransformT m (LMatch GhcPs (LHsExpr GhcPs), r)) -> + r -> + (r -> r -> m r) -> + TransformT m (MatchGroup GhcPs (LHsExpr GhcPs), r) +modifyMgMatchesT' (MG xMg (L locMatches matches)) f def combineResults = do + (unzip -> (matches', rs)) <- mapM f matches + r' <- TransformT $ lift $ foldM combineResults def rs + pure (MG xMg (L locMatches matches'), r') + +graftSmallestDeclsWithM :: + forall a. + (HasDecls a) => + SrcSpan -> + (LHsDecl GhcPs -> TransformT (Either String) (Maybe [LHsDecl GhcPs])) -> + Graft (Either String) a +graftSmallestDeclsWithM dst toDecls = Graft $ \dflags a -> do + let go [] = pure DL.empty + go (e@(L src _) : rest) + | dst `isSubspanOf` locA src = toDecls e >>= \case + Just decs0 -> do + decs <- forM decs0 $ \decl -> + annotateDecl dflags decl + pure $ DL.fromList decs <> DL.fromList rest + Nothing -> (DL.singleton e <>) <$> go rest + | otherwise = (DL.singleton e <>) <$> go rest + modifyDeclsT (fmap DL.toList . go) a + +graftDeclsWithM :: + forall a m. + (HasDecls a, Fail.MonadFail m) => + SrcSpan -> + (LHsDecl GhcPs -> TransformT m (Maybe [LHsDecl GhcPs])) -> + Graft m a +graftDeclsWithM dst toDecls = Graft $ \dflags a -> do + let go [] = pure DL.empty + go (e@(L src _) : rest) + | locA src `eqSrcSpan` dst = toDecls e >>= \case + Just decs0 -> do + decs <- forM decs0 $ \decl -> + hoistTransform (either Fail.fail pure) $ + annotateDecl dflags decl + pure $ DL.fromList decs <> DL.fromList rest + Nothing -> (DL.singleton e <>) <$> go rest + | otherwise = (DL.singleton e <>) <$> go rest + modifyDeclsT (fmap DL.toList . go) a + + +-- In 9.2+, we need `Default l` to do `setPrecedingLines` on annotated elements. +-- In older versions, we pass around annotations explicitly, so the instance isn't needed. +class + ( Data ast + , Typeable l + , Outputable l + , Outputable ast +#if !MIN_VERSION_ghc(9,9,0) + , Default l +#endif + ) => ASTElement l ast | ast -> l where + parseAST :: Parser (LocatedAn l ast) + maybeParensAST :: LocatedAn l ast -> LocatedAn l ast + {- | Construct a 'Graft', replacing the node at the given 'SrcSpan' with + the given @Located ast@. The node at that position must already be + a @Located ast@, or this is a no-op. + -} + graft :: + forall a. + (Data a) => + SrcSpan -> + LocatedAn l ast -> + Graft (Either String) a + graft dst = graft' True dst . maybeParensAST + +instance p ~ GhcPs => ASTElement AnnListItem (HsExpr p) where + parseAST = parseExpr + maybeParensAST = parenthesize + graft = graftExpr + +instance p ~ GhcPs => ASTElement AnnListItem (Pat p) where + parseAST = parsePattern + maybeParensAST = parenthesizePat appPrec + +instance p ~ GhcPs => ASTElement AnnListItem (HsType p) where + parseAST = parseType + maybeParensAST = parenthesizeHsType appPrec + +instance p ~ GhcPs => ASTElement AnnListItem (HsDecl p) where + parseAST = parseDecl + maybeParensAST = id + +instance p ~ GhcPs => ASTElement AnnListItem (ImportDecl p) where + parseAST = parseImport + maybeParensAST = id + +instance ASTElement NameAnn RdrName where + parseAST df fp = parseWith df fp parseIdentifier + maybeParensAST = id + +------------------------------------------------------------------------------ + + +------------------------------------------------------------------------------ + + +-- | Given an 'LHSExpr', compute its exactprint annotations. +-- Note that this function will throw away any existing annotations (and format) +annotate :: ASTElement l ast + => DynFlags -> Bool -> LocatedAn l ast -> TransformT (Either String) (LocatedAn l ast) +annotate dflags needs_space ast = do + uniq <- show <$> uniqueSrcSpanT + let rendered = render dflags ast + expr' <- TransformT $ lift $ mapLeft (showSDoc dflags . ppr) $ parseAST dflags uniq rendered + pure $ setPrecedingLines expr' 0 (bool 0 1 needs_space) + +-- | Given an 'LHsDecl', compute its exactprint annotations. +annotateDecl :: DynFlags -> LHsDecl GhcPs -> TransformT (Either String) (LHsDecl GhcPs) +annotateDecl dflags ast = do + uniq <- show <$> uniqueSrcSpanT + let rendered = render dflags ast + expr' <- TransformT $ lift $ mapLeft (showSDoc dflags . ppr) $ parseDecl dflags uniq rendered + pure $ setPrecedingLines expr' 1 0 + +------------------------------------------------------------------------------ + +-- | Print out something 'Outputable'. +render :: Outputable a => DynFlags -> a -> String +render dflags = showSDoc dflags . ppr + +------------------------------------------------------------------------------ + +-- | Put parentheses around an expression if required. +parenthesize :: LHsExpr GhcPs -> LHsExpr GhcPs +parenthesize = parenthesizeHsExpr appPrec + +------------------------------------------------------------------------------ + +-- | Equality on SrcSpan's. +-- Ignores the (Maybe BufSpan) field of SrcSpan's. +eqSrcSpan :: SrcSpan -> SrcSpan -> Bool +eqSrcSpan l r = leftmost_smallest l r == EQ + +addParensToCtxt :: Maybe EpaLocation -> AnnContext -> AnnContext +addParensToCtxt close_dp = addOpen . addClose + where +#if MIN_VERSION_ghc(9,11,0) + addOpen it@AnnContext{ac_open = []} = it{ac_open = [EpTok (epl 0)]} +#else + addOpen it@AnnContext{ac_open = []} = it{ac_open = [epl 0]} +#endif + addOpen other = other + addClose it +#if MIN_VERSION_ghc(9,11,0) + | Just c <- close_dp = it{ac_close = [EpTok c]} + | AnnContext{ac_close = []} <- it = it{ac_close = [EpTok (epl 0)]} +#else + | Just c <- close_dp = it{ac_close = [c]} + | AnnContext{ac_close = []} <- it = it{ac_close = [epl 0]} +#endif + | otherwise = it + +epl :: Int -> EpaLocation +#if MIN_VERSION_ghc(9,11,0) +epl n = EpaDelta (UnhelpfulSpan UnhelpfulNoLocationInfo) (SameLine n) [] +#else +epl n = EpaDelta (SameLine n) [] +#endif + +epAnn :: SrcSpan -> ann -> EpAnn ann +epAnn srcSpan anns = EpAnn (spanAsAnchor srcSpan) anns emptyComments + +modifyAnns :: LocatedAn a ast -> (a -> a) -> LocatedAn a ast +#if MIN_VERSION_ghc(9,9,0) +modifyAnns x f = first (fmap f) x +#else +modifyAnns x f = first ((fmap.fmap) f) x +#endif + +removeComma :: SrcSpanAnnA -> SrcSpanAnnA +#if MIN_VERSION_ghc(9,9,0) +removeComma (EpAnn anc (AnnListItem as) cs) + = EpAnn anc (AnnListItem (filter (not . isCommaAnn) as)) cs + where + isCommaAnn AddCommaAnn{} = True + isCommaAnn _ = False +#else +removeComma it@(SrcSpanAnn EpAnnNotUsed _) = it +removeComma (SrcSpanAnn (EpAnn anc (AnnListItem as) cs) l) + = SrcSpanAnn (EpAnn anc (AnnListItem (filter (not . isCommaAnn) as)) cs) l + where + isCommaAnn AddCommaAnn{} = True + isCommaAnn _ = False +#endif + +addParens :: Bool -> GHC.NameAnn -> GHC.NameAnn +#if MIN_VERSION_ghc(9,11,0) +addParens True it@NameAnn{} = + it{nann_adornment = NameParens (EpTok (epl 0)) (EpTok (epl 0)) } +addParens True it@NameAnnCommas{} = + it{nann_adornment = NameParens (EpTok (epl 0)) (EpTok (epl 0)) } +addParens True it@NameAnnOnly{} = + it{nann_adornment = NameParens (EpTok (epl 0)) (EpTok (epl 0)) } +addParens True it@NameAnnTrailing{} = + NameAnn{nann_adornment = NameParens (EpTok (epl 0)) (EpTok (epl 0)), nann_name = epl 0, nann_trailing = nann_trailing it} +#else +addParens True it@NameAnn{} = + it{nann_adornment = NameParens, nann_open=epl 0, nann_close=epl 0 } +addParens True it@NameAnnCommas{} = + it{nann_adornment = NameParens, nann_open=epl 0, nann_close=epl 0 } +addParens True it@NameAnnOnly{} = + it{nann_adornment = NameParens, nann_open=epl 0, nann_close=epl 0 } +addParens True NameAnnTrailing{..} = + NameAnn{nann_adornment = NameParens, nann_open=epl 0, nann_close=epl 0, nann_name = epl 0, ..} +#endif +addParens _ it = it + +removeTrailingComma :: GenLocated SrcSpanAnnA ast -> GenLocated SrcSpanAnnA ast +removeTrailingComma = flip modifyAnns $ \(AnnListItem l) -> AnnListItem $ filter (not . isCommaAnn) l + +isCommaAnn :: TrailingAnn -> Bool +isCommaAnn AddCommaAnn{} = True +isCommaAnn _ = False diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs new file mode 100644 index 0000000000..1fba6b67e5 --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs @@ -0,0 +1,2070 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE CPP #-} +{-# LANGUAGE GADTs #-} + +module Development.IDE.Plugin.CodeAction + ( + mkExactprintPluginDescriptor, + iePluginDescriptor, + typeSigsPluginDescriptor, + bindingsPluginDescriptor, + fillHolePluginDescriptor, + extendImportPluginDescriptor, + -- * For testing + matchRegExMultipleImports, + extractNotInScopeName, + NotInScope(..) + ) where + +import Control.Applicative ((<|>)) +import Control.Arrow (second, + (&&&), + (>>>)) +import Control.Concurrent.STM.Stats (atomically) +import Control.Monad.Extra +import Control.Monad.IO.Class +import Control.Monad.Trans.Except (ExceptT (ExceptT)) +import Control.Monad.Trans.Maybe +import Data.Char +import qualified Data.DList as DL +import Data.Function +import Data.Functor +import qualified Data.HashMap.Strict as Map +import qualified Data.HashSet as Set +import Data.List.Extra +import Data.List.NonEmpty (NonEmpty ((:|))) +import qualified Data.List.NonEmpty as NE +import qualified Data.Map.Strict as M +import Data.Maybe +import Data.Ord (comparing) +import qualified Data.Set as S +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.Core.FileStore (getUriContents) +import Development.IDE.Core.Rules +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Service +import Development.IDE.Core.Shake hiding (Log) +import Development.IDE.GHC.Compat hiding + (ImplicitPrelude) +#if !MIN_VERSION_ghc(9,11,0) +import Development.IDE.GHC.Compat.Util +#endif +import Development.IDE.GHC.Error +import Development.IDE.GHC.ExactPrint +import qualified Development.IDE.GHC.ExactPrint as E +import Development.IDE.GHC.Util (printOutputable, + printRdrName) +import Development.IDE.Plugin.CodeAction.Args +import Development.IDE.Plugin.CodeAction.ExactPrint +import Development.IDE.Plugin.CodeAction.PositionIndexed +import Development.IDE.Plugin.CodeAction.Util +import Development.IDE.Plugin.Completions.Types +import qualified Development.IDE.Plugin.Plugins.AddArgument +import Development.IDE.Plugin.Plugins.Diagnostic +import Development.IDE.Plugin.Plugins.FillHole (suggestFillHole) +import Development.IDE.Plugin.Plugins.FillTypeWildcard (suggestFillTypeWildcard) +import Development.IDE.Plugin.Plugins.ImportUtils +import Development.IDE.Plugin.TypeLenses (suggestSignature) +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Exports +import Development.IDE.Types.Location +import Development.IDE.Types.Options +import GHC (DeltaPos (..), + EpAnn (..), + LEpaComment) +import GHC.Iface.Ext.Types (ContextInfo (..), + IdentifierDetails (..)) +import qualified GHC.LanguageExtensions as Lang +import Ide.Logger hiding + (group) +import Ide.PluginUtils (extendToFullLines, + subRange) +import Ide.Types +import Language.LSP.Protocol.Message (Method (..), + SMethod (..)) +import Language.LSP.Protocol.Types (ApplyWorkspaceEditParams (..), + CodeAction (..), + CodeActionKind (CodeActionKind_QuickFix), + CodeActionParams (CodeActionParams), + Command, + MessageType (..), + Null (Null), + ShowMessageParams (..), + TextDocumentIdentifier (TextDocumentIdentifier), + TextEdit (TextEdit, _range), + UInt, + WorkspaceEdit (WorkspaceEdit, _changeAnnotations, _changes, _documentChanges), + type (|?) (InL, InR), + uriToFilePath) +import qualified Text.Fuzzy.Parallel as TFP +import Text.Regex.TDFA ((=~), (=~~)) + +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] + +#if !MIN_VERSION_ghc(9,9,0) +import Development.IDE.GHC.Compat.ExactPrint (makeDeltaAst) +import GHC (AddEpAnn (AddEpAnn), + Anchor (anchor_op), + AnchorOperation (..), + AnnsModule (am_main), + EpaLocation (..)) +#endif + +#if MIN_VERSION_ghc(9,9,0) && !MIN_VERSION_ghc(9,11,0) +import GHC (AddEpAnn (AddEpAnn), + AnnsModule (am_main), + EpaLocation, + EpaLocation' (..), + HasLoc (..)) +#endif + +#if MIN_VERSION_ghc(9,11,0) +import GHC (AnnsModule (am_where), + EpToken (..), + EpaLocation, + EpaLocation' (..), + HasLoc (..)) +#endif + + +------------------------------------------------------------------------------------------------- + +-- | Generate code actions. +codeAction :: PluginMethodHandler IdeState 'Method_TextDocumentCodeAction +codeAction state _ (CodeActionParams _ _ (TextDocumentIdentifier uri) range _) = do + contents <- liftIO $ runAction "hls-refactor-plugin.codeAction.getUriContents" state $ getUriContents $ toNormalizedUri uri + liftIO $ do + let mbFile = toNormalizedFilePath' <$> uriToFilePath uri + allDiags <- atomically $ fmap fdLspDiagnostic . filter (\d -> mbFile == Just (fdFilePath d)) <$> getDiagnostics state + (join -> parsedModule) <- runAction "GhcideCodeActions.getParsedModule" state $ getParsedModule `traverse` mbFile + let + textContents = fmap Rope.toText contents + actions = caRemoveRedundantImports parsedModule textContents allDiags range uri + <> caRemoveInvalidExports parsedModule textContents allDiags range uri + pure $ InL actions + +------------------------------------------------------------------------------------------------- + +iePluginDescriptor :: Recorder (WithPriority E.Log) -> PluginId -> PluginDescriptor IdeState +iePluginDescriptor recorder plId = + let old = + mkGhcideCAsPlugin [ + wrap suggestExportUnusedTopBinding + , wrap suggestModuleTypo + , wrap suggestFixConstructorImport + , wrap suggestExtendImport + , wrap suggestImportDisambiguation + , wrap suggestNewOrExtendImportForClassMethod + , wrap suggestHideShadow + , wrap suggestNewImport + , wrap suggestAddRecordFieldImport + ] + plId + "Provides various quick fixes" + in mkExactprintPluginDescriptor recorder $ old {pluginHandlers = pluginHandlers old <> mkPluginHandler SMethod_TextDocumentCodeAction codeAction } + +typeSigsPluginDescriptor :: Recorder (WithPriority E.Log) -> PluginId -> PluginDescriptor IdeState +typeSigsPluginDescriptor recorder plId = mkExactprintPluginDescriptor recorder $ + mkGhcideCAsPlugin [ + wrap $ suggestSignature True + , wrap suggestFillTypeWildcard + , wrap suggestAddTypeAnnotationToSatisfyConstraints + , wrap removeRedundantConstraints + , wrap suggestConstraint + ] + plId + "Provides various quick fixes for type signatures" + +bindingsPluginDescriptor :: Recorder (WithPriority E.Log) -> PluginId -> PluginDescriptor IdeState +bindingsPluginDescriptor recorder plId = mkExactprintPluginDescriptor recorder $ + mkGhcideCAsPlugin [ + wrap suggestReplaceIdentifier + , wrap suggestImplicitParameter + , wrap suggestNewDefinition + , wrap Development.IDE.Plugin.Plugins.AddArgument.plugin + , wrap suggestDeleteUnusedBinding + ] + plId + "Provides various quick fixes for bindings" + +fillHolePluginDescriptor :: Recorder (WithPriority E.Log) -> PluginId -> PluginDescriptor IdeState +fillHolePluginDescriptor recorder plId = mkExactprintPluginDescriptor recorder (mkGhcideCAPlugin (wrap suggestFillHole) plId "Provides a code action to fill a hole") + +extendImportPluginDescriptor :: Recorder (WithPriority E.Log) -> PluginId -> PluginDescriptor IdeState +extendImportPluginDescriptor recorder plId = mkExactprintPluginDescriptor recorder $ (defaultPluginDescriptor plId "Provides a command to extend the import list") + { pluginCommands = [extendImportCommand] } + + +-- | Add the ability for a plugin to call GetAnnotatedParsedSource +mkExactprintPluginDescriptor :: Recorder (WithPriority E.Log) -> PluginDescriptor a -> PluginDescriptor a +mkExactprintPluginDescriptor recorder desc = desc { pluginRules = pluginRules desc >> getAnnotatedParsedSourceRule recorder } + +------------------------------------------------------------------------------------------------- + + +extendImportCommand :: PluginCommand IdeState +extendImportCommand = + PluginCommand (CommandId extendImportCommandId) "additional edits for a completion" extendImportHandler + +extendImportHandler :: CommandFunction IdeState ExtendImport +extendImportHandler ideState _ edit@ExtendImport {..} = ExceptT $ do + res <- liftIO $ runMaybeT $ extendImportHandler' ideState edit + whenJust res $ \(nfp, wedit@WorkspaceEdit {_changes}) -> do + whenJust (listToMaybe =<< listToMaybe . M.elems =<< _changes) $ \TextEdit {_range} -> do + let srcSpan = rangeToSrcSpan nfp _range + pluginSendNotification SMethod_WindowShowMessage $ + ShowMessageParams MessageType_Info $ + "Import " + <> maybe ("‘" <> newThing) (\x -> "‘" <> x <> " (" <> newThing <> ")") thingParent + <> "’ from " + <> importName + <> " (at " + <> printOutputable srcSpan + <> ")" + void $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing wedit) (\_ -> pure ()) + return $ Right $ InR Null + +extendImportHandler' :: IdeState -> ExtendImport -> MaybeT IO (NormalizedFilePath, WorkspaceEdit) +extendImportHandler' ideState ExtendImport {..} + | Just fp <- uriToFilePath doc, + nfp <- toNormalizedFilePath' fp = + do + (ModSummaryResult {..}, ps, contents) <- MaybeT $ liftIO $ + runAction "extend import" ideState $ + runMaybeT $ do + -- We want accurate edits, so do not use stale data here + msr <- MaybeT $ use GetModSummaryWithoutTimestamps nfp + ps <- MaybeT $ use GetAnnotatedParsedSource nfp + (_, contents) <- MaybeT $ use GetFileContents nfp + return (msr, ps, contents) + let df = ms_hspp_opts msrModSummary + wantedModule = mkModuleName (T.unpack importName) + wantedQual = mkModuleName . T.unpack <$> importQual + existingImport = find (isWantedModule wantedModule wantedQual) msrImports + case existingImport of + Just imp -> do + fmap (nfp,) $ liftEither $ + rewriteToWEdit df doc $ + extendImport (T.unpack <$> thingParent) (T.unpack newThing) +#if MIN_VERSION_ghc(9,9,0) + imp +#else + (makeDeltaAst imp) +#endif + + Nothing -> do + let qns = (,) <$> importQual <*> Just (qualifiedImportStyle df) + n = newImport importName sym qns False + sym = if isNothing importQual then Just it else Nothing + it = case thingParent of + Nothing -> newThing + Just p -> p <> "(" <> newThing <> ")" + t <- liftMaybe $ snd <$> newImportToEdit n ps (Rope.toText (fromMaybe mempty contents)) + return (nfp, WorkspaceEdit {_changes=Just (M.singleton doc [t]), _documentChanges=Nothing, _changeAnnotations=Nothing}) + | otherwise = + mzero + +isWantedModule :: ModuleName -> Maybe ModuleName -> GenLocated l (ImportDecl GhcPs) -> Bool +isWantedModule wantedModule Nothing (L _ it@ImportDecl{ ideclName + , ideclImportList = Just (Exactly, _) + }) = + not (isQualifiedImport it) && unLoc ideclName == wantedModule +isWantedModule wantedModule (Just qual) (L _ ImportDecl{ ideclAs, ideclName + , ideclImportList = Just (Exactly, _) + }) = + unLoc ideclName == wantedModule && (wantedModule == qual || (unLoc <$> ideclAs) == Just qual) +isWantedModule _ _ _ = False + + +liftMaybe :: Monad m => Maybe a -> MaybeT m a +liftMaybe a = MaybeT $ pure a + +liftEither :: Monad m => Either e a -> MaybeT m a +liftEither (Left _) = mzero +liftEither (Right x) = return x + +------------------------------------------------------------------------------------------------- + +findSigOfDecl :: p ~ GhcPass p0 => (IdP p -> Bool) -> [LHsDecl p] -> Maybe (Sig p) +findSigOfDecl pred decls = + listToMaybe + [ sig + | L _ (SigD _ sig@(TypeSig _ idsSig _)) <- decls, + any (pred . unLoc) idsSig + ] + +findSigOfDeclRanged :: forall p p0 . p ~ GhcPass p0 => Range -> [LHsDecl p] -> Maybe (Sig p) +findSigOfDeclRanged range decls = do + dec <- findDeclContainingLoc (_start range) decls + case dec of + L _ (SigD _ sig@TypeSig {}) -> Just sig + L _ (ValD _ (bind :: HsBind p)) -> findSigOfBind range bind + _ -> Nothing + +findSigOfBind :: forall p p0. p ~ GhcPass p0 => Range -> HsBind p -> Maybe (Sig p) +findSigOfBind range bind = + case bind of + FunBind {} -> findSigOfLMatch (unLoc $ mg_alts (fun_matches bind)) + _ -> Nothing + where + findSigOfLMatch :: [LMatch p (LHsExpr p)] -> Maybe (Sig p) + findSigOfLMatch ls = do + match <- findDeclContainingLoc (_start range) ls + let grhs = m_grhss $ unLoc match + msum + [findSigOfBinds range (grhssLocalBinds grhs) -- where clause + , do + grhs <- findDeclContainingLoc (_start range) (grhssGRHSs grhs) + case unLoc grhs of + GRHS _ _ bd -> findSigOfExpr (unLoc bd) + ] + + findSigOfExpr :: HsExpr p -> Maybe (Sig p) + findSigOfExpr = go + where +#if !MIN_VERSION_ghc(9,9,0) + go (HsLet _ _ binds _ _) = findSigOfBinds range binds +#else + go (HsLet _ binds _) = findSigOfBinds range binds +#endif + go (HsDo _ _ stmts) = do + stmtlr <- unLoc <$> findDeclContainingLoc (_start range) (unLoc stmts) + case stmtlr of + LetStmt _ lhsLocalBindsLR -> findSigOfBinds range lhsLocalBindsLR + _ -> Nothing + go _ = Nothing + +findSigOfBinds :: p ~ GhcPass p0 => Range -> HsLocalBinds p -> Maybe (Sig p) +findSigOfBinds range = go + where + go (HsValBinds _ (ValBinds _ binds lsigs)) = + case unLoc <$> findDeclContainingLoc (_start range) lsigs of + Just sig' -> Just sig' + Nothing -> do +#if MIN_VERSION_ghc(9,11,0) + lHsBindLR <- findDeclContainingLoc (_start range) binds +#else + lHsBindLR <- findDeclContainingLoc (_start range) (bagToList binds) +#endif + findSigOfBind range (unLoc lHsBindLR) + go _ = Nothing + +findInstanceHead :: (Outputable (HsType p), p ~ GhcPass p0) => DynFlags -> String -> [LHsDecl p] -> Maybe (LHsType p) +findInstanceHead df instanceHead decls = + listToMaybe + [ hsib_body + | L _ (InstD _ (ClsInstD _ ClsInstDecl {cid_poly_ty = (unLoc -> HsSig {sig_body = hsib_body})})) <- decls, + showSDoc df (ppr hsib_body) == instanceHead + ] + +#if MIN_VERSION_ghc(9,9,0) +findDeclContainingLoc :: (Foldable t, HasLoc l) => Position -> t (GenLocated l e) -> Maybe (GenLocated l e) +#else +findDeclContainingLoc :: Foldable t => Position -> t (GenLocated (SrcSpanAnn' a) e) -> Maybe (GenLocated (SrcSpanAnn' a) e) +#endif +findDeclContainingLoc loc = find (\(L l _) -> loc `isInsideSrcSpan` locA l) + +-- Single: +-- This binding for ‘mod’ shadows the existing binding +-- imported from ‘Prelude’ at haskell-language-server/ghcide/src/Development/IDE/Plugin/CodeAction.hs:10:8-40 +-- (and originally defined in ‘GHC.Real’)typecheck(-Wname-shadowing) +-- Multi: +--This binding for ‘pack’ shadows the existing bindings +-- imported from ‘Data.ByteString’ at B.hs:6:1-22 +-- imported from ‘Data.ByteString.Lazy’ at B.hs:8:1-27 +-- imported from ‘Data.Text’ at B.hs:7:1-16 +suggestHideShadow :: ParsedSource -> T.Text -> Maybe TcModuleResult -> Maybe HieAstResult -> Diagnostic -> [(T.Text, [Either TextEdit Rewrite])] +suggestHideShadow ps fileContents mTcM mHar Diagnostic {_message, _range} + | Just [identifier, modName, s] <- + matchRegexUnifySpaces + _message + "This binding for ‘([^`]+)’ shadows the existing binding imported from ‘([^`]+)’ at ([^ ]*)" = + suggests identifier modName s + | Just [identifier] <- + matchRegexUnifySpaces + _message + "This binding for ‘([^`]+)’ shadows the existing bindings", + Just matched <- allMatchRegexUnifySpaces _message "imported from ‘([^’]+)’ at ([^ ]*)", + mods <- [(modName, s) | [_, modName, s] <- matched], + result <- nubOrdBy (compare `on` fst) $ mods >>= uncurry (suggests identifier), + hideAll <- ("Hide " <> identifier <> " from all occurrence imports", concatMap snd result) = + result <> [hideAll] + | otherwise = [] + where + L _ HsModule {hsmodImports} = ps + + suggests identifier modName s + | Just tcM <- mTcM, + Just har <- mHar, + [s'] <- [x | (x, "") <- readSrcSpan $ T.unpack s], + isUnusedImportedId tcM har (T.unpack identifier) (T.unpack modName) (RealSrcSpan s' Nothing), + mDecl <- findImportDeclByModuleName hsmodImports $ T.unpack modName, + title <- "Hide " <> identifier <> " from " <> modName = + if modName == "Prelude" && null mDecl + then maybeToList $ (\(_, te) -> (title, [Left te])) <$> newImportToEdit (hideImplicitPreludeSymbol identifier) ps fileContents + else maybeToList $ (title,) . pure . pure . hideSymbol (T.unpack identifier) <$> mDecl + | otherwise = [] + +findImportDeclByModuleName :: [LImportDecl GhcPs] -> String -> Maybe (LImportDecl GhcPs) +findImportDeclByModuleName decls modName = flip find decls $ \case + (L _ ImportDecl {..}) -> modName == moduleNameString (unLoc ideclName) + +isTheSameLine :: SrcSpan -> SrcSpan -> Bool +isTheSameLine s1 s2 + | Just sl1 <- getStartLine s1, + Just sl2 <- getStartLine s2 = + sl1 == sl2 + | otherwise = False + where + getStartLine x = srcLocLine . realSrcSpanStart <$> realSpan x + +isUnusedImportedId :: TcModuleResult -> HieAstResult -> String -> String -> SrcSpan -> Bool +isUnusedImportedId + TcModuleResult {tmrTypechecked = TcGblEnv {tcg_imports = ImportAvails {imp_mods}}} + HAR {refMap} + identifier + modName + importSpan + | occ <- mkVarOcc identifier, +#if MIN_VERSION_ghc(9,11,0) + impModsVals <- importedByUser . concat $ M.elems imp_mods, +#else + impModsVals <- importedByUser . concat $ moduleEnvElts imp_mods, +#endif + Just rdrEnv <- + listToMaybe + [ imv_all_exports + | ImportedModsVal {..} <- impModsVals, + imv_name == mkModuleName modName, + isTheSameLine imv_span importSpan + ], + [GRE {gre_name = name}] <- lookupGlobalRdrEnv rdrEnv occ, + importedIdentifier <- Right name, + refs <- M.lookup importedIdentifier refMap = + maybe True (not . any (\(_, IdentifierDetails {..}) -> identInfo == S.singleton Use)) refs + | otherwise = False + +suggestRemoveRedundantImport :: ParsedModule -> Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +suggestRemoveRedundantImport ParsedModule{pm_parsed_source = L _ HsModule{hsmodImports}} contents Diagnostic{_range=_range,..} +-- The qualified import of ‘many’ from module ‘Control.Applicative’ is redundant + | Just [_, bindings] <- matchRegexUnifySpaces _message "The( qualified)? import of ‘([^’]*)’ from module [^ ]* is redundant" + , Just (L _ impDecl) <- find (\(L (locA -> l) _) -> _start _range `isInsideSrcSpan` l && _end _range `isInsideSrcSpan` l ) hsmodImports + , Just c <- contents + , ranges <- map (rangesForBindingImport impDecl . T.unpack) (T.splitOn ", " bindings >>= trySplitIntoOriginalAndRecordField) + , ranges' <- extendAllToIncludeCommaIfPossible False (indexedByPosition $ T.unpack c) (concat ranges) + , not (null ranges') + = [( "Remove " <> bindings <> " from import" , [ TextEdit r "" | r <- ranges' ] )] + +-- File.hs:16:1: warning: +-- The import of `Data.List' is redundant +-- except perhaps to import instances from `Data.List' +-- To import instances alone, use: import Data.List() + | _message =~ ("The( qualified)? import of [^ ]* is redundant" :: String) + = [("Remove import", [TextEdit (extendToWholeLineIfPossible contents _range) ""])] + | otherwise = [] + where + -- In case of an unused record field import, the binding from the message will not match any import directly + -- In this case, we try if we can additionally extract a record field name + -- Example: The import of ‘B(b2)’ from module ‘ModuleB’ is redundant + trySplitIntoOriginalAndRecordField :: T.Text -> [T.Text] + trySplitIntoOriginalAndRecordField binding = + case matchRegexUnifySpaces binding "([^ ]+)\\(([^)]+)\\)" of + Just [_, fields] -> [binding, fields] + _ -> [binding] + +diagInRange :: Diagnostic -> Range -> Bool +diagInRange Diagnostic {_range = dr} r = dr `subRange` extendedRange + where + -- Ensures the range captures full lines. Makes it easier to trigger the correct + -- "remove redundant" code actions from anywhere on the offending line. + extendedRange = extendToFullLines r + +-- Note [Removing imports is preferred] +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- It's good to prefer the remove imports code action because an unused import +-- is likely to be removed and less likely the warning will be disabled. +-- Therefore actions to remove a single or all redundant imports should be +-- preferred, so that the client can prioritize them higher. +caRemoveRedundantImports :: Maybe ParsedModule -> Maybe T.Text -> [Diagnostic] -> Range -> Uri -> [Command |? CodeAction] +caRemoveRedundantImports m contents allDiags contextRange uri + | Just pm <- m, + r <- join $ map (\d -> repeat d `zip` suggestRemoveRedundantImport pm contents d) allDiags, + allEdits <- [ e | (_, (_, edits)) <- r, e <- edits], + caRemoveAll <- removeAll allEdits, + ctxEdits <- [ x | x@(d, _) <- r, d `diagInRange` contextRange], + not $ null ctxEdits, + caRemoveCtx <- map (\(d, (title, tedit)) -> removeSingle title tedit d) ctxEdits + = caRemoveCtx ++ [caRemoveAll] + | otherwise = [] + where + removeSingle title tedit diagnostic = mkCA title (Just CodeActionKind_QuickFix) Nothing [diagnostic] WorkspaceEdit{..} where + _changes = Just $ M.singleton uri tedit + _documentChanges = Nothing + _changeAnnotations = Nothing + removeAll tedit = InR $ CodeAction{..} where + _changes = Just $ M.singleton uri tedit + _title = "Remove all redundant imports" + _kind = Just CodeActionKind_QuickFix + _diagnostics = Nothing + _documentChanges = Nothing + _edit = Just WorkspaceEdit{..} + -- See Note [Removing imports is preferred] + _isPreferred = Just True + _command = Nothing + _disabled = Nothing + _data_ = Nothing + _changeAnnotations = Nothing + +caRemoveInvalidExports :: Maybe ParsedModule -> Maybe T.Text -> [Diagnostic] -> Range -> Uri -> [Command |? CodeAction] +caRemoveInvalidExports m contents allDiags contextRange uri + | Just pm <- m, + Just txt <- contents, + txt' <- indexedByPosition $ T.unpack txt, + r <- mapMaybe (groupDiag pm) allDiags, + r' <- map (\(t,d,rs) -> (t,d,extend txt' rs)) r, + caRemoveCtx <- mapMaybe removeSingle r', + allRanges <- nubOrd $ [ range | (_,_,ranges) <- r, range <- ranges], + allRanges' <- extend txt' allRanges, + Just caRemoveAll <- removeAll allRanges', + ctxEdits <- [ x | x@(_, d, _) <- r, d `diagInRange` contextRange], + not $ null ctxEdits + = caRemoveCtx ++ [caRemoveAll] + | otherwise = [] + where + extend txt ranges = extendAllToIncludeCommaIfPossible True txt ranges + + groupDiag pm dig + | Just (title, ranges) <- suggestRemoveRedundantExport pm dig + = Just (title, dig, ranges) + | otherwise = Nothing + + removeSingle (_, _, []) = Nothing + removeSingle (title, diagnostic, ranges) = Just $ InR $ CodeAction{..} where + tedit = concatMap (\r -> [TextEdit r ""]) $ nubOrd ranges + _changes = Just $ M.singleton uri tedit + _title = title + _kind = Just CodeActionKind_QuickFix + _diagnostics = Just [diagnostic] + _documentChanges = Nothing + _edit = Just WorkspaceEdit{..} + _command = Nothing + -- See Note [Removing imports is preferred] + _isPreferred = Just True + _disabled = Nothing + _data_ = Nothing + _changeAnnotations = Nothing + removeAll [] = Nothing + removeAll ranges = Just $ InR $ CodeAction{..} where + tedit = concatMap (\r -> [TextEdit r ""]) ranges + _changes = Just $ M.singleton uri tedit + _title = "Remove all redundant exports" + _kind = Just CodeActionKind_QuickFix + _diagnostics = Nothing + _documentChanges = Nothing + _edit = Just WorkspaceEdit{..} + _command = Nothing + -- See Note [Removing imports is preferred] + _isPreferred = Just True + _disabled = Nothing + _data_ = Nothing + _changeAnnotations = Nothing + +suggestRemoveRedundantExport :: ParsedModule -> Diagnostic -> Maybe (T.Text, [Range]) +suggestRemoveRedundantExport ParsedModule{pm_parsed_source = L _ HsModule{..}} Diagnostic{..} + | msg <- unifySpaces _message + , Just export <- hsmodExports + , Just exportRange <- getLocatedRange export + , exports <- unLoc export + , Just (removeFromExport, !ranges) <- fmap (getRanges exports . notInScope) (extractNotInScopeName msg) + <|> (,[_range]) <$> matchExportItem msg + <|> (,[_range]) <$> matchDupExport msg + , subRange _range exportRange + = Just ("Remove ‘" <> removeFromExport <> "’ from export", ranges) + where + matchExportItem msg = regexSingleMatch msg "The export item ‘([^’]+)’" + matchDupExport msg = regexSingleMatch msg "Duplicate ‘([^’]+)’ in export list" + getRanges exports txt = case smallerRangesForBindingExport exports (T.unpack txt) of + [] -> (txt, [_range]) + ranges -> (txt, ranges) +suggestRemoveRedundantExport _ _ = Nothing + +suggestDeleteUnusedBinding :: ParsedModule -> Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +suggestDeleteUnusedBinding + ParsedModule{pm_parsed_source = L _ HsModule{hsmodDecls}} + contents + Diagnostic{_range=_range,..} +-- Foo.hs:4:1: warning: [-Wunused-binds] Defined but not used: ‘f’ + | Just [name] <- matchRegexUnifySpaces _message ".*Defined but not used: ‘([^ ]+)’" + , Just indexedContent <- indexedByPosition . T.unpack <$> contents + = let edits = flip TextEdit "" <$> relatedRanges indexedContent (T.unpack name) + in ([("Delete ‘" <> name <> "’", edits) | not (null edits)]) + | otherwise = [] + where + relatedRanges indexedContent name = + concatMap (findRelatedSpans indexedContent name . reLoc) hsmodDecls + toRange = realSrcSpanToRange + extendForSpaces = extendToIncludePreviousNewlineIfPossible + + findRelatedSpans :: PositionIndexedString -> String -> Located (HsDecl GhcPs) -> [Range] + findRelatedSpans + indexedContent + name + (L (RealSrcSpan l _) (ValD _ (extractNameAndMatchesFromFunBind -> Just (lname, matches)))) = + case lname of + (L nLoc _name) | isTheBinding nLoc -> + let findSig (L (RealSrcSpan l _) (SigD _ sig)) = findRelatedSigSpan indexedContent name l sig + findSig _ = [] + in + extendForSpaces indexedContent (toRange l) : + concatMap (findSig . reLoc) hsmodDecls + _ -> concatMap (findRelatedSpanForMatch indexedContent name) matches + findRelatedSpans _ _ _ = [] + + extractNameAndMatchesFromFunBind + :: HsBind GhcPs + -> Maybe (Located (IdP GhcPs), [LMatch GhcPs (LHsExpr GhcPs)]) + extractNameAndMatchesFromFunBind + FunBind + { fun_id=lname + , fun_matches=MG {mg_alts=L _ matches} + } = Just (reLoc lname, matches) + extractNameAndMatchesFromFunBind _ = Nothing + + findRelatedSigSpan :: PositionIndexedString -> String -> RealSrcSpan -> Sig GhcPs -> [Range] + findRelatedSigSpan indexedContent name l sig = + let maybeSpan = findRelatedSigSpan1 name sig + in case maybeSpan of + Just (_span, True) -> pure $ extendForSpaces indexedContent $ toRange l -- a :: Int + Just (RealSrcSpan span _, False) -> pure $ toRange span -- a, b :: Int, a is unused + _ -> [] + + -- Second of the tuple means there is only one match + findRelatedSigSpan1 :: String -> Sig GhcPs -> Maybe (SrcSpan, Bool) + findRelatedSigSpan1 name (TypeSig _ lnames _) = + let maybeIdx = findIndex (\(L _ id) -> isSameName id name) lnames + in case maybeIdx of + Nothing -> Nothing + Just _ | [lname] <- lnames -> Just (getLoc lname, True) + Just idx -> + let targetLname = getLoc $ lnames !! idx + startLoc = srcSpanStart targetLname + endLoc = srcSpanEnd targetLname + startLoc' = if idx == 0 + then startLoc + else srcSpanEnd . getLoc $ lnames !! (idx - 1) + endLoc' = if idx == 0 && idx < length lnames - 1 + then srcSpanStart . getLoc $ lnames !! (idx + 1) + else endLoc + in Just (mkSrcSpan startLoc' endLoc', False) + findRelatedSigSpan1 _ _ = Nothing + + -- for where clause + findRelatedSpanForMatch + :: PositionIndexedString + -> String + -> LMatch GhcPs (LHsExpr GhcPs) + -> [Range] + findRelatedSpanForMatch + indexedContent + name + (L _ Match{m_grhss=GRHSs{grhssLocalBinds}}) = do + let emptyBag bag = +#if MIN_VERSION_ghc(9,11,0) + null bag +#else + isEmptyBag bag +#endif + go bag lsigs = + if emptyBag bag + then [] + else concatMap (findRelatedSpanForHsBind indexedContent name lsigs) bag + case grhssLocalBinds of + (HsValBinds _ (ValBinds _ bag lsigs)) -> go bag lsigs + _ -> [] + + findRelatedSpanForHsBind + :: PositionIndexedString + -> String + -> [LSig GhcPs] + -> LHsBind GhcPs + -> [Range] + findRelatedSpanForHsBind + indexedContent + name + lsigs + (L (locA -> (RealSrcSpan l _)) (extractNameAndMatchesFromFunBind -> Just (lname, matches))) = + if isTheBinding (getLoc lname) + then + let findSig (L (RealSrcSpan l _) sig) = findRelatedSigSpan indexedContent name l sig + findSig _ = [] + in extendForSpaces indexedContent (toRange l) : concatMap (findSig . reLoc) lsigs + else concatMap (findRelatedSpanForMatch indexedContent name) matches + findRelatedSpanForHsBind _ _ _ _ = [] + + isTheBinding :: SrcSpan -> Bool + isTheBinding span = srcSpanToRange span == Just _range + + isSameName :: IdP GhcPs -> String -> Bool + isSameName x name = T.unpack (printOutputable x) == name + +data ExportsAs = ExportName | ExportPattern | ExportFamily | ExportAll + deriving (Eq) + +getLocatedRange :: HasSrcSpan a => a -> Maybe Range +getLocatedRange = srcSpanToRange . getLoc + +suggestExportUnusedTopBinding :: Maybe T.Text -> ParsedModule -> Diagnostic -> Maybe (T.Text, TextEdit) +suggestExportUnusedTopBinding srcOpt ParsedModule{pm_parsed_source = L _ HsModule{..}} Diagnostic{..} +-- Foo.hs:4:1: warning: [-Wunused-top-binds] Defined but not used: ‘f’ +-- Foo.hs:5:1: warning: [-Wunused-top-binds] Defined but not used: type constructor or class ‘F’ +-- Foo.hs:6:1: warning: [-Wunused-top-binds] Defined but not used: data constructor ‘Bar’ + | Just source <- srcOpt + , Just [_, name] <- + matchRegexUnifySpaces + _message + ".*Defined but not used: (type constructor or class |data constructor )?‘([^ ]+)’" + , Just (exportType, _) <- + find (matchWithDiagnostic _range . snd) + . mapMaybe (\(L l b) -> if isTopLevel (locA l) then exportsAs b else Nothing) + $ hsmodDecls + , Just exports <- fmap (fmap reLoc) . reLoc <$> hsmodExports + , Just exportsEndPos <- _end <$> getLocatedRange exports + , let name' = printExport exportType name + sep = exportSep source $ map getLocatedRange <$> exports + exportName = case sep of + Nothing -> (if needsComma source exports then ", " else "") <> name' + Just s -> s <> name' + exportsEndPos' = exportsEndPos { _character = pred $ _character exportsEndPos } + insertPos = fromMaybe exportsEndPos' $ case (sep, unLoc exports) of + (Just _, exports'@(_:_)) -> fmap _end . getLocatedRange $ last exports' + _ -> Nothing + = Just ("Export ‘" <> name <> "’", TextEdit (Range insertPos insertPos) exportName) + | otherwise = Nothing + where + exportSep :: T.Text -> Located [Maybe Range] -> Maybe T.Text + exportSep src (L (RealSrcSpan _ _) xs@(_ : tl@(_ : _))) = + case mapMaybe (\(e, s) -> (,) <$> e <*> s) $ zip (fmap _end <$> xs) (fmap _start <$> tl) of + [] -> Nothing + bounds -> Just smallestSep + where + smallestSep + = snd + $ minimumBy (comparing fst) + $ map (T.length &&& id) + $ nubOrd + $ map (\(prevEnd, nextStart) -> textInRange (Range prevEnd nextStart) src) bounds + exportSep _ _ = Nothing + + -- We get the last export and the closing bracket and check for comma in that range. + needsComma :: T.Text -> Located [Located (IE GhcPs)] -> Bool + needsComma _ (L _ []) = False + needsComma source (L (RealSrcSpan l _) exports) = + let closeParen = _end $ realSrcSpanToRange l + lastExport = fmap _end . getLocatedRange $ last exports + in + case lastExport of + Just lastExport -> + not $ T.any (== ',') $ textInRange (Range lastExport closeParen) source + _ -> False + needsComma _ _ = False + + opLetter :: T.Text + opLetter = ":!#$%&*+./<=>?@\\^|-~" + + parenthesizeIfNeeds :: Bool -> T.Text -> T.Text + parenthesizeIfNeeds needsTypeKeyword x + | T.any (c ==) opLetter = (if needsTypeKeyword then "type " else "") <> "(" <> x <> ")" + | otherwise = x + where + c = T.head x + + matchWithDiagnostic :: Range -> Located (IdP GhcPs) -> Bool + matchWithDiagnostic Range{_start=l,_end=r} x = + let loc = fmap _start . getLocatedRange $ x + in loc >= Just l && loc <= Just r + + printExport :: ExportsAs -> T.Text -> T.Text + printExport ExportName x = parenthesizeIfNeeds False x + printExport ExportPattern x = "pattern " <> parenthesizeIfNeeds False x + printExport ExportFamily x = parenthesizeIfNeeds True x + printExport ExportAll x = parenthesizeIfNeeds True x <> "(..)" + + isTopLevel :: SrcSpan -> Bool + isTopLevel span = fmap (_character . _start) (srcSpanToRange span) == Just 0 + + exportsAs :: HsDecl GhcPs -> Maybe (ExportsAs, Located (IdP GhcPs)) + exportsAs (ValD _ FunBind {fun_id}) = Just (ExportName, reLoc fun_id) + exportsAs (ValD _ (PatSynBind _ PSB {psb_id})) = Just (ExportPattern, reLoc psb_id) + exportsAs (TyClD _ SynDecl{tcdLName}) = Just (ExportName, reLoc tcdLName) + exportsAs (TyClD _ DataDecl{tcdLName}) = Just (ExportAll, reLoc tcdLName) + exportsAs (TyClD _ ClassDecl{tcdLName}) = Just (ExportAll, reLoc tcdLName) + exportsAs (TyClD _ FamDecl{tcdFam}) = Just (ExportFamily, reLoc $ fdLName tcdFam) + exportsAs _ = Nothing + +suggestAddTypeAnnotationToSatisfyConstraints :: Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +suggestAddTypeAnnotationToSatisfyConstraints sourceOpt Diagnostic{_range=_range,..} +-- File.hs:52:41: warning: +-- * Defaulting the following constraint to type ‘Integer’ +-- Num p0 arising from the literal ‘1’ +-- * In the expression: 1 +-- In an equation for ‘f’: f = 1 +-- File.hs:52:41: warning: +-- * Defaulting the following constraints to type ‘[Char]’ +-- (Show a0) +-- arising from a use of ‘traceShow’ +-- at A.hs:228:7-25 +-- (IsString a0) +-- arising from the literal ‘"debug"’ +-- at A.hs:228:17-23 +-- * In the expression: traceShow "debug" a +-- In an equation for ‘f’: f a = traceShow "debug" a +-- File.hs:52:41: warning: +-- * Defaulting the following constraints to type ‘[Char]’ +-- (Show a0) +-- arising from a use of ‘traceShow’ +-- at A.hs:255:28-43 +-- (IsString a0) +-- arising from the literal ‘"test"’ +-- at /Users/serhiip/workspace/ghcide/src/Development/IDE/Plugin/CodeAction.hs:255:38-43 +-- * In the fourth argument of ‘seq’, namely ‘(traceShow "test")’ +-- In the expression: seq "test" seq "test" (traceShow "test") +-- In an equation for ‘f’: +-- f = seq "test" seq "test" (traceShow "test") +-- + | Just [ty, lit] <- matchRegexUnifySpaces _message (pat False False True False) + <|> matchRegexUnifySpaces _message (pat False False False True) + <|> matchRegexUnifySpaces _message (pat False False False False) + + = codeEdit _range ty lit (makeAnnotatedLit ty lit) + | Just source <- sourceOpt + , Just [ty, lit, srcspan] <- matchRegexUnifySpaces _message (pat True True False False) + , range <- case [ x | (x,"") <- readSrcSpan (T.unpack srcspan)] of + [s] -> let x = realSrcSpanToRange s + in x{_end = (_end x){_character = succ (_character (_end x))}} + _ -> error "bug in srcspan parser" + = let lit' = makeAnnotatedLit ty lit; + tir = textInRange range source + in codeEdit range ty lit (T.replace lit lit' tir) + | otherwise = [] + where + makeAnnotatedLit ty lit = "(" <> lit <> " :: " <> ty <> ")" + pat multiple at inArg inExpr = T.concat [ ".*Defaulting the type variable " + , ".*to type ‘([^ ]+)’ " + , "in the following constraint" + , if multiple then "s" else " " + , ".*arising from the literal ‘(.+)’" + , if inArg then ".+In the.+argument" else "" + , if at then ".+at ([^ ]*)" else "" + , if inExpr then ".+In the expression" else "" + , ".+In the expression" + ] + codeEdit range ty lit replacement = + let title = "Add type annotation ‘" <> ty <> "’ to ‘" <> lit <> "’" + edits = [TextEdit range replacement] + in [( title, edits )] + +-- | GHC strips out backticks in case of infix functions as well as single quote +-- in case of quoted name when using TemplateHaskellQuotes. Which is not desired. +-- +-- For example: +-- 1. +-- +-- @ +-- File.hs:52:41: error: +-- * Variable not in scope: +-- suggestAcion :: Maybe T.Text -> Range -> Range +-- * Perhaps you meant ‘suggestAction’ (line 83) +-- File.hs:94:37: error: +-- Not in scope: ‘T.isPrfixOf’ +-- Perhaps you meant one of these: +-- ‘T.isPrefixOf’ (imported from Data.Text), +-- ‘T.isInfixOf’ (imported from Data.Text), +-- ‘T.isSuffixOf’ (imported from Data.Text) +-- Module ‘Data.Text’ does not export ‘isPrfixOf’. +-- @ +-- +-- * action: \`suggestAcion\` will be renamed to \`suggestAction\` keeping back ticks around the function +-- +-- 2. +-- +-- @ +-- import Language.Haskell.TH (Name) +-- foo :: Name +-- foo = 'bread +-- +-- File.hs:8:7: error: +-- Not in scope: ‘bread’ +-- * Perhaps you meant one of these: +-- ‘break’ (imported from Prelude), ‘read’ (imported from Prelude) +-- * In the Template Haskell quotation 'bread +-- @ +-- +-- * action: 'bread will be renamed to 'break keeping single quote on beginning of name +suggestReplaceIdentifier :: Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +suggestReplaceIdentifier contents Diagnostic{_range=_range,..} + | renameSuggestions@(_:_) <- extractRenamableTerms _message + = [ ("Replace with ‘" <> name <> "’", [mkRenameEdit contents _range name]) | name <- renameSuggestions ] + | otherwise = [] + +suggestNewDefinition :: IdeOptions -> ParsedModule -> Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +suggestNewDefinition ideOptions parsedModule contents Diagnostic {_message, _range} + | Just (name, typ) <- matchVariableNotInScope message = + newDefinitionAction ideOptions parsedModule _range name typ + | Just (name, typ) <- matchFoundHole message, + [(label, newDefinitionEdits)] <- newDefinitionAction ideOptions parsedModule _range name (Just typ) = + [(label, mkRenameEdit contents _range name : newDefinitionEdits)] + | otherwise = [] + where + message = unifySpaces _message + +newDefinitionAction :: IdeOptions -> ParsedModule -> Range -> T.Text -> Maybe T.Text -> [(T.Text, [TextEdit])] +newDefinitionAction IdeOptions {..} parsedModule Range {_start} name typ + | Range _ lastLineP : _ <- + [ realSrcSpanToRange sp + | (L (locA -> l@(RealSrcSpan sp _)) _) <- hsmodDecls, + _start `isInsideSrcSpan` l + ], + nextLineP <- Position {_line = _line lastLineP + 1, _character = 0} = + [ ( "Define " <> sig, + [TextEdit (Range nextLineP nextLineP) (T.unlines ["", sig, name <> " = _"])] + ) + ] + | otherwise = [] + where + colon = if optNewColonConvention then " : " else " :: " + sig = name <> colon <> T.dropWhileEnd isSpace (fromMaybe "_" typ) + ParsedModule {pm_parsed_source = L _ HsModule {hsmodDecls}} = parsedModule + +{- Handles two variants with different formatting + +1. Could not find module ‘Data.Cha’ + Perhaps you meant Data.Char (from base-4.12.0.0) + +2. Could not find module ‘Data.I’ + Perhaps you meant + Data.Ix (from base-4.14.3.0) + Data.Eq (from base-4.14.3.0) + Data.Int (from base-4.14.3.0) +-} +suggestModuleTypo :: Diagnostic -> [(T.Text, TextEdit)] +suggestModuleTypo Diagnostic{_range=_range,..} + | "Could not find module" `T.isInfixOf` _message = + case T.splitOn "Perhaps you meant" _message of + [_, stuff] -> + [ ("Replace with " <> modul, TextEdit _range modul) + | modul <- mapMaybe extractModule (T.lines stuff) + ] + _ -> [] + | otherwise = [] + where + extractModule line = case T.words line of + [modul, "(from", _] -> Just modul + _ -> Nothing + +suggestExtendImport :: ExportsMap -> ParsedSource -> Diagnostic -> [(T.Text, CodeActionKind, Rewrite)] +suggestExtendImport exportsMap (L _ HsModule {hsmodImports}) Diagnostic{_range=_range,..} + | Just [binding, mod, srcspan] <- + matchRegexUnifySpaces _message +#if MIN_VERSION_ghc(9,7,0) + "Add ‘([^’]*)’ to the import list in the import of ‘([^’]*)’ *\\(at (.*)\\)\\." +#else + "Perhaps you want to add ‘([^’]*)’ to the import list in the import of ‘([^’]*)’ *\\((.*)\\)\\." +#endif + = suggestions hsmodImports binding mod srcspan + | Just (binding, mod_srcspan) <- + matchRegExMultipleImports _message + = mod_srcspan >>= uncurry (suggestions hsmodImports binding) + | otherwise = [] + where + canUseDatacon = case extractNotInScopeName _message of + Just NotInScopeTypeConstructorOrClass{} -> False + _ -> True + + suggestions decls binding mod srcspan + | range <- case [ x | (x,"") <- readSrcSpan (T.unpack srcspan)] of + [s] -> let x = realSrcSpanToRange s + in x{_end = (_end x){_character = succ (_character (_end x))}} + _ -> error "bug in srcspan parser", + Just decl <- findImportDeclByRange decls range, + Just ident <- lookupExportMap binding mod + = [ ( "Add " <> renderImportStyle importStyle <> " to the import list of " <> mod + , quickFixImportKind' "extend" importStyle + , uncurry extendImport (unImportStyle importStyle) decl + ) + | importStyle <- NE.toList $ importStyles ident + ] + | otherwise = [] + lookupExportMap binding mod + | let em = getExportsMap exportsMap +#if MIN_VERSION_ghc(9,7,0) + match = mconcat $ lookupOccEnv_AllNameSpaces em (mkVarOrDataOcc binding) +#else + match1 = lookupOccEnv em (mkVarOrDataOcc binding) + match2 = lookupOccEnv em (mkTypeOcc binding) + , Just match <- match1 <> match2 +#endif + -- Only for the situation that data constructor name is same as type constructor name, + -- let ident with parent be in front of the one without. + , sortedMatch <- sortBy (\ident1 ident2 -> parent ident2 `compare` parent ident1) (Set.toList match) + , idents <- filter (\ident -> moduleNameText ident == mod && (canUseDatacon || not (isDatacon ident))) sortedMatch + , (ident:_) <- idents -- Ensure fallback while `idents` is empty + = Just ident + + -- fallback to using GHC suggestion even though it is not always correct + | otherwise + = Just IdentInfo + { name = mkVarOrDataOcc binding + , parent = Nothing + , identModuleName = mkModuleNameFS $ mkFastStringByteString $ T.encodeUtf8 mod} + +data HidingMode + = HideOthers [ModuleTarget] + | ToQualified + Bool + -- ^ Parenthesised? + ModuleName + +data ModuleTarget + = ExistingImp (NonEmpty (LImportDecl GhcPs)) + | ImplicitPrelude [LImportDecl GhcPs] + +targetImports :: ModuleTarget -> [LImportDecl GhcPs] +targetImports (ExistingImp ne) = NE.toList ne +targetImports (ImplicitPrelude xs) = xs + +oneAndOthers :: [a] -> [(a, [a])] +oneAndOthers = go + where + go [] = [] + go (x : xs) = (x, xs) : map (second (x :)) (go xs) + +isPreludeImplicit :: DynFlags -> Bool +isPreludeImplicit = xopt Lang.ImplicitPrelude + +-- | Suggests disambiguation for ambiguous symbols. +suggestImportDisambiguation :: + DynFlags -> + Maybe T.Text -> + ParsedSource -> + T.Text -> + Diagnostic -> + [(T.Text, [Either TextEdit Rewrite])] +suggestImportDisambiguation df (Just txt) ps fileContents diag@Diagnostic {..} + | Just [ambiguous] <- + matchRegexUnifySpaces + _message + "Ambiguous occurrence ‘([^’]+)’" + , Just modules <- + map last + <$> allMatchRegexUnifySpaces _message "imported from ‘([^’]+)’" + , local <- matchRegexUnifySpaces _message "defined at .+:[0-9]+:[0-9]+" = + suggestions ambiguous modules (isJust local) + | otherwise = [] + where + L _ HsModule {hsmodImports} = ps + + locDic = + fmap (NE.fromList . DL.toList) $ + Map.fromListWith (<>) $ + map + ( \i@(L _ idecl) -> + ( T.pack $ moduleNameString $ unLoc $ ideclName idecl + , DL.singleton i + ) + ) + hsmodImports + toModuleTarget "Prelude" + | isPreludeImplicit df + = Just $ ImplicitPrelude $ + maybe [] NE.toList (Map.lookup "Prelude" locDic) + toModuleTarget mName = ExistingImp <$> Map.lookup mName locDic + parensed = + "(" `T.isPrefixOf` T.strip (textInRange _range txt) + -- > removeAllDuplicates [1, 1, 2, 3, 2] = [3] + removeAllDuplicates = map NE.head . filter ((==1) . length) . NE.group . sort + hasDuplicate xs = length xs /= length (S.fromList xs) + suggestions symbol mods local + | hasDuplicate mods = case mapM toModuleTarget (removeAllDuplicates mods) of + Just targets -> suggestionsImpl symbol (map (, []) targets) local + Nothing -> [] + | otherwise = case mapM toModuleTarget mods of + Just targets -> suggestionsImpl symbol (oneAndOthers targets) local + Nothing -> [] + suggestionsImpl symbol targetsWithRestImports local = + sortOn fst + [ ( renderUniquify mode modNameText symbol False + , disambiguateSymbol ps fileContents diag symbol mode + ) + | (modTarget, restImports) <- targetsWithRestImports + , let modName = targetModuleName modTarget + modNameText = T.pack $ moduleNameString modName + , mode <- + [ ToQualified parensed qual + | ExistingImp imps <- [modTarget] + {- HLINT ignore suggestImportDisambiguation "Use nubOrd" -} + -- TODO: The use of nub here is slow and maybe wrong for UnhelpfulLocation + -- nubOrd can't be used since SrcSpan is intentionally no Ord + , L _ qual <- nub $ mapMaybe (ideclAs . unLoc) + $ NE.toList imps + ] + ++ [ToQualified parensed modName + | any (occursUnqualified symbol . unLoc) + (targetImports modTarget) + || case modTarget of + ImplicitPrelude{} -> True + _ -> False + ] + ++ [HideOthers restImports | not (null restImports)] + ] ++ case targetsWithRestImports of + (m,ms):_ | local -> + let mode = HideOthers (m:ms) + in [( renderUniquify mode T.empty symbol True + , disambiguateSymbol ps fileContents diag symbol mode + )] + _ -> [] + + renderUniquify HideOthers {} modName symbol local = + "Use " <> (if local then "local definition" else modName) <> " for " <> symbol <> ", hiding other imports" + renderUniquify (ToQualified _ qual) _ symbol _ = + "Replace with qualified: " + <> T.pack (moduleNameString qual) + <> "." + <> symbol +suggestImportDisambiguation _ _ _ _ _ = [] + +occursUnqualified :: T.Text -> ImportDecl GhcPs -> Bool +occursUnqualified symbol ImportDecl{..} + | isNothing ideclAs = Just False /= + -- I don't find this particularly comprehensible, + -- but HLint suggested me to do so... + (ideclImportList <&> \(isHiding, L _ ents) -> + let occurs = any ((symbol `symbolOccursIn`) . unLoc) ents + in (isHiding == EverythingBut) && not occurs || (isHiding == Exactly) && occurs + ) +occursUnqualified _ _ = False + +symbolOccursIn :: T.Text -> IE GhcPs -> Bool +symbolOccursIn symb = any ((== symb). printOutputable) . ieNames + +targetModuleName :: ModuleTarget -> ModuleName +targetModuleName ImplicitPrelude{} = mkModuleName "Prelude" +targetModuleName (ExistingImp (L _ ImportDecl{..} :| _)) = + unLoc ideclName + +disambiguateSymbol :: + ParsedSource -> + T.Text -> + Diagnostic -> + T.Text -> + HidingMode -> + [Either TextEdit Rewrite] +disambiguateSymbol ps fileContents Diagnostic {..} (T.unpack -> symbol) = \case + (HideOthers hiddens0) -> + [ Right $ hideSymbol symbol idecl + | ExistingImp idecls <- hiddens0 + , idecl <- NE.toList idecls + ] + ++ mconcat + [ if null imps + then maybeToList $ Left . snd <$> newImportToEdit (hideImplicitPreludeSymbol $ T.pack symbol) ps fileContents + else Right . hideSymbol symbol <$> imps + | ImplicitPrelude imps <- hiddens0 + ] + (ToQualified parensed qualMod) -> + let occSym = mkVarOcc symbol + rdr = Qual qualMod occSym + in Right <$> [ if parensed + then Rewrite (rangeToSrcSpan "" _range) $ \df -> + liftParseAST @(HsExpr GhcPs) df $ + T.unpack $ printOutputable $ + HsVar @GhcPs noExtField $ + reLocA $ L (mkGeneralSrcSpan "") rdr + else Rewrite (rangeToSrcSpan "" _range) $ \df -> + liftParseAST @RdrName df $ + T.unpack $ printOutputable $ L (mkGeneralSrcSpan "") rdr + ] + +findImportDeclByRange :: [LImportDecl GhcPs] -> Range -> Maybe (LImportDecl GhcPs) +findImportDeclByRange xs range = find (\(L (locA -> l) _)-> srcSpanToRange l == Just range) xs + +suggestFixConstructorImport :: Diagnostic -> [(T.Text, TextEdit)] +suggestFixConstructorImport Diagnostic{_range=_range,..} + -- ‘Success’ is a data constructor of ‘Result’ + -- To import it use + -- import Data.Aeson.Types( Result( Success ) ) + -- or + -- import Data.Aeson.Types( Result(..) ) (lsp-ui) + -- + -- On 9.8+ + -- + -- In the import of ‘ModuleA’: + -- an item called ‘Constructor’ + -- is exported, but it is a data constructor of + -- ‘A’. + | Just [constructor, typ] <- + matchRegexUnifySpaces _message +#if MIN_VERSION_ghc(9,7,0) + "an item called ‘([^’]*)’ is exported, but it is a data constructor of ‘([^’]*)’" +#else + "‘([^’]*)’ is a data constructor of ‘([^’]*)’ To import it use" +#endif + = let fixedImport = typ <> "(" <> constructor <> ")" + in [("Fix import of " <> fixedImport, TextEdit _range fixedImport)] + | otherwise = [] + +suggestAddRecordFieldImport :: ExportsMap -> DynFlags -> ParsedSource -> T.Text -> Diagnostic -> [(T.Text, CodeActionKind, TextEdit)] +suggestAddRecordFieldImport exportsMap df ps fileContents Diagnostic {..} + | Just fieldName <- findMissingField _message + , Just (range, indent) <- newImportInsertRange ps fileContents + = let qis = qualifiedImportStyle df + suggestions = nubSortBy simpleCompareImportSuggestion (constructNewImportSuggestions exportsMap (Nothing, NotInScopeThing fieldName) Nothing qis) + in map (\(ImportSuggestion _ kind (unNewImport -> imp)) -> (imp, kind, TextEdit range (imp <> "\n" <> T.replicate indent " "))) suggestions + | otherwise = [] + where + findMissingField :: T.Text -> Maybe T.Text + findMissingField t = + let + hasfieldRegex = "((.+\\.)?HasField) \"(.+)\" ([^ ]+) ([^ ]+)" + regex = "(No instance for|Could not deduce):? (\\(" <> hasfieldRegex <> "\\)|‘" <> hasfieldRegex <> "’|" <> hasfieldRegex <> ")" + match = filter (/="") <$> matchRegexUnifySpaces t regex + in case match of + Just [_, _, _, _, fieldName, _, _] -> Just fieldName + _ -> Nothing + +-- | Suggests a constraint for a declaration for which a constraint is missing. +suggestConstraint :: DynFlags -> ParsedSource -> Diagnostic -> [(T.Text, Rewrite)] +suggestConstraint df ps diag@Diagnostic {..} + | Just missingConstraint <- findMissingConstraint _message + = let +#if MIN_VERSION_ghc(9,9,0) + parsedSource = ps +#else + parsedSource = makeDeltaAst ps +#endif + codeAction = if _message =~ ("the type signature for:" :: String) + then suggestFunctionConstraint df parsedSource + else suggestInstanceConstraint df parsedSource + in codeAction diag missingConstraint + | otherwise = [] + where + findMissingConstraint :: T.Text -> Maybe T.Text + findMissingConstraint t = + let -- The regex below can be tested at: + -- https://regex101.com/r/dfSivJ/1 + regex = "(No instance for|Could not deduce):? (\\((.+)\\)|‘(.+)’|.+) arising from" -- a use of / a do statement + + match = matchRegexUnifySpaces t regex + + -- For a string like: + -- "Could not deduce: ?a::() arising from" + -- The `matchRegexUnifySpaces` function returns two empty match + -- groups at the end of the list. It's not clear why this is the + -- case, so we select the last non-empty match group. + getCorrectGroup = last . filter (/="") + + in getCorrectGroup <$> match + +-- | Suggests a constraint for an instance declaration for which a constraint is missing. +suggestInstanceConstraint :: DynFlags -> ParsedSource -> Diagnostic -> T.Text -> [(T.Text, Rewrite)] + +suggestInstanceConstraint df (L _ HsModule {hsmodDecls}) Diagnostic {..} missingConstraint + | Just instHead <- instanceHead + = [(actionTitle missingConstraint , appendConstraint (T.unpack missingConstraint) instHead)] + | otherwise = [] + where + instanceHead + -- Suggests a constraint for an instance declaration with no existing constraints. + -- • No instance for (Eq a) arising from a use of ‘==’ + -- Possible fix: add (Eq a) to the context of the instance declaration + -- • In the expression: x == y + -- In an equation for ‘==’: (Wrap x) == (Wrap y) = x == y + -- In the instance declaration for ‘Eq (Wrap a)’ + | Just [instanceDeclaration] <- matchRegexUnifySpaces _message "In the instance declaration for ‘([^`]*)’" + , Just instHead <- findInstanceHead df (T.unpack instanceDeclaration) hsmodDecls + = Just instHead + -- Suggests a constraint for an instance declaration with one or more existing constraints. + -- • Could not deduce (Eq b) arising from a use of ‘==’ + -- from the context: Eq a + -- bound by the instance declaration at /path/to/Main.hs:7:10-32 + -- Possible fix: add (Eq b) to the context of the instance declaration + -- • In the second argument of ‘(&&)’, namely ‘x' == y'’ + -- In the expression: x == y && x' == y' + -- In an equation for ‘==’: + -- (Pair x x') == (Pair y y') = x == y && x' == y' + | Just [instanceLineStr, constraintFirstCharStr] + <- matchRegexUnifySpaces _message "bound by the instance declaration at .+:([0-9]+):([0-9]+)" + , Just (L _ (InstD _ (ClsInstD _ ClsInstDecl {cid_poly_ty = (unLoc -> HsSig{sig_body = hsib_body})}))) + <- findDeclContainingLoc (Position (readPositionNumber instanceLineStr) (readPositionNumber constraintFirstCharStr)) hsmodDecls + = Just hsib_body + | otherwise + = Nothing + + readPositionNumber :: T.Text -> UInt + readPositionNumber = T.unpack >>> read @Integer >>> fromIntegral + + actionTitle :: T.Text -> T.Text + actionTitle constraint = "Add `" <> constraint + <> "` to the context of the instance declaration" + +suggestImplicitParameter :: + ParsedSource -> + Diagnostic -> + [(T.Text, Rewrite)] +suggestImplicitParameter (L _ HsModule {hsmodDecls}) Diagnostic {_message, _range} + | Just [implicitT] <- matchRegexUnifySpaces _message "Unbound implicit parameter \\(([^:]+::.+)\\) arising", + Just (L _ (ValD _ FunBind {fun_id = L _ funId})) <- findDeclContainingLoc (_start _range) hsmodDecls, + Just (TypeSig _ _ HsWC {hswc_body = (unLoc -> HsSig {sig_body = hsib_body})}) + <- findSigOfDecl (== funId) hsmodDecls + = + [( "Add " <> implicitT <> " to the context of " <> T.pack (printRdrName funId) + , appendConstraint (T.unpack implicitT) hsib_body)] + | otherwise = [] + +findTypeSignatureName :: T.Text -> Maybe T.Text +findTypeSignatureName t = matchRegexUnifySpaces t "([^ ]+) :: " >>= listToMaybe + +-- | Suggests a constraint for a type signature with any number of existing constraints. +suggestFunctionConstraint :: DynFlags -> ParsedSource -> Diagnostic -> T.Text -> [(T.Text, Rewrite)] + +suggestFunctionConstraint df (L _ HsModule {hsmodDecls}) Diagnostic {..} missingConstraint +-- • No instance for (Eq a) arising from a use of ‘==’ +-- Possible fix: +-- add (Eq a) to the context of +-- the type signature for: +-- eq :: forall a. a -> a -> Bool +-- • In the expression: x == y +-- In an equation for ‘eq’: eq x y = x == y + +-- • Could not deduce (Eq b) arising from a use of ‘==’ +-- from the context: Eq a +-- bound by the type signature for: +-- eq :: forall a b. Eq a => Pair a b -> Pair a b -> Bool +-- at Main.hs:5:1-42 +-- Possible fix: +-- add (Eq b) to the context of +-- the type signature for: +-- eq :: forall a b. Eq a => Pair a b -> Pair a b -> Bool +-- • In the second argument of ‘(&&)’, namely ‘y == y'’ +-- In the expression: x == x' && y == y' +-- In an equation for ‘eq’: +-- eq (Pair x y) (Pair x' y') = x == x' && y == y' + | Just typeSignatureName <- findTypeSignatureName _message + , Just (TypeSig _ _ HsWC{hswc_body = (unLoc -> HsSig {sig_body = sig})}) + <- findSigOfDecl ((T.unpack typeSignatureName ==) . showSDoc df . ppr) hsmodDecls + , title <- actionTitle missingConstraint typeSignatureName + = [(title, appendConstraint (T.unpack missingConstraint) sig)] + | otherwise + = [] + where + actionTitle :: T.Text -> T.Text -> T.Text + actionTitle constraint typeSignatureName = "Add `" <> constraint + <> "` to the context of the type signature for `" <> typeSignatureName <> "`" + +-- | Suggests the removal of a redundant constraint for a type signature. +removeRedundantConstraints :: DynFlags -> ParsedSource -> Diagnostic -> [(T.Text, Rewrite)] +#if MIN_VERSION_ghc(9,9,0) +removeRedundantConstraints df (L _ HsModule {hsmodDecls}) Diagnostic{..} +#else +removeRedundantConstraints df (makeDeltaAst -> L _ HsModule {hsmodDecls}) Diagnostic{..} +#endif +-- • Redundant constraint: Eq a +-- • In the type signature for: +-- foo :: forall a. Eq a => a -> a +-- • Redundant constraints: (Monoid a, Show a) +-- • In the type signature for: +-- foo :: forall a. (Num a, Monoid a, Eq a, Show a) => a -> Bool + -- Account for both "Redundant constraint" and "Redundant constraints". + | "Redundant constraint" `T.isInfixOf` _message + , Just typeSignatureName <- findTypeSignatureName _message + , Just (TypeSig _ _ HsWC{hswc_body = (unLoc -> HsSig {sig_body = sig})}) + <- fmap(traceAst "redundantConstraint") $ findSigOfDeclRanged _range hsmodDecls + , Just redundantConstraintList <- findRedundantConstraints _message + , rewrite <- removeConstraint (toRemove df redundantConstraintList) sig + = [(actionTitle redundantConstraintList typeSignatureName, rewrite)] + | otherwise = [] + where + toRemove df list a = T.pack (showSDoc df (ppr a)) `elem` list + + parseConstraints :: T.Text -> [T.Text] + parseConstraints t = t + & (T.strip >>> stripConstraintsParens >>> T.splitOn ",") + <&> T.strip + + stripConstraintsParens :: T.Text -> T.Text + stripConstraintsParens constraints = + if "(" `T.isPrefixOf` constraints + then constraints & T.drop 1 & T.dropEnd 1 & T.strip + else constraints + +{- +9.2: "message": "/private/var/folders/4m/d38fhm3936x_gy_9883zbq8h0000gn/T/extra-dir-53173393699/Testing.hs:4:1: warning: + ⢠Redundant constraints: (Eq a, Show a) + ⢠In the type signature for: + foo :: forall a. (Eq a, Show a) => a -> Bool", + +9.0: "message": "⢠Redundant constraints: (Eq a, Show a) + ⢠In the type signature for: + foo :: forall a. (Eq a, Show a) => a -> Bool", +-} + findRedundantConstraints :: T.Text -> Maybe [T.Text] + findRedundantConstraints t = t + & T.lines + -- In <9.2 it's the first line, in 9.2 it' the second line + & take 2 + & mapMaybe ((`matchRegexUnifySpaces` "Redundant constraints?: (.+)") . T.strip) + & listToMaybe + >>= listToMaybe + <&> parseConstraints + + formatConstraints :: [T.Text] -> T.Text + formatConstraints [] = "" + formatConstraints [constraint] = constraint + formatConstraints constraintList = constraintList + & T.intercalate ", " + & \cs -> "(" <> cs <> ")" + + actionTitle :: [T.Text] -> T.Text -> T.Text + actionTitle constraintList typeSignatureName = + "Remove redundant constraint" <> (if length constraintList == 1 then "" else "s") <> " `" + <> formatConstraints constraintList + <> "` from the context of the type signature for `" <> typeSignatureName <> "`" + +------------------------------------------------------------------------------------------------- + +suggestNewOrExtendImportForClassMethod :: ExportsMap -> ParsedSource -> T.Text -> Diagnostic -> [(T.Text, CodeActionKind, [Either TextEdit Rewrite])] +suggestNewOrExtendImportForClassMethod packageExportsMap ps fileContents Diagnostic {_message} + | Just [methodName, className] <- + matchRegexUnifySpaces + _message + "‘([^’]*)’ is not a \\(visible\\) method of class ‘([^’]*)’", + idents <- + maybe [] (Set.toList . Set.filter (\x -> fmap occNameText (parent x) == Just className)) $ + lookupOccEnv (getExportsMap packageExportsMap) (mkVarOrDataOcc methodName) = + mconcat $ suggest <$> idents + | otherwise = [] + where + suggest identInfo + | importStyle <- NE.toList $ importStyles identInfo, + mImportDecl <- findImportDeclByModuleName (hsmodImports . unLoc $ ps) (T.unpack moduleText) = + case mImportDecl of + -- extend + Just decl -> + [ ( "Add " <> renderImportStyle style <> " to the import list of " <> moduleText, + quickFixImportKind' "extend" style, + [Right $ uncurry extendImport (unImportStyle style) decl] + ) + | style <- importStyle + ] + -- new + _ + | Just (range, indent) <- newImportInsertRange ps fileContents + -> + (\(kind, unNewImport -> x) -> (x, kind, [Left $ TextEdit range (x <> "\n" <> T.replicate indent " ")])) <$> + [ (quickFixImportKind' "new" style, newUnqualImport moduleText rendered False) + | style <- importStyle, + let rendered = renderImportStyle style + ] + <> [(quickFixImportKind "new.all", newImportAll moduleText)] + | otherwise -> [] + where moduleText = moduleNameText identInfo + +suggestNewImport :: DynFlags -> ExportsMap -> ParsedSource -> T.Text -> Diagnostic -> [(T.Text, CodeActionKind, TextEdit)] +suggestNewImport df packageExportsMap ps fileContents Diagnostic{..} + | msg <- unifySpaces _message + , Just thingMissing <- extractNotInScopeName msg + , qual <- extractQualifiedModuleName msg + , qual' <- + extractDoesNotExportModuleName msg + >>= (findImportDeclByModuleName hsmodImports . T.unpack) + >>= ideclAs . unLoc + <&> T.pack . moduleNameString . unLoc + , Just (range, indent) <- newImportInsertRange ps fileContents + , extendImportSuggestions <- matchRegexUnifySpaces msg +#if MIN_VERSION_ghc(9,7,0) + "Add ‘[^’]*’ to the import list in the import of ‘([^’]*)’" +#else + "Perhaps you want to add ‘[^’]*’ to the import list in the import of ‘([^’]*)’" +#endif + = let qis = qualifiedImportStyle df + suggestions = nubSortBy simpleCompareImportSuggestion + (constructNewImportSuggestions packageExportsMap (qual <|> qual', thingMissing) extendImportSuggestions qis) in + map (\(ImportSuggestion _ kind (unNewImport -> imp)) -> (imp, kind, TextEdit range (imp <> "\n" <> T.replicate indent " "))) suggestions + where + L _ HsModule {..} = ps +suggestNewImport _ _ _ _ _ = [] + +-- | A Backward compatible implementation of `lookupOccEnv_AllNameSpaces` for +-- GHC <=9.6 +-- +-- It looks for a symbol name in all known namespaces, including types, +-- variables, and fieldnames. +-- +-- Note that on GHC >= 9.8, the record selectors are not in the `mkVarOrDataOcc` +-- anymore, but are in a custom namespace, see +-- https://gitlab.haskell.org/ghc/ghc/-/wikis/migration/9.8#new-namespace-for-record-fields, +-- hence we need to use this "AllNamespaces" implementation, otherwise we'll +-- miss them. +lookupOccEnvAllNamespaces :: ExportsMap -> T.Text -> [IdentInfo] +#if MIN_VERSION_ghc(9,7,0) +lookupOccEnvAllNamespaces exportsMap name = Set.toList $ mconcat (lookupOccEnv_AllNameSpaces (getExportsMap exportsMap) (mkTypeOcc name)) +#else +lookupOccEnvAllNamespaces exportsMap name = maybe [] Set.toList $ + lookupOccEnv (getExportsMap exportsMap) (mkVarOrDataOcc name) + <> lookupOccEnv (getExportsMap exportsMap) (mkTypeOcc name) -- look up the modified unknown name in the export map +#endif + + +constructNewImportSuggestions + :: ExportsMap -> (Maybe T.Text, NotInScope) -> Maybe [T.Text] -> QualifiedImportStyle -> [ImportSuggestion] +constructNewImportSuggestions exportsMap (qual, thingMissing) notTheseModules qis = nubOrdBy simpleCompareImportSuggestion + [ suggestion + | Just name <- [T.stripPrefix (maybe "" (<> ".") qual) $ notInScope thingMissing] -- strip away qualified module names from the unknown name + + , identInfo <- lookupOccEnvAllNamespaces exportsMap name -- look up the modified unknown name in the export map + , canUseIdent thingMissing identInfo -- check if the identifier information retrieved can be used + , moduleNameText identInfo `notElem` fromMaybe [] notTheseModules -- check if the module of the identifier is allowed + , suggestion <- renderNewImport identInfo -- creates a list of import suggestions for the retrieved identifier information + ] + where + renderNewImport :: IdentInfo -> [ImportSuggestion] + renderNewImport identInfo + | Just q <- qual + = [ImportSuggestion importanceScore (quickFixImportKind "new.qualified") (newQualImport m q qis)] + | otherwise + = [ImportSuggestion importanceScore (quickFixImportKind' "new" importStyle) (newUnqualImport m (renderImportStyle importStyle) False) + | importStyle <- NE.toList $ importStyles identInfo] ++ + [ImportSuggestion importanceScore (quickFixImportKind "new.all") (newImportAll m)] + where + -- The importance score takes 2 metrics into account. The first being the similarity using + -- the Text.Fuzzy.Parallel.match function. The second is a factor of the relation between + -- the modules prefix import suggestion and the unknown identifier names. + importanceScore + | Just q <- qual + = let + similarityScore = fromIntegral $ unpackMatchScore (TFP.match (T.toLower q) (T.toLower m)) :: Double + (maxLength, minLength) = case (T.length q, T.length m) of + (la, lb) + | la >= lb -> (fromIntegral la, fromIntegral lb) + | otherwise -> (fromIntegral lb, fromIntegral la) + lengthPenaltyFactor = 100 * minLength / maxLength + in max 0 (floor (similarityScore * lengthPenaltyFactor)) + | otherwise + = 0 + where + unpackMatchScore pScore + | Just score <- pScore = score + | otherwise = 0 + m = moduleNameText identInfo + +data ImportSuggestion = ImportSuggestion !Int !CodeActionKind !NewImport + deriving ( Eq ) + +-- | Implements a lexicographic order for import suggestions that ignores the code action. +-- First it compares the importance score in DESCENDING order. +-- If the scores are equal it compares the import names alphabetical order. +-- +-- TODO: this should be a correct Ord instance but CodeActionKind does not implement a Ord +-- which would lead to an unlawful Ord instance. +simpleCompareImportSuggestion :: ImportSuggestion -> ImportSuggestion -> Ordering +simpleCompareImportSuggestion (ImportSuggestion s1 _ i1) (ImportSuggestion s2 _ i2) + = compare s2 s1 <> compare i1 i2 + +newtype NewImport = NewImport {unNewImport :: T.Text} + deriving (Show, Eq, Ord) + +newImportToEdit :: NewImport -> ParsedSource -> T.Text -> Maybe (T.Text, TextEdit) +newImportToEdit (unNewImport -> imp) ps fileContents + | Just (range, indent) <- newImportInsertRange ps fileContents + = Just (imp, TextEdit range (imp <> "\n" <> T.replicate indent " ")) + | otherwise = Nothing + +-- | Finds the next valid position for inserting a new import declaration +-- * If the file already has existing imports it will be inserted under the last of these, +-- it is assumed that the existing last import declaration is in a valid position +-- * If the file does not have existing imports, but has a (module ... where) declaration, +-- the new import will be inserted directly under this declaration (accounting for explicit exports) +-- * If the file has neither existing imports nor a module declaration, +-- the import will be inserted at line zero if there are no pragmas, +-- * otherwise inserted one line after the last file-header pragma +newImportInsertRange :: ParsedSource -> T.Text -> Maybe (Range, Int) +newImportInsertRange ps fileContents + | Just ((l, c), col) <- case hsmodImports of + -- When there is no existing imports, we only cares about the line number, setting column and indent to zero. + [] -> (\line -> ((line, 0), 0)) <$> findPositionNoImports ps fileContents + _ -> findPositionFromImports hsmodImports last + , let insertPos = Position (fromIntegral l) (fromIntegral c) + = Just (Range insertPos insertPos, col) + | otherwise = Nothing + where + L _ HsModule {..} = ps + +-- | Find the position for a new import when there isn't an existing one. +-- * If there is a module declaration, a new import should be inserted under the module declaration (including exports list) +-- * Otherwise, a new import should be inserted after any file-header pragma. +findPositionNoImports :: ParsedSource -> T.Text -> Maybe Int +findPositionNoImports ps fileContents = + maybe (Just (findNextPragmaPosition fileContents)) (findPositionAfterModuleName ps) hsmodName + where + L _ HsModule {..} = ps + +-- | find line number right after module ... where +findPositionAfterModuleName :: ParsedSource + -> LocatedA ModuleName + -> Maybe Int +findPositionAfterModuleName ps _hsmodName' = do + -- Note that 'where' keyword and comments are not part of the AST. They belongs to + -- the exact-print information. To locate it, we need to find the previous AST node, + -- calculate the gap between it and 'where', then add them up to produce the absolute + -- position of 'where'. + + lineOffset <- whereKeywordLineOffset -- Calculate the gap before 'where' keyword. +#if MIN_VERSION_ghc(9,9,0) + pure lineOffset +#else + -- The last AST node before 'where' keyword. Might be module name or export list. + let prevSrcSpan = maybe (getLoc _hsmodName') getLoc hsmodExports + case prevSrcSpan of + UnhelpfulSpan _ -> Nothing + (RealSrcSpan prevSrcSpan' _) -> + -- add them up produce the absolute location of 'where' keyword + Just $ srcLocLine (realSrcSpanEnd prevSrcSpan') + lineOffset +#endif + where + L _ HsModule {..} = ps + + -- The relative position of 'where' keyword (in lines, relative to the previous AST node). + -- The exact-print API changed a lot in ghc-9.2, so we need to handle it separately for different compiler versions. + whereKeywordLineOffset :: Maybe Int + whereKeywordLineOffset = case hsmodAnn hsmodExt of + EpAnn _ annsModule _ -> do + -- Find the first 'where' +#if MIN_VERSION_ghc(9,11,0) + whereLocation <- filterWhere $ am_where annsModule +#else + whereLocation <- listToMaybe . mapMaybe filterWhere $ am_main annsModule +#endif + epaLocationToLine whereLocation +#if !MIN_VERSION_ghc(9,9,0) + EpAnnNotUsed -> Nothing +#endif +#if MIN_VERSION_ghc(9,11,0) + filterWhere (EpTok loc) = Just loc + filterWhere _ = Nothing +#else + filterWhere (AddEpAnn AnnWhere loc) = Just loc + filterWhere _ = Nothing +#endif + + epaLocationToLine :: EpaLocation -> Maybe Int +#if MIN_VERSION_ghc(9,9,0) + epaLocationToLine (EpaSpan sp) + = fmap (srcLocLine . realSrcSpanEnd) $ srcSpanToRealSrcSpan sp +#else + epaLocationToLine (EpaSpan sp _) + = Just . srcLocLine . realSrcSpanEnd $ sp +#endif +#if MIN_VERSION_ghc(9,11,0) + epaLocationToLine (EpaDelta _ (SameLine _) priorComments) = Just $ sumCommentsOffset priorComments + -- 'priorComments' contains the comments right before the current EpaLocation + -- Summing line offset of priorComments is necessary, as 'line' is the gap between the last comment and + -- the current AST node + epaLocationToLine (EpaDelta _ (DifferentLine line _) priorComments) = Just (line + sumCommentsOffset priorComments) +#else + epaLocationToLine (EpaDelta (SameLine _) priorComments) = Just $ sumCommentsOffset priorComments + -- 'priorComments' contains the comments right before the current EpaLocation + -- Summing line offset of priorComments is necessary, as 'line' is the gap between the last comment and + -- the current AST node + epaLocationToLine (EpaDelta (DifferentLine line _) priorComments) = Just (line + sumCommentsOffset priorComments) +#endif + sumCommentsOffset :: [LEpaComment] -> Int +#if MIN_VERSION_ghc(9,9,0) + sumCommentsOffset = sum . fmap (\(L anchor _) -> anchorOpLine anchor) +#else + sumCommentsOffset = sum . fmap (\(L anchor _) -> anchorOpLine (anchor_op anchor)) +#endif + +#if MIN_VERSION_ghc(9,11,0) + anchorOpLine :: EpaLocation' a -> Int + anchorOpLine EpaSpan{} = 0 + anchorOpLine (EpaDelta _ (SameLine _) _) = 0 + anchorOpLine (EpaDelta _ (DifferentLine line _) _) = line +#elif MIN_VERSION_ghc(9,9,0) + anchorOpLine :: EpaLocation' a -> Int + anchorOpLine EpaSpan{} = 0 + anchorOpLine (EpaDelta (SameLine _) _) = 0 + anchorOpLine (EpaDelta (DifferentLine line _) _) = line +#else + anchorOpLine :: AnchorOperation -> Int + anchorOpLine UnchangedAnchor = 0 + anchorOpLine (MovedAnchor (SameLine _)) = 0 + anchorOpLine (MovedAnchor (DifferentLine line _)) = line +#endif + +findPositionFromImports :: HasSrcSpan a => t -> (t -> a) -> Maybe ((Int, Int), Int) +findPositionFromImports hsField f = case getLoc (f hsField) of + RealSrcSpan s _ -> + let col = calcCol s + in Just ((srcLocLine (realSrcSpanEnd s), col), col) + _ -> Nothing + where calcCol s = srcLocCol (realSrcSpanStart s) - 1 + +-- | Find the position one after the last file-header pragma +-- Defaults to zero if there are no pragmas in file +findNextPragmaPosition :: T.Text -> Int +findNextPragmaPosition contents = lineNumber + where + lineNumber = afterLangPragma . afterOptsGhc $ afterShebang + afterLangPragma = afterPragma "LANGUAGE" contents' + afterOptsGhc = afterPragma "OPTIONS_GHC" contents' + afterShebang = lastLineWithPrefix (T.isPrefixOf "#!") contents' 0 + contents' = T.lines contents + +afterPragma :: T.Text -> [T.Text] -> Int -> Int +afterPragma name contents lineNum = lastLineWithPrefix (checkPragma name) contents lineNum + +lastLineWithPrefix :: (T.Text -> Bool) -> [T.Text] -> Int -> Int +lastLineWithPrefix p contents lineNum = max lineNum next + where + next = maybe lineNum succ $ listToMaybe . reverse $ findIndices p contents + +checkPragma :: T.Text -> T.Text -> Bool +checkPragma name = check + where + check l = isPragma l && getName l == name + getName l = T.take (T.length name) $ T.dropWhile isSpace $ T.drop 3 l + isPragma = T.isPrefixOf "{-#" + +-- | Construct an import declaration with at most one symbol +newImport + :: T.Text -- ^ module name + -> Maybe T.Text -- ^ the symbol + -> Maybe (T.Text, QualifiedImportStyle) -- ^ qualified name and style + -> Bool -- ^ the symbol is to be imported or hidden + -> NewImport +newImport modName mSymbol mQualNameStyle hiding = NewImport impStmt + where + symImp + | Just symbol <- mSymbol + , symOcc <- mkVarOcc $ T.unpack symbol = + " (" <> printOutputable (parenSymOcc symOcc $ ppr symOcc) <> ")" + | otherwise = "" + impStmt = + "import " + <> qualifiedModName (snd <$> mQualNameStyle) + <> (if hiding then " hiding" else "") + <> symImp + <> maybe "" (\qual -> if modName == qual then "" else " as " <> qual) mQual + mQual = fst <$> mQualNameStyle + qualifiedModName Nothing = modName + qualifiedModName (Just QualifiedImportPrefix) = "qualified " <> modName + qualifiedModName (Just QualifiedImportPostfix) = modName <> " qualified" + + +newQualImport :: T.Text -> T.Text -> QualifiedImportStyle -> NewImport +newQualImport modName qual qis = newImport modName Nothing (Just (qual, qis)) False + +newUnqualImport :: T.Text -> T.Text -> Bool -> NewImport +newUnqualImport modName symbol = newImport modName (Just symbol) Nothing + +newImportAll :: T.Text -> NewImport +newImportAll modName = newImport modName Nothing Nothing False + +hideImplicitPreludeSymbol :: T.Text -> NewImport +hideImplicitPreludeSymbol symbol = newUnqualImport "Prelude" symbol True + +canUseIdent :: NotInScope -> IdentInfo -> Bool +canUseIdent NotInScopeDataConstructor{} = isDatacon +canUseIdent NotInScopeTypeConstructorOrClass{} = not . isDatacon +canUseIdent _ = const True + +data NotInScope + = NotInScopeDataConstructor T.Text + | NotInScopeTypeConstructorOrClass T.Text + | NotInScopeThing T.Text + deriving (Show, Eq) + +notInScope :: NotInScope -> T.Text +notInScope (NotInScopeDataConstructor t) = t +notInScope (NotInScopeTypeConstructorOrClass t) = t +notInScope (NotInScopeThing t) = t + +extractNotInScopeName :: T.Text -> Maybe NotInScope +extractNotInScopeName x + | Just [name] <- matchRegexUnifySpaces x "Data constructor not in scope: ([^ ]+)" + = Just $ NotInScopeDataConstructor name + | Just [name] <- matchRegexUnifySpaces x "Not in scope: data constructor [^‘]*‘([^’]*)’" + = Just $ NotInScopeDataConstructor name + | Just [name] <- matchRegexUnifySpaces x "ot in scope: type constructor or class [^‘]*‘([^’]*)’" + = Just $ NotInScopeTypeConstructorOrClass name + | Just [name] <- matchRegexUnifySpaces x "The data constructors of ‘([^ ]+)’ are not all in scope" + = Just $ NotInScopeDataConstructor name + | Just [name] <- matchRegexUnifySpaces x "of newtype ‘([^’]*)’ is not in scope" + = Just $ NotInScopeThing name + -- Match for HasField "foo" Bar String in the context where, e.g. x.foo is + -- used, and x :: Bar. + -- + -- This usually mean that the field is not in scope and the correct fix is to + -- import (Bar(foo)) or (Bar(..)). + -- + -- However, it is more reliable to match for the type name instead of the field + -- name, and most of the time you'll want to import the complete type with all + -- their fields instead of the specific field. + -- + -- The regex is convoluted because it accounts for: + -- + -- - Qualified (or not) `HasField` + -- - The type bar is always qualified. If it is unqualified, it means that the + -- parent module is already imported, and in this context it uses an hint + -- already available in the GHC error message. However this regex accounts for + -- qualified or not, it does not cost much and should be more robust if the + -- hint changes in the future + -- - Next regex will account for polymorphic types, which appears as `HasField + -- "foo" (Bar Int)...`, e.g. see the parenthesis + | Just [_module, name] <- matchRegexUnifySpaces x "No instance for [‘(].*HasField \"[^\"]+\" ([^ (.]+\\.)*([^ (.]+).*[’)]" + = Just $ NotInScopeThing name + | Just [_module, name] <- matchRegexUnifySpaces x "No instance for [‘(].*HasField \"[^\"]+\" \\(([^ .]+\\.)*([^ .]+)[^)]*\\).*[’)]" + = Just $ NotInScopeThing name + -- The order of the "Not in scope" is important, for example, some of the + -- matcher may catch the "record" value instead of the value later. + | Just [name] <- matchRegexUnifySpaces x "Not in scope: record field ‘([^’]*)’" + = Just $ NotInScopeThing name + | Just [name] <- matchRegexUnifySpaces x "ot in scope: \\(([^‘ ]+)\\)" + = Just $ NotInScopeThing name + | Just [name] <- matchRegexUnifySpaces x "ot in scope: ([^‘ ]+)" + = Just $ NotInScopeThing name + | Just [name] <- matchRegexUnifySpaces x "ot in scope:[^‘]*‘([^’]*)’" + = Just $ NotInScopeThing name + | otherwise + = Nothing + +extractQualifiedModuleName :: T.Text -> Maybe T.Text +extractQualifiedModuleName x + | Just [m] <- matchRegexUnifySpaces x "module named [^‘]*‘([^’]*)’" + = Just m + | otherwise + = Nothing + +-- | If a module has been imported qualified, and we want to ues the same qualifier for other modules +-- which haven't been imported, 'extractQualifiedModuleName' won't work. Thus we need extract the qualifier +-- from the imported one. +-- +-- For example, we write f = T.putStrLn, where putStrLn comes from Data.Text.IO, with the following import(s): +-- 1. +-- import qualified Data.Text as T +-- +-- Module ‘Data.Text’ does not export ‘putStrLn’. +-- +-- 2. +-- import qualified Data.Text as T +-- import qualified Data.Functor as T +-- +-- Neither ‘Data.Functor’ nor ‘Data.Text’ exports ‘putStrLn’. +-- +-- 3. +-- import qualified Data.Text as T +-- import qualified Data.Functor as T +-- import qualified Data.Function as T +-- +-- Neither ‘Data.Function’, +-- ‘Data.Functor’ nor ‘Data.Text’ exports ‘putStrLn’. +extractDoesNotExportModuleName :: T.Text -> Maybe T.Text +extractDoesNotExportModuleName x + | Just [m] <- case ghcVersion of + GHC912 -> matchRegexUnifySpaces x "The module ‘([^’]*)’ does not export" + <|> matchRegexUnifySpaces x "nor ‘([^’]*)’ export" + _ -> matchRegexUnifySpaces x "the module ‘([^’]*)’ does not export" + <|> matchRegexUnifySpaces x "nor ‘([^’]*)’ export" + = Just m + | otherwise + = Nothing +------------------------------------------------------------------------------------------------- + + +mkRenameEdit :: Maybe T.Text -> Range -> T.Text -> TextEdit +mkRenameEdit contents range name + | maybeIsInfixFunction == Just True = TextEdit range ("`" <> name <> "`") + | maybeIsTemplateFunction == Just True = TextEdit range ("'" <> name) + | otherwise = TextEdit range name + where + maybeIsInfixFunction = do + curr <- textInRange range <$> contents + pure $ "`" `T.isPrefixOf` curr && "`" `T.isSuffixOf` curr + maybeIsTemplateFunction = do + curr <- textInRange range <$> contents + pure $ "'" `T.isPrefixOf` curr + +extractRenamableTerms :: T.Text -> [T.Text] +extractRenamableTerms msg + -- Account for both "Variable not in scope" and "Not in scope" + | "ot in scope:" `T.isInfixOf` msg = extractSuggestions msg + | otherwise = [] + where + extractSuggestions = map getEnclosed + . concatMap singleSuggestions + . filter isKnownSymbol + . T.lines + singleSuggestions = T.splitOn "), " -- Each suggestion is comma delimited + isKnownSymbol t = " (imported from" `T.isInfixOf` t || " (line " `T.isInfixOf` t + getEnclosed = T.dropWhile (== '‘') + . T.dropWhileEnd (== '’') + . T.dropAround (\c -> c /= '‘' && c /= '’') + +-- | If a range takes up a whole line (it begins at the start of the line and there's only whitespace +-- between the end of the range and the next newline), extend the range to take up the whole line. +extendToWholeLineIfPossible :: Maybe T.Text -> Range -> Range +extendToWholeLineIfPossible contents range@Range{..} = + let newlineAfter = maybe False (T.isPrefixOf "\n" . T.dropWhile (\x -> isSpace x && x /= '\n') . snd . splitTextAtPosition _end) contents + extend = newlineAfter && _character _start == 0 -- takes up an entire line, so remove the whole line + in if extend then Range _start (Position (_line _end + 1) 0) else range + +splitTextAtPosition :: Position -> T.Text -> (T.Text, T.Text) +splitTextAtPosition (Position (fromIntegral -> row) (fromIntegral -> col)) x + | (preRow, mid:postRow) <- splitAt row $ T.splitOn "\n" x + , (preCol, postCol) <- T.splitAt col mid + = (T.intercalate "\n" $ preRow ++ [preCol], T.intercalate "\n" $ postCol : postRow) + | otherwise = (x, T.empty) + +-- | Returns [start .. end[ +textInRange :: Range -> T.Text -> T.Text +textInRange (Range (Position (fromIntegral -> startRow) (fromIntegral -> startCol)) (Position (fromIntegral -> endRow) (fromIntegral -> endCol))) text = + case compare startRow endRow of + LT -> + let (linesInRangeBeforeEndLine, endLineAndFurtherLines) = splitAt (endRow - startRow) linesBeginningWithStartLine + (textInRangeInFirstLine, linesBetween) = case linesInRangeBeforeEndLine of + [] -> ("", []) + firstLine:linesInBetween -> (T.drop startCol firstLine, linesInBetween) + maybeTextInRangeInEndLine = T.take endCol <$> listToMaybe endLineAndFurtherLines + in T.intercalate "\n" (textInRangeInFirstLine : linesBetween ++ maybeToList maybeTextInRangeInEndLine) + EQ -> + let line = fromMaybe "" (listToMaybe linesBeginningWithStartLine) + in T.take (endCol - startCol) (T.drop startCol line) + GT -> "" + where + linesBeginningWithStartLine = drop startRow (T.splitOn "\n" text) + +-- | Returns the ranges for a binding in an import declaration +rangesForBindingImport :: ImportDecl GhcPs -> String -> [Range] +rangesForBindingImport ImportDecl{ + ideclImportList = Just (Exactly, L _ lies) + } b = + concatMap (mapMaybe srcSpanToRange . rangesForBinding' b') lies + where + b' = wrapOperatorInParens b +rangesForBindingImport _ _ = [] + +wrapOperatorInParens :: String -> String +wrapOperatorInParens x = + case uncons x of + -- see #2483 and #2859 + -- common lens functions use the _ prefix, and should not be wrapped in parens + Just ('_', _t) -> x + Just (h, _t) -> if isAlpha h then x else "(" <> x <> ")" + Nothing -> mempty + +smallerRangesForBindingExport :: [LIE GhcPs] -> String -> [Range] +smallerRangesForBindingExport lies b = + concatMap (mapMaybe srcSpanToRange . ranges') lies + where + unqualify = snd . breakOnEnd "." + b' = wrapOperatorInParens $ unqualify b + ranges' + ( L + _ + ( IEThingWith + _ + thing + _ + inners +#if MIN_VERSION_ghc(9,9,0) + _ +#endif + ) + ) + | T.unpack (printOutputable thing) == b' = [] + | otherwise = + [ locA l' | L l' x <- inners, T.unpack (printOutputable x) == b'] + ranges' _ = [] + +rangesForBinding' :: String -> LIE GhcPs -> [SrcSpan] +#if MIN_VERSION_ghc(9,9,0) +rangesForBinding' b (L (locA -> l) (IEVar _ nm _)) +#else +rangesForBinding' b (L (locA -> l) (IEVar _ nm)) +#endif + | L _ (IEPattern _ (L _ b')) <- nm + , T.unpack (printOutputable b') == b + = [l] +rangesForBinding' b (L (locA -> l) x@IEVar{}) + | T.unpack (printOutputable x) == b = [l] +rangesForBinding' b (L (locA -> l) x@IEThingAbs{}) | T.unpack (printOutputable x) == b = [l] +#if MIN_VERSION_ghc(9,9,0) +rangesForBinding' b (L (locA -> l) (IEThingAll _ x _)) +#else +rangesForBinding' b (L (locA -> l) (IEThingAll _ x)) +#endif + | T.unpack (printOutputable x) == b = [l] +#if MIN_VERSION_ghc(9,9,0) +rangesForBinding' b (L (locA -> l) (IEThingWith _ thing _ inners _)) +#else +rangesForBinding' b (L (locA -> l) (IEThingWith _ thing _ inners)) +#endif + | T.unpack (printOutputable thing) == b = [l] + | otherwise = + [ locA l' | L l' x <- inners, T.unpack (printOutputable x) == b] +rangesForBinding' _ _ = [] + +-- | 'allMatchRegex' combined with 'unifySpaces' +allMatchRegexUnifySpaces :: T.Text -> T.Text -> Maybe [[T.Text]] +allMatchRegexUnifySpaces message = + allMatchRegex (unifySpaces message) + +-- | Returns Just (all matches) for the first capture, or Nothing. +allMatchRegex :: T.Text -> T.Text -> Maybe [[T.Text]] +allMatchRegex message regex = message =~~ regex + + +-- functions to help parse multiple import suggestions + +-- | Returns the first match if found +regexSingleMatch :: T.Text -> T.Text -> Maybe T.Text +regexSingleMatch msg regex = case matchRegexUnifySpaces msg regex of + Just (h:_) -> Just h + _ -> Nothing + +-- | Process a list of (module_name, filename:src_span) values +-- +-- Eg. [(Data.Map, app/ModuleB.hs:2:1-18), (Data.HashMap.Strict, app/ModuleB.hs:3:1-29)] +regExImports :: T.Text -> Maybe [(T.Text, T.Text)] +regExImports msg + | Just mods' <- allMatchRegex msg "‘([^’]*)’" + , Just srcspans' <- allMatchRegex msg + -- This regex has to be able to deal both with single-line srcpans like "(/path/to/File.hs:2:1-18)" + -- as well as multi-line srcspans like "(/path/to/File.hs:(3,1)-(5,2))" +#if MIN_VERSION_ghc(9,7,0) + "\\(at ([^:]+:[^ ]+)\\)" +#else + "\\(([^:]+:[^ ]+)\\)" +#endif + , mods <- [mod | [_,mod] <- mods'] + , srcspans <- [srcspan | [_,srcspan] <- srcspans'] + -- check we have matching pairs like (Data.Map, (app/src.hs:1:2-18)) + , let result = if length mods == length srcspans then + Just (zip mods srcspans) else Nothing + = result + | otherwise = Nothing + +matchRegExMultipleImports :: T.Text -> Maybe (T.Text, [(T.Text, T.Text)]) +matchRegExMultipleImports message = do +#if MIN_VERSION_ghc(9,7,0) + let pat = T.pack "Add ‘([^’]*)’ to one of these import lists: *(‘.*\\))$" +#else + let pat = T.pack "Perhaps you want to add ‘([^’]*)’ to one of these import lists: *(‘.*\\))$" +#endif + (binding, imports) <- case matchRegexUnifySpaces message pat of + Just [x, xs] -> Just (x, xs) + _ -> Nothing + imps <- regExImports imports + return (binding, imps) diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Args.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Args.hs new file mode 100644 index 0000000000..a4132dd787 --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Args.hs @@ -0,0 +1,286 @@ +module Development.IDE.Plugin.CodeAction.Args + ( CodeActionTitle, + CodeActionPreferred, + GhcideCodeActionResult, + GhcideCodeAction, + mkGhcideCAPlugin, + mkGhcideCAsPlugin, + ToTextEdit (..), + ToCodeAction (..), + wrap, + mkCA, + ) +where + +import Control.Concurrent.STM.Stats (readTVarIO) +import Control.Monad.Except (ExceptT (..), + runExceptT) +import Control.Monad.Reader +import Control.Monad.Trans.Maybe +import Data.Either (fromRight, + partitionEithers) +import Data.Functor ((<&>)) +import Data.IORef.Extra +import qualified Data.Map as Map +import Data.Maybe (fromMaybe, + maybeToList) +import qualified Data.Text as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE hiding + (pluginHandlers) +import Development.IDE.Core.PluginUtils (activeDiagnosticsInRange) +import Development.IDE.Core.Shake +import Development.IDE.GHC.Compat +import Development.IDE.GHC.ExactPrint +import Development.IDE.Plugin.CodeAction.ExactPrint (Rewrite, + rewriteToEdit) +import Development.IDE.Plugin.TypeLenses (GetGlobalBindingTypeSigs (GetGlobalBindingTypeSigs), + GlobalBindingTypeSigsResult) +import Development.IDE.Spans.LocalBindings (Bindings) +import Development.IDE.Types.Exports (ExportsMap) +import Development.IDE.Types.Options (IdeOptions) +import Ide.Plugin.Error (PluginError) +import Ide.Types +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types + +type CodeActionTitle = T.Text + +type CodeActionPreferred = Bool + +type GhcideCodeActionResult = [(CodeActionTitle, Maybe CodeActionKind, Maybe CodeActionPreferred, [TextEdit])] + +type GhcideCodeAction = ExceptT PluginError (ReaderT CodeActionArgs IO) GhcideCodeActionResult + +------------------------------------------------------------------------------------------------- + +runGhcideCodeAction :: IdeState -> MessageParams Method_TextDocumentCodeAction -> GhcideCodeAction -> HandlerM Config GhcideCodeActionResult +runGhcideCodeAction state (CodeActionParams _ _ (TextDocumentIdentifier uri) _range _) codeAction + | Just nfp <- toNormalizedFilePath' <$> uriToFilePath uri = do + let runRule key = runAction ("GhcideCodeActions." <> show key) state $ runMaybeT $ MaybeT (pure (Just nfp)) >>= MaybeT . use key + caaGhcSession <- onceIO $ runRule GhcSession + caaExportsMap <- + onceIO $ + caaGhcSession >>= \case + Just env -> do + pkgExports <- envPackageExports env + localExports <- readTVarIO (exportsMap $ shakeExtras state) + pure $ localExports <> pkgExports + _ -> pure mempty + caaIdeOptions <- onceIO $ runAction "GhcideCodeActions.getIdeOptions" state getIdeOptions + caaParsedModule <- onceIO $ runRule GetParsedModuleWithComments + caaContents <- + onceIO $ + runRule GetFileContents <&> \case + Just (_, mbContents) -> fmap Rope.toText mbContents + Nothing -> Nothing + caaDf <- onceIO $ fmap (ms_hspp_opts . pm_mod_summary) <$> caaParsedModule + caaAnnSource <- onceIO $ runRule GetAnnotatedParsedSource + caaTmr <- onceIO $ runRule TypeCheck + caaHar <- onceIO $ runRule GetHieAst + caaBindings <- onceIO $ runRule GetBindings + caaGblSigs <- onceIO $ runRule GetGlobalBindingTypeSigs + diags <- concat . maybeToList <$> activeDiagnosticsInRange (shakeExtras state) nfp _range + results <- liftIO $ + sequence + [ + runReaderT (runExceptT codeAction) CodeActionArgs {..} + | caaDiagnostic <- diags + ] + let (_errs, successes) = partitionEithers results + pure $ concat successes + | otherwise = pure [] + + +mkCA :: T.Text -> Maybe CodeActionKind -> Maybe Bool -> [Diagnostic] -> WorkspaceEdit -> (Command |? CodeAction) +mkCA title kind isPreferred diags edit = + InR $ CodeAction title kind (Just diags) isPreferred Nothing (Just edit) Nothing Nothing + +mkGhcideCAPlugin :: GhcideCodeAction -> PluginId -> T.Text -> PluginDescriptor IdeState +mkGhcideCAPlugin codeAction plId desc = + (defaultPluginDescriptor plId desc) + { pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeAction $ + \state _ params@(CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext {_diagnostics = diags}) -> do + results <- lift $ runGhcideCodeAction state params codeAction + pure $ + InL + [ mkCA title kind isPreferred diags edit + | (title, kind, isPreferred, tedit) <- results, + let edit = WorkspaceEdit (Just $ Map.singleton uri tedit) Nothing Nothing + ] + } + +mkGhcideCAsPlugin :: [GhcideCodeAction] -> PluginId -> T.Text -> PluginDescriptor IdeState +mkGhcideCAsPlugin codeActions = mkGhcideCAPlugin $ mconcat codeActions + +------------------------------------------------------------------------------------------------- + +class ToTextEdit a where + toTextEdit :: CodeActionArgs -> a -> IO [TextEdit] + +instance ToTextEdit TextEdit where + toTextEdit _ = pure . pure + +instance ToTextEdit Rewrite where + toTextEdit CodeActionArgs {..} rw = fmap (fromMaybe []) $ + runMaybeT $ do + df <- MaybeT caaDf + let r = rewriteToEdit df rw + pure $ fromRight [] r + +instance ToTextEdit a => ToTextEdit [a] where + toTextEdit caa = foldMap (toTextEdit caa) + +instance ToTextEdit a => ToTextEdit (Maybe a) where + toTextEdit caa = maybe (pure []) (toTextEdit caa) + +instance (ToTextEdit a, ToTextEdit b) => ToTextEdit (Either a b) where + toTextEdit caa = either (toTextEdit caa) (toTextEdit caa) + +------------------------------------------------------------------------------------------------- + +data CodeActionArgs = CodeActionArgs + { caaExportsMap :: IO ExportsMap, + caaGhcSession :: IO (Maybe HscEnvEq), + caaIdeOptions :: IO IdeOptions, + caaParsedModule :: IO (Maybe ParsedModule), + caaContents :: IO (Maybe T.Text), + caaDf :: IO (Maybe DynFlags), + caaAnnSource :: IO (Maybe ParsedSource), + caaTmr :: IO (Maybe TcModuleResult), + caaHar :: IO (Maybe HieAstResult), + caaBindings :: IO (Maybe Bindings), + caaGblSigs :: IO (Maybe GlobalBindingTypeSigsResult), + caaDiagnostic :: FileDiagnostic + } + +-- | There's no concurrency in each provider, +-- so we don't need to be thread-safe here +onceIO :: MonadIO m => IO a -> m (IO a) +onceIO io = do + var <- liftIO $ newIORef Nothing + pure $ + readIORef var >>= \case + Just x -> pure x + _ -> io >>= \x -> writeIORef' var (Just x) >> pure x + +------------------------------------------------------------------------------------------------- + +wrap :: (ToCodeAction a) => a -> GhcideCodeAction +wrap = toCodeAction + +class ToCodeAction a where + toCodeAction :: a -> GhcideCodeAction + +instance ToCodeAction GhcideCodeAction where + toCodeAction = id + +instance Semigroup GhcideCodeAction where + a <> b = toCodeAction [a, b] + +instance Monoid GhcideCodeAction where + mempty = pure [] + +instance ToCodeAction a => ToCodeAction [a] where + toCodeAction = fmap concat . mapM toCodeAction + +instance ToCodeAction a => ToCodeAction (Maybe a) where + toCodeAction = maybe (pure []) toCodeAction + +instance ToCodeAction a => ToCodeAction (Either PluginError a) where + toCodeAction = either (\err -> ExceptT $ ReaderT $ \_ -> pure $ Left err) toCodeAction + +instance ToTextEdit a => ToCodeAction (CodeActionTitle, a) where + toCodeAction (title, te) = ExceptT $ ReaderT $ \caa -> Right . pure . (title,Just CodeActionKind_QuickFix,Nothing,) <$> toTextEdit caa te + +instance ToTextEdit a => ToCodeAction (CodeActionTitle, CodeActionKind, a) where + toCodeAction (title, kind, te) = ExceptT $ ReaderT $ \caa -> Right . pure . (title,Just kind,Nothing,) <$> toTextEdit caa te + +instance ToTextEdit a => ToCodeAction (CodeActionTitle, CodeActionPreferred, a) where + toCodeAction (title, isPreferred, te) = ExceptT $ ReaderT $ \caa -> Right . pure . (title,Just CodeActionKind_QuickFix,Just isPreferred,) <$> toTextEdit caa te + +instance ToTextEdit a => ToCodeAction (CodeActionTitle, CodeActionKind, CodeActionPreferred, a) where + toCodeAction (title, kind, isPreferred, te) = ExceptT $ ReaderT $ \caa -> Right . pure . (title,Just kind,Just isPreferred,) <$> toTextEdit caa te + +------------------------------------------------------------------------------------------------- + +toCodeAction1 :: (ToCodeAction r) => (CodeActionArgs -> IO (Maybe a)) -> (Maybe a -> r) -> GhcideCodeAction +toCodeAction1 get f = ExceptT . ReaderT $ \caa -> do + caaMay <- get caa + flip runReaderT caa . runExceptT . toCodeAction . f $ caaMay + +toCodeAction2 :: (ToCodeAction r) => (CodeActionArgs -> IO (Maybe a)) -> (a -> r) -> GhcideCodeAction +toCodeAction2 get f = ExceptT . ReaderT $ \caa -> + get caa >>= \case + Just x -> flip runReaderT caa . runExceptT . toCodeAction . f $ x + _ -> pure $ Right [] + +toCodeAction3 :: (ToCodeAction r) => (CodeActionArgs -> IO a) -> (a -> r) -> GhcideCodeAction +toCodeAction3 get f = ExceptT . ReaderT $ \caa -> get caa >>= flip runReaderT caa . runExceptT . toCodeAction . f + +-- | this instance returns a delta AST, useful for exactprint transforms +instance ToCodeAction r => ToCodeAction (ParsedSource -> r) where + toCodeAction = toCodeAction2 caaAnnSource + +instance ToCodeAction r => ToCodeAction (ExportsMap -> r) where + toCodeAction = toCodeAction3 caaExportsMap + +instance ToCodeAction r => ToCodeAction (IdeOptions -> r) where + toCodeAction = toCodeAction3 caaIdeOptions + +instance ToCodeAction r => ToCodeAction (Diagnostic -> r) where + toCodeAction f = ExceptT . ReaderT $ \caa@CodeActionArgs {caaDiagnostic = x} -> flip runReaderT caa . runExceptT . toCodeAction $ f (fdLspDiagnostic x) + +instance ToCodeAction r => ToCodeAction (FileDiagnostic -> r) where + toCodeAction f = ExceptT . ReaderT $ \caa@CodeActionArgs {caaDiagnostic = x} -> flip runReaderT caa . runExceptT . toCodeAction $ f x + +instance ToCodeAction r => ToCodeAction (Maybe ParsedModule -> r) where + toCodeAction = toCodeAction1 caaParsedModule + +instance ToCodeAction r => ToCodeAction (ParsedModule -> r) where + toCodeAction = toCodeAction2 caaParsedModule + +instance ToCodeAction r => ToCodeAction (Maybe T.Text -> r) where + toCodeAction = toCodeAction1 caaContents + +instance ToCodeAction r => ToCodeAction (T.Text -> r) where + toCodeAction = toCodeAction2 caaContents + +instance ToCodeAction r => ToCodeAction (Maybe DynFlags -> r) where + toCodeAction = toCodeAction1 caaDf + +instance ToCodeAction r => ToCodeAction (DynFlags -> r) where + toCodeAction = toCodeAction2 caaDf + +instance ToCodeAction r => ToCodeAction (Maybe ParsedSource -> r) where + toCodeAction = toCodeAction1 caaAnnSource + +instance ToCodeAction r => ToCodeAction (Maybe TcModuleResult -> r) where + toCodeAction = toCodeAction1 caaTmr + +instance ToCodeAction r => ToCodeAction (TcModuleResult -> r) where + toCodeAction = toCodeAction2 caaTmr + +instance ToCodeAction r => ToCodeAction (Maybe HieAstResult -> r) where + toCodeAction = toCodeAction1 caaHar + +instance ToCodeAction r => ToCodeAction (HieAstResult -> r) where + toCodeAction = toCodeAction2 caaHar + +instance ToCodeAction r => ToCodeAction (Maybe Bindings -> r) where + toCodeAction = toCodeAction1 caaBindings + +instance ToCodeAction r => ToCodeAction (Bindings -> r) where + toCodeAction = toCodeAction2 caaBindings + +instance ToCodeAction r => ToCodeAction (Maybe GlobalBindingTypeSigsResult -> r) where + toCodeAction = toCodeAction1 caaGblSigs + +instance ToCodeAction r => ToCodeAction (GlobalBindingTypeSigsResult -> r) where + toCodeAction = toCodeAction2 caaGblSigs + +instance ToCodeAction r => ToCodeAction (Maybe HscEnvEq -> r) where + toCodeAction = toCodeAction1 caaGhcSession + +instance ToCodeAction r => ToCodeAction (Maybe HscEnv -> r) where + toCodeAction = toCodeAction1 ((fmap.fmap.fmap) hscEnv caaGhcSession) diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs new file mode 100644 index 0000000000..bffd2a611c --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs @@ -0,0 +1,620 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE TypeFamilies #-} +module Development.IDE.Plugin.CodeAction.ExactPrint ( + Rewrite (..), + rewriteToEdit, + rewriteToWEdit, + + -- * Utilities + appendConstraint, + removeConstraint, + extendImport, + hideSymbol, + liftParseAST, + + wildCardSymbol +) where + +import Control.Monad +import Control.Monad.Trans +import Data.Char (isAlphaNum) +import Data.Data (Data) +import Data.Generics (listify) +import qualified Data.Text as T +import Development.IDE.GHC.Compat hiding (Annotation) +import Development.IDE.GHC.Error +import Development.IDE.GHC.ExactPrint +import Development.IDE.GHC.Util +import Development.IDE.Spans.Common +import GHC.Exts (IsList (fromList)) +import GHC.Stack (HasCallStack) +import Language.Haskell.GHC.ExactPrint +import Language.LSP.Protocol.Types + +import Control.Lens (_head, _last, over) +import Data.Bifunctor (first) +import Data.Maybe (fromMaybe, mapMaybe) +import Development.IDE.Plugin.CodeAction.Util +import GHC (AnnContext (..), + AnnList (..), + DeltaPos (SameLine), + EpAnn (..), + IsUnicodeSyntax (NormalSyntax), + NameAdornment (NameParens), + TrailingAnn (AddCommaAnn), + emptyComments, reAnnL) + + +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] + +#if MIN_VERSION_ghc(9,11,0) +import GHC (EpToken (..) + , AnnListBrackets (..) + , EpUniToken (..)) +#else +import GHC (AddEpAnn (..), + AnnParen (..)) +#endif +#if !MIN_VERSION_ghc(9,9,0) +import Data.Default (Default (..)) +import GHC (addAnns, ann) +#endif + +#if MIN_VERSION_ghc(9,9,0) +import GHC (NoAnn (..)) +import GHC (EpAnnComments (..)) +#endif + +------------------------------------------------------------------------------ + +-- | Construct a 'Rewrite', replacing the node at the given 'SrcSpan' with the +-- given 'ast'. +data Rewrite where + Rewrite :: + (ExactPrint (GenLocated (Anno ast) ast), ResetEntryDP (Anno ast), Outputable (GenLocated (Anno ast) ast), Data (GenLocated (Anno ast) ast)) => + -- | The 'SrcSpan' that we want to rewrite + SrcSpan -> + -- | The ast that we want to graft + (DynFlags -> TransformT (Either String) (GenLocated (Anno ast) ast)) -> + Rewrite + +------------------------------------------------------------------------------ +class ResetEntryDP ann where + resetEntryDP :: GenLocated ann ast -> GenLocated ann ast +#if MIN_VERSION_ghc(9,9,0) +instance {-# OVERLAPPING #-} NoAnn an => ResetEntryDP (EpAnn an) where + resetEntryDP (L srcAnn x) = setEntryDP (L srcAnn{anns=noAnn} x) (SameLine 0) +#else +instance {-# OVERLAPPING #-} Default an => ResetEntryDP (SrcAnn an) where + resetEntryDP (L srcAnn x) = setEntryDP (L srcAnn{ann=EpAnnNotUsed} x) (SameLine 0) +#endif +instance {-# OVERLAPPABLE #-} ResetEntryDP fallback where + resetEntryDP = id + +-- | Convert a 'Rewrite' into a list of '[TextEdit]'. +rewriteToEdit :: HasCallStack => + DynFlags -> + Rewrite -> + Either String [TextEdit] +rewriteToEdit dflags + (Rewrite dst f) = do + (ast, _ , _) <- runTransformT $ do + ast <- f dflags + pure $ traceAst "REWRITE_result" $ resetEntryDP ast + let edits = case srcSpanToRange dst of + Just range -> [ TextEdit range $ T.pack $ exactPrint ast ] + Nothing -> [] + pure edits + +-- | Convert a 'Rewrite' into a 'WorkspaceEdit' +rewriteToWEdit :: DynFlags + -> Uri + -> Rewrite + -> Either String WorkspaceEdit +rewriteToWEdit dflags uri + r = do + edits <- rewriteToEdit dflags + r + return $ + WorkspaceEdit + { _changes = Just (fromList [(uri, edits)]) + , _documentChanges = Nothing + , _changeAnnotations = Nothing + } + +------------------------------------------------------------------------------ + + +dropHsParTy :: LHsType (GhcPass pass) -> LHsType (GhcPass pass) +dropHsParTy (L _ (HsParTy _ ty)) = ty +dropHsParTy other = other + +removeConstraint :: + -- | Predicate: Which context to drop. + (LHsType GhcPs -> Bool) -> + LHsType GhcPs -> + Rewrite +removeConstraint toRemove = go . traceAst "REMOVE_CONSTRAINT_input" + where + go :: LHsType GhcPs -> Rewrite +#if MIN_VERSION_ghc(9,9,0) + go lHsType@(makeDeltaAst -> L l it@HsQualTy{hst_ctxt = L l' ctxt, hst_body}) = Rewrite (locA lHsType) $ \_ -> do +#else + go (L l it@HsQualTy{hst_ctxt = L l' ctxt, hst_body}) = Rewrite (locA l) $ \_ -> do +#endif + let ctxt' = filter (not . toRemove) ctxt + removeStuff = (toRemove <$> headMaybe ctxt) == Just True + let hst_body' = if removeStuff then resetEntryDP hst_body else hst_body + return $ case ctxt' of + [] -> hst_body' + _ -> do + let ctxt'' = over _last (first removeComma) ctxt' + L l $ it{ hst_ctxt = L l' ctxt'' + , hst_body = hst_body' + } + go (L _ (HsParTy _ ty)) = go ty + go (L _ HsForAllTy{hst_body}) = go hst_body + go (L l other) = Rewrite (locA l) $ \_ -> return $ L l other + +-- | Append a constraint at the end of a type context. +-- If no context is present, a new one will be created. +appendConstraint :: + -- | The new constraint to append + String -> + -- | The type signature where the constraint is to be inserted, also assuming annotated + LHsType GhcPs -> + Rewrite +appendConstraint constraintT = go . traceAst "appendConstraint" + where + go (L l it@HsQualTy{hst_ctxt = L l' ctxt}) = Rewrite (locA l) $ \df -> do + constraint <- liftParseAST df constraintT + constraint <- pure $ setEntryDP constraint (SameLine 1) +#if MIN_VERSION_ghc(9,9,0) + let l'' = moveCommentsToTheEnd $ fmap (addParensToCtxt close_dp) l' +#else + let l'' = (fmap.fmap) (addParensToCtxt close_dp) l' +#endif + -- For singleton constraints, the close Paren DP is attached to an HsPar wrapping the constraint + -- we have to reposition it manually into the AnnContext + close_dp = case ctxt of +#if MIN_VERSION_ghc(9,11,0) + [L _ (HsParTy (_, (EpTok ap_close)) _)] -> Just ap_close +#elif MIN_VERSION_ghc(9,9,0) + [L _ (HsParTy AnnParen{ap_close} _)] -> Just ap_close +#else + [L _ (HsParTy EpAnn{anns=AnnParen{ap_close}} _)] -> Just ap_close +#endif + _ -> Nothing + ctxt' = over _last (first addComma) $ map dropHsParTy ctxt + return $ L l $ it{hst_ctxt = L l'' $ ctxt' ++ [constraint]} + go (L _ HsForAllTy{hst_body}) = go hst_body + go (L _ (HsParTy _ ty)) = go ty + go ast@(L l _) = Rewrite (locA l) $ \df -> do + -- there isn't a context, so we must create one + constraint <- liftParseAST df constraintT + lContext <- uniqueSrcSpanT + lTop <- uniqueSrcSpanT + let context = reAnnL annCtxt emptyComments $ L lContext [resetEntryDP constraint] +#if MIN_VERSION_ghc(9,11,0) + annCtxt = AnnContext (Just (EpUniTok (epl 1) NormalSyntax)) [EpTok (epl 0) | needsParens] [EpTok (epl 0) | needsParens] +#else + annCtxt = AnnContext (Just (NormalSyntax, epl 1)) [epl 0 | needsParens] [epl 0 | needsParens] +#endif + needsParens = hsTypeNeedsParens sigPrec $ unLoc constraint + ast <- pure $ setEntryDP (makeDeltaAst ast) (SameLine 1) + + return $ reLocA $ L lTop $ HsQualTy noExtField context ast + +#if MIN_VERSION_ghc(9,9,0) +-- | This moves comment annotation toward the end of the block +-- This is useful when extending a block, so the comment correctly appears +-- after. +-- +-- See https://github.com/haskell/haskell-language-server/issues/4648 for +-- discussion. +-- +-- For example, the following element, @(Foo) => -- hello@, when introducing an +-- additionnal constraint, `Bar`, instead of getting `@(Foo, Bar) => -- hello@, +-- we get @(Foo, -- hello Bar) =>@ +-- +-- This is a bit painful that the pretty printer is not able to realize that it +-- introduces the token `=>` inside the comment and instead does something with +-- meaning, but that's another story. +moveCommentsToTheEnd :: EpAnn ann -> EpAnn ann +moveCommentsToTheEnd (EpAnn entry anns (EpaComments priors)) = EpAnn entry anns (EpaCommentsBalanced { priorComments = [], followingComments = priors}) +moveCommentsToTheEnd (EpAnn entry anns (EpaCommentsBalanced priors following)) = EpAnn entry anns (EpaCommentsBalanced { priorComments = [], followingComments = priors <> following}) +#endif + +liftParseAST + :: forall ast l. (ASTElement l ast, ExactPrint (LocatedAn l ast)) + => DynFlags -> String -> TransformT (Either String) (LocatedAn l ast) +liftParseAST df s = case parseAST df "" s of + Right x -> pure (makeDeltaAst x) + Left _ -> TransformT $ lift $ Left $ "No parse: " <> s + + +headMaybe :: [a] -> Maybe a +headMaybe [] = Nothing +headMaybe (a : _) = Just a + +lastMaybe :: [a] -> Maybe a +lastMaybe [] = Nothing +lastMaybe other = Just $ last other + +------------------------------------------------------------------------------ +extendImport :: Maybe String -> String -> LImportDecl GhcPs -> Rewrite +extendImport mparent identifier lDecl@(L l _) = + Rewrite (locA l) $ \df -> do + case mparent of + -- This will also work for `ImportAllConstructors` + -- Parsed source in GHC 9.4 uses absolute position annotation (RealSrcSpan), + -- while rewriting relies on relative positions. ghc-exactprint has the utility + -- makeDeltaAst for relativization. + Just parent -> extendImportViaParent df parent identifier (makeDeltaAst lDecl) + _ -> extendImportTopLevel identifier (makeDeltaAst lDecl) + +-- | Add an identifier or a data type to import list. Expects a Delta AST +-- +-- extendImportTopLevel "foo" AST: +-- +-- import A --> Error +-- import A (foo) --> Error +-- import A (bar) --> import A (bar, foo) +extendImportTopLevel :: + -- | rendered + String -> + LImportDecl GhcPs -> + TransformT (Either String) (LImportDecl GhcPs) +extendImportTopLevel thing (L l it@ImportDecl{..}) + | Just (hide, L l' lies) <- ideclImportList + = do + src <- uniqueSrcSpanT + top <- uniqueSrcSpanT + let rdr = reLocA $ L src $ mkRdrUnqual $ mkVarOcc thing + let alreadyImported = + printOutputable (occName (unLoc rdr)) + `elem` map (printOutputable @OccName) (listify (const True) lies) + when alreadyImported $ + TransformT $ lift (Left $ thing <> " already imported") + + let lie = reLocA $ L src $ IEName + noExtField + rdr + x = reLocA $ L top $ IEVar +#if MIN_VERSION_ghc(9,8,0) + Nothing -- no deprecated +#else + noExtField +#endif + lie +#if MIN_VERSION_ghc(9,9,0) + Nothing +#endif + + if x `elem` lies + then TransformT $ lift (Left $ thing <> " already imported") + else do + let lies' = addCommaInImportList lies x + return $ L l it{ideclImportList = Just (hide, L l' lies')} +extendImportTopLevel _ _ = TransformT $ lift $ Left "Unable to extend the import list" + +wildCardSymbol :: String +wildCardSymbol = ".." + +-- | Add an identifier with its parent to import list +-- +-- extendImportViaParent "Bar" "Cons" AST: +-- +-- import A --> Error +-- import A (Bar(..)) --> Error +-- import A (Bar(Cons)) --> Error +-- import A () --> import A (Bar(Cons)) +-- import A (Foo, Bar) --> import A (Foo, Bar(Cons)) +-- import A (Foo, Bar()) --> import A (Foo, Bar(Cons)) +-- +-- extendImportViaParent "Bar" ".." AST: +-- import A () --> import A (Bar(..)) +-- import A (Foo, Bar) -> import A (Foo, Bar(..)) +-- import A (Foo, Bar()) -> import A (Foo, Bar(..)) +extendImportViaParent :: + DynFlags -> + -- | parent (already parenthesized if needs) + String -> + -- | rendered child + String -> + LImportDecl GhcPs -> + TransformT (Either String) (LImportDecl GhcPs) +extendImportViaParent df parent child (L l it@ImportDecl{..}) + | Just (hide, L l' lies) <- ideclImportList = go hide l' [] lies + where +#if MIN_VERSION_ghc(9,9,0) + go _hide _l' _pre ((L _ll' (IEThingAll _ (L _ ie) _)) : _xs) +#else + go _hide _l' _pre ((L _ll' (IEThingAll _ (L _ ie))) : _xs) +#endif + | parent == unIEWrappedName ie = TransformT $ lift . Left $ child <> " already included in " <> parent <> " imports" +#if MIN_VERSION_ghc(9,9,0) + go hide l' pre ((L ll' (IEThingAbs _ absIE@(L _ ie) docs)) : xs) +#else + go hide l' pre ((L ll' (IEThingAbs _ absIE@(L _ ie))) : xs) +#endif + -- ThingAbs ie => ThingWith ie child + | parent == unIEWrappedName ie = do + srcChild <- uniqueSrcSpanT + let childRdr = reLocA $ L srcChild $ mkRdrUnqual $ mkVarOcc child + childLIE = reLocA $ L srcChild $ IEName + noExtField + childRdr + x :: LIE GhcPs = L ll' $ IEThingWith +#if MIN_VERSION_ghc(9,11,0) + (Nothing, (EpTok d1, NoEpTok, NoEpTok, EpTok noAnn)) +#elif MIN_VERSION_ghc(9,9,0) + (Nothing, [AddEpAnn AnnOpenP d1, AddEpAnn AnnCloseP noAnn]) +#elif MIN_VERSION_ghc(9,7,0) + (Nothing, addAnns mempty [AddEpAnn AnnOpenP d1, AddEpAnn AnnCloseP def] emptyComments) +#else + (addAnns mempty [AddEpAnn AnnOpenP d1, AddEpAnn AnnCloseP def] emptyComments) +#endif + absIE NoIEWildcard [childLIE] +#if MIN_VERSION_ghc(9,9,0) + docs +#endif + + return $ L l it{ideclImportList = Just (hide, L l' $ reverse pre ++ [x] ++ xs)} + +#if MIN_VERSION_ghc(9,9,0) + go hide l' pre ((L l'' (IEThingWith l''' twIE@(L _ ie) _ lies' docs)) : xs) +#else + go hide l' pre ((L l'' (IEThingWith l''' twIE@(L _ ie) _ lies')) : xs) +#endif + -- ThingWith ie lies' => ThingWith ie (lies' ++ [child]) + | parent == unIEWrappedName ie + , child == wildCardSymbol = do + let it' = it{ideclImportList = Just (hide, lies)} + thing = IEThingWith newl twIE (IEWildcard 2) [] +#if MIN_VERSION_ghc(9,9,0) + docs +#endif +#if MIN_VERSION_ghc(9,7,0) && !MIN_VERSION_ghc(9,9,0) + newl = fmap (\ann -> ann ++ [AddEpAnn AnnDotdot d0]) <$> l''' +#elif MIN_VERSION_ghc(9,11,0) + newl = (\(open, _, comma, close) -> (open, EpTok d0, comma, close)) <$> l''' +#else + newl = (\ann -> ann ++ [AddEpAnn AnnDotdot d0]) <$> l''' +#endif + lies = L l' $ reverse pre ++ [L l'' thing] ++ xs + return $ L l it' + | parent == unIEWrappedName ie = do + let hasSibling = not $ null lies' + srcChild <- uniqueSrcSpanT + let childRdr = reLocA $ L srcChild $ mkRdrUnqual $ mkVarOcc child + childRdr <- pure $ setEntryDP childRdr $ SameLine $ if hasSibling then 1 else 0 + let alreadyImported = + printOutputable (occName (unLoc childRdr)) + `elem` map (printOutputable @OccName) (listify (const True) lies') + when alreadyImported $ + TransformT $ lift (Left $ child <> " already included in " <> parent <> " imports") + + let childLIE = reLocA $ L srcChild $ IEName + noExtField + childRdr + let it' = it{ideclImportList = Just (hide, lies)} + lies = L l' $ reverse pre ++ + [L l'' (IEThingWith l''' twIE NoIEWildcard (over _last fixLast lies' ++ [childLIE]) +#if MIN_VERSION_ghc(9,9,0) + docs +#endif + )] ++ xs + fixLast = if hasSibling then first addComma else id + return $ L l it' + go hide l' pre (x : xs) = go hide l' (x : pre) xs + go hide l' pre [] = do + -- [] => ThingWith parent [child] + l'' <- uniqueSrcSpanT + srcParent <- uniqueSrcSpanT + srcChild <- uniqueSrcSpanT + parentRdr <- liftParseAST df parent + let childRdr = reLocA $ L srcChild $ mkRdrUnqual $ mkVarOcc child + isParentOperator = hasParen parent +#if MIN_VERSION_ghc(9,11,0) + let parentLIE = reLocA $ L srcParent $ if isParentOperator then IEType (EpTok (epl 0)) parentRdr' +#else + let parentLIE = reLocA $ L srcParent $ if isParentOperator then IEType (epl 0) parentRdr' +#endif + else IEName + noExtField + parentRdr' + parentRdr' = modifyAnns parentRdr $ \case +#if MIN_VERSION_ghc(9,11,0) + it@NameAnn{nann_adornment = NameParens _ _} -> it{nann_adornment=NameParens (EpTok (epl 1)) (EpTok (epl 0))} +#else + it@NameAnn{nann_adornment = NameParens} -> it{nann_open = epl 1, nann_close = epl 0} +#endif + other -> other + childLIE = reLocA $ L srcChild $ IEName + noExtField + childRdr +#if MIN_VERSION_ghc(9,11,0) + listAnn = (Nothing, (EpTok (epl 1), NoEpTok, NoEpTok, EpTok (epl 0))) +#elif MIN_VERSION_ghc(9,9,0) + listAnn = (Nothing, [AddEpAnn AnnOpenP (epl 1), AddEpAnn AnnCloseP (epl 0)]) +#elif MIN_VERSION_ghc(9,7,0) + listAnn = (Nothing, epAnn srcParent [AddEpAnn AnnOpenP (epl 1), AddEpAnn AnnCloseP (epl 0)]) +#else + listAnn = epAnn srcParent [AddEpAnn AnnOpenP (epl 1), AddEpAnn AnnCloseP (epl 0)] +#endif + x :: LIE GhcPs = reLocA $ L l'' $ IEThingWith listAnn parentLIE NoIEWildcard [childLIE] +#if MIN_VERSION_ghc(9,9,0) + Nothing -- TODO preserve docs? +#endif + + lies' = addCommaInImportList (reverse pre) x + return $ L l it{ideclImportList = Just (hide, L l' lies')} +extendImportViaParent _ _ _ _ = TransformT $ lift $ Left "Unable to extend the import list via parent" + +-- Add an item in an import list, taking care of adding comma if needed. +addCommaInImportList :: + -- | Initial list + [LocatedAn AnnListItem a] + -- | Additional item + -> LocatedAn AnnListItem a + -> [LocatedAn AnnListItem a] +addCommaInImportList lies x = + fixLast lies ++ [newItem] + where + isTrailingAnnComma :: TrailingAnn -> Bool + isTrailingAnnComma (AddCommaAnn _) = True + isTrailingAnnComma _ = False + + -- check if there is an existing trailing comma + existingTrailingComma = fromMaybe False $ do + L lastItemSrcAnn _ <- lastMaybe lies +#if MIN_VERSION_ghc(9,9,0) + lastItemAnn <- case lastItemSrcAnn of + EpAnn _ lastItemAnn _ -> pure lastItemAnn +#else + lastItemAnn <- case ann lastItemSrcAnn of + EpAnn _ lastItemAnn _ -> pure lastItemAnn + _ -> Nothing +#endif + pure $ any isTrailingAnnComma (lann_trailing lastItemAnn) + + hasSibling = not $ null lies + + -- Setup the new item. It should have a preceding whitespace if it has siblings, and a trailing comma if the + -- preceding item already has one. + newItem = first (if existingTrailingComma then addComma else id) $ + setEntryDP x (SameLine $ if hasSibling then 1 else 0) + + -- Add the comma (if needed) + fixLast :: [LocatedAn AnnListItem a] -> [LocatedAn AnnListItem a] + fixLast = over _last (first (if existingTrailingComma then id else addComma)) + +unIEWrappedName :: IEWrappedName GhcPs -> String +unIEWrappedName (occName -> occ) = T.unpack $ printOutputable $ parenSymOcc occ (ppr occ) + +hasParen :: String -> Bool +hasParen ('(' : _) = True +hasParen _ = False + + +------------------------------------------------------------------------------ + +-- | Hide a symbol from import declaration +hideSymbol :: + String -> LImportDecl GhcPs -> Rewrite +hideSymbol symbol lidecl@(L loc ImportDecl{..}) = + case ideclImportList of + Nothing -> Rewrite (locA loc) $ extendHiding symbol lidecl Nothing + Just (EverythingBut, hides) -> Rewrite (locA loc) $ extendHiding symbol lidecl (Just hides) + Just (Exactly, imports) -> Rewrite (locA loc) $ deleteFromImport symbol lidecl $ setEntryDP (makeDeltaAst imports) (SameLine 1) + +extendHiding :: + String -> + LImportDecl GhcPs -> + Maybe (XRec GhcPs [LIE GhcPs]) -> + DynFlags -> + TransformT (Either String) (LImportDecl GhcPs) +extendHiding symbol (L l idecls) mlies df = do + L l' lies <- case mlies of + Nothing -> do +#if MIN_VERSION_ghc(9,11,0) + let ann :: EpAnn (AnnList (EpToken "hiding", [EpToken ","])) + ann = noAnnSrcSpanDP0 +#elif MIN_VERSION_ghc(9,9,0) + let ann = noAnnSrcSpanDP0 +#else + src <- uniqueSrcSpanT + let ann = noAnnSrcSpanDP0 src +#endif +#if MIN_VERSION_ghc(9,9,0) + ann' = flip fmap ann $ \x -> x +#else + ann' = flip (fmap.fmap) ann $ \x -> x +#endif +#if MIN_VERSION_ghc(9,11,0) + {al_rest = (EpTok (epl 1), [NoEpTok]) + ,al_brackets=ListParens (EpTok (epl 1)) (EpTok (epl 0)) +#else + {al_rest = [AddEpAnn AnnHiding (epl 1)] + ,al_open = Just $ AddEpAnn AnnOpenP (epl 1) + ,al_close = Just $ AddEpAnn AnnCloseP (epl 0) +#endif + } + return $ L ann' [] + Just pr -> pure pr + let hasSibling = not $ null lies + src <- uniqueSrcSpanT + top <- uniqueSrcSpanT + rdr <- liftParseAST df symbol + rdr <- pure $ modifyAnns rdr $ addParens (isOperator $ unLoc rdr) + let lie = reLocA $ L src $ IEName + noExtField + rdr + x = reLocA $ L top $ IEVar +#if MIN_VERSION_ghc(9,7,0) + Nothing +#else + noExtField +#endif + lie +#if MIN_VERSION_ghc(9,9,0) + Nothing +#endif + x <- pure $ if hasSibling then first addComma x else x + lies <- pure $ over _head (`setEntryDP` SameLine 1) lies + return $ L l idecls{ideclImportList = Just (EverythingBut, L l' $ x : lies)} + where + isOperator = not . all isAlphaNum . occNameString . rdrNameOcc + +deleteFromImport :: + String -> + LImportDecl GhcPs -> + XRec GhcPs [LIE GhcPs] -> + DynFlags -> + TransformT (Either String) (LImportDecl GhcPs) +deleteFromImport (T.pack -> symbol) (L l idecl) (L lieLoc lies) _ = do + let edited = L lieLoc deletedLies + lidecl' = + L l $ + idecl + { ideclImportList = Just (Exactly, edited) } + pure lidecl' + where + deletedLies = + over _last removeTrailingComma $ + mapMaybe killLie lies + killLie :: LIE GhcPs -> Maybe (LIE GhcPs) +#if MIN_VERSION_ghc(9,9,0) + killLie v@(L _ (IEVar _ (L _ (unqualIEWrapName -> nam)) _)) +#else + killLie v@(L _ (IEVar _ (L _ (unqualIEWrapName -> nam)))) +#endif + | nam == symbol = Nothing + | otherwise = Just v +#if MIN_VERSION_ghc(9,9,0) + killLie v@(L _ (IEThingAbs _ (L _ (unqualIEWrapName -> nam)) _)) +#else + killLie v@(L _ (IEThingAbs _ (L _ (unqualIEWrapName -> nam)))) +#endif + | nam == symbol = Nothing + | otherwise = Just v +#if MIN_VERSION_ghc(9,9,0) + killLie (L lieL (IEThingWith xt ty@(L _ (unqualIEWrapName -> nam)) wild cons docs)) +#else + killLie (L lieL (IEThingWith xt ty@(L _ (unqualIEWrapName -> nam)) wild cons)) +#endif + | nam == symbol = Nothing + | otherwise = + Just $ + L lieL $ + IEThingWith + xt + ty + wild + (filter ((/= symbol) . unqualIEWrapName . unLoc) cons) +#if MIN_VERSION_ghc(9,9,0) + docs +#endif + killLie v = Just v diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/PositionIndexed.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/PositionIndexed.hs new file mode 100644 index 0000000000..f367b393a0 --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/PositionIndexed.hs @@ -0,0 +1,141 @@ +-- | Position indexed streams of characters +module Development.IDE.Plugin.CodeAction.PositionIndexed + ( PositionIndexed + , PositionIndexedString + , indexedByPosition + , indexedByPositionStartingFrom + , extendAllToIncludeCommaIfPossible + , extendToIncludePreviousNewlineIfPossible + , mergeRanges + ) +where + +import Data.Char +import Data.List +import Language.LSP.Protocol.Types (Position (Position), + Range (Range, _end, _start)) + +type PositionIndexed a = [(Position, a)] + +type PositionIndexedString = PositionIndexed Char + +-- | Add position indexing to a String. +-- +-- > indexedByPositionStartingFrom (0,0) "hey\n ho" ≡ +-- > [ ((0,0),'h') +-- > , ((0,1),'e') +-- > , ((0,2),'y') +-- > , ((0,3),'\n') +-- > , ((1,0),' ') +-- > , ((1,1),'h') +-- > , ((1,2),'o') +-- > ] +indexedByPositionStartingFrom :: Position -> String -> PositionIndexedString +indexedByPositionStartingFrom initialPos = unfoldr f . (initialPos, ) where + f (_, []) = Nothing + f (p@(Position l _), '\n' : rest) = + Just ((p, '\n'), (Position (l + 1) 0, rest)) + f (p@(Position l c), x : rest) = Just ((p, x), (Position l (c + 1), rest)) + +-- | Add position indexing to a String. +-- +-- > indexedByPosition = indexedByPositionStartingFrom (Position 0 0) +indexedByPosition :: String -> PositionIndexedString +indexedByPosition = indexedByPositionStartingFrom (Position 0 0) + +-- | Returns a tuple (before, contents, after) if the range is present. +-- The range is present only if both its start and end positions are present +unconsRange + :: Range + -> PositionIndexed a + -> Maybe (PositionIndexed a, PositionIndexed a, PositionIndexed a) +unconsRange Range {..} indexedString + | (before, rest@(_ : _)) <- span ((/= _start) . fst) indexedString + , (mid, after@(_ : _)) <- span ((/= _end) . fst) rest + = Just (before, mid, after) + | otherwise + = Nothing + +-- | Strips out all the positions included in the range. +-- Returns 'Nothing' if the start or end of the range are not included in the input. +stripRange :: Range -> PositionIndexed a -> Maybe (PositionIndexed a) +stripRange r s = case unconsRange r s of + Just (b, _, a) -> Just (b ++ a) + Nothing -> Nothing + +-- | Returns the smallest possible set of disjoint ranges that is equivalent to the input. +-- Assumes input ranges are sorted on the start positions. +mergeRanges :: [Range] -> [Range] +mergeRanges (r : r' : rest) + | + -- r' is contained in r + _end r > _end r' = mergeRanges (r : rest) + | + -- r and r' are overlapping + _end r > _start r' = mergeRanges (r { _end = _end r' } : rest) + + | otherwise = r : mergeRanges (r' : rest) +mergeRanges other = other + +-- | Returns a sorted list of ranges with extended selections including preceding or trailing commas +-- +-- @ +-- a, |b|, c ===> a|, b|, c +-- a, b, |c| ===> a, b|, c| +-- a, |b|, |c| ===> a|, b||, c| +-- @ +-- +-- If 'acceptNoComma' is enabled, additional ranges are returned +-- +-- @ +-- |a| ===> |a| +-- |a|, |b| ===> |a,| |b| +-- @ +extendAllToIncludeCommaIfPossible :: Bool -> PositionIndexedString -> [Range] -> [Range] +extendAllToIncludeCommaIfPossible acceptNoComma indexedString = + mergeRanges . go indexedString . sortOn _start + where + go _ [] = [] + go input (r : rr) + | r' : _ <- extendToIncludeCommaIfPossible acceptNoComma input r + , Just input' <- stripRange r' input + = r' : go input' rr + | otherwise + = go input rr + +extendToIncludeCommaIfPossible :: Bool -> PositionIndexedString -> Range -> [Range] +extendToIncludeCommaIfPossible acceptNoComma indexedString range + | Just (before, _, after) <- unconsRange range indexedString + , after' <- dropWhile (isSpace . snd) after + , before' <- dropWhile (isSpace . snd) (reverse before) + = + -- a, |b|, c ===> a|, b|, c + [ range { _start = start' } | (start', ',') : _ <- [before'] ] + ++ + -- a, |b|, c ===> a, |b, |c + [ range { _end = end' } + | (_, ',') : rest <- [after'] + , (end', _) : _ <- pure $ dropWhile (isSpace . snd) rest + ] + ++ + ([range | acceptNoComma]) + | otherwise + = [range] + +extendToIncludePreviousNewlineIfPossible :: PositionIndexedString -> Range -> Range +extendToIncludePreviousNewlineIfPossible indexedString range + | Just (before, _, _) <- unconsRange range indexedString + , maybeFirstSpacePos <- lastSpacePos $ reverse before + = case maybeFirstSpacePos of + Nothing -> range + Just pos -> range { _start = pos } + | otherwise = range + where + lastSpacePos :: PositionIndexedString -> Maybe Position + lastSpacePos [] = Nothing + lastSpacePos ((pos, c):xs) = + if not $ isSpace c + then Nothing -- didn't find any space + else case xs of + (y:ys) | isSpace $ snd y -> lastSpacePos (y:ys) + _ -> Just pos diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs new file mode 100644 index 0000000000..69f3332dc0 --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs @@ -0,0 +1,20 @@ +module Development.IDE.Plugin.CodeAction.RuleTypes + (PackageExports(..) + ,IdentInfo(..) + ) where + +import Control.DeepSeq (NFData) +import Data.Hashable (Hashable) +import Development.IDE.Graph (RuleResult) +import Development.IDE.Types.Exports +import Development.IDE.Types.HscEnvEq (HscEnvEq) +import GHC.Generics (Generic) + +-- Rule type for caching Package Exports +type instance RuleResult PackageExports = ExportsMap + +newtype PackageExports = PackageExports HscEnvEq + deriving (Eq, Show, Generic) + +instance Hashable PackageExports +instance NFData PackageExports diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Util.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Util.hs new file mode 100644 index 0000000000..2a7719fdbe --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Util.hs @@ -0,0 +1,47 @@ +module Development.IDE.Plugin.CodeAction.Util where + +import Data.Data (Data) +import Data.Time.Clock.POSIX (POSIXTime, + getCurrentTime, + utcTimeToPOSIXSeconds) +import qualified Data.Unique as U +import Debug.Trace +import Development.IDE.GHC.Compat.ExactPrint as GHC +import Development.IDE.GHC.Dump (showAstDataHtml) +import GHC.Stack +import GHC.Utils.Outputable +import System.Directory.Extra (createDirectoryIfMissing) +import System.Environment.Blank (getEnvDefault) +import System.IO.Unsafe +import Text.Printf +-------------------------------------------------------------------------------- +-- Tracing exactprint terms + +-- Should in `Development.IDE.GHC.Orphans`, +-- leave it here to prevent cyclic module dependency + +{-# NOINLINE timestamp #-} +timestamp :: POSIXTime +timestamp = utcTimeToPOSIXSeconds $ unsafePerformIO getCurrentTime + +debugAST :: Bool +debugAST = unsafePerformIO (getEnvDefault "GHCIDE_DEBUG_AST" "0") == "1" + +-- | Prints an 'Outputable' value to stderr and to an HTML file for further inspection +traceAst :: (Data a, ExactPrint a, HasCallStack) => String -> a -> a +traceAst lbl x + | debugAST = trace doTrace x + | otherwise = x + where + renderDump = renderWithContext defaultSDocContext{sdocStyle = defaultDumpStyle, sdocPprDebug = True} + htmlDump = showAstDataHtml x + doTrace = unsafePerformIO $ do + u <- U.newUnique + let htmlDumpFileName = printf "/tmp/hls/%s-%s-%d.html" (show timestamp) lbl (U.hashUnique u) + createDirectoryIfMissing True "/tmp/hls" + writeFile htmlDumpFileName $ renderDump htmlDump + return $ unlines + [prettyCallStack callStack ++ ":" + , exactPrint x + , "file://" ++ htmlDumpFileName] + diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/AddArgument.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/AddArgument.hs new file mode 100644 index 0000000000..aec82cb17f --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/AddArgument.hs @@ -0,0 +1,210 @@ +{-# LANGUAGE CPP #-} +module Development.IDE.Plugin.Plugins.AddArgument (plugin) where + +import Control.Monad (join) +import Control.Monad.Trans.Class (lift) +import Data.Bifunctor (Bifunctor (..)) +import Data.Either.Extra (maybeToEither) +import qualified Data.Text as T +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Error (spanContainsRange) +import Development.IDE.GHC.ExactPrint (modifyMgMatchesT', + modifySigWithM, + modifySmallestDeclWithM) +import Development.IDE.Plugin.Plugins.Diagnostic +import GHC.Parser.Annotation (SrcSpanAnnA, + SrcSpanAnnN, noAnn) +import Ide.Plugin.Error (PluginError (PluginInternalError)) +import Ide.PluginUtils (makeDiffTextEdit) +import Language.Haskell.GHC.ExactPrint (TransformT (..), + exactPrint, + noAnnSrcSpanDP1, + runTransformT) +import Language.LSP.Protocol.Types + +-- See Note [Guidelines For Using CPP In GHCIDE Import Statements] + +#if MIN_VERSION_ghc(9,6,0) && !MIN_VERSION_ghc(9,9,0) +import Development.IDE.GHC.ExactPrint (epl) +import GHC.Parser.Annotation (TokenLocation (..)) +#endif + +#if !MIN_VERSION_ghc(9,9,0) +import Development.IDE.GHC.Compat.ExactPrint (makeDeltaAst) +import Development.IDE.GHC.ExactPrint (genAnchor1) +import GHC.Parser.Annotation (EpAnn (..), + SrcSpanAnn' (..), + emptyComments) +import GHC.Types.SrcLoc (generatedSrcSpan) +#endif + +#if MIN_VERSION_ghc(9,9,0) +import GHC (DeltaPos (..), + EpUniToken (..), + IsUnicodeSyntax (NormalSyntax)) +import Language.Haskell.GHC.ExactPrint (d1, setEntryDP) +#endif + +#if MIN_VERSION_ghc(9,11,0) +import GHC.Parser.Annotation (EpToken (..)) +#endif + +-- When GHC tells us that a variable is not bound, it will tell us either: +-- - there is an unbound variable with a given type +-- - there is an unbound variable (GHC provides no type suggestion) +-- +-- When we receive either of these errors, we produce a text edit that will add a new argument (as a new pattern in the +-- last position of each LHS of the top-level bindings for this HsDecl). +-- +-- NOTE When adding a new argument to a declaration, the corresponding argument's type in declaration's signature might +-- not be the last type in the signature, such as: +-- foo :: a -> b -> c -> d +-- foo a b = \c -> ... +-- In this case a new argument would have to add its type between b and c in the signature. +plugin :: ParsedModule -> Diagnostic -> Either PluginError [(T.Text, [TextEdit])] +plugin parsedModule Diagnostic {_message, _range} + | Just (name, typ) <- matchVariableNotInScope message = addArgumentAction parsedModule _range name typ + | Just (name, typ) <- matchFoundHoleIncludeUnderscore message = addArgumentAction parsedModule _range name (Just typ) + | otherwise = pure [] + where + message = unifySpaces _message + +-- Given a name for the new binding, add a new pattern to the match in the last position, +-- returning how many patterns there were in this match prior to the transformation: +-- addArgToMatch "foo" `bar arg1 arg2 = ...` +-- => (`bar arg1 arg2 foo = ...`, 2) +addArgToMatch :: T.Text -> GenLocated l (Match GhcPs (LocatedA (HsExpr GhcPs))) -> (GenLocated l (Match GhcPs (LocatedA (HsExpr GhcPs))), Int) + +-- NOTE: The code duplication within CPP clauses avoids a parse error with +-- `stylish-haskell`. +#if MIN_VERSION_ghc(9,11,0) +addArgToMatch name (L locMatch (Match xMatch ctxMatch (L l pats) rhs)) = + let unqualName = mkRdrUnqual $ mkVarOcc $ T.unpack name + newPat = L noAnnSrcSpanDP1 $ VarPat NoExtField $ L noAnn unqualName + -- The intention is to move `= ...` (right-hand side with equals) to the right so there's 1 space between + -- the newly added pattern and the rest + indentRhs :: GRHSs GhcPs (LocatedA (HsExpr GhcPs)) -> GRHSs GhcPs (LocatedA (HsExpr GhcPs)) + indentRhs rhs@GRHSs{grhssGRHSs} = rhs {grhssGRHSs = fmap (`setEntryDP` (SameLine 1)) grhssGRHSs } + in (L locMatch (Match xMatch ctxMatch (L l (pats <> [newPat])) (indentRhs rhs)), Prelude.length pats) +#elif MIN_VERSION_ghc(9,9,0) +addArgToMatch name (L locMatch (Match xMatch ctxMatch pats rhs)) = + let unqualName = mkRdrUnqual $ mkVarOcc $ T.unpack name + newPat = L noAnnSrcSpanDP1 $ VarPat NoExtField $ L noAnn unqualName + -- The intention is to move `= ...` (right-hand side with equals) to the right so there's 1 space between + -- the newly added pattern and the rest + indentRhs :: GRHSs GhcPs (LocatedA (HsExpr GhcPs)) -> GRHSs GhcPs (LocatedA (HsExpr GhcPs)) + indentRhs rhs@GRHSs{grhssGRHSs} = rhs {grhssGRHSs = fmap (`setEntryDP` (SameLine 1)) grhssGRHSs } + in (L locMatch (Match xMatch ctxMatch (pats <> [newPat]) (indentRhs rhs)), Prelude.length pats) +#else +addArgToMatch name (L locMatch (Match xMatch ctxMatch pats rhs)) = + let unqualName = mkRdrUnqual $ mkVarOcc $ T.unpack name + newPat = L (noAnnSrcSpanDP1 generatedSrcSpan) $ VarPat NoExtField (noLocA unqualName) + indentRhs = id + in (L locMatch (Match xMatch ctxMatch (pats <> [newPat]) (indentRhs rhs)), Prelude.length pats) +#endif + +-- Attempt to insert a binding pattern into each match for the given LHsDecl; succeeds only if the function is a FunBind. +-- Also return: +-- - the declaration's name +-- - the number of bound patterns in the declaration's matches prior to the transformation +-- +-- For example: +-- insertArg "new_pat" `foo bar baz = 1` +-- => (`foo bar baz new_pat = 1`, Just ("foo", 2)) +appendFinalPatToMatches :: T.Text -> LHsDecl GhcPs -> TransformT (Either PluginError) (LHsDecl GhcPs, Maybe (GenLocated SrcSpanAnnN RdrName, Int)) +appendFinalPatToMatches name = \case + (L locDecl (ValD xVal fun@FunBind{fun_matches=mg,fun_id = idFunBind})) -> do + (mg', numPatsMay) <- modifyMgMatchesT' mg (pure . second Just . addArgToMatch name) Nothing combineMatchNumPats + numPats <- TransformT $ lift $ maybeToEither (PluginInternalError "Unexpected empty match group in HsDecl") numPatsMay + let decl' = L locDecl (ValD xVal fun{fun_matches=mg'}) + pure (decl', Just (idFunBind, numPats)) + decl -> pure (decl, Nothing) + where + combineMatchNumPats Nothing other = pure other + combineMatchNumPats other Nothing = pure other + combineMatchNumPats (Just l) (Just r) + | l == r = pure (Just l) + | otherwise = Left $ PluginInternalError "Unexpected different numbers of patterns in HsDecl MatchGroup" + +-- The add argument works as follows: +-- 1. Attempt to add the given name as the last pattern of the declaration that contains `range`. +-- 2. If such a declaration exists, use that declaration's name to modify the signature of said declaration, if it +-- has a type signature. +-- +-- NOTE For the following situation, the type signature is not updated (it's unclear what should happen): +-- type FunctionTySyn = () -> Int +-- foo :: FunctionTySyn +-- foo () = new_def +-- +-- TODO instead of inserting a typed hole; use GHC's suggested type from the error +addArgumentAction :: ParsedModule -> Range -> T.Text -> Maybe T.Text -> Either PluginError [(T.Text, [TextEdit])] +addArgumentAction (ParsedModule _ moduleSrc _) range name _typ = do + (newSource, _, _) <- runTransformT $ do + (moduleSrc', join -> matchedDeclNameMay) <- addNameAsLastArgOfMatchingDecl +#if MIN_VERSION_ghc(9,9,0) + moduleSrc +#else + (makeDeltaAst moduleSrc) +#endif + case matchedDeclNameMay of + Just (matchedDeclName, numPats) -> modifySigWithM (unLoc matchedDeclName) (addTyHoleToTySigArg numPats) moduleSrc' + Nothing -> pure moduleSrc' + let diff = makeDiffTextEdit (T.pack $ exactPrint moduleSrc) (T.pack $ exactPrint newSource) + pure [("Add argument ‘" <> name <> "’ to function", diff)] + where + addNameAsLastArgOfMatchingDecl = modifySmallestDeclWithM spanContainsRangeOrErr addNameAsLastArg + addNameAsLastArg = fmap (first (:[])) . appendFinalPatToMatches name + + spanContainsRangeOrErr = maybeToEither (PluginInternalError "SrcSpan was not valid range") . (`spanContainsRange` range) + +-- Transform an LHsType into a list of arguments and return type, to make transformations easier. +hsTypeToFunTypeAsList :: LHsType GhcPs -> ([(SrcSpanAnnA, XFunTy GhcPs, HsArrow GhcPs, LHsType GhcPs)], LHsType GhcPs) +hsTypeToFunTypeAsList = \case + L spanAnnA (HsFunTy xFunTy arrow lhs rhs) -> + let (rhsArgs, rhsRes) = hsTypeToFunTypeAsList rhs + in ((spanAnnA, xFunTy, arrow, lhs):rhsArgs, rhsRes) + ty -> ([], ty) + +-- The inverse of `hsTypeToFunTypeAsList` +hsTypeFromFunTypeAsList :: ([(SrcSpanAnnA, XFunTy GhcPs, HsArrow GhcPs, LHsType GhcPs)], LHsType GhcPs) -> LHsType GhcPs +hsTypeFromFunTypeAsList (args, res) = + foldr (\(spanAnnA, xFunTy, arrow, argTy) res -> L spanAnnA $ HsFunTy xFunTy arrow argTy res) res args + +-- Add a typed hole to a type signature in the given argument position: +-- 0 `foo :: ()` => foo :: _ -> () +-- 2 `foo :: FunctionTySyn` => foo :: FunctionTySyn +-- 1 `foo :: () -> () -> Int` => foo :: () -> _ -> () -> Int +addTyHoleToTySigArg :: Int -> LHsSigType GhcPs -> LHsSigType GhcPs +addTyHoleToTySigArg loc (L annHsSig (HsSig xHsSig tyVarBndrs lsigTy)) = + let (args, res) = hsTypeToFunTypeAsList lsigTy +#if MIN_VERSION_ghc(9,9,0) + wildCardAnn = noAnnSrcSpanDP1 + newArg = + ( noAnn + , noExtField + , HsUnrestrictedArrow (EpUniTok d1 NormalSyntax) +#if MIN_VERSION_ghc(9,11,0) + , L wildCardAnn $ HsWildCardTy NoEpTok +#else + , L wildCardAnn $ HsWildCardTy noExtField +#endif + ) +#else + wildCardAnn = SrcSpanAnn (EpAnn genAnchor1 (AnnListItem []) emptyComments) generatedSrcSpan + arrowAnn = TokenLoc (epl 1) + newArg = + ( SrcSpanAnn mempty generatedSrcSpan + , noAnn + , HsUnrestrictedArrow (L arrowAnn HsNormalTok) + , L wildCardAnn $ HsWildCardTy noExtField + ) +#endif + -- NOTE if the location that the argument wants to be placed at is not one more than the number of arguments + -- in the signature, then we return the original type signature. + -- This situation most likely occurs due to a function type synonym in the signature + insertArg n _ | n < 0 = error "Not possible" + insertArg 0 as = newArg:as + insertArg _ [] = [] + insertArg n (a:as) = a : insertArg (n - 1) as + lsigTy' = hsTypeFromFunTypeAsList (insertArg loc args, res) + in L annHsSig (HsSig xHsSig tyVarBndrs lsigTy') diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/Diagnostic.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/Diagnostic.hs new file mode 100644 index 0000000000..7facc8f54c --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/Diagnostic.hs @@ -0,0 +1,59 @@ +module Development.IDE.Plugin.Plugins.Diagnostic ( + matchVariableNotInScope, + matchRegexUnifySpaces, + unifySpaces, + matchFoundHole, + matchFoundHoleIncludeUnderscore, + ) + where + +import Data.Bifunctor (Bifunctor (..)) +import qualified Data.Text as T +import Text.Regex.TDFA ((=~~)) + +unifySpaces :: T.Text -> T.Text +unifySpaces = T.unwords . T.words + +-- | Returns Just (the submatches) for the first capture, or Nothing. +matchRegex :: T.Text -> T.Text -> Maybe [T.Text] +matchRegex message regex = case message =~~ regex of + Just (_ :: T.Text, _ :: T.Text, _ :: T.Text, bindings) -> Just bindings + Nothing -> Nothing + +-- | 'matchRegex' combined with 'unifySpaces' +-- +-- >>> matchRegexUnifySpaces "hello I'm a cow" "he(ll)o" +-- Just ["ll"] +matchRegexUnifySpaces :: T.Text -> T.Text -> Maybe [T.Text] +matchRegexUnifySpaces message = matchRegex (unifySpaces message) + +matchFoundHole :: T.Text -> Maybe (T.Text, T.Text) +matchFoundHole message + | Just [name, typ] <- matchRegexUnifySpaces message "Found hole: _([^ ]+) :: ([^*•]+) Or perhaps" = + Just (name, typ) + | otherwise = Nothing + +matchFoundHoleIncludeUnderscore :: T.Text -> Maybe (T.Text, T.Text) +matchFoundHoleIncludeUnderscore message = first ("_" <>) <$> matchFoundHole message + +matchVariableNotInScope :: T.Text -> Maybe (T.Text, Maybe T.Text) +matchVariableNotInScope message + -- * Variable not in scope: + -- suggestAcion :: Maybe T.Text -> Range -> Range + -- * Variable not in scope: + -- suggestAcion + | Just (name, typ) <- matchVariableNotInScopeTyped message = Just (name, Just typ) + | Just name <- matchVariableNotInScopeUntyped message = Just (name, Nothing) + | otherwise = Nothing + where + matchVariableNotInScopeTyped message + | Just [name, typ0] <- matchRegexUnifySpaces message "Variable not in scope: ([^ ]+) :: ([^*•]+)" + , -- When some name in scope is similar to not-in-scope variable, the type is followed by + -- "Suggested fix: Perhaps use ..." + typ:_ <- T.splitOn " Suggested fix:" typ0 = + Just (name, typ) + | otherwise = Nothing + matchVariableNotInScopeUntyped message + | Just [name] <- matchRegexUnifySpaces message "Variable not in scope: ([^ ]+)" = + Just name + | otherwise = Nothing diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillHole.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillHole.hs new file mode 100644 index 0000000000..eb6172c7fa --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillHole.hs @@ -0,0 +1,105 @@ +module Development.IDE.Plugin.Plugins.FillHole + ( suggestFillHole + ) where + +import Control.Monad (guard) +import Data.Char +import qualified Data.Text as T +import Development.IDE.Plugin.Plugins.Diagnostic +import Language.LSP.Protocol.Types (Diagnostic (..), + TextEdit (TextEdit)) +import Text.Regex.TDFA (MatchResult (..), + (=~)) + +suggestFillHole :: Diagnostic -> [(T.Text, TextEdit)] +suggestFillHole Diagnostic{_range=_range,..} + | Just holeName <- extractHoleName _message + , (holeFits, refFits) <- processHoleSuggestions (T.lines _message) = + let isInfixHole = _message =~ addBackticks holeName :: Bool in + map (proposeHoleFit holeName False isInfixHole) holeFits + ++ map (proposeHoleFit holeName True isInfixHole) refFits + | otherwise = [] + where + extractHoleName = fmap (headOrThrow "impossible") . flip matchRegexUnifySpaces "Found hole: ([^ ]*)" + addBackticks text = "`" <> text <> "`" + addParens text = "(" <> text <> ")" + proposeHoleFit holeName parenthise isInfixHole name = + case T.uncons name of + Nothing -> error "impossible: empty name provided by ghc" + Just (firstChr, _) -> + let isInfixOperator = firstChr == '(' + name' = getOperatorNotation isInfixHole isInfixOperator name in + ( "Replace " <> holeName <> " with " <> name + , TextEdit _range (if parenthise then addParens name' else name') + ) + getOperatorNotation True False name = addBackticks name + getOperatorNotation True True name = T.drop 1 (T.dropEnd 1 name) + getOperatorNotation _isInfixHole _isInfixOperator name = name + headOrThrow msg = \case + [] -> error msg + (x:_) -> x + +processHoleSuggestions :: [T.Text] -> ([T.Text], [T.Text]) +processHoleSuggestions mm = (holeSuggestions, refSuggestions) +{- + • Found hole: _ :: LSP.Handlers + + Valid hole fits include def + Valid refinement hole fits include + fromMaybe (_ :: LSP.Handlers) (_ :: Maybe LSP.Handlers) + fromJust (_ :: Maybe LSP.Handlers) + haskell-lsp-types-0.22.0.0:Language.LSP.Types.Window.$sel:_value:ProgressParams (_ :: ProgressParams + LSP.Handlers) + T.foldl (_ :: LSP.Handlers -> Char -> LSP.Handlers) + (_ :: LSP.Handlers) + (_ :: T.Text) + T.foldl' (_ :: LSP.Handlers -> Char -> LSP.Handlers) + (_ :: LSP.Handlers) + (_ :: T.Text) +-} + where + t = id @T.Text + holeSuggestions = do + -- get the text indented under Valid hole fits + validHolesSection <- + getIndentedGroupsBy (=~ t " *Valid (hole fits|substitutions) include") mm + -- the Valid hole fits line can contain a hole fit + holeFitLine <- + mapHead + (mrAfter . (=~ t " *Valid (hole fits|substitutions) include")) + validHolesSection + let holeFit = T.strip $ T.takeWhile (/= ':') holeFitLine + guard $ not $ holeFit =~ t "Some hole fits suppressed" + guard $ not $ T.null holeFit + return holeFit + refSuggestions = do -- @[] + -- get the text indented under Valid refinement hole fits + refinementSection <- + getIndentedGroupsBy (=~ t " *Valid refinement hole fits include") mm + case refinementSection of + [] -> error "GHC provided invalid hole fit options" + (_:refinementSection) -> do + -- get the text for each hole fit + holeFitLines <- getIndentedGroups refinementSection + let holeFit = T.strip $ T.unwords holeFitLines + guard $ not $ holeFit =~ t "Some refinement hole fits suppressed" + return holeFit + + mapHead f (a:aa) = f a : aa + mapHead _ [] = [] + +-- > getIndentedGroups [" H1", " l1", " l2", " H2", " l3"] = [[" H1,", " l1", " l2"], [" H2", " l3"]] +getIndentedGroups :: [T.Text] -> [[T.Text]] +getIndentedGroups [] = [] +getIndentedGroups ll@(l:_) = getIndentedGroupsBy ((== indentation l) . indentation) ll +-- | +-- > getIndentedGroupsBy (" H" `isPrefixOf`) [" H1", " l1", " l2", " H2", " l3"] = [[" H1", " l1", " l2"], [" H2", " l3"]] +getIndentedGroupsBy :: (T.Text -> Bool) -> [T.Text] -> [[T.Text]] +getIndentedGroupsBy pred inp = case dropWhile (not.pred) inp of + (l:ll) -> case span (\l' -> indentation l < indentation l') ll of + (indented, rest) -> (l:indented) : getIndentedGroupsBy pred rest + _ -> [] + +indentation :: T.Text -> Int +indentation = T.length . T.takeWhile isSpace + diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillTypeWildcard.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillTypeWildcard.hs new file mode 100644 index 0000000000..0f06fff2f7 --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillTypeWildcard.hs @@ -0,0 +1,107 @@ +module Development.IDE.Plugin.Plugins.FillTypeWildcard + ( suggestFillTypeWildcard + ) where + +import Control.Lens +import Data.Maybe (isJust) +import qualified Data.Text as T +import Development.IDE (FileDiagnostic (..), + fdStructuredMessageL, + printOutputable) +import Development.IDE.GHC.Compat hiding (vcat) +import Development.IDE.GHC.Compat.Error +import Development.IDE.Types.Diagnostics (_SomeStructuredMessage) +import GHC.Tc.Errors.Types (ErrInfo (..)) +import Language.LSP.Protocol.Types (Diagnostic (..), + TextEdit (TextEdit)) + +suggestFillTypeWildcard :: FileDiagnostic -> [(T.Text, TextEdit)] +suggestFillTypeWildcard diag@FileDiagnostic{fdLspDiagnostic = Diagnostic {..}} +-- Foo.hs:3:8: error: +-- * Found type wildcard `_' standing for `p -> p1 -> p' + | isWildcardDiagnostic diag + , typeSignature <- extractWildCardTypeSignature diag = + [("Use type signature: ‘" <> typeSignature <> "’", TextEdit _range typeSignature)] + | otherwise = [] + +isWildcardDiagnostic :: FileDiagnostic -> Bool +isWildcardDiagnostic = + maybe False (isJust . (^? _TypeHole) . hole_sort) . diagReportHoleError + +-- | Extract the 'Hole' out of a 'FileDiagnostic' +diagReportHoleError :: FileDiagnostic -> Maybe Hole +diagReportHoleError diag = do + solverReport <- + diag + ^? fdStructuredMessageL + . _SomeStructuredMessage + . msgEnvelopeErrorL + . _TcRnMessage + . _TcRnSolverReport + . _1 + (hole, _) <- solverReport ^? reportContentL . _ReportHoleError + + Just hole + +-- | Extract the type and surround it in parentheses except in obviously safe cases. +-- +-- Inferring when parentheses are actually needed around the type signature would +-- require understanding both the precedence of the context of the hole and of +-- the signature itself. Inserting them (almost) unconditionally is ugly but safe. +extractWildCardTypeSignature :: FileDiagnostic -> T.Text +extractWildCardTypeSignature diag = + case hole_ty <$> diagReportHoleError diag of + Just ty + | isTopLevel || not (isApp ty) || enclosed ty -> printOutputable ty + | otherwise -> "(" <> printOutputable ty <> ")" + Nothing -> error "GHC provided invalid type" + where + isTopLevel :: Bool + isTopLevel = + maybe False errorMessageRefersToToplevelHole (diagErrInfoContext diag) + + isApp :: Type -> Bool + isApp (AppTy _ _) = True + isApp (TyConApp _ (_ : _)) = True + isApp (FunTy{}) = True + isApp _ = False + + enclosed :: Type -> Bool + enclosed (TyConApp con _) + | con == listTyCon || isTupleTyCon con = True + enclosed _ = False + +-- | Extract the 'ErrInfo' context out of a 'FileDiagnostic' and render it to +-- 'Text' +diagErrInfoContext :: FileDiagnostic -> Maybe T.Text +diagErrInfoContext diag = do + (_, detailedMsg) <- + diag + ^? fdStructuredMessageL + . _SomeStructuredMessage + . msgEnvelopeErrorL + . _TcRnMessageWithCtx + . _TcRnMessageWithInfo + let TcRnMessageDetailed err _ = detailedMsg + ErrInfo errInfoCtx _ = err + + Just (printOutputable errInfoCtx) + +-- | Detect whether user wrote something like @foo :: _@ or @foo :: Maybe _@. +-- The former is considered toplevel case for which the function returns 'True', +-- the latter is not toplevel and the returned value is 'False'. +-- +-- When type hole is at toplevel then the ErrInfo context starts with +-- "In the type signature" which ends with " :: _" like in the +-- following snippet: +-- +-- Just "In the type signature: decl :: _" +-- +-- When type hole is not at toplevel there’s a stack of where +-- the hole was located ending with "In the type signature": +-- +-- Just "In the first argument of ‘HsDecl’\nIn the type signature: decl :: HsDecl _" +errorMessageRefersToToplevelHole :: T.Text -> Bool +errorMessageRefersToToplevelHole msg = + "In the type signature:" `T.isPrefixOf` msg + && " :: _" `T.isSuffixOf` T.takeWhile (/= '\n') msg diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/ImportUtils.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/ImportUtils.hs new file mode 100644 index 0000000000..53fc61d918 --- /dev/null +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/ImportUtils.hs @@ -0,0 +1,98 @@ +module Development.IDE.Plugin.Plugins.ImportUtils + ( ImportStyle(..), + quickFixImportKind', + quickFixImportKind, + renderImportStyle, + unImportStyle, + importStyles, + QualifiedImportStyle(..), + qualifiedImportStyle + ) where + +import Data.List.NonEmpty (NonEmpty ((:|))) +import qualified Data.Text as T +import Development.IDE.GHC.Compat +import Development.IDE.Plugin.CodeAction.ExactPrint (wildCardSymbol) +import Development.IDE.Types.Exports +import Language.LSP.Protocol.Types (CodeActionKind (..)) + +-- | Possible import styles for an 'IdentInfo'. +-- +-- The first 'Text' parameter corresponds to the 'rendered' field of the +-- 'IdentInfo'. +data ImportStyle + = ImportTopLevel T.Text + -- ^ Import a top-level export from a module, e.g., a function, a type, a + -- class. + -- + -- > import M (?) + -- + -- Some exports that have a parent, like a type-class method or an + -- associated type/data family, can still be imported as a top-level + -- import. + -- + -- Note that this is not the case for constructors, they must always be + -- imported as part of their parent data type. + + | ImportViaParent T.Text T.Text + -- ^ Import an export (first parameter) through its parent (second + -- parameter). + -- + -- import M (P(?)) + -- + -- @P@ and @?@ can be a data type and a constructor, a class and a method, + -- a class and an associated type/data family, etc. + + | ImportAllConstructors T.Text + -- ^ Import all constructors for a specific data type. + -- + -- import M (P(..)) + -- + -- @P@ can be a data type or a class. + deriving Show + +importStyles :: IdentInfo -> NonEmpty ImportStyle +importStyles i@(IdentInfo {parent}) + | Just p <- pr + -- Constructors always have to be imported via their parent data type, but + -- methods and associated type/data families can also be imported as + -- top-level exports. + = ImportViaParent rend p + :| [ImportTopLevel rend | not (isDatacon i)] + <> [ImportAllConstructors p] + | otherwise + = ImportTopLevel rend :| [] + where rend = rendered i + pr = occNameText <$> parent + +-- | Used for adding new imports +renderImportStyle :: ImportStyle -> T.Text +renderImportStyle (ImportTopLevel x) = x +renderImportStyle (ImportViaParent x p@(T.uncons -> Just ('(', _))) = "type " <> p <> "(" <> x <> ")" +renderImportStyle (ImportViaParent x p) = p <> "(" <> x <> ")" +renderImportStyle (ImportAllConstructors p) = p <> "(..)" + +-- | Used for extending import lists +unImportStyle :: ImportStyle -> (Maybe String, String) +unImportStyle (ImportTopLevel x) = (Nothing, T.unpack x) +unImportStyle (ImportViaParent x y) = (Just $ T.unpack y, T.unpack x) +unImportStyle (ImportAllConstructors x) = (Just $ T.unpack x, wildCardSymbol) + + +quickFixImportKind' :: T.Text -> ImportStyle -> CodeActionKind +quickFixImportKind' x (ImportTopLevel _) = CodeActionKind_Custom $ "quickfix.import." <> x <> ".list.topLevel" +quickFixImportKind' x (ImportViaParent _ _) = CodeActionKind_Custom $ "quickfix.import." <> x <> ".list.withParent" +quickFixImportKind' x (ImportAllConstructors _) = CodeActionKind_Custom $ "quickfix.import." <> x <> ".list.allConstructors" + +quickFixImportKind :: T.Text -> CodeActionKind +quickFixImportKind x = CodeActionKind_Custom $ "quickfix.import." <> x + +-- | Possible import styles for qualified imports +data QualifiedImportStyle = QualifiedImportPostfix | QualifiedImportPrefix + deriving Show + +qualifiedImportStyle :: DynFlags -> QualifiedImportStyle +qualifiedImportStyle df | hasImportQualifedPostEnabled && hasPrePositiveQualifiedWarning = QualifiedImportPostfix + | otherwise = QualifiedImportPrefix + where hasImportQualifedPostEnabled = xopt ImportQualifiedPost df + hasPrePositiveQualifiedWarning = wopt Opt_WarnPrepositiveQualifiedModule df diff --git a/plugins/hls-refactor-plugin/test/Main.hs b/plugins/hls-refactor-plugin/test/Main.hs new file mode 100644 index 0000000000..0fb8b61f83 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/Main.hs @@ -0,0 +1,4070 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeOperators #-} +{-# OPTIONS_GHC -Wno-redundant-constraints #-} -- don't warn about usage HasCallStack + +module Main + ( main + ) where + +import Control.Applicative.Combinators +import Control.Lens ((^.)) +import Control.Monad +import Data.Default +import Data.Foldable +import Data.List.Extra +import Data.Maybe +import qualified Data.Text as T +import Development.IDE.GHC.Util +import Development.IDE.Plugin.Completions.Types (extendImportCommandId) +import Development.IDE.Test +import Development.IDE.Types.Location +import Development.Shake (getDirectoryFilesIO) +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types hiding + (SemanticTokensEdit (_start), + mkRange) +import Language.LSP.Test +import System.Directory +import System.FilePath +import qualified System.IO.Extra +import System.IO.Extra hiding (withTempDir) +import System.Time.Extra +import Test.Tasty +import Test.Tasty.HUnit +import Text.Regex.TDFA ((=~)) + + +import Development.IDE.Plugin.CodeAction (matchRegExMultipleImports) +import Test.Hls + +import qualified Development.IDE.GHC.ExactPrint +import Development.IDE.Plugin.CodeAction (NotInScope (..)) +import qualified Development.IDE.Plugin.CodeAction as Refactor +import qualified Test.AddArgument + +main :: IO () +main = defaultTestRunner tests + +refactorPlugin :: PluginTestDescriptor Development.IDE.GHC.ExactPrint.Log +refactorPlugin = do + mkPluginTestDescriptor Refactor.iePluginDescriptor "ghcide-code-actions-imports-exports" + <> mkPluginTestDescriptor Refactor.typeSigsPluginDescriptor "ghcide-code-actions-type-signatures" + <> mkPluginTestDescriptor Refactor.bindingsPluginDescriptor "ghcide-code-actions-bindings" + <> mkPluginTestDescriptor Refactor.fillHolePluginDescriptor "ghcide-code-actions-fill-holes" + <> mkPluginTestDescriptor Refactor.extendImportPluginDescriptor "ghcide-completions-1" + + +tests :: TestTree +tests = + testGroup "refactor" + [ initializeTests + , codeActionTests + , codeActionHelperFunctionTests + , completionTests + , extractNotInScopeNameTests + ] + +initializeTests :: TestTree +initializeTests = withResource acquire release tests + where + tests :: IO (TResponseMessage Method_Initialize) -> TestTree + tests getInitializeResponse = testGroup "initialize response capabilities" + [ chk " code action" _codeActionProvider (Just (InR (CodeActionOptions {_workDoneProgress = Just False, _codeActionKinds = Nothing, _resolveProvider = Just False}))) + , che " execute command" _executeCommandProvider [extendImportCommandId] + ] + where + chk :: (Eq a, Show a) => TestName -> (ServerCapabilities -> a) -> a -> TestTree + chk title getActual expected = + testCase title $ getInitializeResponse >>= \ir -> expected @=? (getActual . innerCaps) ir + + che :: TestName -> (ServerCapabilities -> Maybe ExecuteCommandOptions) -> [T.Text] -> TestTree + che title getActual expected = testCase title $ do + ir <- getInitializeResponse + ExecuteCommandOptions {_commands = commands} <- case getActual $ innerCaps ir of + Just eco -> pure eco + Nothing -> assertFailure "Was expecting Just ExecuteCommandOptions, got Nothing" + -- Check if expected exists in commands. Note that commands can arrive in different order. + mapM_ (\e -> any (\o -> T.isSuffixOf e o) commands @? show (expected, show commands)) expected + + acquire :: IO (TResponseMessage Method_Initialize) + acquire = run initializeResponse + + release :: TResponseMessage Method_Initialize -> IO () + release = mempty + + innerCaps :: TResponseMessage Method_Initialize -> ServerCapabilities + innerCaps (TResponseMessage _ _ (Right (InitializeResult c _))) = c + innerCaps (TResponseMessage _ _ (Left _)) = error "Initialization error" + + +completionTests :: TestTree +completionTests = + testGroup "auto import snippets" + [ completionCommandTest + "show imports not in list - simple" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad (msum)", "f = joi"] + (Position 3 6) + "join" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad (msum, join)", "f = joi"] + , completionCommandTest + "show imports not in list - multi-line" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad (\n msum)", "f = joi"] + (Position 4 6) + "join" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad (\n msum, join)", "f = joi"] + , completionCommandTest + "show imports not in list - names with _" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad as M (msum)", "f = M.mapM_"] + (Position 3 11) + "mapM_" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad as M (msum, mapM_)", "f = M.mapM_"] + , completionCommandTest + "show imports not in list - initial empty list" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad as M ()", "f = M.joi"] + (Position 3 10) + "join" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad as M (join)", "f = M.joi"] + , testGroup "qualified imports" + [ completionCommandTest + "single" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad ()", "f = Control.Monad.joi"] + (Position 3 22) + "join" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad (join)", "f = Control.Monad.joi"] + , completionCommandTest + "as" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad as M ()", "f = M.joi"] + (Position 3 10) + "join" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad as M (join)", "f = M.joi"] + , completionCommandTest + "multiple" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad as M ()", "import Control.Monad as N ()", "f = N.joi"] + (Position 4 10) + "join" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad as M ()", "import Control.Monad as N (join)", "f = N.joi"] + -- Regression test for https://github.com/haskell/haskell-language-server/issues/2824 + , completionNoCommandTest + "explicit qualified" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import qualified Control.Monad as M (j)"] + (Position 2 38) + "join" + , completionNoCommandTest + "explicit qualified post" + ["{-# LANGUAGE NoImplicitPrelude, ImportQualifiedPost #-}", + "module A where", "import Control.Monad qualified as M (j)"] + (Position 2 38) + "join" + , completionNoCommandTest + "multiline import" + [ "{-# LANGUAGE NoImplicitPrelude #-}" + , "module A where", "import Control.Monad", " (fore)"] + (Position 3 9) + "forever" + ] + , testGroup "Data constructor" + [ completionCommandTest + "not imported" + ["module A where", "import Text.Printf ()", "ZeroPad"] + (Position 2 4) + "ZeroPad" + ["module A where", "import Text.Printf (FormatAdjustment (ZeroPad))", "ZeroPad"] + , completionCommandTest + "parent imported abs" + ["module A where", "import Text.Printf (FormatAdjustment)", "ZeroPad"] + (Position 2 4) + "ZeroPad" + ["module A where", "import Text.Printf (FormatAdjustment (ZeroPad))", "ZeroPad"] + , completionNoCommandTest + "parent imported all" + ["module A where", "import Text.Printf (FormatAdjustment (..))", "ZeroPad"] + (Position 2 4) + "ZeroPad" + , completionNoCommandTest + "already imported" + ["module A where", "import Text.Printf (FormatAdjustment (ZeroPad))", "ZeroPad"] + (Position 2 4) + "ZeroPad" + , completionNoCommandTest + "function from Prelude" + ["module A where", "import Data.Maybe ()", "Nothing"] + (Position 2 4) + "Nothing" + , completionCommandTest + "type operator parent" + ["module A where", "import Data.Type.Equality ()", "f = Ref"] + (Position 2 8) + "Refl" + ["module A where", "import Data.Type.Equality (type (:~:) (Refl))", "f = Ref"] + ] + , testGroup "Record completion" + [ completionCommandTest + "not imported" + ["module A where", "import Text.Printf ()", "FormatParse"] + (Position 2 10) + "FormatParse" + ["module A where", "import Text.Printf (FormatParse)", "FormatParse"] + , completionCommandTest + "parent imported" + ["module A where", "import Text.Printf (FormatParse)", "FormatParse"] + (Position 2 10) + "FormatParse" + ["module A where", "import Text.Printf (FormatParse (FormatParse))", "FormatParse"] + , completionNoCommandTest + "already imported" + ["module A where", "import Text.Printf (FormatParse (FormatParse))", "FormatParse"] + (Position 2 10) + "FormatParse" + ] + , testGroup "Package completion" + [ completionCommandTest + "import Data.Sequence" + ["module A where", "foo :: Seq"] + (Position 1 9) + "Seq" + ["module A where", "import Data.Sequence (Seq)", "foo :: Seq"] + + , completionCommandTest + "qualified import" + ["module A where", "foo :: Seq.Seq"] + (Position 1 13) + "Seq" + ["module A where", "import qualified Data.Sequence as Seq", "foo :: Seq.Seq"] + ] + ] + +completionCommandTest :: TestName -> [T.Text] -> Position -> T.Text -> [T.Text] -> TestTree +completionCommandTest name src pos wanted expected = testSession name $ do + docId <- createDoc "A.hs" "haskell" (T.unlines src) + _ <- waitForDiagnostics + compls <- skipManyTill anyMessage (getCompletions docId pos) + let wantedC = mapMaybe (\case + CompletionItem {_insertText = Just x, _command = Just cmd} + | wanted `T.isPrefixOf` x -> Just cmd + _ -> Nothing + ) compls + case wantedC of + [] -> + liftIO $ assertFailure $ "Cannot find completion " <> show wanted <> " in: " <> show [_label | CompletionItem {_label} <- compls] + command:_ -> do + executeCommand command + if src /= expected + then do + modifiedCode <- skipManyTill anyMessage (getDocumentEdit docId) + liftIO $ modifiedCode @?= T.unlines expected + else do + expectMessages SMethod_WorkspaceApplyEdit 1 $ \edit -> + liftIO $ assertFailure $ "Expected no edit but got: " <> show edit + +completionNoCommandTest :: TestName -> [T.Text] -> Position -> T.Text -> TestTree +completionNoCommandTest name src pos wanted = testSession name $ do + docId <- createDoc "A.hs" "haskell" (T.unlines src) + _ <- waitForDiagnostics + compls <- getCompletions docId pos + let isPrefixOfInsertOrLabel ci = any (wanted `T.isPrefixOf`) [fromMaybe "" (ci ^. L.insertText), ci ^. L.label] + case find isPrefixOfInsertOrLabel compls of + Nothing -> + liftIO $ assertFailure $ "Cannot find expected completion in: " <> show [_label | CompletionItem {_label} <- compls] + Just CompletionItem{..} -> liftIO . assertBool ("Expected no command but got: " <> show _command) $ null _command + + +codeActionTests :: TestTree +codeActionTests = testGroup "code actions" + [ suggestImportDisambiguationTests + , insertImportTests + , extendImportTests + , renameActionTests + , typeWildCardActionTests + , removeImportTests + , suggestImportClassMethodTests + , suggestImportTests + , suggestAddRecordFieldImportTests + , suggestAddCoerceMissingConstructorImportTests + , suggestAddGenericMissingConstructorImportTests + , suggestHideShadowTests + , fixConstructorImportTests + , fixModuleImportTypoTests + , importRenameActionTests + , fillTypedHoleTests + , addSigActionTests + , insertNewDefinitionTests + , deleteUnusedDefinitionTests + , addInstanceConstraintTests + , addFunctionConstraintTests + , removeRedundantConstraintsTests + , addTypeAnnotationsToLiteralsTest + , exportUnusedTests + , addImplicitParamsConstraintTests + , removeExportTests + , Test.AddArgument.tests + , suggestAddRecordFieldUpdateImportTests + ] + +insertImportTests :: TestTree +insertImportTests = testGroup "insert import" + [ checkImport + "module where keyword lower in file no exports" + "WhereKeywordLowerInFileNoExports.hs" + "WhereKeywordLowerInFileNoExports.expected.hs" + "import Data.Int" + , checkImport + "module where keyword lower in file with exports" + "WhereDeclLowerInFile.hs" + "WhereDeclLowerInFile.expected.hs" + "import Data.Int" + , checkImport + "module where keyword lower in file with comments before it" + "WhereDeclLowerInFileWithCommentsBeforeIt.hs" + "WhereDeclLowerInFileWithCommentsBeforeIt.expected.hs" + "import Data.Int" + -- TODO: 'findNextPragmaPosition' function doesn't account for case when shebang is not + -- placed at top of file" + , checkImport + "Shebang not at top with spaces" + "ShebangNotAtTopWithSpaces.hs" + "ShebangNotAtTopWithSpaces.expected.hs" + "import Data.Monoid" + -- TODO: 'findNextPragmaPosition' function doesn't account for case when shebang is not + -- placed at top of file" + , checkImport + "Shebang not at top no space" + "ShebangNotAtTopNoSpace.hs" + "ShebangNotAtTopNoSpace.expected.hs" + "import Data.Monoid" + -- TODO: 'findNextPragmaPosition' function doesn't account for case when OPTIONS_GHC pragma is + -- not placed at top of file + , checkImport + "OPTIONS_GHC pragma not at top with spaces" + "OptionsNotAtTopWithSpaces.hs" + "OptionsNotAtTopWithSpaces.expected.hs" + "import Data.Monoid" + -- TODO: findNextPragmaPosition' function doesn't account for case when shebang is not placed + -- at top of file + , checkImport + "Shebang not at top of file" + "ShebangNotAtTop.hs" + "ShebangNotAtTop.expected.hs" + "import Data.Monoid" + -- TODO: findNextPragmaPosition' function doesn't account for case when OPTIONS_GHC is not + -- placed at top of file + , checkImport + "OPTIONS_GHC pragma not at top of file" + "OptionsPragmaNotAtTop.hs" + "OptionsPragmaNotAtTop.expected.hs" + "import Data.Monoid" + -- TODO: findNextPragmaPosition' function doesn't account for case when OPTIONS_GHC pragma is + -- not placed at top of file + , checkImport + "pragma not at top with comment at top" + "PragmaNotAtTopWithCommentsAtTop.hs" + "PragmaNotAtTopWithCommentsAtTop.expected.hs" + "import Data.Monoid" + -- TODO: findNextPragmaPosition' function doesn't account for case when OPTIONS_GHC pragma is + -- not placed at top of file + , checkImport + "pragma not at top multiple comments" + "PragmaNotAtTopMultipleComments.hs" + "PragmaNotAtTopMultipleComments.expected.hs" + "import Data.Monoid" + -- TODO: 'findNextPragmaPosition' function doesn't account for case of multiline pragmas + , checkImport + "after multiline language pragmas" + "MultiLinePragma.hs" + "MultiLinePragma.expected.hs" + "import Data.Monoid" + , checkImport + "pragmas not at top with module declaration" + "PragmaNotAtTopWithModuleDecl.hs" + "PragmaNotAtTopWithModuleDecl.expected.hs" + "import Data.Monoid" + , checkImport + "pragmas not at top with imports" + "PragmaNotAtTopWithImports.hs" + "PragmaNotAtTopWithImports.expected.hs" + "import Data.Monoid" + , checkImport + "above comment at top of module" + "CommentAtTop.hs" + "CommentAtTop.expected.hs" + "import Data.Monoid" + , checkImport + "above multiple comments below" + "CommentAtTopMultipleComments.hs" + "CommentAtTopMultipleComments.expected.hs" + "import Data.Monoid" + , checkImport + "above curly brace comment" + "CommentCurlyBraceAtTop.hs" + "CommentCurlyBraceAtTop.expected.hs" + "import Data.Monoid" + , checkImport + "above multi-line comment" + "MultiLineCommentAtTop.hs" + "MultiLineCommentAtTop.expected.hs" + "import Data.Monoid" + , checkImport + "above comment with no module explicit exports" + "NoExplicitExportCommentAtTop.hs" + "NoExplicitExportCommentAtTop.expected.hs" + "import Data.Monoid" + , checkImport + "above two-dash comment with no pipe" + "TwoDashOnlyComment.hs" + "TwoDashOnlyComment.expected.hs" + "import Data.Monoid" + , checkImport + "above comment with no (module .. where) decl" + "NoModuleDeclarationCommentAtTop.hs" + "NoModuleDeclarationCommentAtTop.expected.hs" + "import Data.Monoid" + , checkImport + "comment not at top with no (module .. where) decl" + "NoModuleDeclaration.hs" + "NoModuleDeclaration.expected.hs" + "import Data.Monoid" + , checkImport + "comment not at top (data dec is)" + "DataAtTop.hs" + "DataAtTop.expected.hs" + "import Data.Monoid" + , checkImport + "comment not at top (newtype is)" + "NewTypeAtTop.hs" + "NewTypeAtTop.expected.hs" + "import Data.Monoid" + , checkImport + "with no explicit module exports" + "NoExplicitExports.hs" + "NoExplicitExports.expected.hs" + "import Data.Monoid" + , checkImport + "add to correctly placed existing import" + "ImportAtTop.hs" + "ImportAtTop.expected.hs" + "import Data.Monoid" + , checkImport + "add to multiple correctly placed existing imports" + "MultipleImportsAtTop.hs" + "MultipleImportsAtTop.expected.hs" + "import Data.Monoid" + , checkImport + "with language pragma at top of module" + "LangPragmaModuleAtTop.hs" + "LangPragmaModuleAtTop.expected.hs" + "import Data.Monoid" + , checkImport + "with language pragma and explicit module exports" + "LangPragmaModuleWithComment.hs" + "LangPragmaModuleWithComment.expected.hs" + "import Data.Monoid" + , checkImport + "with language pragma at top and no module declaration" + "LanguagePragmaAtTop.hs" + "LanguagePragmaAtTop.expected.hs" + "import Data.Monoid" + , checkImport + "with multiple lang pragmas and no module declaration" + "MultipleLanguagePragmasNoModuleDeclaration.hs" + "MultipleLanguagePragmasNoModuleDeclaration.expected.hs" + "import Data.Monoid" + , checkImport + "with pragmas and shebangs" + "LanguagePragmasThenShebangs.hs" + "LanguagePragmasThenShebangs.expected.hs" + "import Data.Monoid" + , checkImport + "with pragmas and shebangs but no comment at top" + "PragmasAndShebangsNoComment.hs" + "PragmasAndShebangsNoComment.expected.hs" + "import Data.Monoid" + , checkImport + "module decl no exports under pragmas and shebangs" + "PragmasShebangsAndModuleDecl.hs" + "PragmasShebangsAndModuleDecl.expected.hs" + "import Data.Monoid" + , checkImport + "module decl with explicit import under pragmas and shebangs" + "PragmasShebangsModuleExplicitExports.hs" + "PragmasShebangsModuleExplicitExports.expected.hs" + "import Data.Monoid" + , checkImport + "module decl and multiple imports" + "ModuleDeclAndImports.hs" + "ModuleDeclAndImports.expected.hs" + "import Data.Monoid" + , importQualifiedTests + ] + +importQualifiedTests :: TestTree +importQualifiedTests = testGroup "import qualified prefix suggestions" + [ checkImport' + "qualified import works with 3.8 code action kinds" + "ImportQualified.hs" + "ImportQualified.expected.hs" + "import qualified Control.Monad as Control" + ["import Control.Monad (when)"] + , checkImport' + "qualified import in postfix position works with 3.8 code action kinds" + "ImportPostQualified.hs" + "ImportPostQualified.expected.hs" + "import Control.Monad qualified as Control" + ["import qualified Control.Monad as Control", "import Control.Monad (when)"] + ] + +checkImport :: TestName -> FilePath -> FilePath -> T.Text -> TestTree +checkImport testName originalPath expectedPath action = + checkImport' testName originalPath expectedPath action [] + +checkImport' :: TestName -> FilePath -> FilePath -> T.Text -> [T.Text] -> TestTree +checkImport' testName originalPath expectedPath action excludedActions = + testSessionWithExtraFiles "import-placement" testName $ \dir -> + check (dir originalPath) (dir expectedPath) action + where + check :: FilePath -> FilePath -> T.Text -> Session () + check originalPath expectedPath action = do + oSrc <- liftIO $ readFileUtf8 originalPath + shouldBeDocContents <- liftIO $ readFileUtf8 expectedPath + originalDoc <- createDoc originalPath "haskell" oSrc + _ <- waitForDiagnostics + actionsOrCommands <- getAllCodeActions originalDoc + for_ excludedActions (\a -> assertNoActionWithTitle a actionsOrCommands) + chosenAction <- pickActionWithTitle action actionsOrCommands + executeCodeAction chosenAction + originalDocAfterAction <- documentContents originalDoc + liftIO $ T.replace "\r\n" "\n" shouldBeDocContents @=? T.replace "\r\n" "\n" originalDocAfterAction + +renameActionTests :: TestTree +renameActionTests = testGroup "rename actions" + [ check "change to local variable name" + [ "module Testing where" + , "foo :: Int -> Int" + , "foo argName = argNme" + ] + ("Replace with ‘argName’", R 2 14 2 20) + [ "module Testing where" + , "foo :: Int -> Int" + , "foo argName = argName" + ] + , check "change to name of imported function" + [ "module Testing where" + , "import Data.Maybe (maybeToList)" + , "foo :: Maybe a -> [a]" + , "foo = maybToList" + ] + ("Replace with ‘maybeToList’", R 3 6 3 16) + [ "module Testing where" + , "import Data.Maybe (maybeToList)" + , "foo :: Maybe a -> [a]" + , "foo = maybeToList" + ] + , check "change infix function" + [ "module Testing where" + , "monus :: Int -> Int" + , "monus x y = max 0 (x - y)" + , "foo x y = x `monnus` y" + ] + ("Replace with ‘monus’", R 3 12 3 20) + [ "module Testing where" + , "monus :: Int -> Int" + , "monus x y = max 0 (x - y)" + , "foo x y = x `monus` y" + ] + , check "change template function" + [ "{-# LANGUAGE TemplateHaskellQuotes #-}" + , "module Testing where" + , "import Language.Haskell.TH (Name)" + , "foo :: Name" + , "foo = 'bread" + ] + ("Replace with ‘break’", R 4 6 4 12) + [ "{-# LANGUAGE TemplateHaskellQuotes #-}" + , "module Testing where" + , "import Language.Haskell.TH (Name)" + , "foo :: Name" + , "foo = 'break" + ] + , testSession "suggest multiple local variable names" $ do + doc <- createDoc "Testing.hs" "haskell" $ T.unlines + [ "module Testing where" + , "foo :: Char -> Char -> Char -> Char" + , "foo argument1 argument2 argument3 = argumentX" + ] + _ <- waitForDiagnostics + actions <- getCodeActions doc (R 2 36 2 45) + traverse_ (assertActionWithTitle actions) + [ "Replace with ‘argument1’" + , "Replace with ‘argument2’" + , "Replace with ‘argument3’" + ] + ] + where + check :: TestName -> [T.Text] -> (T.Text, Range) -> [T.Text] -> TestTree + check testName linesOrig (actionTitle, actionRange) linesExpected = + testSession testName $ do + let contentBefore = T.unlines linesOrig + doc <- createDoc "Testing.hs" "haskell" contentBefore + _ <- waitForDiagnostics + action <- pickActionWithTitle actionTitle =<< getCodeActions doc actionRange + executeCodeAction action + contentAfter <- documentContents doc + let expectedContent = T.unlines linesExpected + liftIO $ expectedContent @=? contentAfter + +typeWildCardActionTests :: TestTree +typeWildCardActionTests = testGroup "type wildcard actions" + [ testUseTypeSignature "global signature" + [ "func :: _" + , "func x = x" + ] + [ if ghcVersion >= GHC910 then "func :: t -> t" else "func :: p -> p" + , "func x = x" + ] + , testUseTypeSignature "local signature" + [ "func :: Int -> Int" + , "func x =" + , " let y :: _" + , " y = x * 2" + , " in y" + ] + [ "func :: Int -> Int" + , "func x =" + , " let y :: Int" + , " y = x * 2" + , " in y" + ] + , testUseTypeSignature "multi-line message 1" + [ "func :: _" + , "func x y = x + y" + ] + [ if ghcVersion >= GHC910 then + "func :: t -> t -> t" + else if ghcVersion >= GHC98 then + "func :: a -> a -> a" -- since 9.8 GHC no longer does type defaulting (see https://gitlab.haskell.org/ghc/ghc/-/issues/24522) + else + "func :: Integer -> Integer -> Integer" + , "func x y = x + y" + ] + , testUseTypeSignature "type in parentheses" + [ "func :: a -> _" + , "func x = (x, const x)" + ] + [ "func :: a -> (a, b -> a)" + , "func x = (x, const x)" + ] + , testUseTypeSignature "type in brackets" + [ "func :: _ -> Maybe a" + , "func xs = head xs" + ] + [ "func :: [Maybe a] -> Maybe a" + , "func xs = head xs" + ] + , testUseTypeSignature "unit type" + [ "func :: IO _" + , "func = putChar 'H'" + ] + [ "func :: IO ()" + , "func = putChar 'H'" + ] + , testUseTypeSignature "no spaces around '::'" + [ "func::_" + , "func x y = x + y" + ] + [ if ghcVersion >= GHC910 then + "func::t -> t -> t" + else if ghcVersion >= GHC98 then + "func::a -> a -> a" -- since 9.8 GHC no longer does type defaulting (see https://gitlab.haskell.org/ghc/ghc/-/issues/24522) + else + "func::Integer -> Integer -> Integer" + , "func x y = x + y" + ] + , testNoUseTypeSignature "ignores typed holes" + [ "func :: a -> a" + , "func x = _" + ] + , testGroup "add parens if hole is part of bigger type" + [ testUseTypeSignature "subtype 1" + [ "func :: _ -> Integer -> Integer" + , "func x y = x + y" + ] + [ "func :: Integer -> Integer -> Integer" + , "func x y = x + y" + ] + , testUseTypeSignature "subtype 2" + [ "func :: Integer -> _ -> Integer" + , "func x y = x + y" + ] + [ "func :: Integer -> Integer -> Integer" + , "func x y = x + y" + ] + , testUseTypeSignature "subtype 3" + [ "func :: Integer -> Integer -> _" + , "func x y = x + y" + ] + [ "func :: Integer -> Integer -> Integer" + , "func x y = x + y" + ] + , testUseTypeSignature "subtype 4" + [ "func :: Integer -> _" + , "func x y = x + y" + ] + [ "func :: Integer -> (Integer -> Integer)" + , "func x y = x + y" + ] + ] + ] + where + -- | Test session of given name, checking action "Use type signature..." + -- on a test file with given content and comparing to expected result. + testUseTypeSignature name textIn textOut = testSession name $ do + let expectedContentAfterAction = T.unlines $ fileStart : textOut + content = T.unlines $ fileStart : textIn + doc <- createDoc "Testing.hs" "haskell" content + + (Just addSignature) <- getUseTypeSigAction doc + executeCodeAction addSignature + contentAfterAction <- documentContents doc + liftIO $ expectedContentAfterAction @=? contentAfterAction + + testNoUseTypeSignature name textIn = testSession name $ do + let content = T.unlines $ fileStart : textIn + doc <- createDoc "Testing.hs" "haskell" content + codeAction <- getUseTypeSigAction doc + liftIO $ Nothing @=? codeAction + + fileStart = "module Testing where" + + getUseTypeSigAction docIn = do + _ <- waitForDiagnostics + actionsOrCommands <- getAllCodeActions docIn + + let addSignatures = + [ action | InR action@CodeAction { _title = actionTitle } <- actionsOrCommands + , "Use type signature" `T.isPrefixOf` actionTitle + ] + pure $ listToMaybe addSignatures + + +removeImportTests :: TestTree +removeImportTests = testGroup "remove import actions" + [ testSession "redundant" $ do + let contentA = T.unlines + [ "module ModuleA where" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA" + , "stuffB :: Integer" + , "stuffB = 123" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + action <- pickActionWithTitle "Remove import" =<< getCodeActions docB (R 2 0 2 5) + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "stuffB :: Integer" + , "stuffB = 123" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "qualified redundant" $ do + let contentA = T.unlines + [ "module ModuleA where" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import qualified ModuleA" + , "stuffB :: Integer" + , "stuffB = 123" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + action <- pickActionWithTitle "Remove import" =<< getCodeActions docB (R 2 0 2 5) + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "stuffB :: Integer" + , "stuffB = 123" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "redundant binding" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "stuffA = False" + , "stuffB :: Integer" + , "stuffB = 123" + , "stuffC = ()" + , "_stuffD = '_'" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (stuffA, stuffB, _stuffD, stuffC, stuffA)" + , "main = print stuffB" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + action <- pickActionWithTitle "Remove _stuffD, stuffA, stuffC from import" + =<< getCodeActions docB (R 2 0 2 5) + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (stuffB)" + , "main = print stuffB" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "redundant binding - unicode regression " $ do + let contentA = T.unlines + [ "module ModuleA where" + , "data A = A" + , "ε :: Double" + , "ε = 0.5" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (A(..), ε)" + , "a = A" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + action <- pickActionWithTitle "Remove ε from import" =<< getCodeActions docB (R 2 0 2 5) + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (A(..))" + , "a = A" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "redundant operator" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "a !! _b = a" + , "a _b = a" + , "stuffB :: Integer" + , "stuffB = 123" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import qualified ModuleA as A ((), stuffB, (!!))" + , "main = print A.stuffB" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + action <- pickActionWithTitle "Remove !!, from import" =<< getCodeActions docB (R 2 0 2 5) + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import qualified ModuleA as A (stuffB)" + , "main = print A.stuffB" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "redundant all import" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "data A = A" + , "stuffB :: Integer" + , "stuffB = 123" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (A(..), stuffB)" + , "main = print stuffB" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + action <- pickActionWithTitle "Remove A from import" =<< getCodeActions docB (R 2 0 2 5) + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (stuffB)" + , "main = print stuffB" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "redundant constructor import" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "data D = A | B" + , "data E = F" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (D(A,B), E(F))" + , "main = B" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + action <- pickActionWithTitle "Remove A, E, F from import" =<< getCodeActions docB (R 2 0 2 5) + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (D(B))" + , "main = B" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "import containing the identifier Strict" $ do + let contentA = T.unlines + [ "module Strict where" + ] + _docA <- createDoc "Strict.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import Strict" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + action <- pickActionWithTitle "Remove import" =<< getCodeActions docB (R 2 0 2 5) + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "remove all" $ do + let content = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleA where" + , "import Data.Function (fix, (&))" + , "import qualified Data.Functor.Const" + , "import Data.Functor.Identity" + , "import Data.Functor.Sum (Sum (InL, InR))" + , "import qualified Data.Kind as K (Constraint, Type)" + , "x = InL (Identity 123)" + , "y = fix id" + , "type T = K.Type" + ] + doc <- createDoc "ModuleC.hs" "haskell" content + _ <- waitForDiagnostics + action <- pickActionWithTitle "Remove all redundant imports" =<< getAllCodeActions doc + executeCodeAction action + contentAfterAction <- documentContents doc + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleA where" + , "import Data.Function (fix)" + , "import Data.Functor.Identity" + , "import Data.Functor.Sum (Sum (InL))" + , "import qualified Data.Kind as K (Type)" + , "x = InL (Identity 123)" + , "y = fix id" + , "type T = K.Type" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "remove unused operators whose name ends with '.'" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "(@.) = 0 -- Must have an operator whose name ends with '.'" + , "a = 1 -- .. but also something else" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (a, (@.))" + , "x = a -- Must use something from module A, but not (@.)" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + action <- pickActionWithTitle "Remove @. from import" =<< getCodeActions docB (R 2 0 2 5) + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (a)" + , "x = a -- Must use something from module A, but not (@.)" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "remove redundant record field import" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "data A = A {" + , " a1 :: String," + , " a2 :: Int" + , "}" + , "newA = A \"foo\" 42" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA" + , " ( A (a1, a2)," + , " newA" + , " )" + , "x = a1 newA" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + action <- pickActionWithTitle "Remove A(a2) from import" =<< getCodeActions docB (R 2 0 5 3) + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA" + , " ( A (a1)," + , " newA" + , " )" + , "x = a1 newA" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "remove multiple redundant record field imports" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "data A = A {" + , " a1 :: String," + , " a2 :: Int," + , " a3 :: Int," + , " a4 :: Int" + , "}" + , "newA = A \"foo\" 2 3 4" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA" + , " ( A (a1, a2, a3, a4)," + , " newA" + , " )" + , "x = a2 newA" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + action <- pickActionWithTitle "Remove A(a1), A(a3), A(a4) from import" =<< getCodeActions docB (R 2 0 5 3) + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA" + , " ( A (a2)," + , " newA" + , " )" + , "x = a2 newA" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + ] + +extendImportTests :: TestTree +extendImportTests = testGroup "extend import actions" + [ testGroup "with checkAll" $ tests True + , testGroup "without checkAll" $ tests False + ] + where + tests overrideCheckProject = + [ testSession "extend all constructors for record field" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "data A = B { a :: Int }" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA (A(B))" + , "f = a" + ]) + (Range (Position 2 4) (Position 2 5)) + [ "Add A(..) to the import list of ModuleA" + , "Add A(a) to the import list of ModuleA" + , "Add a to the import list of ModuleA" + ] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (A(..))" + , "f = a" + ]) + , testSession "extend all constructors with sibling" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "data Foo" + , "data Bar" + , "data A = B | C" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA ( Foo, A (C) , Bar ) " + , "f = B" + ]) + (Range (Position 2 4) (Position 2 5)) + [ "Add A(..) to the import list of ModuleA" + , "Add A(B) to the import list of ModuleA" + ] + (T.unlines + [ "module ModuleB where" + , "import ModuleA ( Foo, A (..) , Bar ) " + , "f = B" + ]) + , testSession "extend all constructors with comment" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "data Foo" + , "data Bar" + , "data A = B | C" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA ( Foo, A (C{-comment--}) , Bar ) " + , "f = B" + ]) + (Range (Position 2 4) (Position 2 5)) + [ "Add A(..) to the import list of ModuleA" + , "Add A(B) to the import list of ModuleA" + ] + (T.unlines + [ "module ModuleB where" + , "import ModuleA ( Foo, A (..{-comment--}) , Bar ) " + , "f = B" + ]) + , testSession "extend all constructors for type operator" $ template + [] + ("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "import Data.Type.Equality ((:~:))" + , "x :: (:~:) [] []" + , "x = Refl" + ]) + (Range (Position 3 4) (Position 3 8)) + [ "Add (:~:)(..) to the import list of Data.Type.Equality" + , "Add type (:~:)(Refl) to the import list of Data.Type.Equality"] + (T.unlines + [ "module ModuleA where" + , "import Data.Type.Equality ((:~:) (..))" + , "x :: (:~:) [] []" + , "x = Refl" + ]) + , testSession "extend all constructors for class" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "class C a where" + , " m1 :: a -> a" + , " m2 :: a -> a" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA (C(m1))" + , "b = m2" + ]) + (Range (Position 2 5) (Position 2 5)) + [ "Add C(..) to the import list of ModuleA" + , "Add C(m2) to the import list of ModuleA" + , "Add m2 to the import list of ModuleA" + ] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (C(..))" + , "b = m2" + ]) + , testSession "extend single line import with value" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "stuffA :: Double" + , "stuffA = 0.00750" + , "stuffB :: Integer" + , "stuffB = 123" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA as A (stuffB)" + , "main = print (stuffA, stuffB)" + ]) + (Range (Position 2 17) (Position 2 18)) + ["Add stuffA to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import ModuleA as A (stuffB, stuffA)" + , "main = print (stuffA, stuffB)" + ]) + , testSession "extend single line import with operator" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "(.*) :: Integer -> Integer -> Integer" + , "x .* y = x * y" + , "stuffB :: Integer" + , "stuffB = 123" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA as A (stuffB)" + , "main = print (stuffB .* stuffB)" + ]) + (Range (Position 2 22) (Position 2 24)) + ["Add (.*) to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import ModuleA as A (stuffB, (.*))" + , "main = print (stuffB .* stuffB)" + ]) + , testSession "extend single line import with infix constructor" $ template + [] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import Data.List.NonEmpty (fromList)" + , "main = case (fromList []) of _ :| _ -> pure ()" + ]) + (Range (Position 2 31) (Position 2 33)) + [ "Add NonEmpty((:|)) to the import list of Data.List.NonEmpty" + , "Add NonEmpty(..) to the import list of Data.List.NonEmpty" + ] + (T.unlines + [ "module ModuleB where" + , "import Data.List.NonEmpty (fromList, NonEmpty ((:|)))" + , "main = case (fromList []) of _ :| _ -> pure ()" + ]) + , testSession "extend single line import with prefix constructor" $ template + [] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import Prelude hiding (Maybe(..))" + , "import Data.Maybe (catMaybes)" + , "x = Just 10" + ]) + (Range (Position 3 4) (Position 3 8)) + [ "Add Maybe(Just) to the import list of Data.Maybe" + , "Add Maybe(..) to the import list of Data.Maybe" + ] + (T.unlines + [ "module ModuleB where" + , "import Prelude hiding (Maybe(..))" + , "import Data.Maybe (catMaybes, Maybe (Just))" + , "x = Just 10" + ]) + , testSession "extend single line import with type" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "type A = Double" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA ()" + , "b :: A" + , "b = 0" + ]) + (Range (Position 2 5) (Position 2 5)) + ["Add A to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (A)" + , "b :: A" + , "b = 0" + ]) + , testSession "extend single line import with constructor" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "data A = Constructor" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA (A)" + , "b :: A" + , "b = Constructor" + ]) + (Range (Position 3 5) (Position 3 5)) + [ "Add A(Constructor) to the import list of ModuleA" + , "Add A(..) to the import list of ModuleA" + ] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (A (Constructor))" + , "b :: A" + , "b = Constructor" + ]) + , testSession "extend single line import with constructor (with comments)" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "data A = Constructor" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA (A ({-Constructor-}))" + , "b :: A" + , "b = Constructor" + ]) + (Range (Position 3 5) (Position 3 5)) + [ "Add A(Constructor) to the import list of ModuleA" + , "Add A(..) to the import list of ModuleA" + ] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (A (Constructor{-Constructor-}))" + , "b :: A" + , "b = Constructor" + ]) + , testSession "extend single line import with mixed constructors" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "data A = ConstructorFoo | ConstructorBar" + , "a = 1" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA (A (ConstructorBar), a)" + , "b :: A" + , "b = ConstructorFoo" + ]) + (Range (Position 3 5) (Position 3 5)) + [ "Add A(ConstructorFoo) to the import list of ModuleA" + , "Add A(..) to the import list of ModuleA" + ] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (A (ConstructorBar, ConstructorFoo), a)" + , "b :: A" + , "b = ConstructorFoo" + ]) + , testSession "extend single line import in presence of extra parens" $ template + [] + ("Main.hs", T.unlines + [ "import Data.Monoid (First)" + , "f = (First Nothing) <> mempty" -- parens tripped up the regex extracting import suggestions + ]) + (Range (Position 1 6) (Position 1 7)) + [ "Add First(..) to the import list of Data.Monoid" + , "Add First(First) to the import list of Data.Monoid" + ] + (T.unlines + [ "import Data.Monoid (First (..))" + , "f = (First Nothing) <> mempty" + ]) + , testSession "extend single line qualified import with value" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "stuffA :: Double" + , "stuffA = 0.00750" + , "stuffB :: Integer" + , "stuffB = 123" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import qualified ModuleA as A (stuffB)" + , "main = print (A.stuffA, A.stuffB)" + ]) + (Range (Position 2 17) (Position 2 18)) + ["Add stuffA to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import qualified ModuleA as A (stuffB, stuffA)" + , "main = print (A.stuffA, A.stuffB)" + ]) + , testSession "extend multi line import with value" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "stuffA :: Double" + , "stuffA = 0.00750" + , "stuffB :: Integer" + , "stuffB = 123" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA (stuffB" + , " )" + , "main = print (stuffA, stuffB)" + ]) + (Range (Position 3 17) (Position 3 18)) + ["Add stuffA to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (stuffB, stuffA" + , " )" + , "main = print (stuffA, stuffB)" + ]) + , testSession "extend multi line import with trailing comma" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "stuffA :: Double" + , "stuffA = 0.00750" + , "stuffB :: Integer" + , "stuffB = 123" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA (stuffB," + , " )" + , "main = print (stuffA, stuffB)" + ]) + (Range (Position 3 17) (Position 3 18)) + ["Add stuffA to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (stuffB, stuffA," + , " )" + , "main = print (stuffA, stuffB)" + ]) + , testSession "extend single line import with method within class" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "class C a where" + , " m1 :: a -> a" + , " m2 :: a -> a" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA (C(m1))" + , "b = m2" + ]) + (Range (Position 2 5) (Position 2 5)) + [ "Add C(m2) to the import list of ModuleA" + , "Add m2 to the import list of ModuleA" + , "Add C(..) to the import list of ModuleA" + ] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (C(m1, m2))" + , "b = m2" + ]) + , testSession "extend single line import with method without class" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "class C a where" + , " m1 :: a -> a" + , " m2 :: a -> a" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA (C(m1))" + , "b = m2" + ]) + (Range (Position 2 5) (Position 2 5)) + [ "Add m2 to the import list of ModuleA" + , "Add C(m2) to the import list of ModuleA" + , "Add C(..) to the import list of ModuleA" + ] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (C(m1), m2)" + , "b = m2" + ]) + , testSession "extend import list with multiple choices" $ template + [("ModuleA.hs", T.unlines + -- this is just a dummy module to help the arguments needed for this test + [ "module ModuleA (bar) where" + , "bar = 10" + ]), + ("ModuleB.hs", T.unlines + -- this is just a dummy module to help the arguments needed for this test + [ "module ModuleB (bar) where" + , "bar = 10" + ])] + ("ModuleC.hs", T.unlines + [ "module ModuleC where" + , "import ModuleB ()" + , "import ModuleA ()" + , "foo = bar" + ]) + (Range (Position 3 6) (Position 3 9)) + ["Add bar to the import list of ModuleA", + "Add bar to the import list of ModuleB"] + (T.unlines + [ "module ModuleC where" + , "import ModuleB ()" + , "import ModuleA (bar)" + , "foo = bar" + ]) + , testSession "extend import list with constructor of type operator" $ template + [] + ("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "import Data.Type.Equality ((:~:))" + , "x :: (:~:) [] []" + , "x = Refl" + ]) + (Range (Position 3 4) (Position 3 8)) + [ "Add type (:~:)(Refl) to the import list of Data.Type.Equality" + , "Add (:~:)(..) to the import list of Data.Type.Equality"] + (T.unlines + [ "module ModuleA where" + , "import Data.Type.Equality ((:~:) (Refl))" + , "x :: (:~:) [] []" + , "x = Refl" + ]) + -- TODO: importing pattern synonyms is unsupported + , testSessionExpectFail "extend import list with pattern synonym" + (BrokenIdeal $ + template + [("ModuleA.hs", T.unlines + [ "{-# LANGUAGE PatternSynonyms #-}" + , "module ModuleA where" + , "pattern Some x = Just x" + ]) + ] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import A ()" + , "k (Some x) = x" + ] + ) + (Range (Position 2 3) (Position 2 7)) + ["Add pattern Some to the import list of A"] + (T.unlines + [ "module ModuleB where" + , "import A (pattern Some)" + , "k (Some x) = x" + ] + ) + ) + (BrokenCurrent $ + noCodeActionsTemplate + [("ModuleA.hs", T.unlines + [ "{-# LANGUAGE PatternSynonyms #-}" + , "module ModuleA where" + , "pattern Some x = Just x" + ]) + ] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import A ()" + , "k (Some x) = x" + ] + ) + (Range (Position 2 3) (Position 2 7)) + ) + , testSession "type constructor name same as data constructor name" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "newtype Foo = Foo Int" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA(Foo)" + , "f :: Foo" + , "f = Foo 1" + ]) + (Range (Position 3 4) (Position 3 6)) + ["Add Foo(Foo) to the import list of ModuleA", "Add Foo(..) to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import ModuleA(Foo (Foo))" + , "f :: Foo" + , "f = Foo 1" + ]) + , testSession "type constructor name same as data constructor name, data constructor extraneous" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "data Foo = Foo" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA()" + , "f :: Foo" + , "f = undefined" + ]) + (Range (Position 2 4) (Position 2 6)) + ["Add Foo to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import ModuleA(Foo)" + , "f :: Foo" + , "f = undefined" + ]) + , testSession "data constructor with two multiline import lists that can be extended with it" $ template + [] + ("A.hs", T.unlines + [ "module A where" + , "import Prelude (" + , " )" + , "import Data.Maybe (" + , " )" + , "f = Nothing" + ]) + (Range (Position 5 5) (Position 5 6)) + [ "Add Maybe(..) to the import list of Data.Maybe" + , "Add Maybe(..) to the import list of Prelude" + , "Add Maybe(Nothing) to the import list of Data.Maybe" + , "Add Maybe(Nothing) to the import list of Prelude" + ] + (T.unlines + ["module A where" + , "import Prelude (" + , " )" + , "import Data.Maybe (Maybe (..)" + , " )" + , "f = Nothing" + ]) + ] + where + codeActionTitle CodeAction{_title=x} = x + + template setUpModules moduleUnderTest range expectedTitles expectedContentB = do + docB <- evalProject setUpModules moduleUnderTest + codeActions <- codeActions docB range + let actualTitles = codeActionTitle <$> codeActions + + -- Note that we are not testing the order of the actions, as the + -- order of the expected actions indicates which one we'll execute + -- in this test, i.e., the first one. + liftIO $ sort expectedTitles @=? sort actualTitles + + -- Execute the action with the same title as the first expected one. + -- Since we tested that both lists have the same elements (possibly + -- in a different order), this search cannot fail. + firstTitle:_ <- pure expectedTitles + Just action <- pure $ find ((firstTitle ==) . codeActionTitle) codeActions + executeCodeAction action + contentAfterAction <- documentContents docB + liftIO $ expectedContentB @=? contentAfterAction + + noCodeActionsTemplate setUpModules moduleUnderTest range = do + docB <- evalProject setUpModules moduleUnderTest + codeActions' <- codeActions docB range + let actualTitles = codeActionTitle <$> codeActions' + liftIO $ [] @=? actualTitles + + evalProject setUpModules moduleUnderTest = do + configureCheckProject overrideCheckProject + + mapM_ (\(fileName, contents) -> createDoc fileName "haskell" contents) setUpModules + docB <- createDoc (fst moduleUnderTest) "haskell" (snd moduleUnderTest) + _ <- waitForDiagnostics + waitForProgressDone + + pure docB + + codeActions docB range = do + actionsOrCommands <- getCodeActions docB range + pure $ + [ ca | InR ca <- actionsOrCommands + , let title = codeActionTitle ca + , "Add" `T.isPrefixOf` title && not ("Add argument" `T.isPrefixOf` title) + ] + +fixModuleImportTypoTests :: TestTree +fixModuleImportTypoTests = testGroup "fix module import typo" + [ testSession "works when single module suggested" $ do + doc <- createDoc "A.hs" "haskell" "import Data.Cha" + _ <- waitForDiagnostics + action <- pickActionWithTitle "Replace with Data.Char" =<< getCodeActions doc (R 0 0 0 10) + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ contentAfterAction @?= "import Data.Char" + , testSession "works when multiple modules suggested" $ do + doc <- createDoc "A.hs" "haskell" "import Data.I" + _ <- waitForDiagnostics + actions <- getCodeActions doc (R 0 0 0 10) + traverse_ (assertActionWithTitle actions) + [ "Replace with Data.Eq" + , "Replace with Data.Int" + , "Replace with Data.Ix" + ] + replaceWithDataEq <- pickActionWithTitle "Replace with Data.Eq" actions + executeCodeAction replaceWithDataEq + contentAfterAction <- documentContents doc + liftIO $ contentAfterAction @?= "import Data.Eq" + ] + +suggestImportClassMethodTests :: TestTree +suggestImportClassMethodTests = + testGroup + "suggest import class methods" + [ testGroup + "new" + [ testSession "via parent" $ + template' + "import Data.Semigroup (Semigroup(stimes))" + (Range (Position 4 2) (Position 4 8)), + testSession "top level" $ + template' + "import Data.Semigroup (stimes)" + (Range (Position 4 2) (Position 4 8)), + testSession "all" $ + template' + "import Data.Semigroup" + (Range (Position 4 2) (Position 4 8)) + ], + testGroup + "extend" + [ testSession "via parent" $ + template + [ "module A where", + "", + "import Data.Semigroup ()" + ] + (Range (Position 6 2) (Position 6 8)) + "Add Semigroup(stimes) to the import list of Data.Semigroup" + [ "module A where", + "", + "import Data.Semigroup (Semigroup (stimes))" + ], + testSession "top level" $ + template + [ "module A where", + "", + "import Data.Semigroup ()" + ] + (Range (Position 6 2) (Position 6 8)) + "Add stimes to the import list of Data.Semigroup" + [ "module A where", + "", + "import Data.Semigroup (stimes)" + ] + ] + ] + where + decls = + [ "data X = X", + "instance Semigroup X where", + " (<>) _ _ = X", + " stimes _ _ = X" + ] + template beforeContent range executeTitle expectedContent = do + doc <- createDoc "A.hs" "haskell" $ T.unlines (beforeContent <> decls) + _ <- waitForDiagnostics + waitForProgressDone + action <- pickActionWithTitle executeTitle =<< getCodeActions doc range + executeCodeAction action + content <- documentContents doc + liftIO $ T.unlines (expectedContent <> decls) @=? content + template' executeTitle range = let c = ["module A where"] in template c range executeTitle $ c <> [executeTitle] + +suggestImportTests :: TestTree +suggestImportTests = testGroup "suggest import actions" + [ testGroup "Dont want suggestion" + [ -- extend import + -- We don't want to suggest a new import, but extend existing imports + test False ["Data.List.NonEmpty ()"] "f = nonEmpty" [] "import Data.List.NonEmpty (nonEmpty)" + -- data constructor + , test False [] "f = First" [] "import Data.Monoid (First)" + -- internal module + , test False [] "f :: Typeable a => a" ["f = undefined"] "import Data.Typeable.Internal (Typeable)" + -- package not in scope + , test False [] "f = quickCheck" [] "import Test.QuickCheck (quickCheck)" + -- don't omit the parent data type of a constructor + , test False [] "f ExitSuccess = ()" [] "import System.Exit (ExitSuccess)" + -- don't suggest data constructor when we only need the type + , test False [] "f :: Bar" [] "import Bar (Bar(Bar))" + -- don't suggest all data constructors for the data type + , test False [] "f :: Bar" [] "import Bar (Bar(..))" + ] + , testGroup "want suggestion" + [ wantWait [] "f = foo" [] "import Foo (foo)" + , wantWait [] "f = Bar" [] "import Bar (Bar(Bar))" + , wantWait [] "f :: Bar" [] "import Bar (Bar)" + , wantWait [] "f = Bar" [] "import Bar (Bar(..))" + , test True [] "f = nonEmpty" [] "import Data.List.NonEmpty (nonEmpty)" + , test True [] "f = (:|)" [] "import Data.List.NonEmpty (NonEmpty((:|)))" + , test True [] "f :: Natural" ["f = undefined"] "import Numeric.Natural (Natural)" + , test True [] "f :: Natural" ["f = undefined"] "import Numeric.Natural" + , test True [] "f :: NonEmpty ()" ["f = () :| []"] "import Data.List.NonEmpty (NonEmpty)" + , test True [] "f :: NonEmpty ()" ["f = () :| []"] "import Data.List.NonEmpty" + , test True [] "f = First" [] "import Data.Monoid (First(First))" + , test True [] "f = Endo" [] "import Data.Monoid (Endo(Endo))" + , test True [] "f = Version" [] "import Data.Version (Version(Version))" + , test True [] "f ExitSuccess = ()" [] "import System.Exit (ExitCode(ExitSuccess))" + , test True [] "f = AssertionFailed" [] "import Control.Exception (AssertionFailed(AssertionFailed))" + , test True ["Prelude"] "f = nonEmpty" [] "import Data.List.NonEmpty (nonEmpty)" + , test True [] "f :: Alternative f => f ()" ["f = undefined"] "import Control.Applicative (Alternative)" + , test True [] "f :: Alternative f => f ()" ["f = undefined"] "import Control.Applicative" + , test True [] "f = empty" [] "import Control.Applicative (Alternative(empty))" + , test True [] "f = empty" [] "import Control.Applicative (empty)" + , test True [] "f = empty" [] "import Control.Applicative" + , test True [] "f = (&)" [] "import Data.Function ((&))" + , test True [] "f = NE.nonEmpty" [] "import qualified Data.List.NonEmpty as NE" + , test True [] "f = (NE.:|)" [] "import qualified Data.List.NonEmpty as NE" + , test True [] "f = (Data.List.NonEmpty.:|)" [] "import qualified Data.List.NonEmpty" + , test True [] "f = (B..|.)" [] "import qualified Data.Bits as B" + , test True [] "f = (Data.Bits..|.)" [] "import qualified Data.Bits" + , test True [] "f :: Typeable a => a" ["f = undefined"] "import Data.Typeable (Typeable)" + , test True [] "f = pack" [] "import Data.Text (pack)" + , test True [] "f :: Text" ["f = undefined"] "import Data.Text (Text)" + , test True [] "f = [] & id" [] "import Data.Function ((&))" + , test True [] "f = (&) [] id" [] "import Data.Function ((&))" + , test True [] "f = (.|.)" [] "import Data.Bits (Bits((.|.)))" + , test True [] "f = (.|.)" [] "import Data.Bits ((.|.))" + , test True [] "f :: a ~~ b" [] "import Data.Type.Equality ((~~))" + , test True + ["qualified Data.Text as T" + ] "f = T.putStrLn" [] "import qualified Data.Text.IO as T" + , test True + [ "qualified Data.Text as T" + , "qualified Data.Function as T" + ] "f = T.putStrLn" [] "import qualified Data.Text.IO as T" + , test True + [ "qualified Data.Text as T" + , "qualified Data.Function as T" + , "qualified Data.Functor as T" + , "qualified Data.Data as T" + ] "f = T.putStrLn" [] "import qualified Data.Text.IO as T" + , test True [] "f = (.|.)" [] "import Data.Bits (Bits(..))" + , test True [] "f = empty" [] "import Control.Applicative (Alternative(..))" + ] + -- TODO: Importing pattern synonyms is unsupported + , test False [] "k (Some x) = x" [] "import B (pattern Some)" + ] + where + test = test' False + wantWait = test' True True + + test' waitForCheckProject wanted imps def other newImp = testSessionWithExtraFiles "hover" (T.unpack def) $ \dir -> do + configureCheckProject waitForCheckProject + let before = T.unlines $ "module A where" : ["import " <> x | x <- imps] ++ def : other + after = T.unlines $ "module A where" : ["import " <> x | x <- imps] ++ [newImp] ++ def : other + cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A, Bar, Foo, B]}}" + liftIO $ writeFileUTF8 (dir "hie.yaml") cradle + liftIO $ writeFileUTF8 (dir "B.hs") $ unlines ["{-# LANGUAGE PatternSynonyms #-}", "module B where", "pattern Some x = Just x"] + doc <- createDoc "Test.hs" "haskell" before + waitForProgressDone + _ <- waitForDiagnostics + -- there isn't a good way to wait until the whole project is checked atm + when waitForCheckProject $ liftIO $ sleep 0.5 + let defLine = fromIntegral $ length imps + 1 + range = Range (Position defLine 0) (Position defLine maxBound) + actions <- getCodeActions doc range + if wanted + then do + action <- pickActionWithTitle newImp actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ after @=? contentAfterAction + else + liftIO $ [_title | InR CodeAction{_title} <- actions, _title == newImp ] @?= [] + +suggestAddRecordFieldImportTests :: TestTree +suggestAddRecordFieldImportTests = testGroup "suggest imports of record fields when using OverloadedRecordDot" + [ testGroup "The field is suggested when an instance resolution failure occurs" + ([ ignoreForGhcVersions [GHC96] "Extension not present <9.2, and the assist is derived from the help message in >=9.4" theTest + ] + ++ [ + theTestIndirect qualifiedGhcRecords polymorphicType + | + qualifiedGhcRecords <- [False, True] + , polymorphicType <- [False, True] + ]) + ] + where + theTest = testSessionWithExtraFiles "hover" def $ \dir -> do + configureCheckProject False + let before = T.unlines ["module A where", "import B (Foo)", "getFoo :: Foo -> Int", "getFoo x = x.foo"] + after = T.unlines ["module A where", "import B (Foo, foo)", "getFoo :: Foo -> Int", "getFoo x = x.foo"] + cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A, B]}}" + liftIO $ writeFileUTF8 (dir "hie.yaml") cradle + liftIO $ writeFileUTF8 (dir "B.hs") $ unlines ["module B where", "data Foo = Foo { foo :: Int }"] + doc <- createDoc "Test.hs" "haskell" before + waitForProgressDone + _ <- waitForDiagnostics + let defLine = 3 + range = Range (Position defLine 0) (Position defLine maxBound) + actions <- getCodeActions doc range + action <- pickActionWithTitle "Add foo to the import list of B" actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ after @=? contentAfterAction + + theTestIndirect qualifiedGhcRecords polymorphicType = testGroup + ((if qualifiedGhcRecords then "qualified-" else "unqualified-") + <> ("HasField " :: String) + <> + (if polymorphicType then "polymorphic-" else "monomorphic-") + <> "type ") + . (\x -> [x]) $ testSessionWithExtraFiles "hover" def $ \dir -> do + -- Hopefully enable project indexing? + configureCheckProject True + + let + before = T.unlines ["{-# LANGUAGE OverloadedRecordDot #-}", "module A where", if qualifiedGhcRecords then "" else "import GHC.Records", "import C (bar)", "spam = bar.foo"] + after = T.unlines ["{-# LANGUAGE OverloadedRecordDot #-}", "module A where", if qualifiedGhcRecords then "" else "import GHC.Records", "import C (bar)", "import B (Foo(..))", "spam = bar.foo"] + cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A, B, C]}}" + liftIO $ writeFileUTF8 (dir "hie.yaml") cradle + liftIO $ writeFileUTF8 (dir "B.hs") $ unlines ["module B where", if polymorphicType then "data Foo x = Foo { foo :: x }" else "data Foo = Foo { foo :: Int }"] + liftIO $ writeFileUTF8 (dir "C.hs") $ unlines ["module C where", "import B", "bar = Foo 10" ] + doc <- createDoc "Test.hs" "haskell" before + waitForProgressDone + _ <- waitForDiagnostics + let defLine = 4 + range = Range (Position defLine 0) (Position defLine maxBound) + actions <- getCodeActions doc range + action <- pickActionWithTitle "import B (Foo(..))" actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ after @=? contentAfterAction + +suggestAddRecordFieldUpdateImportTests :: TestTree +suggestAddRecordFieldUpdateImportTests = testGroup "suggest imports of record fields in update" + [ testGroup "implicit import of type" [theTest ] ] + where + theTest = testSessionWithExtraFiles "hover" def $ \dir -> do + configureCheckProject True + + let + before = T.unlines ["module C where", "import B", "biz = bar { foo = 100 }"] + after = T.unlines ["module C where", "import B", "import A (Foo(..))", "biz = bar { foo = 100 }"] + cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A, B, C]}}" + liftIO $ writeFileUTF8 (dir "hie.yaml") cradle + liftIO $ writeFileUTF8 (dir "A.hs") $ unlines ["module A where", "data Foo = Foo { foo :: Int }"] + liftIO $ writeFileUTF8 (dir "B.hs") $ unlines ["module B where", "import A", "bar = Foo 10" ] + doc <- createDoc "Test.hs" "haskell" before + waitForProgressDone + diags <- waitForDiagnostics + liftIO $ print diags + let defLine = 2 + range = Range (Position defLine 0) (Position defLine maxBound) + actions <- getCodeActions doc range + liftIO $ print actions + action <- pickActionWithTitle "import A (Foo(..))" actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ after @=? contentAfterAction + +extractNotInScopeNameTests :: TestTree +extractNotInScopeNameTests = + testGroup "extractNotInScopeName" [ + testGroup "record field" [ + testCase ">=ghc 910" $ Refactor.extractNotInScopeName "Not in scope: ‘foo’" @=? Just (NotInScopeThing "foo"), + testCase " do + configureCheckProject False + let before = T.unlines ["module A where", "import Data.Coerce (coerce)", "import Data.Semigroup (Sum)", "bar = coerce (10 :: Int) :: Sum Int"] + after = T.unlines ["module A where", "import Data.Coerce (coerce)", "import Data.Semigroup (Sum)", "import Data.Semigroup (Sum(..))", "bar = coerce (10 :: Int) :: Sum Int"] + cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A]}}" + liftIO $ writeFileUTF8 (dir "hie.yaml") cradle + doc <- createDoc "Test.hs" "haskell" before + waitForProgressDone + _ <- waitForDiagnostics + let defLine = 3 + range = Range (Position defLine 0) (Position defLine maxBound) + actions <- getCodeActions doc range + action <- pickActionWithTitle "import Data.Semigroup (Sum(..))" actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ after @=? contentAfterAction + +suggestAddGenericMissingConstructorImportTests :: TestTree +suggestAddGenericMissingConstructorImportTests = testGroup "suggest imports of type constructors when using generic deriving" + [ testGroup "The type constructors are suggested when not in scope" + [ theTest + ] + ] + where + theTest = testSessionWithExtraFiles "hover" def $ \dir -> do + configureCheckProject False + let + before = T.unlines ["module A where", "import GHC.Generics", "import Data.Semigroup (Sum)", "deriving instance Generic (Sum Int)"] + after = T.unlines ["module A where", "import GHC.Generics", "import Data.Semigroup (Sum)", "import Data.Semigroup (Sum(..))", "deriving instance Generic (Sum Int)"] + cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A]}}" + liftIO $ writeFileUTF8 (dir "hie.yaml") cradle + doc <- createDoc "Test.hs" "haskell" before + waitForProgressDone + _ <- waitForDiagnostics + let defLine = 3 + range = Range (Position defLine 0) (Position defLine maxBound) + actions <- getCodeActions doc range + action <- pickActionWithTitle "import Data.Semigroup (Sum(..))" actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ after @=? contentAfterAction + + +suggestImportDisambiguationTests :: TestTree +suggestImportDisambiguationTests = testGroup "suggest import disambiguation actions" + [ testGroup "Hiding strategy works" + [ testGroup "fromList" + [ testCase "AVec" $ + compareHideFunctionTo [(8,9),(10,8)] + "Use AVec for fromList, hiding other imports" + "HideFunction.expected.fromList.A.hs" + , testCase "BVec" $ + compareHideFunctionTo [(8,9),(10,8)] + "Use BVec for fromList, hiding other imports" + "HideFunction.expected.fromList.B.hs" + ] + , testGroup "(++)" + [ testCase "EVec" $ + compareHideFunctionTo [(8,9),(10,8)] + "Use EVec for ++, hiding other imports" + "HideFunction.expected.append.E.hs" + , testCase "Hide functions without local" $ + compareTwo + "HideFunctionWithoutLocal.hs" [(8,8)] + "Use local definition for ++, hiding other imports" + "HideFunctionWithoutLocal.expected.hs" + , testCase "Prelude" $ + compareHideFunctionTo [(8,9),(10,8)] + "Use Prelude for ++, hiding other imports" + "HideFunction.expected.append.Prelude.hs" + , testCase "Prelude and local definition, infix" $ + compareTwo + "HidePreludeLocalInfix.hs" [(2,19)] + "Use local definition for ++, hiding other imports" + "HidePreludeLocalInfix.expected.hs" + , testCase "AVec, indented" $ + compareTwo "HidePreludeIndented.hs" [(3,8)] + "Use AVec for ++, hiding other imports" + "HidePreludeIndented.expected.hs" + ] + , testGroup "Vec (type)" + [ testCase "AVec" $ + compareTwo + "HideType.hs" [(8,15)] + "Use AVec for Vec, hiding other imports" + "HideType.expected.A.hs" + , testCase "EVec" $ + compareTwo + "HideType.hs" [(8,15)] + "Use EVec for Vec, hiding other imports" + "HideType.expected.E.hs" + ] + ] + , testGroup "Qualify strategy" + [ testCase "won't suggest full name for qualified module" $ + withHideFunction [(8,9),(10,8)] $ \_ _ actions -> do + liftIO $ + assertBool "EVec.fromList must not be suggested" $ + "Replace with qualified: EVec.fromList" `notElem` + [ actionTitle + | InR CodeAction { _title = actionTitle } <- actions + ] + liftIO $ + assertBool "EVec.++ must not be suggested" $ + "Replace with qualified: EVec.++" `notElem` + [ actionTitle + | InR CodeAction { _title = actionTitle } <- actions + ] + , testGroup "fromList" + [ testCase "EVec" $ + compareHideFunctionTo [(8,9),(10,8)] + "Replace with qualified: E.fromList" + "HideFunction.expected.qualified.fromList.E.hs" + , testCase "Hide DuplicateRecordFields" $ + compareTwo + "HideQualifyDuplicateRecordFields.hs" [(9, 9)] + "Replace with qualified: AVec.fromList" + "HideQualifyDuplicateRecordFields.expected.hs" + , testCase "Duplicate record fields should not be imported" $ do + withTarget ("HideQualifyDuplicateRecordFields" <.> ".hs") [(9, 9)] $ + \_ _ actions -> do + liftIO $ + assertBool "Hidings should not be presented while DuplicateRecordFields exists" $ + all not [ actionTitle =~ T.pack "Use ([A-Za-z][A-Za-z0-9]*) for fromList, hiding other imports" + | InR CodeAction { _title = actionTitle } <- actions] + withTarget ("HideQualifyDuplicateRecordFieldsSelf" <.> ".hs") [(4, 4)] $ + \_ _ actions -> do + liftIO $ + assertBool "ambiguity from DuplicateRecordFields should not be imported" $ + null actions + ] + , testGroup "(++)" + [ testCase "Prelude, parensed" $ + compareHideFunctionTo [(8,9),(10,8)] + "Replace with qualified: Prelude.++" + "HideFunction.expected.qualified.append.Prelude.hs" + , testCase "Prelude, infix" $ + compareTwo + "HideQualifyInfix.hs" [(4,19)] + "Replace with qualified: Prelude.++" + "HideQualifyInfix.expected.hs" + , testCase "Prelude, left section" $ + compareTwo + "HideQualifySectionLeft.hs" [(4,15)] + "Replace with qualified: Prelude.++" + "HideQualifySectionLeft.expected.hs" + , testCase "Prelude, right section" $ + compareTwo + "HideQualifySectionRight.hs" [(4,18)] + "Replace with qualified: Prelude.++" + "HideQualifySectionRight.expected.hs" + ] + ] + ] + where + compareTwo original locs cmd expected = + withTarget original locs $ \dir doc actions -> do + expected <- liftIO $ + readFileUtf8 (dir expected) + action <- pickActionWithTitle cmd actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ T.replace "\r\n" "\n" expected @=? contentAfterAction + compareHideFunctionTo = compareTwo "HideFunction.hs" + withTarget file locs k = runWithExtraFiles "hiding" $ \dir -> do + doc <- openDoc file "haskell" + void $ expectDiagnostics [(file, [(DiagnosticSeverity_Error, loc, "Ambiguous occurrence", Nothing) | loc <- locs])] -- Since GHC 9.8: GHC-87110 + actions <- getAllCodeActions doc + k dir doc actions + withHideFunction = withTarget ("HideFunction" <.> "hs") + +suggestHideShadowTests :: TestTree +suggestHideShadowTests = + testGroup + "suggest hide shadow" + [ testGroup + "single" + [ testOneCodeAction + "hide unused" + "Hide on from Data.Function" + (1, 2) + (1, 4) + [ "import Data.Function" + , "f on = on" + , "g on = on" + ] + [ "import Data.Function hiding (on)" + , "f on = on" + , "g on = on" + ] + , testOneCodeAction + "extend hiding unused" + "Hide on from Data.Function" + (1, 2) + (1, 4) + [ "import Data.Function hiding ((&))" + , "f on = on" + ] + [ "import Data.Function hiding (on, (&))" + , "f on = on" + ] + , testOneCodeAction + "delete unused" + "Hide on from Data.Function" + (1, 2) + (1, 4) + [ "import Data.Function ((&), on)" + , "f on = on" + ] + [ "import Data.Function ((&))" + , "f on = on" + ] + , testOneCodeAction + "hide operator" + "Hide & from Data.Function" + (1, 2) + (1, 5) + [ "import Data.Function" + , "f (&) = (&)" + ] + [ "import Data.Function hiding ((&))" + , "f (&) = (&)" + ] + , testOneCodeAction + "remove operator" + "Hide & from Data.Function" + (1, 2) + (1, 5) + [ "import Data.Function ((&), on)" + , "f (&) = (&)" + ] + [ "import Data.Function ( on)" + , "f (&) = (&)" + ] + , noCodeAction + "don't remove already used" + (2, 2) + (2, 4) + [ "import Data.Function" + , "g = on" + , "f on = on" + ] + ] + , testGroup + "multi" + [ testOneCodeAction + "hide from B" + "Hide ++ from B" + (2, 2) + (2, 6) + [ "import B" + , "import C" + , "f (++) = (++)" + ] + [ "import B hiding ((++))" + , "import C" + , "f (++) = (++)" + ] + , testOneCodeAction + "hide from C" + "Hide ++ from C" + (2, 2) + (2, 6) + [ "import B" + , "import C" + , "f (++) = (++)" + ] + [ "import B" + , "import C hiding ((++))" + , "f (++) = (++)" + ] + , testOneCodeAction + "hide from Prelude" + "Hide ++ from Prelude" + (2, 2) + (2, 6) + [ "import B" + , "import C" + , "f (++) = (++)" + ] + [ "import B" + , "import C" + , "import Prelude hiding ((++))" + , "f (++) = (++)" + ] + , testMultiCodeActions + "manual hide all" + [ "Hide ++ from Prelude" + , "Hide ++ from C" + , "Hide ++ from B" + ] + (2, 2) + (2, 6) + [ "import B" + , "import C" + , "f (++) = (++)" + ] + [ "import B hiding ((++))" + , "import C hiding ((++))" + , "import Prelude hiding ((++))" + , "f (++) = (++)" + ] + , testOneCodeAction + "auto hide all" + "Hide ++ from all occurrence imports" + (2, 2) + (2, 6) + [ "import B" + , "import C" + , "f (++) = (++)" + ] + [ "import B hiding ((++))" + , "import C hiding ((++))" + , "import Prelude hiding ((++))" + , "f (++) = (++)" + ] + ] + ] + where + testOneCodeAction testName actionName start end origin expected = + helper testName start end origin expected $ \cas -> do + action <- pickActionWithTitle actionName cas + executeCodeAction action + noCodeAction testName start end origin = + helper testName start end origin origin $ \cas -> do + liftIO $ cas @?= [] + testMultiCodeActions testName actionNames start end origin expected = + helper testName start end origin expected $ \cas -> do + let r = [ca | (InR ca) <- cas, ca ^. L.title `elem` actionNames] + liftIO $ + (length r == length actionNames) + @? "Expected " <> show actionNames <> ", but got " <> show cas <> " which is not its superset" + forM_ r executeCodeAction + helper testName (line1, col1) (line2, col2) origin expected k = testSession testName $ do + void $ createDoc "B.hs" "haskell" $ T.unlines docB + void $ createDoc "C.hs" "haskell" $ T.unlines docC + doc <- createDoc "A.hs" "haskell" $ T.unlines (header <> origin) + void waitForDiagnostics + waitForProgressDone + cas <- getCodeActions doc (Range (Position (fromIntegral $ line1 + length header) col1) (Position (fromIntegral $ line2 + length header) col2)) + void $ k [x | x@(InR ca) <- cas, "Hide" `T.isPrefixOf` (ca ^. L.title)] + contentAfter <- documentContents doc + liftIO $ contentAfter @?= T.unlines (header <> expected) + header = + [ "{-# OPTIONS_GHC -Wname-shadowing #-}" + , "module A where" + , "" + ] + -- for multi group + docB = + [ "module B where" + , "(++) = id" + ] + docC = + [ "module C where" + , "(++) = id" + ] + +insertNewDefinitionTests :: TestTree +insertNewDefinitionTests = testGroup "insert new definition actions" + [ testSession "insert new function definition" $ do + let txtB = + ["foo True = select [True]" + , "" + ,"foo False = False" + ] + txtB' = + ["" + ,"someOtherCode = ()" + ] + docB <- createDoc "ModuleB.hs" "haskell" (T.unlines $ txtB ++ txtB') + _ <- waitForDiagnostics + action <- pickActionWithTitle "Define select :: [Bool] -> Bool" + =<< getCodeActions docB (R 0 0 0 50) + executeCodeAction action + contentAfterAction <- documentContents docB + liftIO $ contentAfterAction @?= T.unlines (txtB ++ + [ "" + , "select :: [Bool] -> Bool" + , "select = _" + ] + ++ txtB') + , testSession "insert new function definition - with similar suggestion in scope" $ do + doc <- createDoc "Module.hs" "haskell" $ T.unlines + [ "import Control.Monad" -- brings `mplus` into scope, leading to additional suggestion + -- "Perhaps use \8216mplus\8217 (imported from Control.Monad)" + , "f :: Int -> Int" + , "f x = plus x x" + ] + _ <- waitForDiagnostics + action <- pickActionWithTitle "Define plus :: Int -> Int -> Int" + =<< getCodeActions doc (R 2 0 2 13) + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ contentAfterAction @?= T.unlines + [ "import Control.Monad" + , "f :: Int -> Int" + , "f x = plus x x" + , "" + , "plus :: Int -> Int -> Int" + , "plus = _" + ] + , testSession "define a hole" $ do + let txtB = + ["foo True = _select [True]" + , "" + ,"foo False = False" + ] + txtB' = + ["" + ,"someOtherCode = ()" + ] + docB <- createDoc "ModuleB.hs" "haskell" (T.unlines $ txtB ++ txtB') + _ <- waitForDiagnostics + action <- pickActionWithTitle "Define select :: [Bool] -> Bool" + =<< getCodeActions docB (R 0 0 0 50) + executeCodeAction action + contentAfterAction <- documentContents docB + liftIO $ contentAfterAction @?= T.unlines ( + ["foo True = select [True]" + , "" + ,"foo False = False" + , "" + , "select :: [Bool] -> Bool" + , "select = _" + ] + ++ txtB') + , testSession "insert new function definition - Haddock comments" $ do + let start = [ "foo :: Int -> Bool" + , "foo x = select (x + 1)" + , "" + , "-- | This is a haddock comment" + , "haddock :: Int -> Int" + , "haddock = undefined" + ] + let expected = ["foo :: Int -> Bool" + , "foo x = select (x + 1)" + , "" + , "select :: Int -> Bool" + , "select = _" + , "" + , "-- | This is a haddock comment" + , "haddock :: Int -> Int" + , "haddock = undefined" + ] + docB <- createDoc "ModuleB.hs" "haskell" (T.unlines start) + _ <- waitForDiagnostics + action <- pickActionWithTitle "Define select :: Int -> Bool" + =<< getCodeActions docB (R 1 8 1 14) + executeCodeAction action + contentAfterAction <- documentContents docB + liftIO $ contentAfterAction @?= T.unlines expected + , testSession "insert new function definition - normal comments" $ do + let start = ["foo :: Int -> Bool" + , "foo x = select (x + 1)" + , "" + , "-- This is a normal comment" + , "normal :: Int -> Int" + , "normal = undefined" + ] + let expected = ["foo :: Int -> Bool" + , "foo x = select (x + 1)" + , "" + , "select :: Int -> Bool" + , "select = _" + , "" + , "-- This is a normal comment" + , "normal :: Int -> Int" + , "normal = undefined"] + docB <- createDoc "ModuleB.hs" "haskell" (T.unlines start) + _ <- waitForDiagnostics + action <- pickActionWithTitle "Define select :: Int -> Bool" + =<< getCodeActions docB (R 1 8 1 14) + executeCodeAction action + contentAfterAction <- documentContents docB + liftIO $ contentAfterAction @?= T.unlines expected + , testSession "insert new function definition - untyped error" $ do + let txtB = + ["foo = select" + ] + txtB' = + ["" + ,"someOtherCode = ()" + ] + docB <- createDoc "ModuleB.hs" "haskell" (T.unlines $ txtB ++ txtB') + _ <- waitForDiagnostics + action <- pickActionWithTitle "Define select :: _" =<< getCodeActions docB (R 0 0 0 50) + executeCodeAction action + contentAfterAction <- documentContents docB + liftIO $ contentAfterAction @?= T.unlines (txtB ++ + [ "" + , "select :: _" + , "select = _" + ] + ++ txtB') + ] + + +deleteUnusedDefinitionTests :: TestTree +deleteUnusedDefinitionTests = testGroup "delete unused definition action" + [ testSession "delete unused top level binding" $ + testFor + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (some) where" + , "" + , "f :: Int -> Int" + , "f 1 = let a = 1" + , " in a" + , "f 2 = 2" + , "" + , "some = ()" + ] + (4, 0) + "Delete ‘f’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (some) where" + , "" + , "some = ()" + ] + , testSession "delete unused top level binding defined in infix form" $ + testFor + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (some) where" + , "" + , "myPlus :: Int -> Int -> Int" + , "a `myPlus` b = a + b" + , "" + , "some = ()" + ] + (4, 2) + "Delete ‘myPlus’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (some) where" + , "" + , "some = ()" + ] + , testSession "delete unused binding in where clause" $ + testFor + [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (h, g) where" + , "" + , "h :: Int" + , "h = 3" + , "" + , "g :: Int" + , "g = 6" + , " where" + , " h :: Int" + , " h = 4" + , "" + ] + (10, 4) + "Delete ‘h’" + [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (h, g) where" + , "" + , "h :: Int" + , "h = 3" + , "" + , "g :: Int" + , "g = 6" + , " where" + , "" + ] + , testSession "delete unused binding with multi-oneline signatures front" $ + testFor + [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (b, c) where" + , "" + , "a, b, c :: Int" + , "a = 3" + , "b = 4" + , "c = 5" + ] + (4, 0) + "Delete ‘a’" + [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (b, c) where" + , "" + , "b, c :: Int" + , "b = 4" + , "c = 5" + ] + , testSession "delete unused binding with multi-oneline signatures mid" $ + testFor + [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (a, c) where" + , "" + , "a, b, c :: Int" + , "a = 3" + , "b = 4" + , "c = 5" + ] + (5, 0) + "Delete ‘b’" + [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (a, c) where" + , "" + , "a, c :: Int" + , "a = 3" + , "c = 5" + ] + , testSession "delete unused binding with multi-oneline signatures end" $ + testFor + [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (a, b) where" + , "" + , "a, b, c :: Int" + , "a = 3" + , "b = 4" + , "c = 5" + ] + (6, 0) + "Delete ‘c’" + [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (a, b) where" + , "" + , "a, b :: Int" + , "a = 3" + , "b = 4" + ] + ] + where + testFor sourceLines pos@(l,c) expectedTitle expectedLines = do + docId <- createDoc "A.hs" "haskell" $ T.unlines sourceLines + expectDiagnostics [ ("A.hs", [(DiagnosticSeverity_Warning, pos, "not used", Nothing)]) ] + action <- pickActionWithTitle expectedTitle =<< getCodeActions docId (R l c l c) + executeCodeAction action + contentAfterAction <- documentContents docId + liftIO $ contentAfterAction @?= T.unlines expectedLines + +addTypeAnnotationsToLiteralsTest :: TestTree +addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals to satisfy constraints" + [ testSession "add default type to satisfy one constraint" $ + testFor + [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "module A (f) where" + , "" + , "f = 1" + ] + [ (DiagnosticSeverity_Warning, (3, 4), "Defaulting the type variable", Nothing) ] + "Add type annotation ‘Integer’ to ‘1’" + [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "module A (f) where" + , "" + , "f = (1 :: Integer)" + ] + + , testSession "add default type to satisfy one constraint in nested expressions" $ + testFor + [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "module A where" + , "" + , "f =" + , " let x = 3" + , " in x" + ] + [ (DiagnosticSeverity_Warning, (4, 12), "Defaulting the type variable", Nothing) ] + "Add type annotation ‘Integer’ to ‘3’" + [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "module A where" + , "" + , "f =" + , " let x = (3 :: Integer)" + , " in x" + ] + , testSession "add default type to satisfy one constraint in more nested expressions" $ + testFor + [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "module A where" + , "" + , "f =" + , " let x = let y = 5 in y" + , " in x" + ] + [ (DiagnosticSeverity_Warning, (4, 20), "Defaulting the type variable", Nothing) ] + "Add type annotation ‘Integer’ to ‘5’" + [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "module A where" + , "" + , "f =" + , " let x = let y = (5 :: Integer) in y" + , " in x" + ] + , testSession "add default type to satisfy one constraint with duplicate literals" $ + testFor + [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "{-# LANGUAGE OverloadedStrings #-}" + , "module A (f) where" + , "" + , "import Debug.Trace" + , "" + , "f = seq \"debug\" traceShow \"debug\"" + ] + [ (DiagnosticSeverity_Warning, (6, 8), "Defaulting the type variable", Nothing) + , (DiagnosticSeverity_Warning, (6, 16), "Defaulting the type variable", Nothing) + ] + ("Add type annotation ‘" <> stringLit <> "’ to ‘\"debug\"’") + [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "{-# LANGUAGE OverloadedStrings #-}" + , "module A (f) where" + , "" + , "import Debug.Trace" + , "" + , "f = seq (\"debug\" :: "<> stringLit <> ") traceShow \"debug\"" + ] + , testSession "add default type to satisfy two constraints" $ + testFor + [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "{-# LANGUAGE OverloadedStrings #-}" + , "module A (f) where" + , "" + , "import Debug.Trace" + , "" + , "f a = traceShow \"debug\" a" + ] + [ (DiagnosticSeverity_Warning, (6, 6), "Defaulting the type variable", Nothing) ] + ("Add type annotation ‘" <> stringLit <> "’ to ‘\"debug\"’") + [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "{-# LANGUAGE OverloadedStrings #-}" + , "module A (f) where" + , "" + , "import Debug.Trace" + , "" + , "f a = traceShow (\"debug\" :: " <> stringLit <> ") a" + ] + , testSession "add default type to satisfy two constraints with duplicate literals" $ + testFor + [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "{-# LANGUAGE OverloadedStrings #-}" + , "module A (f) where" + , "" + , "import Debug.Trace" + , "" + , "f = seq (\"debug\" :: [Char]) (seq (\"debug\" :: [Char]) (traceShow \"debug\"))" + ] + [ (DiagnosticSeverity_Warning, (6, 54), "Defaulting the type variable", Nothing) ] + ("Add type annotation ‘"<> stringLit <>"’ to ‘\"debug\"’") + [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "{-# LANGUAGE OverloadedStrings #-}" + , "module A (f) where" + , "" + , "import Debug.Trace" + , "" + , "f = seq (\"debug\" :: [Char]) (seq (\"debug\" :: [Char]) (traceShow (\"debug\" :: "<> stringLit <> ")))" + ] + ] + where + stringLit = if ghcVersion >= GHC912 then "[Char]" else "String" + testFor sourceLines diag expectedTitle expectedLines = do + docId <- createDoc "A.hs" "haskell" $ T.unlines sourceLines + expectDiagnostics [ ("A.hs", diag) ] + let cursors = map (\(_, snd, _, _) -> snd) diag + (ls, cs) = minimum cursors + (le, ce) = maximum cursors + + action <- pickActionWithTitle expectedTitle =<< getCodeActions docId (R ls cs le ce) + executeCodeAction action + contentAfterAction <- documentContents docId + liftIO $ contentAfterAction @?= T.unlines expectedLines + + +fixConstructorImportTests :: TestTree +fixConstructorImportTests = testGroup "fix import actions" + [ testSession "fix constructor import" $ template + (T.unlines + [ "module ModuleA where" + , "data A = Constructor" + ]) + (T.unlines + [ "module ModuleB where" + , "import ModuleA(Constructor)" + ]) + (Range (Position 1 15) (Position 1 26)) + "Fix import of A(Constructor)" + (T.unlines + [ "module ModuleB where" + , "import ModuleA(A(Constructor))" + ]) + ] + where + template contentA contentB range expectedAction expectedContentB = do + _docA <- createDoc "ModuleA.hs" "haskell" contentA + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + action <- pickActionWithTitle expectedAction =<< getCodeActions docB range + executeCodeAction action + contentAfterAction <- documentContents docB + liftIO $ expectedContentB @=? contentAfterAction + +importRenameActionTests :: TestTree +importRenameActionTests = testGroup "import rename actions" $ + fmap check ["Map", "Maybe"] + where + check modname = checkCodeAction + ("Data.Mape -> Data." <> T.unpack modname) + ("Replace with Data." <> modname) + (T.unlines + [ "module Testing where" + , "import Data.Mape" + ]) + (T.unlines + [ "module Testing where" + , "import Data." <> modname + ]) + +fillTypedHoleTests :: TestTree +fillTypedHoleTests = let + + sourceCode :: T.Text -> T.Text -> T.Text -> T.Text + sourceCode a b c = T.unlines + [ "module Testing where" + , "" + , "globalConvert :: Int -> String" + , "globalConvert = undefined" + , "" + , "globalInt :: Int" + , "globalInt = 3" + , "" + , "bar :: Int -> Int -> String" + , "bar n parameterInt = " <> a <> " (n + " <> b <> " + " <> c <> ") where" + , " localConvert = (flip replicate) 'x'" + , "" + , "foo :: () -> Int -> String" + , "foo = undefined" + ] + + check :: T.Text -> T.Text -> T.Text -> T.Text -> T.Text -> T.Text -> T.Text -> TestTree + check actionTitle + oldA oldB oldC + newA newB newC = testSession (T.unpack actionTitle) $ do + let originalCode = sourceCode oldA oldB oldC + let expectedCode = sourceCode newA newB newC + doc <- createDoc "Testing.hs" "haskell" originalCode + _ <- waitForDiagnostics + actionsOrCommands <- getCodeActions doc (Range (Position 9 0) (Position 9 maxBound)) + chosenAction <- pickActionWithTitle actionTitle actionsOrCommands + executeCodeAction chosenAction + modifiedCode <- documentContents doc + liftIO $ expectedCode @=? modifiedCode + in + testGroup "fill typed holes" + [ check "Replace _ with show" + "_" "n" "n" + "show" "n" "n" + + , check "Replace _ with globalConvert" + "_" "n" "n" + "globalConvert" "n" "n" + + , check "Replace _convertme with localConvert" + "_convertme" "n" "n" + "localConvert" "n" "n" + + , check "Replace _b with globalInt" + "_a" "_b" "_c" + "_a" "globalInt" "_c" + + , check "Replace _c with globalInt" + "_a" "_b" "_c" + "_a" "_b" "globalInt" + + , check "Replace _c with parameterInt" + "_a" "_b" "_c" + "_a" "_b" "parameterInt" + , check "Replace _ with foo _" + "_" "n" "n" + "(foo _)" "n" "n" + , testSession "Replace _toException with E.toException" $ do + let mkDoc x = T.unlines + [ "module Testing where" + , "import qualified Control.Exception as E" + , "ioToSome :: E.IOException -> E.SomeException" + , "ioToSome = " <> x ] + doc <- createDoc "Test.hs" "haskell" $ mkDoc "_toException" + _ <- waitForDiagnostics + actions <- getCodeActions doc (Range (Position 3 0) (Position 3 maxBound)) + chosen <- pickActionWithTitle "Replace _toException with E.toException" actions + executeCodeAction chosen + modifiedCode <- documentContents doc + liftIO $ mkDoc "E.toException" @=? modifiedCode + , testSession "filling infix type hole uses prefix notation" $ do + let mkDoc x = T.unlines + [ "module Testing where" + , "data A = A" + , "foo :: A -> A -> A" + , "foo A A = A" + , "test :: A -> A -> A" + , "test a1 a2 = a1 " <> x <> " a2" + ] + doc <- createDoc "Test.hs" "haskell" $ mkDoc "`_`" + _ <- waitForDiagnostics + actions <- getCodeActions doc (Range (Position 5 16) (Position 5 19)) + chosen <- pickActionWithTitle "Replace _ with foo" actions + executeCodeAction chosen + modifiedCode <- documentContents doc + liftIO $ mkDoc "`foo`" @=? modifiedCode + , testSession "postfix hole uses postfix notation of infix operator" $ do + let mkDoc x = T.unlines + [ "module Testing where" + , "test :: Int -> Maybe Int -> Maybe Int" + , "test a ma = " <> x <> " (a +) ma" + ] + doc <- createDoc "Test.hs" "haskell" $ mkDoc "_" + _ <- waitForDiagnostics + actions <- getCodeActions doc (Range (Position 2 13) (Position 2 14)) + chosen <- pickActionWithTitle "Replace _ with (<$>)" actions + executeCodeAction chosen + modifiedCode <- documentContents doc + liftIO $ mkDoc "(<$>)" @=? modifiedCode + , testSession "filling infix type hole uses infix operator" $ do + let mkDoc x = T.unlines + [ "module Testing where" + , "test :: Int -> Maybe Int -> Maybe Int" + , "test a ma = (a +) " <> x <> " ma" + ] + doc <- createDoc "Test.hs" "haskell" $ mkDoc "`_`" + _ <- waitForDiagnostics + actions <- getCodeActions doc (Range (Position 2 16) (Position 2 19)) + chosen <- pickActionWithTitle "Replace _ with (<$>)" actions + executeCodeAction chosen + modifiedCode <- documentContents doc + liftIO $ mkDoc "<$>" @=? modifiedCode + ] + +addInstanceConstraintTests :: TestTree +addInstanceConstraintTests = let + missingConstraintSourceCode :: Maybe T.Text -> T.Text + missingConstraintSourceCode mConstraint = + let constraint = maybe "" (<> " => ") mConstraint + in T.unlines + [ "module Testing where" + , "" + , "data Wrap a = Wrap a" + , "" + , "instance " <> constraint <> "Eq (Wrap a) where" + , " (Wrap x) == (Wrap y) = x == y" + ] + + incompleteConstraintSourceCode :: Maybe T.Text -> T.Text + incompleteConstraintSourceCode mConstraint = + let constraint = maybe "Eq a" (\c -> "(Eq a, " <> c <> ")") mConstraint + in T.unlines + [ "module Testing where" + , "" + , "data Pair a b = Pair a b" + , "" + , "instance " <> constraint <> " => Eq (Pair a b) where" + , " (Pair x y) == (Pair x' y') = x == x' && y == y'" + ] + + incompleteConstraintSourceCode2 :: Maybe T.Text -> T.Text + incompleteConstraintSourceCode2 mConstraint = + let constraint = maybe "(Eq a, Eq b)" (\c -> "(Eq a, Eq b, " <> c <> ")") mConstraint + in T.unlines + [ "module Testing where" + , "" + , "data Three a b c = Three a b c" + , "" + , "instance " <> constraint <> " => Eq (Three a b c) where" + , " (Three x y z) == (Three x' y' z') = x == x' && y == y' && z == z'" + ] + + check :: T.Text -> T.Text -> T.Text -> TestTree + check actionTitle originalCode expectedCode = + checkCodeAction (T.unpack actionTitle) actionTitle originalCode expectedCode + + in testGroup "add instance constraint" + [ check + "Add `Eq a` to the context of the instance declaration" + (missingConstraintSourceCode Nothing) + (missingConstraintSourceCode $ Just "Eq a") + , check + "Add `Eq b` to the context of the instance declaration" + (incompleteConstraintSourceCode Nothing) + (incompleteConstraintSourceCode $ Just "Eq b") + , check + "Add `Eq c` to the context of the instance declaration" + (incompleteConstraintSourceCode2 Nothing) + (incompleteConstraintSourceCode2 $ Just "Eq c") + ] + +addFunctionConstraintTests :: TestTree +addFunctionConstraintTests = let + missingConstraintSourceCode :: T.Text -> T.Text + missingConstraintSourceCode constraint = + T.unlines + [ "module Testing where" + , "" + , "eq :: " <> constraint <> "a -> a -> Bool" + , "eq x y = x == y" + ] + + missingConstraintWithForAllSourceCode :: T.Text -> T.Text + missingConstraintWithForAllSourceCode constraint = + T.unlines + [ "{-# LANGUAGE ExplicitForAll #-}" + , "module Testing where" + , "" + , "eq :: forall a. " <> constraint <> "a -> a -> Bool" + , "eq x y = x == y" + ] + + incompleteConstraintWithForAllSourceCode :: T.Text -> T.Text + incompleteConstraintWithForAllSourceCode constraint = + T.unlines + [ "{-# LANGUAGE ExplicitForAll #-}" + , "module Testing where" + , "" + , "data Pair a b = Pair a b" + , "" + , "eq :: " <> constraint <> " => Pair a b -> Pair a b -> Bool" + , "eq (Pair x y) (Pair x' y') = x == x' && y == y'" + ] + + incompleteConstraintSourceCode :: T.Text -> T.Text + incompleteConstraintSourceCode constraint = + T.unlines + [ "module Testing where" + , "" + , "data Pair a b = Pair a b" + , "" + , "eq :: " <> constraint <> " => Pair a b -> Pair a b -> Bool" + , "eq (Pair x y) (Pair x' y') = x == x' && y == y'" + ] + + incompleteConstraintSourceCode2 :: T.Text -> T.Text + incompleteConstraintSourceCode2 constraint = + T.unlines + [ "module Testing where" + , "" + , "data Three a b c = Three a b c" + , "" + , "eq :: " <> constraint <> " => Three a b c -> Three a b c -> Bool" + , "eq (Three x y z) (Three x' y' z') = x == x' && y == y' && z == z'" + ] + + incompleteConstraintSourceCodeWithExtraCharsInContext :: T.Text -> T.Text + incompleteConstraintSourceCodeWithExtraCharsInContext constraint = + T.unlines + [ "module Testing where" + , "" + , "data Pair a b = Pair a b" + , "" + , "eq :: ( " <> constraint <> " ) => Pair a b -> Pair a b -> Bool" + , "eq (Pair x y) (Pair x' y') = x == x' && y == y'" + ] + + incompleteConstraintSourceCodeWithNewlinesInTypeSignature :: T.Text -> T.Text + incompleteConstraintSourceCodeWithNewlinesInTypeSignature constraint = + T.unlines + [ "module Testing where" + , "data Pair a b = Pair a b" + , "eq " + , " :: (" <> constraint <> ")" + , " => Pair a b -> Pair a b -> Bool" + , "eq (Pair x y) (Pair x' y') = x == x' && y == y'" + ] + + -- See https://github.com/haskell/haskell-language-server/issues/4648 + -- When haddock comment appears after the =>, code action was introducing the + -- new constraint in the comment + incompleteConstraintSourceCodeWithCommentInTypeSignature :: T.Text -> T.Text + incompleteConstraintSourceCodeWithCommentInTypeSignature constraint = + T.unlines + + [ "module Testing where" + , "foo " + , " :: ("<> constraint <> ") =>" + , " -- This is a comment" + , " m ()" + , "foo = pure ()" + ] + + missingMonadConstraint constraint = T.unlines + [ "module Testing where" + , "f :: " <> constraint <> "m ()" + , "f = do " + , " return ()" + ] + + in testGroup "add function constraint" + [ checkCodeAction + "no preexisting constraint" + "Add `Eq a` to the context of the type signature for `eq`" + (missingConstraintSourceCode "") + (missingConstraintSourceCode "Eq a => ") + , checkCodeAction + "no preexisting constraint, with forall" + "Add `Eq a` to the context of the type signature for `eq`" + (missingConstraintWithForAllSourceCode "") + (missingConstraintWithForAllSourceCode "Eq a => ") + , checkCodeAction + "preexisting constraint, no parenthesis" + "Add `Eq b` to the context of the type signature for `eq`" + (incompleteConstraintSourceCode "Eq a") + (incompleteConstraintSourceCode "(Eq a, Eq b)") + , checkCodeAction + "preexisting constraints in parenthesis" + "Add `Eq c` to the context of the type signature for `eq`" + (incompleteConstraintSourceCode2 "(Eq a, Eq b)") + (incompleteConstraintSourceCode2 "(Eq a, Eq b, Eq c)") + , checkCodeAction + "preexisting constraints with forall" + "Add `Eq b` to the context of the type signature for `eq`" + (incompleteConstraintWithForAllSourceCode "Eq a") + (incompleteConstraintWithForAllSourceCode "(Eq a, Eq b)") + , checkCodeAction + "preexisting constraint, with extra spaces in context" + "Add `Eq b` to the context of the type signature for `eq`" + (incompleteConstraintSourceCodeWithExtraCharsInContext "Eq a") + (incompleteConstraintSourceCodeWithExtraCharsInContext "Eq a, Eq b") + , checkCodeAction + "preexisting constraint, with newlines in type signature" + "Add `Eq b` to the context of the type signature for `eq`" + (incompleteConstraintSourceCodeWithNewlinesInTypeSignature "Eq a") + (incompleteConstraintSourceCodeWithNewlinesInTypeSignature "Eq a, Eq b") + , checkCodeAction + "preexisting constraint, with haddock comment in type signature" + "Add `Applicative m` to the context of the type signature for `foo`" + (incompleteConstraintSourceCodeWithCommentInTypeSignature "") + (incompleteConstraintSourceCodeWithCommentInTypeSignature " Applicative m") + , checkCodeAction + "missing Monad constraint" + "Add `Monad m` to the context of the type signature for `f`" + (missingMonadConstraint "") + (missingMonadConstraint "Monad m => ") + ] + +checkCodeAction :: TestName -> T.Text -> T.Text -> T.Text -> TestTree +checkCodeAction testName actionTitle originalCode expectedCode = testSession testName $ do + doc <- createDoc "Testing.hs" "haskell" originalCode + _ <- waitForDiagnostics + actionsOrCommands <- getAllCodeActions doc + chosenAction <- pickActionWithTitle actionTitle actionsOrCommands + executeCodeAction chosenAction + modifiedCode <- documentContents doc + liftIO $ expectedCode @=? modifiedCode + +addImplicitParamsConstraintTests :: TestTree +addImplicitParamsConstraintTests = + testGroup + "add missing implicit params constraints" + [ testGroup + "introduced" + [ let ex ctxtA = exampleCode "?a" ctxtA "" + in checkCodeAction "at top level" "Add ?a::() to the context of fBase" (ex "") (ex "?a::()"), + let ex ctxA = exampleCode "x where x = ?a" ctxA "" + in checkCodeAction "in nested def" "Add ?a::() to the context of fBase" (ex "") (ex "?a::()") + ], + testGroup + "inherited" + [ let ex = exampleCode "()" "?a::()" + in checkCodeAction + "with preexisting context" + "Add `?a::()` to the context of the type signature for `fCaller`" + (ex "Eq ()") + (ex "Eq (), ?a::()"), + let ex = exampleCode "()" "?a::()" + in checkCodeAction "without preexisting context" "Add ?a::() to the context of fCaller" (ex "") (ex "?a::()") + ] + ] + where + mkContext "" = "" + mkContext contents = "(" <> contents <> ") => " + + exampleCode bodyBase contextBase contextCaller = + T.unlines + [ "{-# LANGUAGE FlexibleContexts, ImplicitParams #-}", + "module Testing where", + "fBase :: " <> mkContext contextBase <> "()", + "fBase = " <> bodyBase, + "fCaller :: " <> mkContext contextCaller <> "()", + "fCaller = fBase" + ] + +removeRedundantConstraintsTests :: TestTree +removeRedundantConstraintsTests = let + header = + [ "{-# OPTIONS_GHC -Wredundant-constraints #-}" + , "module Testing where" + , "" + ] + + headerExt :: [T.Text] -> [T.Text] + headerExt exts = + redunt : extTxt ++ ["module Testing where"] + where + redunt = "{-# OPTIONS_GHC -Wredundant-constraints #-}" + extTxt = map (\ext -> "{-# LANGUAGE " <> ext <> " #-}") exts + + redundantConstraintsCode :: Maybe T.Text -> T.Text + redundantConstraintsCode mConstraint = + let constraint = maybe "" (\c -> "" <> c <> " => ") mConstraint + in T.unlines $ header <> + [ "foo :: " <> constraint <> "a -> a" + , "foo = id" + ] + + redundantMixedConstraintsCode :: Maybe T.Text -> T.Text + redundantMixedConstraintsCode mConstraint = + let constraint = maybe "(Num a, Eq a)" (\c -> "(Num a, Eq a, " <> c <> ")") mConstraint + in T.unlines $ header <> + [ "foo :: " <> constraint <> " => a -> Bool" + , "foo x = x == 1" + ] + + typeSignatureSpaces :: Maybe T.Text -> T.Text + typeSignatureSpaces mConstraint = + let constraint = maybe "(Num a, Eq a)" (\c -> "(Num a, Eq a, " <> c <> ")") mConstraint + in T.unlines $ header <> + [ "foo :: " <> constraint <> " => a -> Bool" + , "foo x = x == 1" + ] + + redundantConstraintsForall :: Maybe T.Text -> T.Text + redundantConstraintsForall mConstraint = + let constraint = maybe "" (\c -> "" <> c <> " => ") mConstraint + in T.unlines $ headerExt ["RankNTypes"] <> + [ "foo :: forall a. " <> constraint <> "a -> a" + , "foo = id" + ] + + typeSignatureDo :: Maybe T.Text -> T.Text + typeSignatureDo mConstraint = + let constraint = maybe "" (\c -> "" <> c <> " => ") mConstraint + in T.unlines $ header <> + [ "f :: Int -> IO ()" + , "f n = do" + , " let foo :: " <> constraint <> "a -> IO ()" + , " foo _ = return ()" + , " r n" + ] + + typeSignatureNested :: Maybe T.Text -> T.Text + typeSignatureNested mConstraint = + let constraint = maybe "" (\c -> "" <> c <> " => ") mConstraint + in T.unlines $ header <> + [ "f :: Int -> ()" + , "f = g" + , " where" + , " g :: " <> constraint <> "a -> ()" + , " g _ = ()" + ] + + typeSignatureNested' :: Maybe T.Text -> T.Text + typeSignatureNested' mConstraint = + let constraint = maybe "" (\c -> "" <> c <> " => ") mConstraint + in T.unlines $ header <> + [ "f :: Int -> ()" + , "f =" + , " let" + , " g :: Int -> ()" + , " g = h" + , " where" + , " h :: " <> constraint <> "a -> ()" + , " h _ = ()" + , " in g" + ] + + typeSignatureNested'' :: Maybe T.Text -> T.Text + typeSignatureNested'' mConstraint = + let constraint = maybe "" (\c -> "" <> c <> " => ") mConstraint + in T.unlines $ header <> + [ "f :: Int -> ()" + , "f = g" + , " where" + , " g :: Int -> ()" + , " g = " + , " let" + , " h :: " <> constraint <> "a -> ()" + , " h _ = ()" + , " in h" + ] + + typeSignatureLined1 = T.unlines $ header <> + [ "foo :: Eq a =>" + , " a -> Bool" + , "foo _ = True" + ] + + typeSignatureLined2 = T.unlines $ header <> + [ "foo :: (Eq a, Show a)" + , " => a -> Bool" + , "foo _ = True" + ] + + typeSignatureOneLine = T.unlines $ header <> + [ "foo :: a -> Bool" + , "foo _ = True" + ] + + typeSignatureLined3 = T.unlines $ header <> + [ "foo :: ( Eq a" + , " , Show a" + , " )" + , " => a -> Bool" + , "foo x = x == x" + ] + + typeSignatureLined3' = T.unlines $ header <> + [ "foo :: ( Eq a" + , " )" + , " => a -> Bool" + , "foo x = x == x" + ] + + + check :: T.Text -> T.Text -> T.Text -> TestTree + check actionTitle originalCode expectedCode = + checkCodeAction (T.unpack actionTitle) actionTitle originalCode expectedCode + + in testGroup "remove redundant function constraints" + [ check + "Remove redundant constraint `Eq a` from the context of the type signature for `foo`" + (redundantConstraintsCode $ Just "Eq a") + (redundantConstraintsCode Nothing) + , check + "Remove redundant constraints `(Eq a, Monoid a)` from the context of the type signature for `foo`" + (redundantConstraintsCode $ Just "(Eq a, Monoid a)") + (redundantConstraintsCode Nothing) + , check + "Remove redundant constraints `(Monoid a, Show a)` from the context of the type signature for `foo`" + (redundantMixedConstraintsCode $ Just "Monoid a, Show a") + (redundantMixedConstraintsCode Nothing) + , check + "Remove redundant constraint `Eq a` from the context of the type signature for `g`" + (typeSignatureNested $ Just "Eq a") + (typeSignatureNested Nothing) + , check + "Remove redundant constraint `Eq a` from the context of the type signature for `h`" + (typeSignatureNested' $ Just "Eq a") + (typeSignatureNested' Nothing) + , check + "Remove redundant constraint `Eq a` from the context of the type signature for `h`" + (typeSignatureNested'' $ Just "Eq a") + (typeSignatureNested'' Nothing) + , check + "Remove redundant constraint `Eq a` from the context of the type signature for `foo`" + (redundantConstraintsForall $ Just "Eq a") + (redundantConstraintsForall Nothing) + , check + "Remove redundant constraint `Eq a` from the context of the type signature for `foo`" + (typeSignatureDo $ Just "Eq a") + (typeSignatureDo Nothing) + , check + "Remove redundant constraints `(Monoid a, Show a)` from the context of the type signature for `foo`" + (typeSignatureSpaces $ Just "Monoid a, Show a") + (typeSignatureSpaces Nothing) + , check + "Remove redundant constraint `Eq a` from the context of the type signature for `foo`" + typeSignatureLined1 + typeSignatureOneLine + , check + "Remove redundant constraints `(Eq a, Show a)` from the context of the type signature for `foo`" + typeSignatureLined2 + typeSignatureOneLine + , check + "Remove redundant constraint `Show a` from the context of the type signature for `foo`" + typeSignatureLined3 + typeSignatureLined3' + ] + +addSigActionTests :: TestTree +addSigActionTests = let + header = [ "{-# OPTIONS_GHC -Wmissing-signatures -Wmissing-pattern-synonym-signatures #-}" + , "{-# LANGUAGE PatternSynonyms,BangPatterns,GADTs #-}" + , "module Sigs where" + , "data T1 a where" + , " MkT1 :: (Show b) => a -> b -> T1 a" + ] + before def = T.unlines $ header ++ [def] + after' def sig = T.unlines $ header ++ [sig, def] + + def >:: sig = testSession (T.unpack $ T.replace "\n" "\\n" def) $ do + let originalCode = before def + let expectedCode = after' def sig + doc <- createDoc "Sigs.hs" "haskell" originalCode + _ <- waitForDiagnostics + actionsOrCommands <- getCodeActions doc (Range (Position 5 1) (Position 5 maxBound)) + chosenAction <- pickActionWithTitle ("add signature: " <> sig) actionsOrCommands + executeCodeAction chosenAction + modifiedCode <- documentContents doc + liftIO $ expectedCode @=? modifiedCode + issue806 = if ghcVersion >= GHC910 then + "hello = print" >:: "hello :: GHC.Types.ZonkAny 0 -> IO ()" -- GHC now returns ZonkAny 0 instead of Any. https://gitlab.haskell.org/ghc/ghc/-/issues/25895 + else + "hello = print" >:: "hello :: GHC.Types.Any -> IO ()" -- Documents current behavior outlined in #806 + in + testGroup "add signature" + [ "abc = True" >:: "abc :: Bool" + , "foo a b = a + b" >:: "foo :: Num a => a -> a -> a" + , "bar a b = show $ a + b" >:: "bar :: (Show a, Num a) => a -> a -> String" + , "(!!!) a b = a > b" >:: "(!!!) :: Ord a => a -> a -> Bool" + , "a >>>> b = a + b" >:: "(>>>>) :: Num a => a -> a -> a" + , "a `haha` b = a b" >:: "haha :: (t1 -> t2) -> t1 -> t2" + , issue806 + , "pattern Some a = Just a" >:: "pattern Some :: a -> Maybe a" + , "pattern Some a <- Just a" >:: "pattern Some :: a -> Maybe a" + , "pattern Some a <- Just a\n where Some a = Just a" >:: "pattern Some :: a -> Maybe a" + , "pattern Some a <- Just !a\n where Some !a = Just a" >:: "pattern Some :: a -> Maybe a" + , "pattern Point{x, y} = (x, y)" >:: "pattern Point :: a -> b -> (a, b)" + , "pattern Point{x, y} <- (x, y)" >:: "pattern Point :: a -> b -> (a, b)" + , "pattern Point{x, y} <- (x, y)\n where Point x y = (x, y)" >:: "pattern Point :: a -> b -> (a, b)" + , "pattern MkT1' b = MkT1 42 b" >:: "pattern MkT1' :: (Eq a, Num a) => Show b => b -> T1 a" + , "pattern MkT1' b <- MkT1 42 b" >:: "pattern MkT1' :: (Eq a, Num a) => Show b => b -> T1 a" + , "pattern MkT1' b <- MkT1 42 b\n where MkT1' b = MkT1 42 b" >:: "pattern MkT1' :: (Eq a, Num a) => Show b => b -> T1 a" + ] + +exportUnusedTests :: TestTree +exportUnusedTests = testGroup "export unused actions" + [ testGroup "don't want suggestion" -- in this test group we check that no code actions are created + [ testSession "implicit exports" $ templateNoAction + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# OPTIONS_GHC -Wmissing-signatures #-}" + , "module A where" + , "foo = id" + ] + (R 3 0 3 3) + "Export ‘foo’" + , testSession "not top-level" $ templateNoAction + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (foo,bar) where" + , "foo = ()" + , " where bar = ()" + , "bar = ()" + ] + (R 2 0 2 11) + "Export ‘bar’" + , testSession "type is exported but not the constructor of same name" $ templateNoAction + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (Foo) where" + , "data Foo = Foo" + ] + (R 2 0 2 8) + "Export ‘Foo’" + , testSession "unused data field" $ templateNoAction + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (Foo(Foo)) where" + , "data Foo = Foo {foo :: ()}" + ] + (R 2 0 2 20) + "Export ‘foo’" + ] + , testGroup "want suggestion" + [ testSession "empty exports" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (" + , ") where" + , "foo = id" + ] + (R 3 0 3 3) + "Export ‘foo’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (" + , "foo) where" + , "foo = id" + ] + , testSession "single line explicit exports" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (foo) where" + , "foo = id" + , "bar = foo" + ] + (R 3 0 3 3) + "Export ‘bar’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (foo, bar) where" + , "foo = id" + , "bar = foo" + ] + , testSession "multi line explicit exports" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " (" + , " foo) where" + , "foo = id" + , "bar = foo" + ] + (R 5 0 5 3) + "Export ‘bar’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " (" + , " foo, bar) where" + , "foo = id" + , "bar = foo" + ] + , testSession "export list ends in comma" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " (foo," + , " ) where" + , "foo = id" + , "bar = foo" + ] + (R 5 0 5 3) + "Export ‘bar’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " (foo," + , " bar) where" + , "foo = id" + , "bar = foo" + ] + , testSession "style of multiple exports is preserved 1" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " ( foo" + , " , bar" + , " ) where" + , "foo = id" + , "bar = foo" + , "baz = bar" + ] + (R 7 0 7 3) + "Export ‘baz’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " ( foo" + , " , bar" + , " , baz" + , " ) where" + , "foo = id" + , "bar = foo" + , "baz = bar" + ] + , testSession "style of multiple exports is preserved 2" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " ( foo," + , " bar" + , " ) where" + , "foo = id" + , "bar = foo" + , "baz = bar" + ] + (R 7 0 7 3) + "Export ‘baz’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " ( foo," + , " bar," + , " baz" + , " ) where" + , "foo = id" + , "bar = foo" + , "baz = bar" + ] + , testSession "style of multiple exports is preserved and selects smallest export separator" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " ( foo" + , " , bar" + , " -- * For testing" + , " , baz" + , " ) where" + , "foo = id" + , "bar = foo" + , "baz = bar" + , "quux = bar" + ] + (R 10 0 10 4) + "Export ‘quux’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " ( foo" + , " , bar" + , " -- * For testing" + , " , baz" + , " , quux" + , " ) where" + , "foo = id" + , "bar = foo" + , "baz = bar" + , "quux = bar" + ] + , testSession "unused pattern synonym" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE PatternSynonyms #-}" + , "module A () where" + , "pattern Foo a <- (a, _)" + ] + (R 3 0 3 10) + "Export ‘Foo’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE PatternSynonyms #-}" + , "module A (pattern Foo) where" + , "pattern Foo a <- (a, _)" + ] + , testSession "unused pattern synonym operator" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE PatternSynonyms #-}" + , "module A () where" + , "pattern x :+ y = (x, y)" + ] + (R 3 0 3 12) + "Export ‘:+’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE PatternSynonyms #-}" + , "module A (pattern (:+)) where" + , "pattern x :+ y = (x, y)" + ] + , testSession "unused data type" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A () where" + , "data Foo = Foo" + ] + (R 2 0 2 7) + "Export ‘Foo’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (Foo(..)) where" + , "data Foo = Foo" + ] + , testSession "unused newtype" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A () where" + , "newtype Foo = Foo ()" + ] + (R 2 0 2 10) + "Export ‘Foo’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (Foo(..)) where" + , "newtype Foo = Foo ()" + ] + , testSession "unused type synonym" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A () where" + , "type Foo = ()" + ] + (R 2 0 2 7) + "Export ‘Foo’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (Foo) where" + , "type Foo = ()" + ] + , testSession "unused type family" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeFamilies #-}" + , "module A () where" + , "type family Foo p" + ] + (R 3 0 3 15) + "Export ‘Foo’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeFamilies #-}" + , "module A (Foo) where" + , "type family Foo p" + ] + , testSession "unused typeclass" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A () where" + , "class Foo a" + ] + (R 2 0 2 8) + "Export ‘Foo’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (Foo(..)) where" + , "class Foo a" + ] + , testSession "infix" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A () where" + , "a `f` b = ()" + ] + (R 2 0 2 11) + "Export ‘f’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (f) where" + , "a `f` b = ()" + ] + , testSession "function operator" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A () where" + , "(<|) = ($)" + ] + (R 2 0 2 9) + "Export ‘<|’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A ((<|)) where" + , "(<|) = ($)" + ] + , testSession "type synonym operator" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A () where" + , "type (:<) = ()" + ] + (R 3 0 3 13) + "Export ‘:<’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A ((:<)) where" + , "type (:<) = ()" + ] + , testSession "type family operator" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeFamilies #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A () where" + , "type family (:<)" + ] + (R 4 0 4 15) + "Export ‘:<’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeFamilies #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A (type (:<)) where" + , "type family (:<)" + ] + , testSession "typeclass operator" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A () where" + , "class (:<) a" + ] + (R 3 0 3 11) + "Export ‘:<’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A (type (:<)(..)) where" + , "class (:<) a" + ] + , testSession "newtype operator" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A () where" + , "newtype (:<) = Foo ()" + ] + (R 3 0 3 20) + "Export ‘:<’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A (type (:<)(..)) where" + , "newtype (:<) = Foo ()" + ] + , testSession "data type operator" $ template + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A () where" + , "data (:<) = Foo ()" + ] + (R 3 0 3 17) + "Export ‘:<’" + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A (type (:<)(..)) where" + , "data (:<) = Foo ()" + ] + ] + ] + where + template origLines range actionTitle expectedLines = + exportTemplate (Just range) origLines actionTitle (Just expectedLines) + templateNoAction origLines range actionTitle = + exportTemplate (Just range) origLines actionTitle Nothing + +exportTemplate :: Maybe Range -> [T.Text] -> T.Text -> Maybe [T.Text] -> Session () +exportTemplate mRange initialLines expectedAction expectedLines = do + doc <- createDoc "A.hs" "haskell" $ T.unlines initialLines + _ <- waitForDiagnostics + actions <- case mRange of + Nothing -> getAllCodeActions doc + Just range -> getCodeActions doc range + case expectedLines of + Just content -> do + action <- pickActionWithTitle expectedAction actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ T.unlines content @=? contentAfterAction + Nothing -> + liftIO $ [_title | InR CodeAction{_title} <- actions, _title == expectedAction ] @?= [] + +removeExportTests :: TestTree +removeExportTests = testGroup "remove export actions" + [ testSession "single export" $ template + [ "module A ( a ) where" + , "b :: ()" + , "b = ()" + ] + "Remove ‘a’ from export" + [ "module A ( ) where" + , "b :: ()" + , "b = ()" + ] + , testSession "ending comma" $ template + [ "module A ( a, ) where" + , "b :: ()" + , "b = ()" + ] + "Remove ‘a’ from export" + [ "module A ( ) where" + , "b :: ()" + , "b = ()" + ] + , testSession "multiple exports" $ template + [ "module A (a , c, b ) where" + , "a, c :: ()" + , "a = ()" + , "c = ()" + ] + "Remove ‘b’ from export" + [ "module A (a , c ) where" + , "a, c :: ()" + , "a = ()" + , "c = ()" + ] + , testSession "not in scope constructor" $ template + [ "module A (A (X,Y,Z,(:<)), ab) where" + , "data A = X Int | Y | (:<) Int" + , "ab :: ()" + , "ab = ()" + ] + "Remove ‘Z’ from export" + [ "module A (A (X,Y,(:<)), ab) where" + , "data A = X Int | Y | (:<) Int" + , "ab :: ()" + , "ab = ()" + ] + , testSession "multiline export" $ template + [ "module A (a" + , " , b" + , " , (:*:)" + , " , ) where" + , "a,b :: ()" + , "a = ()" + , "b = ()" + ] + "Remove ‘:*:’ from export" + [ "module A (a" + , " , b" + , " " + , " , ) where" + , "a,b :: ()" + , "a = ()" + , "b = ()" + ] + , testSession "qualified re-export" $ template + [ "module A (M.x,a) where" + , "import qualified Data.List as M" + , "a :: ()" + , "a = ()" + ] + "Remove ‘M.x’ from export" + [ "module A (a) where" + , "import qualified Data.List as M" + , "a :: ()" + , "a = ()" + ] + , testSession "qualified re-export ending in '.'" $ template + [ "module A ((M.@.),a) where" + , "import qualified Data.List as M" + , "a :: ()" + , "a = ()" + ] + "Remove ‘M.@.’ from export" + [ "module A (a) where" + , "import qualified Data.List as M" + , "a :: ()" + , "a = ()" + ] + , testSession "export module" $ template + [ "module A (module B) where" + , "a :: ()" + , "a = ()" + ] + "Remove ‘module B’ from export" + [ "module A () where" + , "a :: ()" + , "a = ()" + ] + , testSession "dodgy export" $ template + [ "{-# OPTIONS_GHC -Wall #-}" + , "module A (A (..)) where" + , "data X = X" + , "type A = X" + ] + "Remove ‘A(..)’ from export" + [ "{-# OPTIONS_GHC -Wall #-}" + , "module A () where" + , "data X = X" + , "type A = X" + ] + , testSession "dodgy export" $ template + [ "{-# OPTIONS_GHC -Wall #-}" + , "module A (A (..)) where" + , "data X = X" + , "type A = X" + ] + "Remove ‘A(..)’ from export" + [ "{-# OPTIONS_GHC -Wall #-}" + , "module A () where" + , "data X = X" + , "type A = X" + ] + , testSession "duplicate module export" $ template + [ "{-# OPTIONS_GHC -Wall #-}" + , "module A (module L,module L) where" + , "import Data.List as L" + , "a :: ()" + , "a = ()" + ] + "Remove ‘Module L’ from export" + [ "{-# OPTIONS_GHC -Wall #-}" + , "module A (module L) where" + , "import Data.List as L" + , "a :: ()" + , "a = ()" + ] + , testSession "remove all exports single" $ template + [ "module A (x) where" + , "a :: ()" + , "a = ()" + ] + "Remove all redundant exports" + [ "module A () where" + , "a :: ()" + , "a = ()" + ] + , testSession "remove all exports two" $ template + [ "module A (x,y) where" + , "a :: ()" + , "a = ()" + ] + "Remove all redundant exports" + [ "module A () where" + , "a :: ()" + , "a = ()" + ] + , testSession "remove all exports three" $ template + [ "module A (a,x,y) where" + , "a :: ()" + , "a = ()" + ] + "Remove all redundant exports" + [ "module A (a) where" + , "a :: ()" + , "a = ()" + ] + , testSession "remove all exports composite" $ template + [ "module A (x,y,b, module Ls, a, A(X,getW, Y, Z,(:-),getV), (-+), B(B)) where" + , "data A = X {getV :: Int} | Y {getV :: Int}" + , "data B = B" + , "a,b :: ()" + , "a = ()" + , "b = ()" + ] + "Remove all redundant exports" + [ "module A (b, a, A(X, Y,getV), B(B)) where" + , "data A = X {getV :: Int} | Y {getV :: Int}" + , "data B = B" + , "a,b :: ()" + , "a = ()" + , "b = ()" + ] + ] + where + template origLines actionTitle expectedLines = + exportTemplate Nothing origLines actionTitle (Just expectedLines) + + +codeActionHelperFunctionTests :: TestTree +codeActionHelperFunctionTests = testGroup "code action helpers" + [ extendImportTestsRegEx + ] + +extendImportTestsRegEx :: TestTree +extendImportTestsRegEx = testGroup "regex parsing" + [ testCase "parse invalid multiple imports" $ template "foo bar foo" Nothing + , testCase "parse malformed import list" $ template + "\n\8226 Perhaps you want to add \8216fromList\8217 to one of these import lists:\n \8216Data.Map\8217)" + Nothing + , testCase "parse multiple imports" $ template + (if ghcVersion >= GHC98 + then "\n\8226 Add \8216fromList\8217 to one of these import lists:\n \8216Data.Map\8217 (at app/testlsp.hs:7:1-18)\n \8216Data.HashMap.Strict\8217 (at app/testlsp.hs:8:1-29)" + else "\n\8226 Perhaps you want to add \8216fromList\8217 to one of these import lists:\n \8216Data.Map\8217 (app/testlsp.hs:7:1-18)\n \8216Data.HashMap.Strict\8217 (app/testlsp.hs:8:1-29)" + ) + $ Just ("fromList",[("Data.Map","app/testlsp.hs:7:1-18"),("Data.HashMap.Strict","app/testlsp.hs:8:1-29")]) + ] + where + template message expected = do + liftIO $ expected @=? matchRegExMultipleImports message + +pickActionWithTitle :: T.Text -> [Command |? CodeAction] -> Session CodeAction +pickActionWithTitle title actions = + case matches of + [] -> liftIO . assertFailure $ "CodeAction with title " <> show title <> " not found in " <> show titles + a:_ -> pure a + where + titles = + [ actionTitle + | InR CodeAction { _title = actionTitle } <- actions + ] + matches = + [ action + | InR action@CodeAction { _title = actionTitle } <- actions + , title == actionTitle + ] + +assertNoActionWithTitle :: T.Text -> [Command |? CodeAction] -> Session () +assertNoActionWithTitle title actions = + liftIO $ assertBool + ("Unexpected code action " <> show title <> " in " <> show titles) + (title `notElem` titles) + where + titles = + [ actionTitle + | InR CodeAction { _title = actionTitle } <- actions + ] + +assertActionWithTitle :: [Command |? CodeAction] -> T.Text -> Session () +assertActionWithTitle actions title = + liftIO $ assertBool + ("CodeAction with title " <> show title <>" not found in " <> show titles) + (title `elem` titles) + where + titles = + [ actionTitle + | InR CodeAction { _title = actionTitle } <- actions + ] + +testSession :: TestName -> Session () -> TestTree +testSession name = testCase name . run + +testSessionExpectFail + :: TestName + -> ExpectBroken 'Ideal (Session ()) + -> ExpectBroken 'Current (Session ()) + -> TestTree +testSessionExpectFail name _ = testSession name . unCurrent + +testSessionWithExtraFiles :: HasCallStack => FilePath -> TestName -> (FilePath -> Session ()) -> TestTree +testSessionWithExtraFiles prefix name = testCase name . runWithExtraFiles prefix + +runWithExtraFiles :: HasCallStack => FilePath -> (FilePath -> Session a) -> IO a +runWithExtraFiles prefix s = withTempDir $ \dir -> do + copyTestDataFiles dir prefix + runInDir dir (s dir) + +copyTestDataFiles :: HasCallStack => FilePath -> FilePath -> IO () +copyTestDataFiles dir prefix = do + -- Copy all the test data files to the temporary workspace + testDataFiles <- getDirectoryFilesIO ("plugins/hls-refactor-plugin/test/data" prefix) ["//*"] + for_ testDataFiles $ \f -> do + createDirectoryIfMissing True $ dir takeDirectory f + copyFile ("plugins/hls-refactor-plugin/test/data" prefix f) (dir f) + +run :: Session a -> IO a +run s = run' (const s) + +run' :: (FilePath -> Session a) -> IO a +run' s = withTempDir $ \dir -> runInDir dir (s dir) + +runInDir :: FilePath -> Session a -> IO a +runInDir dir act = + runSessionWithTestConfig def + { testDirLocation = Left dir + , testPluginDescriptor = refactorPlugin + , testConfigCaps = lspTestCaps } + $ const act + +lspTestCaps :: ClientCapabilities +lspTestCaps = fullLatestClientCaps { _window = Just $ WindowClientCapabilities (Just True) Nothing Nothing } + +pattern R :: UInt -> UInt -> UInt -> UInt -> Range +pattern R x y x' y' = Range (Position x y) (Position x' y') + +-- | Version of 'System.IO.Extra.withTempDir' that canonicalizes the path +-- Which we need to do on macOS since the $TMPDIR can be in @/private/var@ or +-- @/var@ +withTempDir :: (FilePath -> IO a) -> IO a +withTempDir f = System.IO.Extra.withTempDir $ (canonicalizePath >=> f) diff --git a/plugins/hls-refactor-plugin/test/Test/AddArgument.hs b/plugins/hls-refactor-plugin/test/Test/AddArgument.hs new file mode 100644 index 0000000000..a0bf8b004e --- /dev/null +++ b/plugins/hls-refactor-plugin/test/Test/AddArgument.hs @@ -0,0 +1,73 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedStrings #-} + +module Test.AddArgument (tests) where + +import qualified Data.Text as T +import Development.IDE.Types.Location +import Language.LSP.Protocol.Types hiding + (SemanticTokensEdit (_start), + mkRange) +import Language.LSP.Test +import Test.Tasty +import Test.Tasty.HUnit + + +import Test.Hls +import qualified Test.Hls.FileSystem as FS + +import qualified Development.IDE.Plugin.CodeAction as Refactor +import System.FilePath ((<.>)) + +tests :: TestTree +tests = + testGroup + "add argument" + [ mkGoldenAddArgTest' "Hole" (r 0 0 0 50) "_new_def", + mkGoldenAddArgTest "NoTypeSuggestion" (r 0 0 0 50), + mkGoldenAddArgTest "MultipleDeclAlts" (r 0 0 0 50), + mkGoldenAddArgTest "AddArgWithSig" (r 1 0 1 50), + mkGoldenAddArgTest "AddArgWithSigAndDocs" (r 8 0 8 50), + mkGoldenAddArgTest "AddArgFromLet" (r 2 0 2 50), + mkGoldenAddArgTest "AddArgFromWhere" (r 3 0 3 50), + -- TODO can we make this work for GHC 9.10? + knownBrokenForGhcVersions [GHC910, GHC912] "In GHC 9.10 and 9.12 end-of-line comment annotation is in different place" $ + mkGoldenAddArgTest "AddArgFromWhereComments" (r 3 0 3 50), + mkGoldenAddArgTest "AddArgWithTypeSynSig" (r 2 0 2 50), + mkGoldenAddArgTest "AddArgWithTypeSynSigContravariant" (r 2 0 2 50), + mkGoldenAddArgTest "AddArgWithLambda" (r 1 0 1 50), + mkGoldenAddArgTest "MultiSigFirst" (r 2 0 2 50), + mkGoldenAddArgTest "MultiSigLast" (r 2 0 2 50), + mkGoldenAddArgTest "MultiSigMiddle" (r 2 0 2 50) + ] + where + r x y x' y' = Range (Position x y) (Position x' y') + +mkGoldenAddArgTest :: FilePath -> Range -> TestTree +mkGoldenAddArgTest testFileName range = mkGoldenAddArgTest' testFileName range "new_def" + +-- Make a golden test for the add argument action. Given varName is the name of the variable not yet defined. +mkGoldenAddArgTest' :: FilePath -> Range -> T.Text -> TestTree +mkGoldenAddArgTest' testFileName range varName = do + let action docB = do + _ <- waitForDiagnostics + let matchAction a = case a of + InR CodeAction {_title = t} -> "Add" `T.isPrefixOf` t + _ -> False + InR action@CodeAction {_title = actionTitle} : _ <- + filter matchAction <$> getCodeActions docB range + liftIO $ actionTitle @?= ("Add argument ‘" <> varName <> "’ to function") + executeCodeAction action + goldenWithHaskellDocInTmpDir + def + (mkPluginTestDescriptor Refactor.bindingsPluginDescriptor "ghcide-code-actions-bindings") + (testFileName <> " (golden)") + (FS.mkVirtualFileTree "plugins/hls-refactor-plugin/test/data/golden/add-arg" (FS.directProject $ testFileName <.> "hs")) + testFileName + "expected" + "hs" + action diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromLet.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromLet.expected.hs new file mode 100644 index 0000000000..f351aed465 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromLet.expected.hs @@ -0,0 +1,6 @@ +foo :: Bool -> _ -> Int +foo True new_def = + let bar = new_def + in bar + +foo False new_def = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromLet.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromLet.hs new file mode 100644 index 0000000000..091613d232 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromLet.hs @@ -0,0 +1,6 @@ +foo :: Bool -> Int +foo True = + let bar = new_def + in bar + +foo False = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromWhere.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromWhere.expected.hs new file mode 100644 index 0000000000..d208452548 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromWhere.expected.hs @@ -0,0 +1,6 @@ +foo :: Bool -> _ -> Int +foo True new_def = bar + where + bar = new_def + +foo False new_def = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromWhere.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromWhere.hs new file mode 100644 index 0000000000..0047eedb6e --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromWhere.hs @@ -0,0 +1,6 @@ +foo :: Bool -> Int +foo True = bar + where + bar = new_def + +foo False = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromWhereComments.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromWhereComments.expected.hs new file mode 100644 index 0000000000..30c418cc7e --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromWhereComments.expected.hs @@ -0,0 +1,6 @@ +foo -- c1 + -- | c2 + {- c3 -} True new_def -- c4 + = new_def + +foo False new_def = False diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromWhereComments.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromWhereComments.hs new file mode 100644 index 0000000000..ece25370a5 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgFromWhereComments.hs @@ -0,0 +1,6 @@ +foo -- c1 + -- | c2 + {- c3 -} True -- c4 + = new_def + +foo False = False diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithLambda.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithLambda.expected.hs new file mode 100644 index 0000000000..3fcc2dbb4c --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithLambda.expected.hs @@ -0,0 +1,4 @@ +foo :: Bool -> _ -> () -> Int +foo True new_def = \() -> new_def [True] + +foo False new_def = const 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithLambda.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithLambda.hs new file mode 100644 index 0000000000..d08c0ef496 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithLambda.hs @@ -0,0 +1,4 @@ +foo :: Bool -> () -> Int +foo True = \() -> new_def [True] + +foo False = const 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithSig.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithSig.expected.hs new file mode 100644 index 0000000000..f8082bd027 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithSig.expected.hs @@ -0,0 +1,4 @@ +foo :: Bool -> _ -> Int +foo True new_def = new_def [True] + +foo False new_def = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithSig.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithSig.hs new file mode 100644 index 0000000000..3fa44a6dfe --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithSig.hs @@ -0,0 +1,4 @@ +foo :: Bool -> Int +foo True = new_def [True] + +foo False = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithSigAndDocs.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithSigAndDocs.expected.hs new file mode 100644 index 0000000000..12927c7dce --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithSigAndDocs.expected.hs @@ -0,0 +1,11 @@ +foo :: + -- c1 + Bool -- c2 + -- c3 + -> -- c4 + -- | c5 + () -- c6 + -> _ -> Int +foo True () new_def = new_def [True] + +foo False () new_def = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithSigAndDocs.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithSigAndDocs.hs new file mode 100644 index 0000000000..f9033dce3f --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithSigAndDocs.hs @@ -0,0 +1,11 @@ +foo :: + -- c1 + Bool -- c2 + -- c3 + -> -- c4 + -- | c5 + () -- c6 + -> Int +foo True () = new_def [True] + +foo False () = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithTypeSynSig.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithTypeSynSig.expected.hs new file mode 100644 index 0000000000..e36ca8f89d --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithTypeSynSig.expected.hs @@ -0,0 +1,5 @@ +type FunctionTySyn = Bool -> Int +foo :: FunctionTySyn +foo True new_def = new_def [True] + +foo False new_def = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithTypeSynSig.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithTypeSynSig.hs new file mode 100644 index 0000000000..1843a5d460 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithTypeSynSig.hs @@ -0,0 +1,5 @@ +type FunctionTySyn = Bool -> Int +foo :: FunctionTySyn +foo True = new_def [True] + +foo False = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithTypeSynSigContravariant.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithTypeSynSigContravariant.expected.hs new file mode 100644 index 0000000000..e9735428f2 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithTypeSynSigContravariant.expected.hs @@ -0,0 +1,5 @@ +type FunctionTySyn = Bool -> Int +foo :: FunctionTySyn -> () -> _ -> Int +foo True () new_def = new_def [True] + +foo False () new_def = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithTypeSynSigContravariant.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithTypeSynSigContravariant.hs new file mode 100644 index 0000000000..cf2b67f63a --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/AddArgWithTypeSynSigContravariant.hs @@ -0,0 +1,5 @@ +type FunctionTySyn = Bool -> Int +foo :: FunctionTySyn -> () -> Int +foo True () = new_def [True] + +foo False () = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/Hole.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/Hole.expected.hs new file mode 100644 index 0000000000..1f440e9650 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/Hole.expected.hs @@ -0,0 +1 @@ +foo _new_def = _new_def \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/Hole.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/Hole.hs new file mode 100644 index 0000000000..31761e6934 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/Hole.hs @@ -0,0 +1 @@ +foo = _new_def \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigFirst.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigFirst.expected.hs new file mode 100644 index 0000000000..66611817ef --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigFirst.expected.hs @@ -0,0 +1,6 @@ +bar :: Bool -> Int +foo :: Bool -> _ -> Int +bar = const 1 +foo True new_def = new_def [True] + +foo False new_def = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigFirst.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigFirst.hs new file mode 100644 index 0000000000..00ef9ba769 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigFirst.hs @@ -0,0 +1,5 @@ +foo, bar :: Bool -> Int +bar = const 1 +foo True = new_def [True] + +foo False = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigLast.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigLast.expected.hs new file mode 100644 index 0000000000..489f6c2ba8 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigLast.expected.hs @@ -0,0 +1,7 @@ +baz, bar :: Bool -> Int +foo :: Bool -> _ -> Int +bar = const 1 +foo True new_def = new_def [True] + +foo False new_def = 1 +baz = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigLast.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigLast.hs new file mode 100644 index 0000000000..d3e8846728 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigLast.hs @@ -0,0 +1,6 @@ +baz, bar, foo :: Bool -> Int +bar = const 1 +foo True = new_def [True] + +foo False = 1 +baz = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigMiddle.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigMiddle.expected.hs new file mode 100644 index 0000000000..489f6c2ba8 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigMiddle.expected.hs @@ -0,0 +1,7 @@ +baz, bar :: Bool -> Int +foo :: Bool -> _ -> Int +bar = const 1 +foo True new_def = new_def [True] + +foo False new_def = 1 +baz = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigMiddle.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigMiddle.hs new file mode 100644 index 0000000000..80cada1601 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultiSigMiddle.hs @@ -0,0 +1,6 @@ +baz, foo, bar :: Bool -> Int +bar = const 1 +foo True = new_def [True] + +foo False = 1 +baz = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultipleDeclAlts.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultipleDeclAlts.expected.hs new file mode 100644 index 0000000000..fce633e2b9 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultipleDeclAlts.expected.hs @@ -0,0 +1,2 @@ +foo True new_def = new_def +foo False new_def = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultipleDeclAlts.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultipleDeclAlts.hs new file mode 100644 index 0000000000..919ce56546 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/MultipleDeclAlts.hs @@ -0,0 +1,2 @@ +foo True = new_def +foo False = 1 \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/NoTypeSuggestion.expected.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/NoTypeSuggestion.expected.hs new file mode 100644 index 0000000000..e982cdf35e --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/NoTypeSuggestion.expected.hs @@ -0,0 +1 @@ +foo new_def = new_def \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/golden/add-arg/NoTypeSuggestion.hs b/plugins/hls-refactor-plugin/test/data/golden/add-arg/NoTypeSuggestion.hs new file mode 100644 index 0000000000..cf9ade10dc --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/golden/add-arg/NoTypeSuggestion.hs @@ -0,0 +1 @@ +foo = new_def \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/hiding/AVec.hs b/plugins/hls-refactor-plugin/test/data/hiding/AVec.hs new file mode 100644 index 0000000000..4c1ea30874 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/AVec.hs @@ -0,0 +1,20 @@ +{-# LANGUAGE TypeOperators #-} +module AVec (Vec, type (@@@), (++), cons, fromList, snoc) where + +import Prelude hiding ((++)) + +data Vec a + +(++) :: Vec a -> Vec a -> Vec a +(++) = undefined + +data (@@@) a b + +fromList :: [a] -> Vec a +fromList = undefined + +cons :: a -> Vec a -> Vec a +cons = undefined + +snoc :: Vec a -> a -> Vec a +snoc = undefined diff --git a/plugins/hls-refactor-plugin/test/data/hiding/BVec.hs b/plugins/hls-refactor-plugin/test/data/hiding/BVec.hs new file mode 100644 index 0000000000..e086bb6ff4 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/BVec.hs @@ -0,0 +1,20 @@ +{-# LANGUAGE TypeOperators #-} +module BVec (Vec, type (@@@), (++), cons, fromList, snoc) where + +import Prelude hiding ((++)) + +data Vec a + +(++) :: Vec a -> Vec a -> Vec a +(++) = undefined + +data (@@@) a b + +fromList :: [a] -> Vec a +fromList = undefined + +cons :: a -> Vec a -> Vec a +cons = undefined + +snoc :: Vec a -> a -> Vec a +snoc = undefined diff --git a/plugins/hls-refactor-plugin/test/data/hiding/CVec.hs b/plugins/hls-refactor-plugin/test/data/hiding/CVec.hs new file mode 100644 index 0000000000..4a5fd3e7e9 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/CVec.hs @@ -0,0 +1,20 @@ +{-# LANGUAGE TypeOperators #-} +module CVec (Vec, type (@@@), (++), cons, fromList, snoc) where + +import Prelude hiding ((++)) + +data Vec a + +(++) :: Vec a -> Vec a -> Vec a +(++) = undefined + +data (@@@) a b + +fromList :: [a] -> Vec a +fromList = undefined + +cons :: a -> Vec a -> Vec a +cons = undefined + +snoc :: Vec a -> a -> Vec a +snoc = undefined diff --git a/plugins/hls-refactor-plugin/test/data/hiding/DVec.hs b/plugins/hls-refactor-plugin/test/data/hiding/DVec.hs new file mode 100644 index 0000000000..a580ca907d --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/DVec.hs @@ -0,0 +1,20 @@ +{-# LANGUAGE TypeOperators #-} +module DVec (Vec, (++), type (@@@), cons, fromList, snoc) where + +import Prelude hiding ((++)) + +data Vec a + +(++) :: Vec a -> Vec a -> Vec a +(++) = undefined + +data (@@@) a b + +fromList :: [a] -> Vec a +fromList = undefined + +cons :: a -> Vec a -> Vec a +cons = undefined + +snoc :: Vec a -> a -> Vec a +snoc = undefined diff --git a/plugins/hls-refactor-plugin/test/data/hiding/EVec.hs b/plugins/hls-refactor-plugin/test/data/hiding/EVec.hs new file mode 100644 index 0000000000..f5e0b2c269 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/EVec.hs @@ -0,0 +1,20 @@ +{-# LANGUAGE TypeOperators #-} +module EVec (Vec, (++), type (@@@), cons, fromList, snoc) where + +import Prelude hiding ((++)) + +data Vec a + +(++) :: Vec a -> Vec a -> Vec a +(++) = undefined + +data (@@@) a b + +fromList :: [a] -> Vec a +fromList = undefined + +cons :: a -> Vec a -> Vec a +cons = undefined + +snoc :: Vec a -> a -> Vec a +snoc = undefined diff --git a/plugins/hls-refactor-plugin/test/data/hiding/FVec.hs b/plugins/hls-refactor-plugin/test/data/hiding/FVec.hs new file mode 100644 index 0000000000..872bb1c373 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/FVec.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE DuplicateRecordFields #-} + +module FVec (RecA(..), RecB(..)) where + +data Vec a + +newtype RecA a = RecA { fromList :: [a] -> Vec a } + +newtype RecB a = RecB { fromList :: [a] -> Vec a } diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.append.E.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.append.E.hs new file mode 100644 index 0000000000..3448baa4f4 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.append.E.hs @@ -0,0 +1,12 @@ +module HideFunction where + +import AVec (fromList) +import BVec (fromList) +import CVec hiding ((++), cons) +import DVec hiding ((++), cons, snoc) +import EVec as E +import Prelude hiding ((++)) + +theFun = fromList + +theOp = (++) diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.append.Prelude.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.append.Prelude.hs new file mode 100644 index 0000000000..78d1cd879b --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.append.Prelude.hs @@ -0,0 +1,11 @@ +module HideFunction where + +import AVec (fromList) +import BVec (fromList) +import CVec hiding ((++), cons) +import DVec hiding ((++), cons, snoc) +import EVec as E hiding ((++)) + +theFun = fromList + +theOp = (++) diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.fromList.A.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.fromList.A.hs new file mode 100644 index 0000000000..b91d83f98b --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.fromList.A.hs @@ -0,0 +1,11 @@ +module HideFunction where + +import AVec (fromList) +import BVec ( (++)) +import CVec hiding (fromList, cons) +import DVec hiding (fromList, cons, snoc) +import EVec as E hiding (fromList) + +theFun = fromList + +theOp = (++) diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.fromList.B.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.fromList.B.hs new file mode 100644 index 0000000000..e131d86c1c --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.fromList.B.hs @@ -0,0 +1,11 @@ +module HideFunction where + +import AVec () +import BVec (fromList, (++)) +import CVec hiding (fromList, cons) +import DVec hiding (fromList, cons, snoc) +import EVec as E hiding (fromList) + +theFun = fromList + +theOp = (++) diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.qualified.append.Prelude.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.qualified.append.Prelude.hs new file mode 100644 index 0000000000..505125f55a --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.qualified.append.Prelude.hs @@ -0,0 +1,11 @@ +module HideFunction where + +import AVec (fromList) +import BVec (fromList, (++)) +import CVec hiding (cons) +import DVec hiding (cons, snoc) +import EVec as E + +theFun = fromList + +theOp = (Prelude.++) diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.qualified.fromList.E.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.qualified.fromList.E.hs new file mode 100644 index 0000000000..e81909ce0f --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.expected.qualified.fromList.E.hs @@ -0,0 +1,11 @@ +module HideFunction where + +import AVec (fromList) +import BVec (fromList, (++)) +import CVec hiding (cons) +import DVec hiding (cons, snoc) +import EVec as E + +theFun = E.fromList + +theOp = (++) diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.hs new file mode 100644 index 0000000000..ade8f63ac5 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideFunction.hs @@ -0,0 +1,11 @@ +module HideFunction where + +import AVec (fromList) +import BVec (fromList, (++)) +import CVec hiding (cons) +import DVec hiding (cons, snoc) +import EVec as E + +theFun = fromList + +theOp = (++) diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideFunctionWithoutLocal.expected.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideFunctionWithoutLocal.expected.hs new file mode 100644 index 0000000000..82c57fd8ed --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideFunctionWithoutLocal.expected.hs @@ -0,0 +1,14 @@ +module HideFunctionWithoutLocal where + +import AVec (fromList) +import BVec (fromList) +import CVec hiding ((++), cons) +import DVec hiding ((++), cons, snoc) +import EVec as E hiding ((++)) +import Prelude hiding ((++)) + +theOp = (++) + +data Vec a + +(++) = undefined diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideFunctionWithoutLocal.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideFunctionWithoutLocal.hs new file mode 100644 index 0000000000..0168627d45 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideFunctionWithoutLocal.hs @@ -0,0 +1,13 @@ +module HideFunctionWithoutLocal where + +import AVec (fromList) +import BVec (fromList, (++)) +import CVec hiding (cons) +import DVec hiding (cons, snoc) +import EVec as E + +theOp = (++) + +data Vec a + +(++) = undefined diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HidePreludeIndented.expected.hs b/plugins/hls-refactor-plugin/test/data/hiding/HidePreludeIndented.expected.hs new file mode 100644 index 0000000000..4218338bee --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HidePreludeIndented.expected.hs @@ -0,0 +1,5 @@ +module HidePreludeIndented where + + import AVec + import Prelude hiding ((++)) + op = (++) diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HidePreludeIndented.hs b/plugins/hls-refactor-plugin/test/data/hiding/HidePreludeIndented.hs new file mode 100644 index 0000000000..122b64a390 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HidePreludeIndented.hs @@ -0,0 +1,4 @@ +module HidePreludeIndented where + + import AVec + op = (++) diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HidePreludeLocalInfix.expected.hs b/plugins/hls-refactor-plugin/test/data/hiding/HidePreludeLocalInfix.expected.hs new file mode 100644 index 0000000000..b0b97c348d --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HidePreludeLocalInfix.expected.hs @@ -0,0 +1,9 @@ +module HidePreludeLocalInfix where +import Prelude hiding ((++)) + +infixed xs ys = xs ++ ys + +data Vec a + +(++) :: Vec a -> Vec a -> Vec a +(++) = undefined diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HidePreludeLocalInfix.hs b/plugins/hls-refactor-plugin/test/data/hiding/HidePreludeLocalInfix.hs new file mode 100644 index 0000000000..2e2dfb5df8 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HidePreludeLocalInfix.hs @@ -0,0 +1,8 @@ +module HidePreludeLocalInfix where + +infixed xs ys = xs ++ ys + +data Vec a + +(++) :: Vec a -> Vec a -> Vec a +(++) = undefined diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyDuplicateRecordFields.expected.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyDuplicateRecordFields.expected.hs new file mode 100644 index 0000000000..c41fae58c1 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyDuplicateRecordFields.expected.hs @@ -0,0 +1,10 @@ +module HideQualifyDuplicateRecordFields where + +import AVec +import BVec +import CVec +import DVec +import EVec +import FVec + +theFun = AVec.fromList \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyDuplicateRecordFields.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyDuplicateRecordFields.hs new file mode 100644 index 0000000000..7e3e16cd9f --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyDuplicateRecordFields.hs @@ -0,0 +1,10 @@ +module HideQualifyDuplicateRecordFields where + +import AVec +import BVec +import CVec +import DVec +import EVec +import FVec + +theFun = fromList \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyDuplicateRecordFieldsSelf.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyDuplicateRecordFieldsSelf.hs new file mode 100644 index 0000000000..63e14db00a --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyDuplicateRecordFieldsSelf.hs @@ -0,0 +1,5 @@ +module HideQualifyDuplicateRecordFieldsSelf where + +import FVec + +x = fromList \ No newline at end of file diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyInfix.expected.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyInfix.expected.hs new file mode 100644 index 0000000000..151171b2f4 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyInfix.expected.hs @@ -0,0 +1,5 @@ +module HideQualifyInfix where + +import AVec + +infixed xs ys = xs Prelude.++ ys diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyInfix.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyInfix.hs new file mode 100644 index 0000000000..3fffc4e804 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifyInfix.hs @@ -0,0 +1,5 @@ +module HideQualifyInfix where + +import AVec + +infixed xs ys = xs ++ ys diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideQualifySectionLeft.expected.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifySectionLeft.expected.hs new file mode 100644 index 0000000000..d029cf54bd --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifySectionLeft.expected.hs @@ -0,0 +1,5 @@ +module HideQualifySectionLeft where + +import AVec + +sectLeft xs = (Prelude.++ xs) diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideQualifySectionLeft.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifySectionLeft.hs new file mode 100644 index 0000000000..598c95f191 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifySectionLeft.hs @@ -0,0 +1,5 @@ +module HideQualifySectionLeft where + +import AVec + +sectLeft xs = (++ xs) diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideQualifySectionRight.expected.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifySectionRight.expected.hs new file mode 100644 index 0000000000..3f306a4254 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifySectionRight.expected.hs @@ -0,0 +1,5 @@ +module HideQualifySectionRight where + +import AVec + +sectLeft xs = (xs Prelude.++) diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideQualifySectionRight.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifySectionRight.hs new file mode 100644 index 0000000000..89876605c6 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideQualifySectionRight.hs @@ -0,0 +1,5 @@ +module HideQualifySectionRight where + +import AVec + +sectLeft xs = (xs ++) diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideType.expected.A.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideType.expected.A.hs new file mode 100644 index 0000000000..a59de871b4 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideType.expected.A.hs @@ -0,0 +1,9 @@ +module HideType where + +import AVec (Vec, fromList) +import BVec (fromList, (++)) +import CVec hiding (Vec, cons) +import DVec hiding (Vec, cons, snoc) +import EVec as E hiding (Vec) + +type TheType = Vec diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideType.expected.E.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideType.expected.E.hs new file mode 100644 index 0000000000..51fa6610b5 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideType.expected.E.hs @@ -0,0 +1,9 @@ +module HideType where + +import AVec ( fromList) +import BVec (fromList, (++)) +import CVec hiding (Vec, cons) +import DVec hiding (Vec, cons, snoc) +import EVec as E + +type TheType = Vec diff --git a/plugins/hls-refactor-plugin/test/data/hiding/HideType.hs b/plugins/hls-refactor-plugin/test/data/hiding/HideType.hs new file mode 100644 index 0000000000..926cedf15d --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/HideType.hs @@ -0,0 +1,9 @@ +module HideType where + +import AVec (Vec, fromList) +import BVec (fromList, (++)) +import CVec hiding (cons) +import DVec hiding (cons, snoc) +import EVec as E + +type TheType = Vec diff --git a/plugins/hls-refactor-plugin/test/data/hiding/hie.yaml b/plugins/hls-refactor-plugin/test/data/hiding/hie.yaml new file mode 100644 index 0000000000..6ad33cd35e --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hiding/hie.yaml @@ -0,0 +1,10 @@ +cradle: + direct: + arguments: + - -Wall + - AVec.hs + - BVec.hs + - CVec.hs + - DVec.hs + - EVec.hs + - FVec.hs diff --git a/plugins/hls-refactor-plugin/test/data/hover/Bar.hs b/plugins/hls-refactor-plugin/test/data/hover/Bar.hs new file mode 100644 index 0000000000..f9fde2a7cc --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hover/Bar.hs @@ -0,0 +1,4 @@ +module Bar (Bar(..)) where + +-- | Bar Haddock +data Bar = Bar diff --git a/plugins/hls-refactor-plugin/test/data/hover/Foo.hs b/plugins/hls-refactor-plugin/test/data/hover/Foo.hs new file mode 100644 index 0000000000..489a6ccd6b --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hover/Foo.hs @@ -0,0 +1,6 @@ +module Foo (Bar, foo) where + +import Bar + +-- | foo Haddock +foo = Bar diff --git a/plugins/hls-refactor-plugin/test/data/hover/GotoHover.hs b/plugins/hls-refactor-plugin/test/data/hover/GotoHover.hs new file mode 100644 index 0000000000..e1802580e2 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hover/GotoHover.hs @@ -0,0 +1,66 @@ +{-# LANGUAGE OverloadedStrings, TemplateHaskell #-} +{- HLINT ignore -} +module GotoHover ( module GotoHover) where +import Data.Text (Text, pack) +import Foo (Bar, foo) + + +data TypeConstructor = DataConstructor + { fff :: Text + , ggg :: Int } +aaa :: TypeConstructor +aaa = DataConstructor + { fff = "dfgy" + , ggg = 832 + } +bbb :: TypeConstructor +bbb = DataConstructor "mjgp" 2994 +ccc :: (Text, Int) +ccc = (fff bbb, ggg aaa) +ddd :: Num a => a -> a -> a +ddd vv ww = vv +! ww +a +! b = a - b +hhh (Just a) (><) = a >< a +iii a b = a `b` a +jjj s = pack $ s <> s +class MyClass a where + method :: a -> Int +instance MyClass Int where + method = succ +kkk :: MyClass a => Int -> a -> Int +kkk n c = n + method c + +doBind :: Maybe () +doBind = do unwrapped <- Just () + return unwrapped + +listCompBind :: [Char] +listCompBind = [ succ c | c <- "ptfx" ] + +multipleClause :: Bool -> Char +multipleClause True = 't' +multipleClause False = 'f' + +-- | Recognizable docs: kpqz +documented :: Monad m => Either Int (m a) +documented = Left 7518 + +listOfInt = [ 8391 :: Int, 6268 ] + +outer :: Bool +outer = undefined inner where + + inner :: Char + inner = undefined + +imported :: Bar +imported = foo + +aa2 :: Bool +aa2 = $(id [| True |]) + +hole :: Int +hole = _ + +hole2 :: a -> Maybe a +hole2 = _ diff --git a/plugins/hls-refactor-plugin/test/data/hover/RecordDotSyntax.hs b/plugins/hls-refactor-plugin/test/data/hover/RecordDotSyntax.hs new file mode 100644 index 0000000000..3680d08a3c --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hover/RecordDotSyntax.hs @@ -0,0 +1,18 @@ +{-# LANGUAGE OverloadedRecordDot, DuplicateRecordFields, NoFieldSelectors #-} + +module RecordDotSyntax ( module RecordDotSyntax) where + +import qualified Data.Maybe as M + +data MyRecord = MyRecord + { a :: String + , b :: Integer + , c :: MyChild + } deriving (Eq, Show) + +newtype MyChild = MyChild + { z :: String + } deriving (Eq, Show) + +x = MyRecord { a = "Hello", b = 12, c = MyChild { z = "there" } } +y = x.a ++ show x.b ++ x.c.z diff --git a/plugins/hls-refactor-plugin/test/data/hover/hie.yaml b/plugins/hls-refactor-plugin/test/data/hover/hie.yaml new file mode 100644 index 0000000000..e2b3e97c5d --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/hover/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["Foo", "Bar", "GotoHover", "RecordDotSyntax"]}} diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/CommentAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/CommentAtTop.expected.hs new file mode 100644 index 0000000000..2e7a43d73e --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/CommentAtTop.expected.hs @@ -0,0 +1,9 @@ +module Test +( SomeData(..) +) where +import Data.Monoid + +-- | Some comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/CommentAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/CommentAtTop.hs new file mode 100644 index 0000000000..a811d70cfa --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/CommentAtTop.hs @@ -0,0 +1,8 @@ +module Test +( SomeData(..) +) where + +-- | Some comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/CommentAtTopMultipleComments.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/CommentAtTopMultipleComments.expected.hs new file mode 100644 index 0000000000..6589449194 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/CommentAtTopMultipleComments.expected.hs @@ -0,0 +1,12 @@ +module Test +( SomeData(..) +) where +import Data.Monoid + +-- | Another comment +data SomethingElse = SomethingElse + +-- | Some comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/CommentAtTopMultipleComments.hs b/plugins/hls-refactor-plugin/test/data/import-placement/CommentAtTopMultipleComments.hs new file mode 100644 index 0000000000..80b1e16359 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/CommentAtTopMultipleComments.hs @@ -0,0 +1,11 @@ +module Test +( SomeData(..) +) where + +-- | Another comment +data SomethingElse = SomethingElse + +-- | Some comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/CommentCurlyBraceAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/CommentCurlyBraceAtTop.expected.hs new file mode 100644 index 0000000000..50a6954815 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/CommentCurlyBraceAtTop.expected.hs @@ -0,0 +1,9 @@ +module Test +( SomeData(..) +) where +import Data.Monoid + +{- Some comment -} +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/CommentCurlyBraceAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/CommentCurlyBraceAtTop.hs new file mode 100644 index 0000000000..dd4b0688c0 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/CommentCurlyBraceAtTop.hs @@ -0,0 +1,8 @@ +module Test +( SomeData(..) +) where + +{- Some comment -} +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/DataAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/DataAtTop.expected.hs new file mode 100644 index 0000000000..d2698e963b --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/DataAtTop.expected.hs @@ -0,0 +1,11 @@ +module Test +( SomeData(..) +) where +import Data.Monoid + +data Something = Something + +-- | some comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/DataAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/DataAtTop.hs new file mode 100644 index 0000000000..8840738a51 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/DataAtTop.hs @@ -0,0 +1,10 @@ +module Test +( SomeData(..) +) where + +data Something = Something + +-- | some comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ImportAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ImportAtTop.expected.hs new file mode 100644 index 0000000000..1f05d361d7 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ImportAtTop.expected.hs @@ -0,0 +1,14 @@ +module Test +( SomeData(..) +) where + +import Data.Char +import Data.Monoid + +{- Some multi + line comment +-} +class Semigroup a => SomeData a + +-- | a comment +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ImportAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ImportAtTop.hs new file mode 100644 index 0000000000..f4aa6780e0 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ImportAtTop.hs @@ -0,0 +1,13 @@ +module Test +( SomeData(..) +) where + +import Data.Char + +{- Some multi + line comment +-} +class Semigroup a => SomeData a + +-- | a comment +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ImportPostQualified.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ImportPostQualified.expected.hs new file mode 100644 index 0000000000..0ea06c3dcf --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ImportPostQualified.expected.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE ImportQualifiedPost #-} +{-# OPTIONS_GHC -Wprepositive-qualified-module #-} +import Control.Monad qualified as Control +main :: IO () +main = Control.when True $ putStrLn "hello" diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ImportPostQualified.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ImportPostQualified.hs new file mode 100644 index 0000000000..1f225e77a5 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ImportPostQualified.hs @@ -0,0 +1,4 @@ +{-# LANGUAGE ImportQualifiedPost #-} +{-# OPTIONS_GHC -Wprepositive-qualified-module #-} +main :: IO () +main = Control.when True $ putStrLn "hello" diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ImportQualified.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ImportQualified.expected.hs new file mode 100644 index 0000000000..5b9ce112ff --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ImportQualified.expected.hs @@ -0,0 +1,3 @@ +import qualified Control.Monad as Control +main :: IO () +main = Control.when True $ putStrLn "hello" diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ImportQualified.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ImportQualified.hs new file mode 100644 index 0000000000..9740fd0aa5 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ImportQualified.hs @@ -0,0 +1,2 @@ +main :: IO () +main = Control.when True $ putStrLn "hello" diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleAtTop.expected.hs new file mode 100644 index 0000000000..e7ea2779dd --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleAtTop.expected.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test where +import Data.Monoid + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleAtTop.hs new file mode 100644 index 0000000000..52ac5ac564 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleAtTop.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test where + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleExplicitExports.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleExplicitExports.expected.hs new file mode 100644 index 0000000000..097c3d2c56 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleExplicitExports.expected.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test +( SomeData(..) +) where +import Data.Monoid + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleExplicitExports.hs b/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleExplicitExports.hs new file mode 100644 index 0000000000..e528c43343 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleExplicitExports.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test +( SomeData(..) +) where + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleWithComment.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleWithComment.expected.hs new file mode 100644 index 0000000000..ad8e7aa4f1 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleWithComment.expected.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test +( SomeData(..) +) where +import Data.Monoid + +-- comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleWithComment.hs b/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleWithComment.hs new file mode 100644 index 0000000000..bac9db1cd6 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/LangPragmaModuleWithComment.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test +( SomeData(..) +) where + +-- comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmaAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmaAtTop.expected.hs new file mode 100644 index 0000000000..970f8dee59 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmaAtTop.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE OverloadedStrings #-} +import Data.Monoid + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmaAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmaAtTop.hs new file mode 100644 index 0000000000..2d679bd537 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmaAtTop.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE OverloadedStrings #-} + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmaAtTopWithComment.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmaAtTopWithComment.expected.hs new file mode 100644 index 0000000000..3e6fc76050 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmaAtTopWithComment.expected.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE OverloadedStrings #-} +import Data.Monoid + +-- | comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmaAtTopWithComment.hs b/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmaAtTopWithComment.hs new file mode 100644 index 0000000000..df1f85126b --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmaAtTopWithComment.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE OverloadedStrings #-} + +-- | comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmasThenShebangs.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmasThenShebangs.expected.hs new file mode 100644 index 0000000000..c3efb90b68 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmasThenShebangs.expected.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +import Data.Monoid + +-- some comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmasThenShebangs.hs b/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmasThenShebangs.hs new file mode 100644 index 0000000000..916e465856 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/LanguagePragmasThenShebangs.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" + +-- some comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ModuleDeclAndImports.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ModuleDeclAndImports.expected.hs new file mode 100644 index 0000000000..b9492d84ba --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ModuleDeclAndImports.expected.hs @@ -0,0 +1,10 @@ +module Test +( SomeData(..) +) where +import Data.Char +import Data.Array +import Data.Monoid + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ModuleDeclAndImports.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ModuleDeclAndImports.hs new file mode 100644 index 0000000000..5cef735e54 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ModuleDeclAndImports.hs @@ -0,0 +1,9 @@ +module Test +( SomeData(..) +) where +import Data.Char +import Data.Array + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/MultiLineCommentAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/MultiLineCommentAtTop.expected.hs new file mode 100644 index 0000000000..4c96510899 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/MultiLineCommentAtTop.expected.hs @@ -0,0 +1,11 @@ +module Test +( SomeData(..) +) where +import Data.Monoid + +{- Some multi + line comment +-} +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/MultiLineCommentAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/MultiLineCommentAtTop.hs new file mode 100644 index 0000000000..8cf400d448 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/MultiLineCommentAtTop.hs @@ -0,0 +1,10 @@ +module Test +( SomeData(..) +) where + +{- Some multi + line comment +-} +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/MultiLinePragma.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/MultiLinePragma.expected.hs new file mode 100644 index 0000000000..e9e8f4f604 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/MultiLinePragma.expected.hs @@ -0,0 +1,13 @@ +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards, + OverloadedStrings #-} +{-# OPTIONS_GHC -Wall, +import Data.Monoid + -Wno-unused-imports #-} + + +-- some comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/MultiLinePragma.hs b/plugins/hls-refactor-plugin/test/data/import-placement/MultiLinePragma.hs new file mode 100644 index 0000000000..e3e6187193 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/MultiLinePragma.hs @@ -0,0 +1,12 @@ +{-# OPTIONS_GHC -Wall #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards, + OverloadedStrings #-} +{-# OPTIONS_GHC -Wall, + -Wno-unused-imports #-} + + +-- some comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/MultipleImportsAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/MultipleImportsAtTop.expected.hs new file mode 100644 index 0000000000..9a7d3d5214 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/MultipleImportsAtTop.expected.hs @@ -0,0 +1,14 @@ +module Test +( SomeData(..) +) where + +import Data.Char +import Data.Bool +import Data.Eq +import Data.Monoid + +-- | A comment +class Semigroup a => SomeData a + +-- | another comment +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/MultipleImportsAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/MultipleImportsAtTop.hs new file mode 100644 index 0000000000..1eab2c5685 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/MultipleImportsAtTop.hs @@ -0,0 +1,13 @@ +module Test +( SomeData(..) +) where + +import Data.Char +import Data.Bool +import Data.Eq + +-- | A comment +class Semigroup a => SomeData a + +-- | another comment +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/MultipleLanguagePragmasNoModuleDeclaration.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/MultipleLanguagePragmasNoModuleDeclaration.expected.hs new file mode 100644 index 0000000000..94ed4397a4 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/MultipleLanguagePragmasNoModuleDeclaration.expected.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE RankNTypes #-} +import Data.Monoid + +-- some comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/MultipleLanguagePragmasNoModuleDeclaration.hs b/plugins/hls-refactor-plugin/test/data/import-placement/MultipleLanguagePragmasNoModuleDeclaration.hs new file mode 100644 index 0000000000..352c2b2763 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/MultipleLanguagePragmasNoModuleDeclaration.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE RankNTypes #-} + +-- some comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/NewTypeAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/NewTypeAtTop.expected.hs new file mode 100644 index 0000000000..27ec825a79 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/NewTypeAtTop.expected.hs @@ -0,0 +1,11 @@ +module Test +( SomeData(..) +) where +import Data.Monoid + +newtype Something = S { foo :: Int } + +-- | a comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/NewTypeAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/NewTypeAtTop.hs new file mode 100644 index 0000000000..64a49358d9 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/NewTypeAtTop.hs @@ -0,0 +1,10 @@ +module Test +( SomeData(..) +) where + +newtype Something = S { foo :: Int } + +-- | a comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/NoExplicitExportCommentAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/NoExplicitExportCommentAtTop.expected.hs new file mode 100644 index 0000000000..6388cb99da --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/NoExplicitExportCommentAtTop.expected.hs @@ -0,0 +1,7 @@ +module Test where +import Data.Monoid + +-- | a comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/NoExplicitExportCommentAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/NoExplicitExportCommentAtTop.hs new file mode 100644 index 0000000000..761ea388b1 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/NoExplicitExportCommentAtTop.hs @@ -0,0 +1,6 @@ +module Test where + +-- | a comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/NoExplicitExports.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/NoExplicitExports.expected.hs new file mode 100644 index 0000000000..fb92a8309e --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/NoExplicitExports.expected.hs @@ -0,0 +1,8 @@ +module Test where +import Data.Monoid + +newtype Something = S { foo :: Int } + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/NoExplicitExports.hs b/plugins/hls-refactor-plugin/test/data/import-placement/NoExplicitExports.hs new file mode 100644 index 0000000000..8515a4ff35 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/NoExplicitExports.hs @@ -0,0 +1,7 @@ +module Test where + +newtype Something = S { foo :: Int } + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/NoModuleDeclaration.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/NoModuleDeclaration.expected.hs new file mode 100644 index 0000000000..2136db9018 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/NoModuleDeclaration.expected.hs @@ -0,0 +1,7 @@ +import Data.Monoid +newtype Something = S { foo :: Int } + +-- | a comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/NoModuleDeclaration.hs b/plugins/hls-refactor-plugin/test/data/import-placement/NoModuleDeclaration.hs new file mode 100644 index 0000000000..5173bc31a6 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/NoModuleDeclaration.hs @@ -0,0 +1,6 @@ +newtype Something = S { foo :: Int } + +-- | a comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/NoModuleDeclarationCommentAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/NoModuleDeclarationCommentAtTop.expected.hs new file mode 100644 index 0000000000..9336469265 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/NoModuleDeclarationCommentAtTop.expected.hs @@ -0,0 +1,5 @@ +import Data.Monoid +-- a comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/NoModuleDeclarationCommentAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/NoModuleDeclarationCommentAtTop.hs new file mode 100644 index 0000000000..8727beedf1 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/NoModuleDeclarationCommentAtTop.hs @@ -0,0 +1,4 @@ +-- a comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/OptionsNotAtTopWithSpaces.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/OptionsNotAtTopWithSpaces.expected.hs new file mode 100644 index 0000000000..8595bca913 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/OptionsNotAtTopWithSpaces.expected.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE OverloadedStrings #-} + + +{-# LANGUAGE TupleSections #-} + + + + +class Semigroup a => SomeData a +instance SomeData All + +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} +import Data.Monoid + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/OptionsNotAtTopWithSpaces.hs b/plugins/hls-refactor-plugin/test/data/import-placement/OptionsNotAtTopWithSpaces.hs new file mode 100644 index 0000000000..9b49dacfba --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/OptionsNotAtTopWithSpaces.hs @@ -0,0 +1,15 @@ +{-# LANGUAGE OverloadedStrings #-} + + +{-# LANGUAGE TupleSections #-} + + + + +class Semigroup a => SomeData a +instance SomeData All + +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/OptionsPragmaNotAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/OptionsPragmaNotAtTop.expected.hs new file mode 100644 index 0000000000..a92bbab580 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/OptionsPragmaNotAtTop.expected.hs @@ -0,0 +1,8 @@ +class Semigroup a => SomeData a +instance SomeData All + +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} +import Data.Monoid + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/OptionsPragmaNotAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/OptionsPragmaNotAtTop.hs new file mode 100644 index 0000000000..ad7c01f4e7 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/OptionsPragmaNotAtTop.hs @@ -0,0 +1,7 @@ +class Semigroup a => SomeData a +instance SomeData All + +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopMultipleComments.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopMultipleComments.expected.hs new file mode 100644 index 0000000000..cbe451714d --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopMultipleComments.expected.hs @@ -0,0 +1,23 @@ +{-# LANGUAGE OverloadedStrings #-} + +{-# OPTIONS_GHC -Wall, + OPTIONS_GHC -Wno-unrecognised-pragmas #-} +-- another comment +-- oh +{- multi line +comment +-} + +{-# LANGUAGE TupleSections #-} +{- some comment -} + +-- again +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} +import Data.Monoid + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopMultipleComments.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopMultipleComments.hs new file mode 100644 index 0000000000..306af5aa71 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopMultipleComments.hs @@ -0,0 +1,22 @@ +{-# LANGUAGE OverloadedStrings #-} + +{-# OPTIONS_GHC -Wall, + OPTIONS_GHC -Wno-unrecognised-pragmas #-} +-- another comment +-- oh +{- multi line +comment +-} + +{-# LANGUAGE TupleSections #-} +{- some comment -} + +-- again +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithCommentsAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithCommentsAtTop.expected.hs new file mode 100644 index 0000000000..57ab794a7e --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithCommentsAtTop.expected.hs @@ -0,0 +1,18 @@ +{-# LANGUAGE OverloadedStrings #-} + +{-# OPTIONS_GHC -Wall #-} +-- another comment + +{-# LANGUAGE TupleSections #-} +{- some comment -} + + +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} +import Data.Monoid + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithCommentsAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithCommentsAtTop.hs new file mode 100644 index 0000000000..c50fe08c85 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithCommentsAtTop.hs @@ -0,0 +1,17 @@ +{-# LANGUAGE OverloadedStrings #-} + +{-# OPTIONS_GHC -Wall #-} +-- another comment + +{-# LANGUAGE TupleSections #-} +{- some comment -} + + +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithImports.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithImports.expected.hs new file mode 100644 index 0000000000..f2525d60e8 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithImports.expected.hs @@ -0,0 +1,19 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} + +{-# LANGUAGE TupleSections #-} +module Test +( SomeData(..) +) where +import Data.Text +import Data.Monoid + + +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithImports.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithImports.hs new file mode 100644 index 0000000000..849c029069 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithImports.hs @@ -0,0 +1,18 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} + +{-# LANGUAGE TupleSections #-} +module Test +( SomeData(..) +) where +import Data.Text + + +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithModuleDecl.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithModuleDecl.expected.hs new file mode 100644 index 0000000000..d62e2e32f1 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithModuleDecl.expected.hs @@ -0,0 +1,22 @@ +{-# LANGUAGE OverloadedStrings #-} + + +{-# OPTIONS_GHC -Wall #-} + +{-# LANGUAGE TupleSections #-} +module Test +( SomeData(..) +) where +import Data.Monoid + + + + +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithModuleDecl.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithModuleDecl.hs new file mode 100644 index 0000000000..6115d25973 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmaNotAtTopWithModuleDecl.hs @@ -0,0 +1,21 @@ +{-# LANGUAGE OverloadedStrings #-} + + +{-# OPTIONS_GHC -Wall #-} + +{-# LANGUAGE TupleSections #-} +module Test +( SomeData(..) +) where + + + + +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmasAndShebangsNoComment.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasAndShebangsNoComment.expected.hs new file mode 100644 index 0000000000..7c0ce68d8f --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasAndShebangsNoComment.expected.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +import Data.Monoid + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmasAndShebangsNoComment.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasAndShebangsNoComment.hs new file mode 100644 index 0000000000..bdd2e9b69b --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasAndShebangsNoComment.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmasShebangsAndModuleDecl.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasShebangsAndModuleDecl.expected.hs new file mode 100644 index 0000000000..a582ff69ac --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasShebangsAndModuleDecl.expected.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" + +module Test where +import Data.Monoid + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmasShebangsAndModuleDecl.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasShebangsAndModuleDecl.hs new file mode 100644 index 0000000000..e1eac4118d --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasShebangsAndModuleDecl.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" + +module Test where + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmasShebangsModuleExplicitExports.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasShebangsModuleExplicitExports.expected.hs new file mode 100644 index 0000000000..03271ccad2 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasShebangsModuleExplicitExports.expected.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" + +module Test +( SomeData(..) +) where +import Data.Monoid + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmasShebangsModuleExplicitExports.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasShebangsModuleExplicitExports.hs new file mode 100644 index 0000000000..b59e9b22de --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasShebangsModuleExplicitExports.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" + +module Test +( SomeData(..) +) where + +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmasThenShebangsMultilineComment.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasThenShebangsMultilineComment.expected.hs new file mode 100644 index 0000000000..95adbd87cd --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasThenShebangsMultilineComment.expected.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +import Data.Monoid + +{- | some multiline + comment + ... -} +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/PragmasThenShebangsMultilineComment.hs b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasThenShebangsMultilineComment.hs new file mode 100644 index 0000000000..5f56f1c390 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/PragmasThenShebangsMultilineComment.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wall #-} +#! /usr/bin/env nix-shell +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" + +{- | some multiline + comment + ... -} +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTop.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTop.expected.hs new file mode 100644 index 0000000000..230710232e --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTop.expected.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE OverloadedStrings #-} + +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +import Data.Monoid + +f :: Int -> Int +f x = x * x diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTop.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTop.hs new file mode 100644 index 0000000000..ee512102d5 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTop.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE OverloadedStrings #-} + +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" + +f :: Int -> Int +f x = x * x diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopNoSpace.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopNoSpace.expected.hs new file mode 100644 index 0000000000..c5977503a6 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopNoSpace.expected.hs @@ -0,0 +1,8 @@ +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +import Data.Monoid + +f :: Int -> Int +f x = x * x diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopNoSpace.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopNoSpace.hs new file mode 100644 index 0000000000..5ba507a5e7 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopNoSpace.hs @@ -0,0 +1,7 @@ +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" + +f :: Int -> Int +f x = x * x diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopWithSpaces.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopWithSpaces.expected.hs new file mode 100644 index 0000000000..8d358468da --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopWithSpaces.expected.hs @@ -0,0 +1,21 @@ +{-# LANGUAGE OverloadedStrings #-} + + +{-# OPTIONS_GHC -Wall #-} + + + +{-# LANGUAGE TupleSections #-} + + + + +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} +import Data.Monoid + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopWithSpaces.hs b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopWithSpaces.hs new file mode 100644 index 0000000000..df312b2044 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/ShebangNotAtTopWithSpaces.hs @@ -0,0 +1,20 @@ +{-# LANGUAGE OverloadedStrings #-} + + +{-# OPTIONS_GHC -Wall #-} + + + +{-# LANGUAGE TupleSections #-} + + + + +class Semigroup a => SomeData a +instance SomeData All + +#! nix-shell --pure -i runghc -p "haskellPackages.ghcWithPackages (hp: with hp; [ turtle ])" +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +addOne :: Int -> Int +addOne x = x + 1 diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/TwoDashOnlyComment.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/TwoDashOnlyComment.expected.hs new file mode 100644 index 0000000000..e2c4f7c230 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/TwoDashOnlyComment.expected.hs @@ -0,0 +1,9 @@ +module Test +( SomeData(..) +) where +import Data.Monoid + +-- no vertical bar comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/TwoDashOnlyComment.hs b/plugins/hls-refactor-plugin/test/data/import-placement/TwoDashOnlyComment.hs new file mode 100644 index 0000000000..87749ec8d2 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/TwoDashOnlyComment.hs @@ -0,0 +1,8 @@ +module Test +( SomeData(..) +) where + +-- no vertical bar comment +class Semigroup a => SomeData a + +instance SomeData All diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/WhereDeclLowerInFile.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/WhereDeclLowerInFile.expected.hs new file mode 100644 index 0000000000..53788dfaa8 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/WhereDeclLowerInFile.expected.hs @@ -0,0 +1,18 @@ +module Asdf + (f + , where') + + where +import Data.Int + + + +f :: Int64 -> Int64 +f = id' + where id' = id + +g :: Int -> Int +g = id + +where' :: Int -> Int +where' = id diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/WhereDeclLowerInFile.hs b/plugins/hls-refactor-plugin/test/data/import-placement/WhereDeclLowerInFile.hs new file mode 100644 index 0000000000..84f31f07a7 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/WhereDeclLowerInFile.hs @@ -0,0 +1,17 @@ +module Asdf + (f + , where') + + where + + + +f :: Int64 -> Int64 +f = id' + where id' = id + +g :: Int -> Int +g = id + +where' :: Int -> Int +where' = id diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/WhereDeclLowerInFileWithCommentsBeforeIt.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/WhereDeclLowerInFileWithCommentsBeforeIt.expected.hs new file mode 100644 index 0000000000..9ea40643c9 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/WhereDeclLowerInFileWithCommentsBeforeIt.expected.hs @@ -0,0 +1,20 @@ +module Asdf + (f + , where') +-- hello +-- world + + where +import Data.Int + + + +f :: Int64 -> Int64 +f = id' + where id' = id + +g :: Int -> Int +g = id + +where' :: Int -> Int +where' = id diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/WhereDeclLowerInFileWithCommentsBeforeIt.hs b/plugins/hls-refactor-plugin/test/data/import-placement/WhereDeclLowerInFileWithCommentsBeforeIt.hs new file mode 100644 index 0000000000..d79ea57f21 --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/WhereDeclLowerInFileWithCommentsBeforeIt.hs @@ -0,0 +1,19 @@ +module Asdf + (f + , where') +-- hello +-- world + + where + + + +f :: Int64 -> Int64 +f = id' + where id' = id + +g :: Int -> Int +g = id + +where' :: Int -> Int +where' = id diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/WhereKeywordLowerInFileNoExports.expected.hs b/plugins/hls-refactor-plugin/test/data/import-placement/WhereKeywordLowerInFileNoExports.expected.hs new file mode 100644 index 0000000000..644fd3abab --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/WhereKeywordLowerInFileNoExports.expected.hs @@ -0,0 +1,16 @@ +module Asdf + + + where +import Data.Int + + +f :: Int64 -> Int64 +f = id' + where id' = id + +g :: Int -> Int +g = id + +where' :: Int -> Int +where' = id diff --git a/plugins/hls-refactor-plugin/test/data/import-placement/WhereKeywordLowerInFileNoExports.hs b/plugins/hls-refactor-plugin/test/data/import-placement/WhereKeywordLowerInFileNoExports.hs new file mode 100644 index 0000000000..caceb1d25d --- /dev/null +++ b/plugins/hls-refactor-plugin/test/data/import-placement/WhereKeywordLowerInFileNoExports.hs @@ -0,0 +1,15 @@ +module Asdf + + + where + + +f :: Int64 -> Int64 +f = id' + where id' = id + +g :: Int -> Int +g = id + +where' :: Int -> Int +where' = id diff --git a/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs b/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs new file mode 100644 index 0000000000..0ba6bc7975 --- /dev/null +++ b/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs @@ -0,0 +1,294 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# OPTIONS_GHC -Wno-orphans #-} + +module Ide.Plugin.Rename (descriptor, E.Log) where + +import Control.Lens ((^.)) +import Control.Monad +import Control.Monad.Except (ExceptT, throwError) +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Class (lift) +import Data.Either (rights) +import Data.Foldable (fold) +import Data.Generics +import Data.Hashable +import Data.HashSet (HashSet) +import qualified Data.HashSet as HS +import Data.List.NonEmpty (NonEmpty ((:|)), + groupWith) +import qualified Data.Map as M +import Data.Maybe +import Data.Mod.Word +import qualified Data.Text as T +import Development.IDE (Recorder, WithPriority, + usePropertyAction) +import Development.IDE.Core.FileStore (getVersionedTextDoc) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Service +import Development.IDE.Core.Shake +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Compat.ExactPrint +import Development.IDE.GHC.Error +import Development.IDE.GHC.ExactPrint +import qualified Development.IDE.GHC.ExactPrint as E +import Development.IDE.Plugin.CodeAction +import Development.IDE.Spans.AtPoint +import Development.IDE.Types.Location +import GHC.Iface.Ext.Types (HieAST (..), + HieASTs (..), + NodeOrigin (..), + SourcedNodeInfo (..)) +import GHC.Iface.Ext.Utils (generateReferencesMap) +import HieDb ((:.) (..)) +import HieDb.Query +import HieDb.Types (RefRow (refIsGenerated)) +import Ide.Plugin.Error +import Ide.Plugin.Properties +import Ide.PluginUtils +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types + +instance Hashable (Mod a) where hash n = hash (unMod n) + +descriptor :: Recorder (WithPriority E.Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder pluginId = mkExactprintPluginDescriptor recorder $ + (defaultPluginDescriptor pluginId "Provides renaming of Haskell identifiers") + { pluginHandlers = mconcat + [ mkPluginHandler SMethod_TextDocumentRename renameProvider + , mkPluginHandler SMethod_TextDocumentPrepareRename prepareRenameProvider + ] + , pluginConfigDescriptor = defaultConfigDescriptor + { configCustomConfig = mkCustomConfig properties } + } + +prepareRenameProvider :: PluginMethodHandler IdeState Method_TextDocumentPrepareRename +prepareRenameProvider state _pluginId (PrepareRenameParams (TextDocumentIdentifier uri) pos _progressToken) = do + nfp <- getNormalizedFilePathE uri + namesUnderCursor <- getNamesAtPos state nfp pos + -- When this handler says that rename is invalid, VSCode shows "The element can't be renamed" + -- and doesn't even allow you to create full rename request. + -- This handler deliberately approximates "things that definitely can't be renamed" + -- to mean "there is no Name at given position". + -- + -- In particular it allows some cases through (e.g. cross-module renames), + -- so that the full rename handler can give more informative error about them. + let renameValid = not $ null namesUnderCursor + pure $ InL $ PrepareRenameResult $ InR $ InR $ PrepareRenameDefaultBehavior renameValid + +renameProvider :: PluginMethodHandler IdeState Method_TextDocumentRename +renameProvider state pluginId (RenameParams _prog (TextDocumentIdentifier uri) pos newNameText) = do + nfp <- getNormalizedFilePathE uri + directOldNames <- getNamesAtPos state nfp pos + directRefs <- concat <$> mapM (refsAtName state nfp) directOldNames + + {- References in HieDB are not necessarily transitive. With `NamedFieldPuns`, we can have + indirect references through punned names. To find the transitive closure, we do a pass of + the direct references to find the references for any punned names. + See the `IndirectPuns` test for an example. -} + indirectOldNames <- concat . filter ((>1) . length) <$> + mapM (uncurry (getNamesAtPos state) <=< locToFilePos) directRefs + let oldNames = filter matchesDirect indirectOldNames ++ directOldNames + where + matchesDirect n = occNameFS (nameOccName n) `elem` directFS + directFS = map (occNameFS . nameOccName) directOldNames + + case oldNames of + -- There were no Names at given position (e.g. rename triggered within a comment or on a keyword) + [] -> throwError $ PluginInvalidParams "No symbol to rename at given position" + _ -> do + refs <- HS.fromList . concat <$> mapM (refsAtName state nfp) oldNames + + -- Validate rename + crossModuleEnabled <- liftIO $ runAction "rename: config" state $ usePropertyAction #crossModule pluginId properties + unless crossModuleEnabled $ failWhenImportOrExport state nfp refs oldNames + when (any isBuiltInSyntax oldNames) $ throwError $ PluginInternalError "Invalid rename of built-in syntax" + + -- Perform rename + let newName = mkTcOcc $ T.unpack newNameText + filesRefs = collectWith locToUri refs + getFileEdit (uri, locations) = do + verTxtDocId <- liftIO $ runAction "rename: getVersionedTextDoc" state $ getVersionedTextDoc (TextDocumentIdentifier uri) + getSrcEdit state verTxtDocId (replaceRefs newName locations) + fileEdits <- mapM getFileEdit filesRefs + pure $ InL $ fold fileEdits + +-- | Limit renaming across modules. +failWhenImportOrExport :: + IdeState -> + NormalizedFilePath -> + HashSet Location -> + [Name] -> + ExceptT PluginError (HandlerM config) () +failWhenImportOrExport state nfp refLocs names = do + pm <- runActionE "Rename.GetParsedModule" state + (useE GetParsedModule nfp) + let hsMod = unLoc $ pm_parsed_source pm + case (unLoc <$> hsmodName hsMod, hsmodExports hsMod) of + (mbModName, _) | not $ any (\n -> nameIsLocalOrFrom (replaceModName n mbModName) n) names + -> throwError $ PluginInternalError "Renaming of an imported name is unsupported" + (_, Just (L _ exports)) | any ((`HS.member` refLocs) . unsafeSrcSpanToLoc . getLoc) exports + -> throwError $ PluginInternalError "Renaming of an exported name is unsupported" + (Just _, Nothing) -> throwError $ PluginInternalError "Explicit export list required for renaming" + _ -> pure () + +--------------------------------------------------------------------------------------------------- +-- Source renaming + +-- | Apply a function to a `ParsedSource` for a given `Uri` to compute a `WorkspaceEdit`. +getSrcEdit :: + IdeState -> + VersionedTextDocumentIdentifier -> + (ParsedSource -> ParsedSource) -> + ExceptT PluginError (HandlerM config) WorkspaceEdit +getSrcEdit state verTxtDocId updatePs = do + ccs <- lift pluginGetClientCapabilities + nfp <- getNormalizedFilePathE (verTxtDocId ^. L.uri) + annAst <- runActionE "Rename.GetAnnotatedParsedSource" state + (useE GetAnnotatedParsedSource nfp) + let ps = annAst + src = T.pack $ exactPrint ps + res = T.pack $ exactPrint (updatePs ps) + pure $ diffText ccs (verTxtDocId, src) res IncludeDeletions + +-- | Replace names at every given `Location` (in a given `ParsedSource`) with a given new name. +replaceRefs :: + OccName -> + HashSet Location -> + ParsedSource -> + ParsedSource +replaceRefs newName refs = everywhere $ + -- there has to be a better way... + mkT (replaceLoc @AnnListItem) `extT` + -- replaceLoc @AnnList `extT` -- not needed + -- replaceLoc @AnnParen `extT` -- not needed + -- replaceLoc @AnnPragma `extT` -- not needed + -- replaceLoc @AnnContext `extT` -- not needed + -- replaceLoc @NoEpAnns `extT` -- not needed + replaceLoc @NameAnn + where + replaceLoc :: forall an. LocatedAn an RdrName -> LocatedAn an RdrName + replaceLoc (L srcSpan oldRdrName) + | isRef (locA srcSpan) = L srcSpan $ replace oldRdrName + replaceLoc lOldRdrName = lOldRdrName + replace :: RdrName -> RdrName + replace (Qual modName _) = Qual modName newName + replace _ = Unqual newName + + isRef :: SrcSpan -> Bool + isRef = (`HS.member` refs) . unsafeSrcSpanToLoc + +--------------------------------------------------------------------------------------------------- +-- Reference finding + +-- | Note: We only find exact name occurrences (i.e. type reference "depth" is 0). +refsAtName :: + MonadIO m => + IdeState -> + NormalizedFilePath -> + Name -> + ExceptT PluginError m [Location] +refsAtName state nfp name = do + ShakeExtras{withHieDb} <- liftIO $ runAction "Rename.HieDb" state getShakeExtras + ast <- handleGetHieAst state nfp + dbRefs <- case nameModule_maybe name of + Nothing -> pure [] + Just mod -> liftIO $ mapMaybe rowToLoc <$> withHieDb (\hieDb -> + -- See Note [Generated references] + filter (\(refRow HieDb.:. _) -> refIsGenerated refRow) <$> + findReferences + hieDb + True + (nameOccName name) + (Just $ moduleName mod) + (Just $ moduleUnit mod) + [fromNormalizedFilePath nfp] + ) + pure $ nameLocs name ast ++ dbRefs + +nameLocs :: Name -> HieAstResult -> [Location] +nameLocs name (HAR _ _ rm _ _) = + concatMap (map (realSrcSpanToLocation . fst)) + (M.lookup (Right name) rm) + +--------------------------------------------------------------------------------------------------- +-- Util + +getNamesAtPos :: MonadIO m => IdeState -> NormalizedFilePath -> Position -> ExceptT PluginError m [Name] +getNamesAtPos state nfp pos = do + HAR{hieAst} <- handleGetHieAst state nfp + pure $ getNamesAtPoint' hieAst pos + +handleGetHieAst :: + MonadIO m => + IdeState -> + NormalizedFilePath -> + ExceptT PluginError m HieAstResult +handleGetHieAst state nfp = + -- We explicitly do not want to allow a stale version here - we only want to rename if + -- the module compiles, otherwise we can't guarantee that we'll rename everything, + -- which is bad (see https://github.com/haskell/haskell-language-server/issues/3799) + fmap removeGenerated $ runActionE "Rename.GetHieAst" state $ useE GetHieAst nfp + +{- Note [Generated references] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +GHC inserts `Use`s of record constructor everywhere where its record selectors are used, +which leads to record fields being renamed whenever corresponding constructor is renamed. +see https://github.com/haskell/haskell-language-server/issues/2915 +To work around this, we filter out compiler-generated references. +-} +removeGenerated :: HieAstResult -> HieAstResult +removeGenerated HAR{..} = + HAR{hieAst = sourceOnlyAsts, refMap = sourceOnlyRefMap, ..} + where + goAsts :: HieASTs a -> HieASTs a + goAsts (HieASTs asts) = HieASTs (fmap goAst asts) + + goAst :: HieAST a -> HieAST a + goAst (Node (SourcedNodeInfo sniMap) sp children) = + let sourceOnlyNodeInfos = SourcedNodeInfo $ M.delete GeneratedInfo sniMap + in Node sourceOnlyNodeInfos sp $ map goAst children + + sourceOnlyAsts = goAsts hieAst + -- Also need to regenerate the RefMap, because the one in HAR + -- is generated from HieASTs containing GeneratedInfo + sourceOnlyRefMap = generateReferencesMap $ getAsts sourceOnlyAsts + +collectWith :: (Hashable a, Eq b) => (a -> b) -> HashSet a -> [(b, HashSet a)] +collectWith f = map (\(a :| as) -> (f a, HS.fromList (a:as))) . groupWith f . HS.toList + +-- | A variant 'getNamesAtPoint' that does not expect a 'PositionMapping' +getNamesAtPoint' :: HieASTs a -> Position -> [Name] +getNamesAtPoint' hf pos = + concat $ pointCommand hf pos (rights . M.keys . getNodeIds) + +locToUri :: Location -> Uri +locToUri (Location uri _) = uri + +unsafeSrcSpanToLoc :: SrcSpan -> Location +unsafeSrcSpanToLoc srcSpan = + case srcSpanToLocation srcSpan of + Nothing -> error "Invalid conversion from UnhelpfulSpan to Location" + Just location -> location + +locToFilePos :: Monad m => Location -> ExceptT PluginError m (NormalizedFilePath, Position) +locToFilePos (Location uri (Range pos _)) = (,pos) <$> getNormalizedFilePathE uri + +replaceModName :: Name -> Maybe ModuleName -> Module +replaceModName name mbModName = + mkModule (moduleUnit $ nameModule name) (fromMaybe (mkModuleName "Main") mbModName) + +--------------------------------------------------------------------------------------------------- +-- Config + +properties :: Properties '[ 'PropertyKey "crossModule" 'TBoolean] +properties = emptyProperties + & defineBooleanProperty #crossModule + "Enable experimental cross-module renaming" False diff --git a/plugins/hls-rename-plugin/test/Main.hs b/plugins/hls-rename-plugin/test/Main.hs new file mode 100644 index 0000000000..b935e6563f --- /dev/null +++ b/plugins/hls-rename-plugin/test/Main.hs @@ -0,0 +1,152 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DisambiguateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} + +module Main (main) where + +import Control.Lens ((^.)) +import Data.Aeson +import qualified Data.Map as M +import Data.Text (Text, pack) +import Ide.Plugin.Config +import qualified Ide.Plugin.Rename as Rename +import qualified Language.LSP.Protocol.Lens as L +import System.FilePath +import Test.Hls + +main :: IO () +main = defaultTestRunner tests + +renamePlugin :: PluginTestDescriptor Rename.Log +renamePlugin = mkPluginTestDescriptor Rename.descriptor "rename" + +tests :: TestTree +tests = testGroup "Rename" + [ goldenWithRename "Data constructor" "DataConstructor" $ \doc -> + rename doc (Position 0 15) "Op" + , goldenWithRename "Data constructor with fields" "DataConstructorWithFields" $ \doc -> + rename doc (Position 1 13) "FooRenamed" + , knownBrokenForGhcVersions [GHC96, GHC98] "renaming Constructor{..} with RecordWildcard removes .." $ + goldenWithRename "Data constructor with fields" "DataConstructorWithFieldsRecordWildcards" $ \doc -> + rename doc (Position 1 13) "FooRenamed" + , goldenWithRename "Exported function" "ExportedFunction" $ \doc -> + rename doc (Position 2 1) "quux" + , goldenWithRename "Field Puns" "FieldPuns" $ \doc -> + rename doc (Position 7 13) "bleh" + , goldenWithRename "Function argument" "FunctionArgument" $ \doc -> + rename doc (Position 3 4) "y" + , goldenWithRename "Function name" "FunctionName" $ \doc -> + rename doc (Position 3 1) "baz" + , goldenWithRename "GADT" "Gadt" $ \doc -> + rename doc (Position 6 37) "Expr" + , goldenWithRename "Hidden function" "HiddenFunction" $ \doc -> + rename doc (Position 0 32) "quux" + , goldenWithRename "Imported function" "ImportedFunction" $ \doc -> + rename doc (Position 3 8) "baz" + , goldenWithRename "Import hiding" "ImportHiding" $ \doc -> + rename doc (Position 0 22) "hiddenFoo" + , goldenWithRename "Indirect Puns" "IndirectPuns" $ \doc -> + rename doc (Position 4 23) "blah" + , goldenWithRename "Let expression" "LetExpression" $ \doc -> + rename doc (Position 5 11) "foobar" + , goldenWithRename "Qualified as" "QualifiedAs" $ \doc -> + rename doc (Position 3 10) "baz" + , goldenWithRename "Qualified shadowing" "QualifiedShadowing" $ \doc -> + rename doc (Position 3 12) "foobar" + , goldenWithRename "Qualified function" "QualifiedFunction" $ \doc -> + rename doc (Position 3 12) "baz" + , goldenWithRename "Realigns do block indentation" "RealignDo" $ \doc -> + rename doc (Position 0 2) "fooBarQuux" + , goldenWithRename "Record field" "RecordField" $ \doc -> + rename doc (Position 6 9) "number" + , goldenWithRename "Shadowed name" "ShadowedName" $ \doc -> + rename doc (Position 1 1) "baz" + , goldenWithRename "Typeclass" "Typeclass" $ \doc -> + rename doc (Position 8 15) "Equal" + , goldenWithRename "Type constructor" "TypeConstructor" $ \doc -> + rename doc (Position 2 17) "BinaryTree" + , goldenWithRename "Type variable" "TypeVariable" $ \doc -> + rename doc (Position 0 13) "b" + , goldenWithRename "Rename within comment" "Comment" $ \doc -> do + let expectedError = TResponseError + (InR ErrorCodes_InvalidParams) + "rename: Invalid Params: No symbol to rename at given position" + Nothing + renameExpectError expectedError doc (Position 0 10) "ImpossibleRename" + + , testCase "fails when module does not compile" $ runRenameSession "" $ do + doc <- openDoc "FunctionArgument.hs" "haskell" + expectNoMoreDiagnostics 3 doc "typecheck" + + -- Update the document so it doesn't compile + let change = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range (Position 2 13) (Position 2 17) + , _rangeLength = Nothing + , _text = "A" + } + changeDoc doc [change] + diags@(tcDiag : _) <- waitForDiagnosticsFrom doc + + -- Make sure there's a typecheck error + liftIO $ do + length diags @?= 1 + tcDiag ^. L.range @?= Range (Position 2 13) (Position 2 14) + tcDiag ^. L.severity @?= Just DiagnosticSeverity_Error + tcDiag ^. L.source @?= Just "typecheck" + + -- Make sure renaming fails + renameErr <- expectRenameError doc (Position 3 0) "foo'" + liftIO $ do + renameErr ^. L.code @?= InL LSPErrorCodes_RequestFailed + renameErr ^. L.message @?= "rename: Rule Failed: GetHieAst" + + -- Update the document so it compiles + let change' = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range (Position 2 13) (Position 2 14) + , _rangeLength = Nothing + , _text = "Int" + } + changeDoc doc [change'] + expectNoMoreDiagnostics 3 doc "typecheck" + + -- Make sure renaming succeeds + rename doc (Position 3 0) "foo'" + ] + +goldenWithRename :: TestName-> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +goldenWithRename title path act = + goldenWithHaskellDoc (def { plugins = M.fromList [("rename", def { plcConfig = "crossModule" .= True })] }) + renamePlugin title testDataDir path "expected" "hs" act + +renameExpectError :: TResponseError Method_TextDocumentRename -> TextDocumentIdentifier -> Position -> Text -> Session () +renameExpectError expectedError doc pos newName = do + let params = RenameParams Nothing doc pos newName + rsp <- request SMethod_TextDocumentRename params + case rsp ^. L.result of + Right _ -> liftIO $ assertFailure $ "Was expecting " <> show expectedError <> ", got success" + Left actualError -> liftIO $ assertEqual "ResponseError" expectedError actualError + +testDataDir :: FilePath +testDataDir = "plugins" "hls-rename-plugin" "test" "testdata" + +-- | Attempts to renames the term at the specified position, expecting a failure +expectRenameError :: + TextDocumentIdentifier -> + Position -> + String -> + Session (TResponseError Method_TextDocumentRename) +expectRenameError doc pos newName = do + let params = RenameParams Nothing doc pos (pack newName) + rsp <- request SMethod_TextDocumentRename params + case rsp ^. L.result of + Left err -> pure err + Right _ -> liftIO $ assertFailure $ + "Got unexpected successful rename response for " <> show (doc ^. L.uri) + +runRenameSession :: FilePath -> Session a -> IO a +runRenameSession subdir = failIfSessionTimeout + . runSessionWithTestConfig def + { testDirLocation = Left $ testDataDir subdir + , testPluginDescriptor = renamePlugin + , testConfigCaps = codeActionNoResolveCaps } + . const diff --git a/plugins/hls-rename-plugin/test/testdata/Comment.expected.hs b/plugins/hls-rename-plugin/test/testdata/Comment.expected.hs new file mode 100644 index 0000000000..d58fd349a8 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/Comment.expected.hs @@ -0,0 +1 @@ +{- IShouldNotBeRenaemable -} diff --git a/plugins/hls-rename-plugin/test/testdata/Comment.hs b/plugins/hls-rename-plugin/test/testdata/Comment.hs new file mode 100644 index 0000000000..d58fd349a8 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/Comment.hs @@ -0,0 +1 @@ +{- IShouldNotBeRenaemable -} diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructor.expected.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructor.expected.hs new file mode 100644 index 0000000000..d1ee10a6d1 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructor.expected.hs @@ -0,0 +1,4 @@ +data Expr = Op Int Int + +plus :: Expr -> Expr +plus (Op n m) = Op (n + m) 0 diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructor.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructor.hs new file mode 100644 index 0000000000..b614d72291 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructor.hs @@ -0,0 +1,4 @@ +data Expr = Apply Int Int + +plus :: Expr -> Expr +plus (Apply n m) = Apply (n + m) 0 diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.expected.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.expected.hs new file mode 100644 index 0000000000..5fc38c7f01 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.expected.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE NamedFieldPuns #-} +data Foo = FooRenamed { a :: Int, b :: Bool } + +foo1 :: Foo +foo1 = FooRenamed { a = 1, b = True } + +foo2 :: Foo +foo2 = FooRenamed 1 True + +fun1 :: Foo -> Int +fun1 FooRenamed {a} = a + +fun2 :: Foo -> Int +fun2 FooRenamed {a = i} = i diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.hs new file mode 100644 index 0000000000..abd8031096 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE NamedFieldPuns #-} +data Foo = Foo { a :: Int, b :: Bool } + +foo1 :: Foo +foo1 = Foo { a = 1, b = True } + +foo2 :: Foo +foo2 = Foo 1 True + +fun1 :: Foo -> Int +fun1 Foo {a} = a + +fun2 :: Foo -> Int +fun2 Foo {a = i} = i diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.expected.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.expected.hs new file mode 100644 index 0000000000..b5dd83cecb --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.expected.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE RecordWildCards #-} +data Foo = FooRenamed { a :: Int, b :: Bool } + +fun :: Foo -> Int +fun FooRenamed {..} = a diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.hs new file mode 100644 index 0000000000..8e624b0816 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE RecordWildCards #-} +data Foo = Foo { a :: Int, b :: Bool } + +fun :: Foo -> Int +fun Foo {..} = a diff --git a/plugins/hls-rename-plugin/test/testdata/ExportedFunction.expected.hs b/plugins/hls-rename-plugin/test/testdata/ExportedFunction.expected.hs new file mode 100644 index 0000000000..568edb36db --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/ExportedFunction.expected.hs @@ -0,0 +1,5 @@ +module ExportedFunction (quux) where + +quux :: Num p => [a] -> p +quux [] = 0 +quux xs = 1 diff --git a/plugins/hls-rename-plugin/test/testdata/ExportedFunction.hs b/plugins/hls-rename-plugin/test/testdata/ExportedFunction.hs new file mode 100644 index 0000000000..3adb72dc9f --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/ExportedFunction.hs @@ -0,0 +1,5 @@ +module ExportedFunction (foo) where + +foo :: Num p => [a] -> p +foo [] = 0 +foo xs = 1 diff --git a/plugins/hls-rename-plugin/test/testdata/FieldPuns.expected.hs b/plugins/hls-rename-plugin/test/testdata/FieldPuns.expected.hs new file mode 100644 index 0000000000..f6618927b0 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/FieldPuns.expected.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE NamedFieldPuns #-} + +module FieldPun () where + +newtype Foo = Foo { bleh :: Int } + +unFoo :: Foo -> Int +unFoo Foo{bleh} = bleh diff --git a/plugins/hls-rename-plugin/test/testdata/FieldPuns.hs b/plugins/hls-rename-plugin/test/testdata/FieldPuns.hs new file mode 100644 index 0000000000..2cd53d0026 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/FieldPuns.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE NamedFieldPuns #-} + +module FieldPun () where + +newtype Foo = Foo { field :: Int } + +unFoo :: Foo -> Int +unFoo Foo{field} = field diff --git a/plugins/hls-rename-plugin/test/testdata/Foo.hs b/plugins/hls-rename-plugin/test/testdata/Foo.hs new file mode 100644 index 0000000000..c4850149b4 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/Foo.hs @@ -0,0 +1,4 @@ +module Foo where + +foo :: Int -> Int +foo x = 0 diff --git a/plugins/hls-rename-plugin/test/testdata/FunctionArgument.expected.hs b/plugins/hls-rename-plugin/test/testdata/FunctionArgument.expected.hs new file mode 100644 index 0000000000..bc10997ece --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/FunctionArgument.expected.hs @@ -0,0 +1,4 @@ +module FunctionArgument () where + +foo :: Int -> Int +foo y = y + 1 diff --git a/plugins/hls-rename-plugin/test/testdata/FunctionArgument.hs b/plugins/hls-rename-plugin/test/testdata/FunctionArgument.hs new file mode 100644 index 0000000000..d318cd80c4 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/FunctionArgument.hs @@ -0,0 +1,4 @@ +module FunctionArgument () where + +foo :: Int -> Int +foo x = x + 1 diff --git a/plugins/hls-rename-plugin/test/testdata/FunctionName.expected.hs b/plugins/hls-rename-plugin/test/testdata/FunctionName.expected.hs new file mode 100644 index 0000000000..c02f55937b --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/FunctionName.expected.hs @@ -0,0 +1,6 @@ +main = do + x <- return $ baz 42 + return (baz x) +baz, bar :: Int -> Int +baz x = x + 1 +bar = (+ 1) . baz diff --git a/plugins/hls-rename-plugin/test/testdata/FunctionName.hs b/plugins/hls-rename-plugin/test/testdata/FunctionName.hs new file mode 100644 index 0000000000..61bdc5e03c --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/FunctionName.hs @@ -0,0 +1,6 @@ +main = do + x <- return $ foo 42 + return (foo x) +foo, bar :: Int -> Int +foo x = x + 1 +bar = (+ 1) . foo diff --git a/plugins/hls-rename-plugin/test/testdata/Gadt.expected.hs b/plugins/hls-rename-plugin/test/testdata/Gadt.expected.hs new file mode 100644 index 0000000000..65115d42d7 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/Gadt.expected.hs @@ -0,0 +1,17 @@ +{-# LANGUAGE GADTs #-} + +data Expr a where + Number :: Int -> Expr Int + Boolean :: Bool -> Expr Bool + Not :: Expr Bool -> Expr Bool + Even :: Expr Int -> Expr Bool + Add :: Enum a => Expr a -> Expr a -> Expr Int + Max :: Ord a => Expr a -> Expr a -> Expr a + +evaluate :: Expr a -> a +evaluate (Number n) = n +evaluate (Boolean p) = p +evaluate (Add n m) = fromEnum (evaluate n) + fromEnum (evaluate m) +evaluate (Even n) = even $ evaluate n +evaluate (Not p) = not $ evaluate p +evaluate (Max x y) = max (evaluate x) (evaluate y) diff --git a/plugins/hls-rename-plugin/test/testdata/Gadt.hs b/plugins/hls-rename-plugin/test/testdata/Gadt.hs new file mode 100644 index 0000000000..408f516900 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/Gadt.hs @@ -0,0 +1,17 @@ +{-# LANGUAGE GADTs #-} + +data Expression a where + Number :: Int -> Expression Int + Boolean :: Bool -> Expression Bool + Not :: Expression Bool -> Expression Bool + Even :: Expression Int -> Expression Bool + Add :: Enum a => Expression a -> Expression a -> Expression Int + Max :: Ord a => Expression a -> Expression a -> Expression a + +evaluate :: Expression a -> a +evaluate (Number n) = n +evaluate (Boolean p) = p +evaluate (Add n m) = fromEnum (evaluate n) + fromEnum (evaluate m) +evaluate (Even n) = even $ evaluate n +evaluate (Not p) = not $ evaluate p +evaluate (Max x y) = max (evaluate x) (evaluate y) diff --git a/plugins/hls-rename-plugin/test/testdata/HiddenFunction.expected.hs b/plugins/hls-rename-plugin/test/testdata/HiddenFunction.expected.hs new file mode 100644 index 0000000000..3195291c66 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/HiddenFunction.expected.hs @@ -0,0 +1,4 @@ +import Foo hiding (quux) + +foo :: Int -> Int +foo x = 0 diff --git a/plugins/hls-rename-plugin/test/testdata/HiddenFunction.hs b/plugins/hls-rename-plugin/test/testdata/HiddenFunction.hs new file mode 100644 index 0000000000..eacb9d1a4c --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/HiddenFunction.hs @@ -0,0 +1,4 @@ +import Foo hiding (foo) + +foo :: Int -> Int +foo x = 0 diff --git a/plugins/hls-rename-plugin/test/testdata/ImportHiding.expected.hs b/plugins/hls-rename-plugin/test/testdata/ImportHiding.expected.hs new file mode 100644 index 0000000000..e1b600aa1c --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/ImportHiding.expected.hs @@ -0,0 +1,4 @@ +import Foo hiding (hiddenFoo) + +foo :: Int -> Int +foo _ = 5 diff --git a/plugins/hls-rename-plugin/test/testdata/ImportHiding.hs b/plugins/hls-rename-plugin/test/testdata/ImportHiding.hs new file mode 100644 index 0000000000..c14099e68b --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/ImportHiding.hs @@ -0,0 +1,4 @@ +import Foo hiding (foo) + +foo :: Int -> Int +foo _ = 5 diff --git a/plugins/hls-rename-plugin/test/testdata/ImportedFunction.expected.hs b/plugins/hls-rename-plugin/test/testdata/ImportedFunction.expected.hs new file mode 100644 index 0000000000..8f0cbcf888 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/ImportedFunction.expected.hs @@ -0,0 +1,4 @@ +import Foo (baz) + +bar :: Int -> Int +bar = baz diff --git a/plugins/hls-rename-plugin/test/testdata/ImportedFunction.hs b/plugins/hls-rename-plugin/test/testdata/ImportedFunction.hs new file mode 100644 index 0000000000..56361fc64b --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/ImportedFunction.hs @@ -0,0 +1,4 @@ +import Foo (foo) + +bar :: Int -> Int +bar = foo diff --git a/plugins/hls-rename-plugin/test/testdata/IndirectPuns.expected.hs b/plugins/hls-rename-plugin/test/testdata/IndirectPuns.expected.hs new file mode 100644 index 0000000000..cf181c7215 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/IndirectPuns.expected.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE NamedFieldPuns #-} + +module IndirectPuns () where + +newtype Foo = Foo { blah :: Int } + +unFoo :: Foo -> Int +unFoo Foo{blah} = blah diff --git a/plugins/hls-rename-plugin/test/testdata/IndirectPuns.hs b/plugins/hls-rename-plugin/test/testdata/IndirectPuns.hs new file mode 100644 index 0000000000..c823126a76 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/IndirectPuns.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE NamedFieldPuns #-} + +module IndirectPuns () where + +newtype Foo = Foo { field :: Int } + +unFoo :: Foo -> Int +unFoo Foo{field} = field diff --git a/plugins/hls-rename-plugin/test/testdata/LetExpression.expected.hs b/plugins/hls-rename-plugin/test/testdata/LetExpression.expected.hs new file mode 100644 index 0000000000..213b49c20f --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/LetExpression.expected.hs @@ -0,0 +1,10 @@ +module Let () where + +import Foo + +bar :: Int +bar = let foobar = 5 in + foobar * foobar + +quux :: Int +quux = Foo.foo 4 diff --git a/plugins/hls-rename-plugin/test/testdata/LetExpression.hs b/plugins/hls-rename-plugin/test/testdata/LetExpression.hs new file mode 100644 index 0000000000..ec9f58bcea --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/LetExpression.hs @@ -0,0 +1,10 @@ +module Let () where + +import Foo + +bar :: Int +bar = let foo = 5 in + foo * foo + +quux :: Int +quux = Foo.foo 4 diff --git a/plugins/hls-rename-plugin/test/testdata/QualifiedAs.expected.hs b/plugins/hls-rename-plugin/test/testdata/QualifiedAs.expected.hs new file mode 100644 index 0000000000..a864119ef2 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/QualifiedAs.expected.hs @@ -0,0 +1,4 @@ +import qualified Foo as F + +bar :: Int -> Int +bar = F.baz diff --git a/plugins/hls-rename-plugin/test/testdata/QualifiedAs.hs b/plugins/hls-rename-plugin/test/testdata/QualifiedAs.hs new file mode 100644 index 0000000000..022b2f8e31 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/QualifiedAs.hs @@ -0,0 +1,4 @@ +import qualified Foo as F + +bar :: Int -> Int +bar = F.foo diff --git a/plugins/hls-rename-plugin/test/testdata/QualifiedFunction.expected.hs b/plugins/hls-rename-plugin/test/testdata/QualifiedFunction.expected.hs new file mode 100644 index 0000000000..808c12b066 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/QualifiedFunction.expected.hs @@ -0,0 +1,4 @@ +import qualified Foo + +bar :: Int -> Int +bar = Foo.baz diff --git a/plugins/hls-rename-plugin/test/testdata/QualifiedFunction.hs b/plugins/hls-rename-plugin/test/testdata/QualifiedFunction.hs new file mode 100644 index 0000000000..01581c0c8d --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/QualifiedFunction.hs @@ -0,0 +1,4 @@ +import qualified Foo + +bar :: Int -> Int +bar = Foo.foo diff --git a/plugins/hls-rename-plugin/test/testdata/QualifiedShadowing.expected.hs b/plugins/hls-rename-plugin/test/testdata/QualifiedShadowing.expected.hs new file mode 100644 index 0000000000..52dbcea2b0 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/QualifiedShadowing.expected.hs @@ -0,0 +1,7 @@ +import qualified Foo as F + +bar :: Int -> Int +bar x = F.foobar x + foo x + +foo :: Int -> Int +foo _ = 5 diff --git a/plugins/hls-rename-plugin/test/testdata/QualifiedShadowing.hs b/plugins/hls-rename-plugin/test/testdata/QualifiedShadowing.hs new file mode 100644 index 0000000000..aa5e50caf6 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/QualifiedShadowing.hs @@ -0,0 +1,7 @@ +import qualified Foo as F + +bar :: Int -> Int +bar x = F.foo x + foo x + +foo :: Int -> Int +foo _ = 5 diff --git a/plugins/hls-rename-plugin/test/testdata/RealignDo.expected.hs b/plugins/hls-rename-plugin/test/testdata/RealignDo.expected.hs new file mode 100644 index 0000000000..9033a89d87 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/RealignDo.expected.hs @@ -0,0 +1,4 @@ +fooBarQuux :: Maybe Integer +fooBarQuux = do x <- Just 5 + t <- Just 10 + pure $ x + t diff --git a/plugins/hls-rename-plugin/test/testdata/RealignDo.hs b/plugins/hls-rename-plugin/test/testdata/RealignDo.hs new file mode 100644 index 0000000000..aa121ac984 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/RealignDo.hs @@ -0,0 +1,4 @@ +foo :: Maybe Integer +foo = do x <- Just 5 + t <- Just 10 + pure $ x + t diff --git a/plugins/hls-rename-plugin/test/testdata/RecordField.expected.hs b/plugins/hls-rename-plugin/test/testdata/RecordField.expected.hs new file mode 100644 index 0000000000..6646df611c --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/RecordField.expected.hs @@ -0,0 +1,7 @@ +data Bam = Bam { + number :: Int, + s :: String +} + +foo :: Bam -> Bam +foo Bam {number = y} = Bam {number = y + 5, s = ""} diff --git a/plugins/hls-rename-plugin/test/testdata/RecordField.hs b/plugins/hls-rename-plugin/test/testdata/RecordField.hs new file mode 100644 index 0000000000..873150935d --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/RecordField.hs @@ -0,0 +1,7 @@ +data Bam = Bam { + n :: Int, + s :: String +} + +foo :: Bam -> Bam +foo Bam {n = y} = Bam {n = y + 5, s = ""} diff --git a/plugins/hls-rename-plugin/test/testdata/ShadowedName.expected.hs b/plugins/hls-rename-plugin/test/testdata/ShadowedName.expected.hs new file mode 100644 index 0000000000..7c6391176a --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/ShadowedName.expected.hs @@ -0,0 +1,4 @@ +baz :: Int -> Int +baz x = foo + 10 + where + foo = 20 diff --git a/plugins/hls-rename-plugin/test/testdata/ShadowedName.hs b/plugins/hls-rename-plugin/test/testdata/ShadowedName.hs new file mode 100644 index 0000000000..513f8fa89f --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/ShadowedName.hs @@ -0,0 +1,4 @@ +foo :: Int -> Int +foo x = foo + 10 + where + foo = 20 diff --git a/plugins/hls-rename-plugin/test/testdata/TypeConstructor.expected.hs b/plugins/hls-rename-plugin/test/testdata/TypeConstructor.expected.hs new file mode 100644 index 0000000000..0c46ffa077 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/TypeConstructor.expected.hs @@ -0,0 +1,5 @@ +data BinaryTree a = Node a (BinaryTree a) (BinaryTree a) | Leaf a + +rotateRight :: BinaryTree a -> BinaryTree a +rotateRight (Node v (Node v' l' r') r) = Node v' l' (Node v r' r) +rotateRight t = t diff --git a/plugins/hls-rename-plugin/test/testdata/TypeConstructor.hs b/plugins/hls-rename-plugin/test/testdata/TypeConstructor.hs new file mode 100644 index 0000000000..4e728aa1a4 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/TypeConstructor.hs @@ -0,0 +1,5 @@ +data Tree a = Node a (Tree a) (Tree a) | Leaf a + +rotateRight :: Tree a -> Tree a +rotateRight (Node v (Node v' l' r') r) = Node v' l' (Node v r' r) +rotateRight t = t diff --git a/plugins/hls-rename-plugin/test/testdata/TypeVariable.expected.hs b/plugins/hls-rename-plugin/test/testdata/TypeVariable.expected.hs new file mode 100644 index 0000000000..75891f4290 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/TypeVariable.expected.hs @@ -0,0 +1,2 @@ +bar :: Maybe b -> Maybe b +bar a = a diff --git a/plugins/hls-rename-plugin/test/testdata/TypeVariable.hs b/plugins/hls-rename-plugin/test/testdata/TypeVariable.hs new file mode 100644 index 0000000000..782be9a7f3 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/TypeVariable.hs @@ -0,0 +1,2 @@ +bar :: Maybe a -> Maybe a +bar a = a diff --git a/plugins/hls-rename-plugin/test/testdata/Typeclass.expected.hs b/plugins/hls-rename-plugin/test/testdata/Typeclass.expected.hs new file mode 100644 index 0000000000..6a021a3a52 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/Typeclass.expected.hs @@ -0,0 +1,10 @@ +module Typeclass () where + +class Equal a where + equals :: a -> a -> Bool + +instance Equal Int where + equals = (==) + +allEqual :: Equal a => [a] -> Bool +allEqual = all =<< equals . head diff --git a/plugins/hls-rename-plugin/test/testdata/Typeclass.hs b/plugins/hls-rename-plugin/test/testdata/Typeclass.hs new file mode 100644 index 0000000000..b92a591321 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/Typeclass.hs @@ -0,0 +1,10 @@ +module Typeclass () where + +class Equality a where + equals :: a -> a -> Bool + +instance Equality Int where + equals = (==) + +allEqual :: Equality a => [a] -> Bool +allEqual = all =<< equals . head diff --git a/plugins/hls-rename-plugin/test/testdata/hie.yaml b/plugins/hls-rename-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..892a7d675f --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/hie.yaml @@ -0,0 +1,24 @@ +cradle: + direct: + arguments: + - "DataConstructor" + - "ExportedFunction" + - "FieldPuns" + - "Foo" + - "FunctionArgument" + - "FunctionName" + - "Gadt" + - "HiddenFunction" + - "ImportHiding" + - "ImportedFunction" + - "IndirectPuns" + - "LetExpression" + - "QualifiedAs" + - "QualifiedFunction" + - "QualifiedShadowing" + - "RealignDo" + - "RecordField" + - "ShadowedName" + - "TypeClass" + - "TypeConstructor" + - "TypeVariable" diff --git a/plugins/hls-retrie-plugin/src/Ide/Plugin/Retrie.hs b/plugins/hls-retrie-plugin/src/Ide/Plugin/Retrie.hs new file mode 100644 index 0000000000..2e39ffcd98 --- /dev/null +++ b/plugins/hls-retrie-plugin/src/Ide/Plugin/Retrie.hs @@ -0,0 +1,789 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ViewPatterns #-} + +{-# OPTIONS -Wno-orphans #-} + +module Ide.Plugin.Retrie (descriptor, Log) where + +import Control.Concurrent.STM (readTVarIO) +import Control.Exception.Safe (Exception (..), + SomeException, assert, + catch, throwIO, try) +import Control.Lens.Operators +import Control.Monad (forM, unless, when) +import Control.Monad.Error.Class (MonadError (throwError)) +import Control.Monad.IO.Class (MonadIO (liftIO)) +import Control.Monad.Trans.Class (MonadTrans (lift)) +import Control.Monad.Trans.Except (ExceptT (..), runExceptT) + +import Control.Monad.Trans.Maybe (MaybeT) +import Data.Aeson (FromJSON (..), + ToJSON (..)) +import Data.Bifunctor (second) +import qualified Data.ByteString as BS +import Data.Data +import Data.Either (partitionEithers) +import Data.Hashable (unhashed) +import qualified Data.HashSet as Set +import Data.IORef.Extra (atomicModifyIORef'_, + newIORef, readIORef) +import Data.List.Extra (find, nubOrdOn) +import qualified Data.Map as Map +import Data.Maybe (catMaybes) +import Data.Monoid (First (First)) +import Data.String (IsString) +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE hiding (pluginHandlers) +import Development.IDE.Core.Actions (lookupMod) +import Development.IDE.Core.PluginUtils +import Development.IDE.Core.PositionMapping +import Development.IDE.Core.Shake (ShakeExtras (ShakeExtras, knownTargetsVar), + getShakeExtras, + hiedbWriter, + toKnownFiles, withHieDb) +import Development.IDE.GHC.Compat (GRHSs (GRHSs), + GenLocated (L), GhcPs, + GhcRn, + HsBindLR (FunBind), + HsExpr (HsApp, OpApp), + HsGroup (..), + HsValBindsLR (..), + HscEnv, ImportDecl (..), + LHsExpr, LRuleDecls, + Match, ModIface, + ModSummary (ModSummary, ms_hspp_buf, ms_mod), + Outputable, ParsedModule, + RuleDecl (HsRule), + RuleDecls (HsRules), + SourceText (..), + TyClDecl (SynDecl), + TyClGroup (..), fun_id, + isQual, isQual_maybe, + locA, mi_fixities, + moduleNameString, + ms_hspp_opts, + nameModule_maybe, + nameOccName, nameRdrName, + noLocA, occNameFS, + occNameString, + pattern IsBoot, + pattern NotBoot, + pattern RealSrcSpan, + pm_parsed_source, + rdrNameOcc, rds_rules, + srcSpanFile, topDir, + unLoc, unLocA) +import qualified Development.IDE.GHC.Compat as GHC +import Development.IDE.GHC.Compat.Util hiding (catch, try) +import Development.IDE.GHC.ExactPrint (GetAnnotatedParsedSource (GetAnnotatedParsedSource), + TransformT) +import Development.IDE.Spans.AtPoint (LookupModule, + nameToLocation) +import Development.IDE.Types.Shake (WithHieDb) +import qualified GHC as GHCGHC +import GHC.Generics (Generic) +import Ide.Plugin.Error (PluginError (PluginInternalError), + getNormalizedFilePathE) +import Ide.PluginUtils +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message as LSP +import Language.LSP.Protocol.Types as LSP +import Language.LSP.Server (ProgressCancellable (Cancellable)) +import Retrie (Annotated (astA), + AnnotatedModule, + Fixity (Fixity), + FixityDirection (InfixL), + Options, Options_ (..), + Verbosity (Loud), + addImports, apply, + applyWithUpdate) +import Retrie.Context +import Retrie.CPP (CPP (NoCPP), parseCPP) +import Retrie.ExactPrint (fix, makeDeltaAst, + transformA, unsafeMkA) +import Retrie.Expr (mkLocatedHsVar) +import Retrie.Fixity (FixityEnv, lookupOp, + mkFixityEnv) +import Retrie.Monad (getGroundTerms, + runRetrie) +import Retrie.Options (defaultOptions, + getTargetFiles) +import Retrie.Replace (Change (..), + Replacement (..)) +import Retrie.Rewrites +import Retrie.Rewrites.Function (matchToRewrites) +import Retrie.SYB (everything, extQ, + listify, mkQ) +import Retrie.Types +import Retrie.Universe (Universe) + +import GHC.Types.PkgQual + +data Log + = LogParsingModule FilePath + +instance Pretty Log where + pretty = \case + LogParsingModule fp -> "Parsing module:" <+> pretty fp + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultPluginDescriptor plId "Provides code actions to inline Haskell definitions") + { pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeAction provider, + pluginCommands = [retrieCommand recorder, retrieInlineThisCommand recorder] + } + +retrieCommandId :: CommandId +retrieCommandId = "retrieCommand" + +retrieInlineThisCommandId :: CommandId +retrieInlineThisCommandId = "retrieInlineThisCommand" + +retrieCommand :: Recorder (WithPriority Log) -> PluginCommand IdeState +retrieCommand recorder = + PluginCommand retrieCommandId "run the refactoring" (runRetrieCmd recorder) + +retrieInlineThisCommand :: Recorder (WithPriority Log) -> PluginCommand IdeState +retrieInlineThisCommand recorder = + PluginCommand retrieInlineThisCommandId "inline function call" + (runRetrieInlineThisCmd recorder) + +-- | Parameters for the runRetrie PluginCommand. +data RunRetrieParams = RunRetrieParams + { description :: T.Text, + rewrites :: [RewriteSpec], + originatingFile :: Uri, + restrictToOriginatingFile :: Bool + } + deriving (Eq, Show, Generic, FromJSON, ToJSON) + +runRetrieCmd :: Recorder (WithPriority Log) -> CommandFunction IdeState RunRetrieParams +runRetrieCmd recorder state token RunRetrieParams{originatingFile = uri, ..} = ExceptT $ + pluginWithIndefiniteProgress description token Cancellable $ \_updater -> do + _ <- runExceptT $ do + nfp <- getNormalizedFilePathE uri + (session, _) <- + runActionE "Retrie.GhcSessionDeps" state $ + useWithStaleE GhcSessionDeps + nfp + (ms, binds, _, _, _) <- runActionE "Retrie.getBinds" state $ getBinds nfp + let importRewrites = concatMap (extractImports ms binds) rewrites + (errors, edits) <- liftIO $ + callRetrie + recorder + state + (hscEnv session) + (map Right rewrites <> map Left importRewrites) + nfp + restrictToOriginatingFile + unless (null errors) $ + lift $ pluginSendNotification SMethod_WindowShowMessage $ + ShowMessageParams MessageType_Warning $ + T.unlines $ + "## Found errors during rewrite:" : + ["-" <> T.pack (show e) | e <- errors] + _ <- lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edits) (\_ -> pure ()) + return () + return $ Right $ InR Null + +data RunRetrieInlineThisParams = RunRetrieInlineThisParams + { inlineIntoThisLocation :: !Location, + inlineFromThisLocation :: !Location, + inlineThisDefinition :: !T.Text + } + deriving (Eq, Show, Generic, FromJSON, ToJSON) + +runRetrieInlineThisCmd :: Recorder (WithPriority Log) -> CommandFunction IdeState RunRetrieInlineThisParams +runRetrieInlineThisCmd recorder state _token RunRetrieInlineThisParams{..} = do + nfp <- getNormalizedFilePathE $ getLocationUri inlineIntoThisLocation + nfpSource <- getNormalizedFilePathE $ getLocationUri inlineFromThisLocation + -- What we do here: + -- Find the identifier in the given position + -- Construct an inline rewrite for it + -- Run retrie to get a list of changes + -- Select the change that inlines the identifier in the given position + -- Apply the edit + astSrc <- runActionE "retrie" state $ + useE GetAnnotatedParsedSource nfpSource + let fromRange = rangeToRealSrcSpan nfpSource $ getLocationRange inlineFromThisLocation + intoRange = rangeToRealSrcSpan nfp $ getLocationRange inlineIntoThisLocation + inlineRewrite <- liftIO $ constructInlineFromIdentifer (unsafeMkA astSrc 0) fromRange + when (null inlineRewrite) $ throwError $ PluginInternalError "Empty rewrite" + (session, _) <- runActionE "retrie" state $ + useWithStaleE GhcSessionDeps nfp + (fixityEnv, cpp) <- liftIO $ getCPPmodule recorder state (hscEnv session) $ fromNormalizedFilePath nfp + result <- liftIO $ try @_ @SomeException $ + runRetrie fixityEnv (applyWithUpdate myContextUpdater inlineRewrite) cpp + case result of + Left err -> throwError $ PluginInternalError $ "Retrie - crashed with: " <> T.pack (show err) + Right (_,_,NoChange) -> throwError $ PluginInternalError "Retrie - inline produced no changes" + Right (_,_,Change replacements imports) -> do + let edits = asEditMap $ asTextEdits $ Change ourReplacement imports + wedit = WorkspaceEdit (Just edits) Nothing Nothing + ourReplacement = [ r + | r@Replacement{..} <- replacements + , RealSrcSpan intoRange Nothing `GHC.isSubspanOf` replLocation] + _ <- lift $ pluginSendRequest SMethod_WorkspaceApplyEdit + (ApplyWorkspaceEditParams Nothing wedit) (\_ -> pure ()) + return $ InR Null + +-- Override to skip adding binders to the context, which prevents inlining +-- nested defined functions +myContextUpdater :: ContextUpdater +myContextUpdater c i = + updateContext c i + `extQ` (return . updExp) + `extQ` (skipUpdate @(GRHSs GhcPs (LHsExpr GhcPs))) + `extQ` (skipUpdate @(Match GhcPs (LHsExpr GhcPs))) + where + skipUpdate :: forall a m . Monad m => a -> TransformT m Context + skipUpdate _ = pure c + + -- override to skip the HsLet case + updExp :: HsExpr GhcPs -> Context + updExp HsApp{} = + c { ctxtParentPrec = HasPrec $ Retrie.Fixity (SourceText "HsApp") (10 + i - firstChild) InfixL } + -- Reason for 10 + i: (i is index of child, 0 = left, 1 = right) + -- In left child, prec is 10, so HsApp child will NOT get paren'd + -- In right child, prec is 11, so every child gets paren'd (unless atomic) + updExp (OpApp _ _ op _) = c { ctxtParentPrec = HasPrec $ lookupOp op (ctxtFixityEnv c) } + updExp _ = c { ctxtParentPrec = NeverParen } + -- Deal with Trees-That-Grow adding extension points + -- as the first child everywhere. + firstChild :: Int + firstChild = 1 + +extractImports :: ModSummary -> [HsBindLR GhcRn GhcRn] -> RewriteSpec -> [ImportSpec] +extractImports ModSummary{ms_mod} topLevelBinds (Unfold thing) + | Just FunBind {fun_matches} + <- find (\case FunBind{fun_id = L _ n} -> T.unpack (printOutputable n) == thing ; _ -> False) topLevelBinds + , names <- listify p fun_matches + = + [ AddImport {..} + | let ideclSource = False, + name <- names, + let r = nameRdrName name, + let ideclQualifiedBool = isQual r, + let ideclAsString = moduleNameString . fst <$> isQual_maybe r, + let ideclThing = Just (IEVar $ occNameString $ rdrNameOcc r), + Just ideclNameString <- + [moduleNameString . GHC.moduleName <$> nameModule_maybe name] + ] + where + p name = nameModule_maybe name /= Just ms_mod +-- TODO handle imports for all rewrites +extractImports _ _ _ = [] + +------------------------------------------------------------------------------- + +provider :: PluginMethodHandler IdeState Method_TextDocumentCodeAction +provider state plId (CodeActionParams _ _ (TextDocumentIdentifier uri) range ca) = do + let (LSP.CodeActionContext _diags _monly _) = ca + nfp <- getNormalizedFilePathE uri + + (ModSummary{ms_mod}, topLevelBinds, posMapping, hs_ruleds, hs_tyclds) + <- runActionE "retrie" state $ + getBinds nfp + + extras@ShakeExtras{ withHieDb, hiedbWriter } <- liftIO $ runAction "" state getShakeExtras + + range <- fromCurrentRangeE posMapping range + let pos = range ^. L.start + let rewrites = + concatMap (suggestBindRewrites uri pos ms_mod) topLevelBinds + ++ concatMap (suggestRuleRewrites uri pos ms_mod) hs_ruleds + ++ [ r + | TyClGroup {group_tyclds} <- hs_tyclds, + L (locA -> l) g <- group_tyclds, + pos `isInsideSrcSpan` l, + r <- suggestTypeRewrites uri ms_mod g + ] + + retrieCommands <- lift $ + forM rewrites $ \(title, kind, params) -> liftIO $ do + let c = mkLspCommand plId retrieCommandId title (Just [toJSON params]) + return $ CodeAction title (Just kind) Nothing Nothing Nothing Nothing (Just c) Nothing + + inlineSuggestions <- liftIO $ runIdeAction "" extras $ + suggestBindInlines plId uri topLevelBinds range withHieDb (lookupMod hiedbWriter) + let inlineCommands = + [ Just $ + CodeAction _title (Just CodeActionKind_RefactorInline) Nothing Nothing Nothing Nothing (Just c) Nothing + | c@Command{..} <- inlineSuggestions + ] + return $ InL [InR c | c <- retrieCommands ++ catMaybes inlineCommands] + +getLocationUri :: Location -> Uri +getLocationUri Location{_uri} = _uri + +getLocationRange :: Location -> Range +getLocationRange Location{_range} = _range + +getBinds :: NormalizedFilePath -> ExceptT PluginError Action + ( ModSummary + , [HsBindLR GhcRn GhcRn] + , PositionMapping + , [LRuleDecls GhcRn] + , [TyClGroup GhcRn] + ) +getBinds nfp = do + (tm, posMapping) <- useWithStaleE TypeCheck nfp + -- we use the typechecked source instead of the parsed source + -- to be able to extract module names from the Ids, + -- so that we can include adding the required imports in the retrie command + let rn = tmrRenamed tm + case rn of +#if MIN_VERSION_ghc(9,9,0) + (HsGroup{hs_valds, hs_ruleds, hs_tyclds}, _, _, _, _) -> do +#else + (HsGroup{hs_valds, hs_ruleds, hs_tyclds}, _, _, _) -> do +#endif + topLevelBinds <- case hs_valds of + ValBinds{} -> throwError $ PluginInternalError "getBinds: ValBinds not supported" + XValBindsLR (GHC.NValBinds binds _sigs :: GHC.NHsValBindsLR GhcRn) -> + pure [ decl + | (_, bagBinds) <- binds + , L _ decl <- bagToList bagBinds + ] + return (tmrModSummary tm, topLevelBinds, posMapping, hs_ruleds, hs_tyclds) + +suggestBindRewrites :: + Uri -> + Position -> + GHC.Module -> + HsBindLR GhcRn GhcRn -> + [(T.Text, CodeActionKind, RunRetrieParams)] +suggestBindRewrites originatingFile pos ms_mod FunBind {fun_id = L (locA -> l') rdrName} + | pos `isInsideSrcSpan` l' = + let pprNameText = printOutputable rdrName + pprName = T.unpack pprNameText + unfoldRewrite restrictToOriginatingFile = + let rewrites = [Unfold (qualify ms_mod pprName)] + description = "Unfold " <> pprNameText <> describeRestriction restrictToOriginatingFile + in (description, CodeActionKind_RefactorInline, RunRetrieParams {..}) + foldRewrite restrictToOriginatingFile = + let rewrites = [Fold (qualify ms_mod pprName)] + description = "Fold " <> pprNameText <> describeRestriction restrictToOriginatingFile + in (description, CodeActionKind_RefactorExtract, RunRetrieParams {..}) + in [unfoldRewrite False, unfoldRewrite True, foldRewrite False, foldRewrite True] +suggestBindRewrites _ _ _ _ = [] + + -- find all the identifiers in the AST for which have source definitions +suggestBindInlines :: + PluginId + -> Uri + -> [HsBindLR GhcRn GhcRn] + -> Range + -> WithHieDb + -> (FilePath -> GHCGHC.ModuleName -> GHCGHC.Unit -> Bool -> MaybeT IdeAction Uri) + -> IdeAction [Command] +suggestBindInlines plId _uri binds range hie lookupMod = do + identifiers <- definedIdentifiers + return $ map (\(name, siteLoc, srcLoc) -> + let + title = "Inline " <> printedName + printedName = printOutputable name + params = RunRetrieInlineThisParams + { inlineIntoThisLocation = siteLoc + , inlineFromThisLocation = srcLoc + , inlineThisDefinition= printedName + } + in mkLspCommand plId retrieInlineThisCommandId title (Just [toJSON params]) + ) + (Set.toList identifiers) + where + definedIdentifiers = + -- we search for candidates to inline in RHSs only, skipping LHSs + everything (<>) (pure mempty `mkQ` getGRHSIdentifierDetails hie lookupMod) binds + + getGRHSIdentifierDetails :: + WithHieDb + -> (FilePath -> GHCGHC.ModuleName -> GHCGHC.Unit -> Bool -> MaybeT IdeAction Uri) + -> GRHSs GhcRn (LHsExpr GhcRn) + -> IdeAction (Set.HashSet (GHC.OccName, Location, Location)) + getGRHSIdentifierDetails a b it@GRHSs{} = + -- we only select candidates for which we have source code + everything (<>) (pure mempty `mkQ` getDefinedIdentifierDetailsViaHieDb a b) it + + getDefinedIdentifierDetailsViaHieDb :: WithHieDb -> LookupModule IdeAction -> GHC.LIdP GhcRn -> IdeAction (Set.HashSet (GHC.OccName, Location, Location)) + getDefinedIdentifierDetailsViaHieDb withHieDb lookupModule lname | name <- unLoc lname = + case srcSpanToLocation (GHC.getLocA lname) of + Just siteLoc + | siteRange <- getLocationRange siteLoc + , range `isSubrangeOf` siteRange -> do + mbSrcLocation <- nameToLocation withHieDb lookupModule name + return $ maybe mempty (Set.fromList . map (nameOccName name, siteLoc,)) mbSrcLocation + _ -> pure mempty + + +describeRestriction :: IsString p => Bool -> p +describeRestriction restrictToOriginatingFile = + if restrictToOriginatingFile then " in current file" else "" + +suggestTypeRewrites :: + Uri -> + GHC.Module -> + TyClDecl GhcRn -> + [(T.Text, CodeActionKind, RunRetrieParams)] +suggestTypeRewrites originatingFile ms_mod SynDecl {tcdLName} = + let pprNameText = printOutputable (unLocA tcdLName) + pprName = T.unpack pprNameText + unfoldRewrite restrictToOriginatingFile = + let rewrites = [TypeForward (qualify ms_mod pprName)] + description = "Unfold " <> pprNameText <> describeRestriction restrictToOriginatingFile + in (description, CodeActionKind_RefactorInline, RunRetrieParams {..}) + foldRewrite restrictToOriginatingFile = + let rewrites = [TypeBackward (qualify ms_mod pprName)] + description = "Fold " <> pprNameText <> describeRestriction restrictToOriginatingFile + in (description, CodeActionKind_RefactorExtract, RunRetrieParams {..}) + in [unfoldRewrite False, unfoldRewrite True, foldRewrite False, foldRewrite True] +suggestTypeRewrites _ _ _ = [] + +suggestRuleRewrites :: + Uri -> + Position -> + GHC.Module -> + LRuleDecls GhcRn -> + [(T.Text, CodeActionKind, RunRetrieParams)] +suggestRuleRewrites originatingFile pos ms_mod (L _ HsRules {rds_rules}) = + concat + [ [ forwardRewrite ruleName True + , forwardRewrite ruleName False + , backwardsRewrite ruleName True + , backwardsRewrite ruleName False + ] + | L (locA -> l) r <- rds_rules, + pos `isInsideSrcSpan` l, + let HsRule {rd_name = L _ rn} = r, + let ruleName = unpackFS rn + ] + where + forwardRewrite ruleName restrictToOriginatingFile = + let rewrites = [RuleForward (qualify ms_mod ruleName)] + description = "Apply rule " <> T.pack ruleName <> " forward" <> + describeRestriction restrictToOriginatingFile + + in ( description, + CodeActionKind_Refactor, + RunRetrieParams {..} + ) + backwardsRewrite ruleName restrictToOriginatingFile = + let rewrites = [RuleBackward (qualify ms_mod ruleName)] + description = "Apply rule " <> T.pack ruleName <> " backwards" <> + describeRestriction restrictToOriginatingFile + in ( description, + CodeActionKind_Refactor, + RunRetrieParams {..} + ) + +qualify :: Outputable mod => mod -> String -> String +qualify ms_mod x = T.unpack (printOutputable ms_mod) <> "." <> x + +------------------------------------------------------------------------------- +-- Retrie driving code + +data CallRetrieError + = CallRetrieInternalError String NormalizedFilePath + | NoParse NormalizedFilePath + | GHCParseError NormalizedFilePath String + | NoTypeCheck NormalizedFilePath + deriving (Eq) + +instance Show CallRetrieError where + show (CallRetrieInternalError msg f) = msg <> " - " <> fromNormalizedFilePath f + show (NoParse f) = "Cannot parse: " <> fromNormalizedFilePath f + show (GHCParseError f m) = "Cannot parse " <> fromNormalizedFilePath f <> " : " <> m + show (NoTypeCheck f) = "File does not typecheck: " <> fromNormalizedFilePath f + +instance Exception CallRetrieError + +callRetrie :: + Recorder (WithPriority Log) -> + IdeState -> + HscEnv -> + [Either ImportSpec RewriteSpec] -> + NormalizedFilePath -> + Bool -> + IO ([CallRetrieError], WorkspaceEdit) +callRetrie recorder state session rewrites origin restrictToOriginatingFile = do + knownFiles <- toKnownFiles . unhashed <$> readTVarIO (knownTargetsVar $ shakeExtras state) + let + -- TODO cover all workspaceFolders + target = "." + + retrieOptions :: Retrie.Options + retrieOptions = (defaultOptions target) + {Retrie.verbosity = Loud + ,Retrie.targetFiles = map fromNormalizedFilePath $ + if restrictToOriginatingFile + then [origin] + else Set.toList knownFiles + } + + (theImports, theRewrites) = partitionEithers rewrites + + annotatedImports = + unsafeMkA (map (noLocA . toImportDecl) theImports) 0 + + (originFixities, originParsedModule) <- reuseParsedModule state origin + retrie <- + (\specs -> apply specs >> addImports annotatedImports) + <$> parseSpecs state origin originParsedModule originFixities theRewrites + + targets <- getTargetFiles retrieOptions (getGroundTerms retrie) + + results <- forM targets $ \t -> runExceptT $ do + (fixityEnv, cpp) <- ExceptT $ try $ getCPPmodule recorder state session t + -- TODO add the imports to the resulting edits + (_user, _ast, change@(Change _replacements _imports)) <- + lift $ runRetrie fixityEnv retrie cpp + return $ asTextEdits change + + let (errors :: [CallRetrieError], replacements) = partitionEithers results + editParams :: WorkspaceEdit + editParams = + WorkspaceEdit (Just $ asEditMap $ concat replacements) Nothing Nothing + + return (errors, editParams) + +useOrFail :: + IdeRule r v => + IdeState -> + String -> + (NormalizedFilePath -> CallRetrieError) -> + r -> + NormalizedFilePath -> + IO (RuleResult r) +useOrFail state lbl mkException rule f = + useRule lbl state rule f >>= maybe (liftIO $ throwIO $ mkException f) return + +fixityEnvFromModIface :: ModIface -> FixityEnv +fixityEnvFromModIface modIface = + mkFixityEnv + [ (fs, (fs, fixity)) + | (n, fixity) <- mi_fixities modIface, + let fs = occNameFS n + ] + +fixFixities :: Data ast => + IdeState + -> NormalizedFilePath + -> Annotated ast + -> IO (FixityEnv, Annotated ast) +fixFixities state f pm = do + HiFileResult {hirModIface} <- + useOrFail state "GetModIface" NoTypeCheck GetModIface f + let fixities = fixityEnvFromModIface hirModIface + res <- transformA pm (fix fixities) + return (fixities, res) + +fixAnns :: ParsedModule -> Annotated GHC.ParsedSource +fixAnns GHC.ParsedModule{pm_parsed_source} = unsafeMkA (makeDeltaAst pm_parsed_source) 0 + +parseSpecs + :: IdeState + -> NormalizedFilePath + -> AnnotatedModule + -> FixityEnv + -> [RewriteSpec] + -> IO [Rewrite Universe] +parseSpecs state origin originParsedModule originFixities specs = do + -- retrie needs the libdir for `parseRewriteSpecs` + libdir <- topDir . ms_hspp_opts . msrModSummary <$> useOrFail state "Retrie.GetModSummary" (CallRetrieInternalError "file not found") GetModSummary origin + parseRewriteSpecs + libdir + (\_f -> return $ NoCPP originParsedModule) + originFixities + specs + +constructfromFunMatches :: + Annotated [GHCGHC.LocatedA (ImportDecl GhcPs)] + -> GHCGHC.LocatedN GHCGHC.RdrName + -> GHCGHC.MatchGroup GhcPs (GHCGHC.LocatedA (HsExpr GhcPs)) + -> TransformT IO [Rewrite Universe] +constructfromFunMatches imps fun_id fun_matches = do + fe <- mkLocatedHsVar fun_id + rewrites <- concat <$> + forM (unLoc $ GHC.mg_alts fun_matches) (matchToRewrites fe imps LeftToRight) + let urewrites = toURewrite <$> rewrites + -- traceShowM $ map showQuery urewrites + assert (not $ null urewrites) $ + return urewrites + +-- showQuery :: Rewrite Universe -> String +-- showQuery = ppRewrite +-- +-- showQuery :: Rewrite (LHsExpr GhcPs) -> String +-- showQuery q = unlines +-- [ "template: " <> show (hash (printOutputable . showAstData NoBlankSrcSpan . astA . tTemplate . fst . qResult $ q)) +-- , "quantifiers: " <> show (hash (T.pack (show(Ext.toList $ qQuantifiers q)))) +-- , "matcher: " <> show (hash (printOutputable . showAstData NoBlankSrcSpan . astA . qPattern $ q)) +-- ] +-- +-- s :: Data a => a -> String +-- s = T.unpack . printOutputable . showAstData NoBlankSrcSpan +-- NoBlankEpAnnotations + +constructInlineFromIdentifer :: Data a => Annotated (GenLocated l a) -> GHCGHC.RealSrcSpan -> IO [Rewrite Universe] +constructInlineFromIdentifer originParsedModule originSpan = do + -- traceM $ s $ astA originParsedModule + fmap astA $ transformA originParsedModule $ \(L _ m) -> do + let ast = everything (<>) (First Nothing `mkQ` matcher) m + matcher :: HsBindLR GhcPs GhcPs + -> First ( GHCGHC.LocatedN GHCGHC.RdrName + , GHCGHC.MatchGroup GhcPs (GHCGHC.LocatedA (HsExpr GhcPs)) + ) + matcher FunBind{fun_id, fun_matches} + -- trace (show (GHC.getLocA fun_id) <> ": " <> s fun_id) False = undefined + | RealSrcSpan sp _ <- GHC.getLocA fun_id + , sp == originSpan = + First $ Just (fun_id, fun_matches) + matcher _ = First Nothing + case ast of + First (Just (fun_id, fun_matches)) + -> + let imports = mempty in + constructfromFunMatches imports fun_id fun_matches + _ -> return $ error "could not find source code to inline" + +asEditMap :: [(Uri, TextEdit)] -> Map.Map Uri [TextEdit] +asEditMap = Map.fromListWith (++) . map (second pure) + +asTextEdits :: Change -> [(Uri, TextEdit)] +asTextEdits NoChange = [] +asTextEdits (Change reps _imports) = + [ (filePathToUri spanLoc, edit) + | Replacement {..} <- nubOrdOn (realSpan . replLocation) reps, + (RealSrcSpan rspan _) <- [replLocation], + let spanLoc = unpackFS $ srcSpanFile rspan, + let edit = TextEdit (realSrcSpanToRange rspan) (T.pack replReplacement) + ] + +------------------------------------------------------------------------------- +-- Rule wrappers + +_useRuleBlocking, + _useRuleStale, + useRule :: + (IdeRule k v) => + String -> + IdeState -> + k -> + NormalizedFilePath -> + IO (Maybe (RuleResult k)) +_useRuleBlocking label state rule f = runAction label state (use rule f) +_useRuleStale label state rule f = + fmap fst + <$> runIdeAction label (shakeExtras state) (useWithStaleFast rule f) + +-- | Chosen approach for calling ghcide Shake rules +useRule label = _useRuleStale ("Retrie." <> label) + +------------------------------------------------------------------------------- +-- Serialization wrappers and instances + +deriving instance Eq RewriteSpec + +deriving instance Show RewriteSpec + +deriving instance Generic RewriteSpec + +deriving instance FromJSON RewriteSpec + +deriving instance ToJSON RewriteSpec + +newtype IE name + = IEVar name + deriving stock (Eq, Show, Generic) + deriving anyclass (FromJSON, ToJSON) + + +data ImportSpec = AddImport + { ideclNameString :: String, + ideclSource :: Bool, + ideclQualifiedBool :: Bool, + ideclAsString :: Maybe String, + ideclThing :: Maybe (IE String) + } + deriving (Eq, Show, Generic, FromJSON, ToJSON) + +toImportDecl :: ImportSpec -> GHC.ImportDecl GHC.GhcPs +toImportDecl AddImport {..} = GHC.ImportDecl {ideclSource = ideclSource', ..} + where + ideclSource' = if ideclSource then IsBoot else NotBoot + toMod = noLocA . GHC.mkModuleName + ideclName = toMod ideclNameString + ideclSafe = False + ideclImplicit = False + ideclSourceSrc = NoSourceText + ideclAs = toMod <$> ideclAsString + ideclQualified = if ideclQualifiedBool then GHC.QualifiedPre else GHC.NotQualified + + ideclPkgQual = NoRawPkgQual + + ideclImportList = Nothing + ideclExt = GHCGHC.XImportDeclPass + { ideclAnn = +#if MIN_VERSION_ghc(9,9,0) + GHCGHC.noAnn +#else + GHCGHC.EpAnnNotUsed +#endif + , ideclSourceText = ideclSourceSrc + , ideclImplicit = ideclImplicit + } + +reuseParsedModule :: IdeState -> NormalizedFilePath -> IO (FixityEnv, Annotated GHCGHC.ParsedSource) +reuseParsedModule state f = do + pm <- useOrFail state "Retrie.GetParsedModule" NoParse GetParsedModule f + (fixities, pm') <- fixFixities state f (fixAnns pm) + return (fixities, pm') + +getCPPmodule :: Recorder (WithPriority Log) -> IdeState -> HscEnv -> FilePath -> IO (FixityEnv, CPP AnnotatedModule) +getCPPmodule recorder state session t = do + -- TODO: is it safe to drop this makeAbsolute? + let nt = toNormalizedFilePath' $ (toAbsolute $ rootDir state) t + let getParsedModule f contents = do + modSummary <- msrModSummary <$> + useOrFail state "Retrie.GetModSummary" (CallRetrieInternalError "file not found") GetModSummary nt + let ms' = + modSummary + { ms_hspp_buf = + Just (stringToStringBuffer contents) + } + logWith recorder Info $ LogParsingModule t + parsed <- evalGhcEnv session (GHCGHC.parseModule ms') + `catch` \e -> throwIO (GHCParseError nt (show @SomeException e)) + (fixities, parsed) <- fixFixities state f (fixAnns parsed) + return (fixities, parsed) + + contents <- do + mbContentsVFS <- + runAction "Retrie.GetFileContents" state $ getFileContents nt + case mbContentsVFS of + Just contents -> return $ Rope.toText contents + Nothing -> T.decodeUtf8 <$> BS.readFile (fromNormalizedFilePath nt) + if any (T.isPrefixOf "#if" . T.toLower) (T.lines contents) + then do + fixitiesRef <- newIORef mempty + let parseModule x = do + (fix, res) <- getParsedModule nt x + atomicModifyIORef'_ fixitiesRef (fix <>) + return res + res <- parseCPP parseModule contents + fixities <- readIORef fixitiesRef + return (fixities, res) + else do + (fixities, pm) <- reuseParsedModule state nt + return (fixities, NoCPP pm) diff --git a/plugins/hls-retrie-plugin/test/Main.hs b/plugins/hls-retrie-plugin/test/Main.hs new file mode 100644 index 0000000000..96a25b0c4c --- /dev/null +++ b/plugins/hls-retrie-plugin/test/Main.hs @@ -0,0 +1,99 @@ +{-# LANGUAGE DisambiguateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} + +module Main (main) where + +import Control.Monad (void) +import qualified Data.Map as M +import Data.Text (Text) +import qualified Development.IDE.GHC.ExactPrint as ExactPrint +import qualified Development.IDE.Plugin.CodeAction as Refactor +import Ide.Logger +import Ide.Plugin.Config +import qualified Ide.Plugin.Retrie as Retrie +import System.FilePath +import Test.Hls + +data LogWrap + = RetrieLog Retrie.Log + | ExactPrintLog ExactPrint.Log + +instance Pretty LogWrap where + pretty = \case + RetrieLog msg -> pretty msg + ExactPrintLog msg -> pretty msg + +main :: IO () +main = defaultTestRunner tests + +retriePlugin :: PluginTestDescriptor LogWrap +retriePlugin = mkPluginTestDescriptor (Retrie.descriptor . cmapWithPrio RetrieLog) "retrie" + +refactorPlugin :: PluginTestDescriptor LogWrap +refactorPlugin = mkPluginTestDescriptor (Refactor.iePluginDescriptor . cmapWithPrio ExactPrintLog) "refactor" + +tests :: TestTree +tests = testGroup "Retrie" + [ inlineThisTests + ] + +inlineThisTests :: TestTree +inlineThisTests = testGroup "Inline this" + [ + testGroup "provider" [ + testProvider "lhs" "Identity" 4 1 ["Unfold function", "Unfold function in current file", "Fold function", "Fold function in current file"], + testProvider "identifier" "Identity" 4 16 ["Inline identity"], + testProvider "imported identifier" "Imported" 4 12 ["Inline identity"], + testProvider "nested where" "NestedWhere" 4 16 ["Inline identity"], + testProvider "nested let" "NestedLet" 6 12 ["Inline identity"], + testProvider "class member" "Class" 5 16 [], + testProvider "operator" "Operator" 4 16 ["Inline */"] + ], + testGroup "command" [ + testCommand "top level function" "Identity" 4 16, + testCommand "top level function in another file" "Imported" 4 12, + testCommand "nested where function" "NestedWhere" 4 16, + testCommand "nested let function" "NestedLet" 6 12, + testCommand "operator" "Operator" 4 16 + ] + ] + +testProvider :: TestName -> FilePath -> UInt -> UInt -> [Text] -> TestTree +testProvider title file line row expected = testCase title $ runWithRetrie $ do + adoc <- openDoc (file <.> "hs") "haskell" + _ <- waitForTypecheck adoc + let position = Position line row + codeActions <- getCodeActions adoc $ Range position position + liftIO $ map codeActionTitle codeActions @?= map Just expected + +testCommand :: TestName -> FilePath -> UInt -> UInt -> TestTree +testCommand title file row col = goldenWithRetrie title file $ \adoc -> do + _ <- waitForTypecheck adoc + let p = Position row col + codeActions <- getCodeActions adoc $ Range p p + case codeActions of + [InR ca] -> do + executeCodeAction ca + void $ skipManyTill anyMessage $ getDocumentEdit adoc + cas -> liftIO . assertFailure $ "One code action expected, got " <> show (length cas) + +codeActionTitle :: (Command |? CodeAction) -> Maybe Text +codeActionTitle (InR CodeAction {_title}) = Just _title +codeActionTitle _ = Nothing + +goldenWithRetrie :: TestName -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +goldenWithRetrie title path act = + goldenWithHaskellDoc (def { plugins = M.singleton "retrie" def }) testPlugins title testDataDir path "expected" "hs" act + +runWithRetrie :: Session a -> IO a +runWithRetrie = runSessionWithServer def testPlugins testDataDir + +testPlugins :: PluginTestDescriptor LogWrap +testPlugins = + retriePlugin <> + refactorPlugin -- needed for the GetAnnotatedParsedSource rule + +testDataDir :: FilePath +testDataDir = "plugins" "hls-retrie-plugin" "test" "testdata" diff --git a/plugins/hls-retrie-plugin/test/testdata/Class.hs b/plugins/hls-retrie-plugin/test/testdata/Class.hs new file mode 100644 index 0000000000..644a647b5e --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/Class.hs @@ -0,0 +1,7 @@ +module Class where + +class Identity x where + identity :: x -> x + identity x = x + +function x = identity x diff --git a/plugins/hls-retrie-plugin/test/testdata/Identity.expected.hs b/plugins/hls-retrie-plugin/test/testdata/Identity.expected.hs new file mode 100644 index 0000000000..8fbd1b5cea --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/Identity.expected.hs @@ -0,0 +1,5 @@ +module Identity where + +identity x = x + +function x = x diff --git a/plugins/hls-retrie-plugin/test/testdata/Identity.hs b/plugins/hls-retrie-plugin/test/testdata/Identity.hs new file mode 100644 index 0000000000..3e81c40efa --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/Identity.hs @@ -0,0 +1,5 @@ +module Identity where + +identity x = x + +function x = identity x diff --git a/plugins/hls-retrie-plugin/test/testdata/Imported.expected.hs b/plugins/hls-retrie-plugin/test/testdata/Imported.expected.hs new file mode 100644 index 0000000000..7670647d4d --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/Imported.expected.hs @@ -0,0 +1,5 @@ +module Imported where + +import Identity + +f x = x diff --git a/plugins/hls-retrie-plugin/test/testdata/Imported.hs b/plugins/hls-retrie-plugin/test/testdata/Imported.hs new file mode 100644 index 0000000000..3773e396fc --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/Imported.hs @@ -0,0 +1,5 @@ +module Imported where + +import Identity + +f x = identity x diff --git a/plugins/hls-retrie-plugin/test/testdata/Nested.expected.hs b/plugins/hls-retrie-plugin/test/testdata/Nested.expected.hs new file mode 100644 index 0000000000..8df3fbd2de --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/Nested.expected.hs @@ -0,0 +1,7 @@ +module Nested where + + + +function x = x + where + identity x = x diff --git a/plugins/hls-retrie-plugin/test/testdata/NestedLet.expected.hs b/plugins/hls-retrie-plugin/test/testdata/NestedLet.expected.hs new file mode 100644 index 0000000000..0cd81093e9 --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/NestedLet.expected.hs @@ -0,0 +1,7 @@ +module NestedLet where + + + +function x = + let identity x = x + in x diff --git a/plugins/hls-retrie-plugin/test/testdata/NestedLet.hs b/plugins/hls-retrie-plugin/test/testdata/NestedLet.hs new file mode 100644 index 0000000000..ce7db202bd --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/NestedLet.hs @@ -0,0 +1,7 @@ +module NestedLet where + + + +function x = + let identity x = x + in identity x diff --git a/plugins/hls-retrie-plugin/test/testdata/NestedNested.hs b/plugins/hls-retrie-plugin/test/testdata/NestedNested.hs new file mode 100644 index 0000000000..e2935c4464 --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/NestedNested.hs @@ -0,0 +1,10 @@ + +module NestedNested where + + + +function x = meme x + where + meme x = identity x + identity x = x + diff --git a/plugins/hls-retrie-plugin/test/testdata/NestedWhere.expected.hs b/plugins/hls-retrie-plugin/test/testdata/NestedWhere.expected.hs new file mode 100644 index 0000000000..948779a429 --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/NestedWhere.expected.hs @@ -0,0 +1,7 @@ +module NestedWhere where + + + +function x = x + where + identity x = x diff --git a/plugins/hls-retrie-plugin/test/testdata/NestedWhere.hs b/plugins/hls-retrie-plugin/test/testdata/NestedWhere.hs new file mode 100644 index 0000000000..edde87bb26 --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/NestedWhere.hs @@ -0,0 +1,7 @@ +module NestedWhere where + + + +function x = identity x + where + identity x = x diff --git a/plugins/hls-retrie-plugin/test/testdata/Operator.expected.hs b/plugins/hls-retrie-plugin/test/testdata/Operator.expected.hs new file mode 100644 index 0000000000..4e351e4864 --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/Operator.expected.hs @@ -0,0 +1,5 @@ +module Operator where + +x */ y = x + +function x = x diff --git a/plugins/hls-retrie-plugin/test/testdata/Operator.hs b/plugins/hls-retrie-plugin/test/testdata/Operator.hs new file mode 100644 index 0000000000..6c6b63522a --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/Operator.hs @@ -0,0 +1,5 @@ +module Operator where + +x */ y = x + +function x = x */ () diff --git a/plugins/hls-retrie-plugin/test/testdata/hie.yaml b/plugins/hls-retrie-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..bf059478ed --- /dev/null +++ b/plugins/hls-retrie-plugin/test/testdata/hie.yaml @@ -0,0 +1,11 @@ +cradle: + direct: + arguments: + - Class.hs + - Identity.hs + - Imported.hs + - Nested.hs + - NestedLet.hs + - NestedNested.hs + - NestedWhere.hs + - Operator.hs diff --git a/plugins/hls-semantic-tokens-plugin/README.md b/plugins/hls-semantic-tokens-plugin/README.md new file mode 100644 index 0000000000..5d6be35ef5 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/README.md @@ -0,0 +1,66 @@ +# Semantic tokens (LSP) plugin for Haskell language server + +## Purpose + +The purpose of this plugin is to provide semantic tokens for the Haskell language server, +according to the [LSP specification](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_semanticTokens) +It can be used to provide semantic highlighting for Haskell code in editors by given semantic type and modifiers for some tokens. +A lot of editors support semantic highlighting through LSP, for example vscode, vim, emacs, etc. + +## Features + +### Semantic types and modifiers + +The handles request for semantic tokens for the whole file. +It supports semantic types and but not yet modifiers from the LSP specification. + +Default semantic types defined in lsp diverge greatly from the ones used in ghc. +But default semantic types allows user with less configuration to get semantic highlighting. +That is why we use default semantic types for now. By mapping ghc semantic types to lsp semantic types. +The mapping is defined in `Mapping.hs` file. + +### delta semantic tokens, range semantic tokens and refresh + +It is not yet support capabilities for delta semantic tokens, which might be +crucial for performance. +It should be implemented in the future. + +## checkList + +* Supported PluginMethodHandler + * [x] [textDocument/semanticTokens/full](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#semanticTokens_fullRequest). + * [ ] [textDocument/semanticTokens/full/delta](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#semanticTokens_deltaRequest) + * [ ] [workspace/semanticTokens/refresh](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#semanticTokens_refreshRequest) + +* Supported semantic tokens type: + * [x] class and class method + * [x] type family name (data family) + * [x] data constructor name (not distinguishing record and normal data, and GADT) + * [x] type constructor name (GADT) + * [x] record field name + * [x] type synonym + * [x] pattern synonym + * [x] ~~pattern bindings~~ In favor of differing functions and none-functions from its type + * [x] ~~value bindings~~ In favor of differing functions and none-functions from its type + * [x] functions + * [x] none-function variables + * [x] imported name + +* Supported modifiers(planning): + * [future] declaration (as in class declearations, type definition and type family) + * [future] definition (as in class instance declaration, left hand side value binding, and type family instance) + * [future] modification (as in rec field update) + +## Implementation details + +* [x] Compute visible names from renamedsource +* [x] Compute `NameSemanticMap` for imported and top level name tokens using `HscEnv`(with deps) and type checked result +* [x] Compute current module `NameSemanticMap` using `RefMap a` from the result of `GetHieAst` +* [x] Compute all visible `(Name, Span)` in current module, in turn compute their semantic token using the combination map of the above two `NameSemanticMap` +* [x] use default legends, Candidates map of token type with default token type: [Maps to default token types](https://github.com/soulomoon/haskell-language-server/blob/master/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs) +* [x] add args support to turn the plugin on and off +* [x] enhence test +* [x] enhence error reporting. +* [x] computation of semantic tokens is pushed into rule `getSemanticTokensRule` +* [future] make use of modifiers +* [future] hadling customize legends using server capabilities (how?) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens.hs new file mode 100644 index 0000000000..28e05f5e8c --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens.hs @@ -0,0 +1,24 @@ +{-# LANGUAGE OverloadedStrings #-} + + +module Ide.Plugin.SemanticTokens (descriptor) where + +import Development.IDE +import qualified Ide.Plugin.SemanticTokens.Internal as Internal +import Ide.Plugin.SemanticTokens.Types +import Ide.Types +import Language.LSP.Protocol.Message + +descriptor :: Recorder (WithPriority SemanticLog) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultPluginDescriptor plId "Provides semantic tokens") + { Ide.Types.pluginHandlers = + mkPluginHandler SMethod_TextDocumentSemanticTokensFull (Internal.semanticTokensFull recorder) + <> mkPluginHandler SMethod_TextDocumentSemanticTokensFullDelta (Internal.semanticTokensFullDelta recorder), + Ide.Types.pluginRules = Internal.getSemanticTokensRule recorder, + pluginConfigDescriptor = + defaultConfigDescriptor + { configInitialGenericConfig = (configInitialGenericConfig defaultConfigDescriptor) {plcGlobalOn = False} + , configCustomConfig = mkCustomConfig Internal.semanticConfigProperties + } + } diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs new file mode 100644 index 0000000000..1bbba24df2 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -0,0 +1,166 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UnicodeSyntax #-} + +-- | +-- This module provides the core functionality of the plugin. +module Ide.Plugin.SemanticTokens.Internal (semanticTokensFull, getSemanticTokensRule, semanticConfigProperties, semanticTokensFullDelta) where + +import Control.Concurrent.STM (stateTVar) +import Control.Concurrent.STM.Stats (atomically) +import Control.Lens ((^.)) +import Control.Monad.Except (ExceptT, liftEither, + withExceptT) +import Control.Monad.IO.Class (MonadIO (..)) +import Control.Monad.Trans (lift) +import Control.Monad.Trans.Except (runExceptT) +import qualified Data.Map.Strict as M +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE (Action, + GetDocMap (GetDocMap), + GetHieAst (GetHieAst), + HieAstResult (HAR, hieAst, hieModule, refMap), + IdeResult, IdeState, + Priority (..), + Recorder, Rules, + WithPriority, + cmapWithPrio, define, + fromNormalizedFilePath, + hieKind) +import Development.IDE.Core.PluginUtils (runActionE, useE, + useWithStaleE) +import Development.IDE.Core.Rules (toIdeResult) +import Development.IDE.Core.RuleTypes (DocAndTyThingMap (..)) +import Development.IDE.Core.Shake (ShakeExtras (..), + getShakeExtras, + getVirtualFile) +import Development.IDE.GHC.Compat hiding (Warning) +import Development.IDE.GHC.Compat.Util (mkFastString) +import GHC.Iface.Ext.Types (HieASTs (getAsts), + pattern HiePath) +import Ide.Logger (logWith) +import Ide.Plugin.Error (PluginError (PluginInternalError), + getNormalizedFilePathE, + handleMaybe, + handleMaybeM) +import Ide.Plugin.SemanticTokens.Mappings +import Ide.Plugin.SemanticTokens.Query +import Ide.Plugin.SemanticTokens.SemanticConfig (mkSemanticConfigFunctions) +import Ide.Plugin.SemanticTokens.Tokenize (computeRangeHsSemanticTokenTypeList) +import Ide.Plugin.SemanticTokens.Types +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Message (MessageResult, + Method (Method_TextDocumentSemanticTokensFull, Method_TextDocumentSemanticTokensFullDelta)) +import Language.LSP.Protocol.Types (NormalizedFilePath, + SemanticTokens, + type (|?) (InL, InR)) +import Prelude hiding (span) +import qualified StmContainers.Map as STM + + +$mkSemanticConfigFunctions + +----------------------- +---- the api +----------------------- + +computeSemanticTokens :: Recorder (WithPriority SemanticLog) -> PluginId -> IdeState -> NormalizedFilePath -> ExceptT PluginError Action SemanticTokens +computeSemanticTokens recorder pid _ nfp = do + config <- lift $ useSemanticConfigAction pid + logWith recorder Debug (LogConfig config) + semanticId <- lift getAndIncreaseSemanticTokensId + (RangeHsSemanticTokenTypes {rangeSemanticList}, mapping) <- useWithStaleE GetSemanticTokens nfp + withExceptT PluginInternalError $ liftEither $ rangeSemanticsSemanticTokens semanticId config mapping rangeSemanticList + +semanticTokensFull :: Recorder (WithPriority SemanticLog) -> PluginMethodHandler IdeState 'Method_TextDocumentSemanticTokensFull +semanticTokensFull recorder state pid param = runActionE "SemanticTokens.semanticTokensFull" state computeSemanticTokensFull + where + computeSemanticTokensFull :: ExceptT PluginError Action (MessageResult Method_TextDocumentSemanticTokensFull) + computeSemanticTokensFull = do + nfp <- getNormalizedFilePathE (param ^. L.textDocument . L.uri) + items <- computeSemanticTokens recorder pid state nfp + lift $ setSemanticTokens nfp items + return $ InL items + + +semanticTokensFullDelta :: Recorder (WithPriority SemanticLog) -> PluginMethodHandler IdeState 'Method_TextDocumentSemanticTokensFullDelta +semanticTokensFullDelta recorder state pid param = do + nfp <- getNormalizedFilePathE (param ^. L.textDocument . L.uri) + let previousVersionFromParam = param ^. L.previousResultId + runActionE "SemanticTokens.semanticTokensFullDelta" state $ computeSemanticTokensFullDelta recorder previousVersionFromParam pid state nfp + where + computeSemanticTokensFullDelta :: Recorder (WithPriority SemanticLog) -> Text -> PluginId -> IdeState -> NormalizedFilePath -> ExceptT PluginError Action (MessageResult Method_TextDocumentSemanticTokensFullDelta) + computeSemanticTokensFullDelta recorder previousVersionFromParam pid state nfp = do + semanticTokens <- computeSemanticTokens recorder pid state nfp + previousSemanticTokensMaybe <- lift $ getPreviousSemanticTokens nfp + lift $ setSemanticTokens nfp semanticTokens + case previousSemanticTokensMaybe of + Nothing -> return $ InL semanticTokens + Just previousSemanticTokens -> + if Just previousVersionFromParam == previousSemanticTokens^.L.resultId + then return $ InR $ InL $ makeSemanticTokensDeltaWithId (semanticTokens^.L.resultId) previousSemanticTokens semanticTokens + else do + logWith recorder Warning (LogSemanticTokensDeltaMisMatch previousVersionFromParam (previousSemanticTokens^.L.resultId)) + return $ InL semanticTokens + +-- | Defines the 'getSemanticTokensRule' function, compute semantic tokens for a Haskell source file. +-- +-- This Rule collects information from various sources, including: +-- +-- Imported name token type from Rule 'GetDocMap' +-- Local names token type from 'hieAst' +-- Name locations from 'hieAst' +-- Visible names from 'tmrRenamed' + +-- +-- It then combines this information to compute the semantic tokens for the file. +getSemanticTokensRule :: Recorder (WithPriority SemanticLog) -> Rules () +getSemanticTokensRule recorder = + define (cmapWithPrio LogShake recorder) $ \GetSemanticTokens nfp -> handleError recorder $ do + (HAR {..}) <- withExceptT LogDependencyError $ useE GetHieAst nfp + (DKMap {getTyThingMap}, _) <- withExceptT LogDependencyError $ useWithStaleE GetDocMap nfp + ast <- handleMaybe (LogNoAST $ show nfp) $ getAsts hieAst M.!? (HiePath . mkFastString . fromNormalizedFilePath) nfp + virtualFile <- handleMaybeM LogNoVF $ getVirtualFile nfp + let hsFinder = idSemantic getTyThingMap (hieKindFunMasksKind hieKind) refMap + return $ computeRangeHsSemanticTokenTypeList hsFinder virtualFile ast + + +-- taken from /haskell-language-server/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs + +-- | Handle error in 'Action'. Returns an 'IdeResult' with no value and no diagnostics on error. (but writes log) +handleError :: Recorder (WithPriority msg) -> ExceptT msg Action a -> Action (IdeResult a) +handleError recorder action' = do + valueEither <- runExceptT action' + case valueEither of + Left msg -> do + logWith recorder Warning msg + pure $ toIdeResult (Left []) + Right value -> pure $ toIdeResult (Right value) + +----------------------- +-- helper functions +----------------------- + +-- keep track of the semantic tokens response id +-- so that we can compute the delta between two versions +getAndIncreaseSemanticTokensId :: Action SemanticTokenId +getAndIncreaseSemanticTokensId = do + ShakeExtras{semanticTokensId} <- getShakeExtras + liftIO $ atomically $ do + i <- stateTVar semanticTokensId (\val -> (val, val+1)) + return $ T.pack $ show i + +getPreviousSemanticTokens :: NormalizedFilePath -> Action (Maybe SemanticTokens) +getPreviousSemanticTokens uri = getShakeExtras >>= liftIO . atomically . STM.lookup uri . semanticTokensCache + +setSemanticTokens :: NormalizedFilePath -> SemanticTokens -> Action () +setSemanticTokens uri tokens = getShakeExtras >>= liftIO . atomically . STM.insert tokens uri . semanticTokensCache diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs new file mode 100644 index 0000000000..e93cefb711 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs @@ -0,0 +1,252 @@ +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + + +-- | +-- This module provides mappings to convert token type information in the Haskell IDE plugin. It includes functions for: +-- +-- 1. Mapping semantic token type to and from the LSP default token type. +-- 2. Mapping from GHC type and tyThing to semantic token type. +-- 3. Mapping from hieAst identifier details to haskell semantic token type. +-- 4. Mapping from LSP tokens to SemanticTokenOriginal. +module Ide.Plugin.SemanticTokens.Mappings where + +import qualified Data.Array as A +import Data.List.Extra (chunksOf, (!?)) +import qualified Data.Map.Strict as Map +import Data.Maybe (mapMaybe) +import qualified Data.Set as Set +import Data.Text (Text, unpack) +import Development.IDE (HieKind (HieFresh, HieFromDisk)) +import Development.IDE.GHC.Compat +import GHC.Iface.Ext.Types (BindType (..), + ContextInfo (..), + DeclType (..), HieType (..), + HieTypeFlat, TypeIndex) +import Ide.Plugin.SemanticTokens.Types +import Ide.Plugin.SemanticTokens.Utils (mkRange) +import Language.LSP.Protocol.Types (LspEnum (knownValues), + SemanticTokenAbsolute (SemanticTokenAbsolute), + SemanticTokenRelative (SemanticTokenRelative), + SemanticTokenTypes (..), + SemanticTokens (SemanticTokens), + UInt, absolutizeTokens) +import Language.LSP.VFS hiding (line) + +-- * 0. Mapping name to Hs semantic token type. + +nameInfixOperator :: Name -> Maybe HsSemanticTokenType +nameInfixOperator name | isSymOcc (nameOccName name) = Just TOperator +nameInfixOperator _ = Nothing + +-- * 1. Mapping semantic token type to and from the LSP default token type. + +-- | map from haskell semantic token type to LSP default token type +toLspTokenType :: SemanticTokensConfig -> HsSemanticTokenType -> SemanticTokenTypes +toLspTokenType conf tk = case tk of + TFunction -> stFunction conf + TVariable -> stVariable conf + TClassMethod -> stClassMethod conf + TTypeVariable -> stTypeVariable conf + TDataConstructor -> stDataConstructor conf + TClass -> stClass conf + TTypeConstructor -> stTypeConstructor conf + TTypeSynonym -> stTypeSynonym conf + TTypeFamily -> stTypeFamily conf + TRecordField -> stRecordField conf + TPatternSynonym -> stPatternSynonym conf + TModule -> stModule conf + TOperator -> stOperator conf + +lspTokenReverseMap :: SemanticTokensConfig -> Map.Map SemanticTokenTypes HsSemanticTokenType +lspTokenReverseMap config + | length xs /= Map.size mr = error "lspTokenReverseMap: token type mapping is not bijection" + | otherwise = mr + where xs = enumFrom minBound + mr = Map.fromList $ map (\x -> (toLspTokenType config x, x)) xs + +lspTokenTypeHsTokenType :: SemanticTokensConfig -> SemanticTokenTypes -> Maybe HsSemanticTokenType +lspTokenTypeHsTokenType cf tk = Map.lookup tk (lspTokenReverseMap cf) + +-- * 2. Mapping from GHC type and tyThing to semantic token type. + +-- | tyThingSemantic +tyThingSemantic :: TyThing -> Maybe HsSemanticTokenType +tyThingSemantic ty | (Just hst) <- tyThingSemantic' ty = Just hst <> nameInfixOperator (getName ty) +tyThingSemantic _ = Nothing +tyThingSemantic' :: TyThing -> Maybe HsSemanticTokenType +tyThingSemantic' ty = case ty of + AnId vid + | isTyVar vid -> Just TTypeVariable + | isRecordSelector vid -> Just TRecordField + | isClassOpId vid -> Just TClassMethod + | isFunVar vid -> Just TFunction + | otherwise -> Just TVariable + AConLike con -> case con of + RealDataCon _ -> Just TDataConstructor + PatSynCon _ -> Just TPatternSynonym + ATyCon tyCon + | isTypeSynonymTyCon tyCon -> Just TTypeSynonym + | isTypeFamilyTyCon tyCon -> Just TTypeFamily + | isClassTyCon tyCon -> Just TClass + -- fall back to TTypeConstructor the result + | otherwise -> Just TTypeConstructor + ACoAxiom _ -> Nothing + where + isFunVar :: Var -> Bool + isFunVar var = isFunType $ varType var + +-- expand the type synonym https://hackage.haskell.org/package/ghc-9.8.1/docs/src/GHC.Core.Type.html +expandTypeSyn :: Type -> Type +expandTypeSyn ty + | Just ty' <- coreView ty = expandTypeSyn ty' + | otherwise = ty + +isFunType :: Type -> Bool +isFunType a = case expandTypeSyn a of + ForAllTy _ t -> isFunType t + -- Development.IDE.GHC.Compat.Core.FunTy(pattern synonym), FunTyFlag which is used to distinguish + -- (->, =>, etc..) + FunTy flg _ rhs -> isVisibleFunArg flg || isFunType rhs + _x -> isFunTy a + + +hieKindFunMasksKind :: HieKind a -> HieFunMaskKind a +hieKindFunMasksKind hieKind = case hieKind of + HieFresh -> HieFreshFun + HieFromDisk full_file -> HieFromDiskFun $ recoverFunMaskArray (hie_types full_file) + +-- wz1000 offered +-- the idea from https://gitlab.haskell.org/ghc/haddock/-/blob/b0b0e0366457c9aefebcc94df74e5de4d00e17b7/haddock-api/src/Haddock/Backends/Hyperlinker/Utils.hs#L107 +-- optimize version of looking for which types are functions without unfolding the whole type +recoverFunMaskArray :: + -- | flat types + A.Array TypeIndex HieTypeFlat -> + -- | array of bool indicating whether the type is a function + A.Array TypeIndex Bool +recoverFunMaskArray flattened = unflattened + where + -- The recursion in 'unflattened' is crucial - it's what gives us sharing + -- function indicator check. + unflattened :: A.Array TypeIndex Bool + unflattened = fmap (go . fmap (unflattened A.!)) flattened + + -- Unfold an 'HieType' whose sub-terms have already been unfolded + go :: HieType Bool -> Bool + go (HTyVarTy _name) = False + go (HAppTy _f _x) = False + go (HLitTy _lit) = False + go (HForAllTy ((_n, _k), _af) b) = b + go (HFunTy {}) = True + go (HQualTy _constraint b) = b + go (HCastTy b) = b + go HCoercionTy = False + -- we have no enough information to expand the type synonym + go (HTyConApp _ _) = False + +typeSemantic :: HieFunMaskKind hType -> hType -> Maybe HsSemanticTokenType +typeSemantic kind t = case kind of + HieFreshFun -> if isFunType t then Just TFunction else Nothing + HieFromDiskFun arr -> if arr A.! t then Just TFunction else Nothing + +-- * 3. Mapping from hieAst ContextInfo to haskell semantic token type. + +infoTokenType :: ContextInfo -> Maybe HsSemanticTokenType +infoTokenType x = case x of + Use -> Nothing + MatchBind -> Nothing + IEThing _ -> Nothing + TyDecl -> Nothing -- type signature + ValBind RegularBind _ _ -> Just TVariable + ValBind InstanceBind _ _ -> Just TClassMethod + PatternBind {} -> Just TVariable + ClassTyDecl _ -> Just TClassMethod + TyVarBind _ _ -> Just TTypeVariable + RecField _ _ -> Just TRecordField + -- data constructor, type constructor, type synonym, type family + Decl ClassDec _ -> Just TClass + Decl DataDec _ -> Just TTypeConstructor + Decl ConDec _ -> Just TDataConstructor + Decl SynDec _ -> Just TTypeSynonym + Decl FamDec _ -> Just TTypeFamily + -- instance dec is class method + Decl InstDec _ -> Just TClassMethod + Decl PatSynDec _ -> Just TPatternSynonym + EvidenceVarUse -> Nothing + EvidenceVarBind {} -> Nothing + +-- * 4. Mapping from LSP tokens to SemanticTokenOriginal. + +-- | recoverSemanticTokens +-- for debug and test. +-- this function is used to recover the original tokens(with token in haskell token type zoon) +-- from the lsp semantic tokens(with token in lsp token type zoon) +-- the `SemanticTokensConfig` used should be a map with bijection property +recoverSemanticTokens :: SemanticTokensConfig -> VirtualFile -> SemanticTokens -> Either Text [SemanticTokenOriginal HsSemanticTokenType] +recoverSemanticTokens config v s = do + tks <- recoverLspSemanticTokens v s + return $ map (lspTokenHsToken config) tks + +-- | lspTokenHsToken +-- for debug and test. +-- use the `SemanticTokensConfig` to convert lsp token type to haskell token type +-- the `SemanticTokensConfig` used should be a map with bijection property +lspTokenHsToken :: SemanticTokensConfig -> SemanticTokenOriginal SemanticTokenTypes -> SemanticTokenOriginal HsSemanticTokenType +lspTokenHsToken config (SemanticTokenOriginal tokenType location name) = + case lspTokenTypeHsTokenType config tokenType of + Just t -> SemanticTokenOriginal t location name + Nothing -> error "recoverSemanticTokens: unknown lsp token type" + +-- | recoverLspSemanticTokens +-- for debug and test. +-- this function is used to recover the original tokens(with token in standard lsp token type zoon) +-- from the lsp semantic tokens(with token in lsp token type zoon) +recoverLspSemanticTokens :: VirtualFile -> SemanticTokens -> Either Text [SemanticTokenOriginal SemanticTokenTypes] +recoverLspSemanticTokens vsf (SemanticTokens _ xs) = do + tokens <- dataActualToken xs + return $ mapMaybe (tokenOrigin sourceCode) tokens + where + sourceCode = unpack $ virtualFileText vsf + tokenOrigin :: [Char] -> SemanticTokenAbsolute -> Maybe (SemanticTokenOriginal SemanticTokenTypes) + tokenOrigin sourceCode' (SemanticTokenAbsolute line startChar len tokenType _tokenModifiers) = do + -- convert back to count from 1 + let range = mkRange line startChar len + CodePointRange (CodePointPosition x y) (CodePointPosition _ y1) <- rangeToCodePointRange vsf range + let line' = x + let startChar' = y + let len' = y1 - y + let tLine = lines sourceCode' !? fromIntegral line' + let name = maybe "no source" (take (fromIntegral len') . drop (fromIntegral startChar')) tLine + return $ SemanticTokenOriginal tokenType (Loc (line' + 1) (startChar' + 1) len') name + + dataActualToken :: [UInt] -> Either Text [SemanticTokenAbsolute] + dataActualToken dt = + maybe decodeError (Right . absolutizeTokens) $ + mapM fromTuple (chunksOf 5 $ map fromIntegral dt) + where + decodeError = Left "recoverSemanticTokenRelative: wrong token data" + fromTuple [a, b, c, d, _] = SemanticTokenRelative a b c <$> fromInt (fromIntegral d) <*> return [] + fromTuple _ = Nothing + + + -- legends :: SemanticTokensLegend + fromInt :: Int -> Maybe SemanticTokenTypes + fromInt i = Set.toAscList knownValues !? i + +-- Note [Semantic information from Multiple Sources] +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- We group Name into 2 categories since the information source is different: +-- 1. Locally defined Name +-- Information source is current module's HieAst, +-- Either from ContextInfo(all except differing function and none-function) +-- or from Hie Type(Differing Function and Non-function Variable) +-- 2. Imported Name +-- Information source is `TyThing` for the `Name`, looked up in `HscEnv`(with all imported things loaded). +-- `TyThing` is information rich, since it is used to represent the things that a name can refer to in ghc. +-- The reason why we need special handling for imported name is that +-- Up to 9.8 +-- 1. For Hie Type, IfaceTyCon in hie type does not contain enough information to distinguish class, type syn, type family etc.. +-- 2. Most imported name is only annotated as [Use] in the ContextInfo from hie. +-- 3. `namespace` in `Name` is limited, we can only classify `VarName, FldName, DataName, TvNamem, TcClsName`. +-- 4. WiredIn `Name` have `TyThing` attached, but not many are WiredIn names. diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs new file mode 100644 index 0000000000..5875ebfa8d --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -0,0 +1,95 @@ +-- | +-- The query module is used to query the semantic tokens from the AST +module Ide.Plugin.SemanticTokens.Query where + +import Control.Applicative ((<|>)) +import Data.Foldable (fold) +import qualified Data.Map.Strict as M +import Data.Maybe (listToMaybe, mapMaybe) +import qualified Data.Set as Set +import Data.Text (Text) +import Development.IDE.Core.PositionMapping (PositionMapping, + toCurrentRange) +import Development.IDE.GHC.Compat +import GHC.Iface.Ext.Types (ContextInfo, Identifier, + IdentifierDetails (..)) +import GHC.Iface.Ext.Utils (RefMap) +import Ide.Plugin.SemanticTokens.Mappings +import Ide.Plugin.SemanticTokens.Types (HieFunMaskKind, + HsSemanticTokenType (TModule), + RangeSemanticTokenTypeList, + SemanticTokenId, + SemanticTokensConfig) +import Language.LSP.Protocol.Types (Position (Position), + Range (Range), + SemanticTokenAbsolute (SemanticTokenAbsolute), + SemanticTokens (SemanticTokens), + SemanticTokensDelta (SemanticTokensDelta), + defaultSemanticTokensLegend, + makeSemanticTokens, + makeSemanticTokensDelta) +import Prelude hiding (length, span) + +--------------------------------------------------------- + +-- * extract semantic + +--------------------------------------------------------- + +idSemantic :: forall a. NameEnv TyThing -> HieFunMaskKind a -> RefMap a -> Identifier -> Maybe HsSemanticTokenType +idSemantic _ _ _ (Left _) = Just TModule +idSemantic tyThingMap hieKind rm (Right n) = + nameSemanticFromHie hieKind rm n -- local name + <|> (lookupNameEnv tyThingMap n >>= tyThingSemantic) -- global name + + +--------------------------------------------------------- + +-- * extract semantic from HieAst for local variables + +--------------------------------------------------------- + +nameSemanticFromHie :: forall a. HieFunMaskKind a -> RefMap a -> Name -> Maybe HsSemanticTokenType +nameSemanticFromHie hieKind rm n = idSemanticFromRefMap rm (Right n) + where + idSemanticFromRefMap :: RefMap a -> Identifier -> Maybe HsSemanticTokenType + idSemanticFromRefMap rm' name' = do + spanInfos <- M.lookup name' rm' + let typeTokenType = foldMap (typeSemantic hieKind) $ listToMaybe $ mapMaybe (identType . snd) spanInfos + contextInfoTokenType <- foldMap (contextInfosMaybeTokenType . identInfo . snd) spanInfos + fold [typeTokenType, Just contextInfoTokenType, nameInfixOperator n] + + contextInfosMaybeTokenType :: Set.Set ContextInfo -> Maybe HsSemanticTokenType + contextInfosMaybeTokenType details = foldMap infoTokenType (Set.toList details) + + +------------------------------------------------- + +-- * extract lsp semantic tokens from RangeSemanticTokenTypeList + +------------------------------------------------- + +rangeSemanticsSemanticTokens :: SemanticTokenId -> SemanticTokensConfig -> PositionMapping -> RangeSemanticTokenTypeList -> Either Text SemanticTokens +rangeSemanticsSemanticTokens sid stc mapping = + makeSemanticTokensWithId (Just sid) . mapMaybe (\(ran, tk) -> toAbsSemanticToken <$> toCurrentRange mapping ran <*> return tk) + where + toAbsSemanticToken :: Range -> HsSemanticTokenType -> SemanticTokenAbsolute + toAbsSemanticToken (Range (Position startLine startColumn) (Position _endLine endColumn)) tokenType = + let len = endColumn - startColumn + in SemanticTokenAbsolute + (fromIntegral startLine) + (fromIntegral startColumn) + (fromIntegral len) + (toLspTokenType stc tokenType) + [] + +makeSemanticTokensWithId :: Maybe SemanticTokenId -> [SemanticTokenAbsolute] -> Either Text SemanticTokens +makeSemanticTokensWithId sid tokens = do + (SemanticTokens _ tokens) <- makeSemanticTokens defaultSemanticTokensLegend tokens + return $ SemanticTokens sid tokens + +makeSemanticTokensDeltaWithId :: Maybe SemanticTokenId -> SemanticTokens -> SemanticTokens -> SemanticTokensDelta +makeSemanticTokensDeltaWithId sid previousTokens currentTokens = + let (SemanticTokensDelta _ stEdits) = makeSemanticTokensDelta previousTokens currentTokens + in SemanticTokensDelta sid stEdits + diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs new file mode 100644 index 0000000000..be793cfe7a --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -0,0 +1,134 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TemplateHaskellQuotes #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.SemanticTokens.SemanticConfig where + +import Data.Char (toLower) +import Data.Default (def) +import qualified Data.Set as S +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE (Action, usePropertyAction) +import GHC.TypeLits (KnownSymbol) +import Ide.Plugin.Properties (KeyNameProxy, NotElem, + Properties, + PropertyKey (PropertyKey), + PropertyType (TEnum), + defineEnumProperty, + emptyProperties) +import Ide.Plugin.SemanticTokens.Types +import Ide.Types (PluginId) +import Language.Haskell.TH +import Language.LSP.Protocol.Types (LspEnum (..), + SemanticTokenTypes) + +docName :: HsSemanticTokenType -> T.Text +docName tt = case tt of + TVariable -> "variables" + TFunction -> "functions" + TDataConstructor -> "data constructors" + TTypeVariable -> "type variables" + TClassMethod -> "typeclass methods" + TPatternSynonym -> "pattern synonyms" + TTypeConstructor -> "type constructors" + TClass -> "typeclasses" + TTypeSynonym -> "type synonyms" + TTypeFamily -> "type families" + TRecordField -> "record fields" + TModule -> "modules" + TOperator -> "operators" + +toConfigName :: String -> String +toConfigName = ("st" <>) + +type LspTokenTypeDescriptions = [(SemanticTokenTypes, T.Text)] + +lspTokenTypeDescriptions :: LspTokenTypeDescriptions +lspTokenTypeDescriptions = + map + ( \x -> + (x, "LSP Semantic Token Type: " <> toEnumBaseType x) + ) + $ S.toList knownValues + +allHsTokenTypes :: [HsSemanticTokenType] +allHsTokenTypes = enumFrom minBound + +lowerFirst :: String -> String +lowerFirst [] = [] +lowerFirst (x : xs) = toLower x : xs + +allHsTokenNameStrings :: [String] +allHsTokenNameStrings = map (drop 1 . show) allHsTokenTypes + +defineSemanticProperty :: + (NotElem s r, KnownSymbol s) => + (KeyNameProxy s, Text, SemanticTokenTypes) -> + Properties r -> + Properties ('PropertyKey s (TEnum SemanticTokenTypes) : r) +defineSemanticProperty (lb, tokenType, st) = + defineEnumProperty + lb + tokenType + lspTokenTypeDescriptions + st + +semanticDef :: SemanticTokensConfig +semanticDef = def + +-- | it produces the following functions: +-- semanticConfigProperties :: Properties '[ +-- 'PropertyKey "Variable" ('TEnum SemanticTokenTypes), +-- ... +-- ] +-- useSemanticConfigAction :: PluginId -> Action SemanticTokensConfig +mkSemanticConfigFunctions :: Q [Dec] +mkSemanticConfigFunctions = do + let pid = mkName "pid" + let semanticConfigPropertiesName = mkName "semanticConfigProperties" + let useSemanticConfigActionName = mkName "useSemanticConfigAction" + let allLabelStrs = map ((<> "Token") . lowerFirst) allHsTokenNameStrings + allLabels = map (LabelE . (<> "Token") . lowerFirst) allHsTokenNameStrings + allFieldsNames = map (mkName . toConfigName) allHsTokenNameStrings + allVariableNames = map (mkName . ("_variable_" <>) . toConfigName) allHsTokenNameStrings + -- <- useSemanticConfigAction label pid config + mkGetProperty (variable, label) = + BindS + (VarP variable) + (AppE (VarE 'usePropertyAction) label `AppE` VarE pid `AppE` VarE semanticConfigPropertiesName) + getProperties = zipWith (curry mkGetProperty) allVariableNames allLabels + recordUpdate = + RecUpdE (VarE 'semanticDef) $ + zipWith (\fieldName variableName -> (fieldName, VarE variableName)) allFieldsNames allVariableNames + -- get and then update record + bb = DoE Nothing $ getProperties ++ [NoBindS $ AppE (VarE 'return) recordUpdate] + let useSemanticConfigAction = FunD useSemanticConfigActionName [Clause [VarP pid] (NormalB bb) []] + let useSemanticConfigActionSig = SigD useSemanticConfigActionName (ArrowT `AppT` ConT ''PluginId `AppT` (ConT ''Action `AppT` ConT ''SemanticTokensConfig)) + + -- SemanticConfigProperties + nameAndDescList <- + mapM + ( \(lb, x) -> do + desc <- [|"LSP semantic token type to use for " <> docName x|] + lspToken <- [|toLspTokenType def x|] + return $ TupE [Just lb, Just desc, Just lspToken] + ) + $ zip allLabels allHsTokenTypes + let body = foldr (AppE . AppE (VarE 'defineSemanticProperty)) (VarE 'emptyProperties) nameAndDescList + let propertiesType = + foldr + ( \la -> + AppT + ( PromotedConsT + `AppT` (AppT (ConT 'PropertyKey) (LitT (StrTyLit la)) `AppT` AppT (ConT 'TEnum) (ConT ''SemanticTokenTypes)) + ) + ) + PromotedNilT + allLabelStrs + let semanticConfigProperties = FunD semanticConfigPropertiesName [Clause [] (NormalB body) []] + let semanticConfigPropertiesSig = SigD semanticConfigPropertiesName (AppT (ConT ''Properties) propertiesType) + return [semanticConfigPropertiesSig, semanticConfigProperties, useSemanticConfigActionSig, useSemanticConfigAction] diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs new file mode 100644 index 0000000000..b6142fb39c --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -0,0 +1,204 @@ +{-# LANGUAGE OverloadedRecordDot #-} +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.SemanticTokens.Tokenize (computeRangeHsSemanticTokenTypeList) where + +import Control.Lens (Identity (runIdentity)) +import Control.Monad (foldM, guard) +import Control.Monad.State.Strict (MonadState (get), + MonadTrans (lift), + evalStateT, modify, put) +import Control.Monad.Trans.State.Strict (StateT, runStateT) +import Data.Char (isAlphaNum) +import Data.DList (DList) +import qualified Data.DList as DL +import qualified Data.Map.Strict as M +import qualified Data.Map.Strict as Map +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Text.Rope as Char +import qualified Data.Text.Utf16.Rope as Utf16 +import Data.Text.Utf16.Rope.Mixed (Rope) +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) +import GHC.Iface.Ext.Types (HieAST (..), Identifier, + NodeInfo (..), + NodeOrigin (..), + SourcedNodeInfo (..)) +import Ide.Plugin.SemanticTokens.Types (HsSemanticTokenType (TModule), + RangeHsSemanticTokenTypes (..)) +import Language.LSP.Protocol.Types (Position (Position), + Range (Range), UInt, mkRange) +import Language.LSP.VFS hiding (line) +import Prelude hiding (length, span) + +type Tokenizer m a = StateT PTokenState m a +type HsSemanticLookup = Identifier -> Maybe HsSemanticTokenType + + +data PTokenState = PTokenState + { + rope :: !Rope -- the remains of rope we are working on + , cursor :: !Char.Position -- the cursor position of the current rope to the start of the original file in code point position + , columnsInUtf16 :: !UInt -- the column of the start of the current rope in utf16 + } + +data SplitResult + = NoSplit (Text, Range) -- does not need to split, token text, token range + | Split (Text, Range, Range) -- token text, prefix range(module range), token range + deriving (Show) + +getSplitTokenText :: SplitResult -> Text +getSplitTokenText (NoSplit (t, _)) = t +getSplitTokenText (Split (t, _, _)) = t + + +mkPTokenState :: VirtualFile -> PTokenState +mkPTokenState vf = + PTokenState + { + rope = vf._file_text, + cursor = Char.Position 0 0, + columnsInUtf16 = 0 + } + +-- lift a Tokenizer Maybe a to Tokenizer m a, +-- if the Maybe is Nothing, do nothing, recover the state, and return the mempty value +-- if the Maybe is Just x, do the action, and keep the state, and return x +liftMaybeM :: (Monad m, Monoid a) => Tokenizer Maybe a -> Tokenizer m a +liftMaybeM p = do + st <- get + maybe (return mempty) (\(ans, st') -> put st' >> return ans) $ runStateT p st + +foldMapM :: (Monad m, Monoid b, Foldable t) => (a -> m b) -> t a -> m b +foldMapM f ta = foldM (\b a -> mappend b <$> f a) mempty ta + +computeRangeHsSemanticTokenTypeList :: HsSemanticLookup -> VirtualFile -> HieAST a -> RangeHsSemanticTokenTypes +computeRangeHsSemanticTokenTypeList lookupHsTokenType vf ast = + RangeHsSemanticTokenTypes $ DL.toList $ runIdentity $ evalStateT (foldAst lookupHsTokenType ast) (mkPTokenState vf) +-- | foldAst +-- visit every leaf node in the ast in depth first order +foldAst :: (Monad m) => HsSemanticLookup -> HieAST t -> Tokenizer m (DList (Range, HsSemanticTokenType)) +foldAst lookupHsTokenType ast = if null (nodeChildren ast) + then liftMaybeM (visitLeafIds lookupHsTokenType ast) + else foldMapM (foldAst lookupHsTokenType) $ nodeChildren ast + +visitLeafIds :: HsSemanticLookup -> HieAST t -> Tokenizer Maybe (DList (Range, HsSemanticTokenType)) +visitLeafIds lookupHsTokenType leaf = liftMaybeM $ do + let span = nodeSpan leaf + (ran, token) <- focusTokenAt leaf + -- if `focusTokenAt` succeed, we can safely assume we have shift the cursor correctly + -- we do not need to recover the cursor state, even if the following computation failed + liftMaybeM $ do + -- only handle the leaf node with single column token + guard $ srcSpanStartLine span == srcSpanEndLine span + splitResult <- lift $ splitRangeByText token ran + foldMapM (combineNodeIds lookupHsTokenType ran splitResult) $ Map.filterWithKey (\k _ -> k == SourceInfo) $ getSourcedNodeInfo $ sourcedNodeInfo leaf + where + combineNodeIds :: (Monad m) => HsSemanticLookup -> Range -> SplitResult -> NodeInfo a -> Tokenizer m (DList (Range, HsSemanticTokenType)) + combineNodeIds lookupHsTokenType ran ranSplit (NodeInfo _ _ bd) = + case (maybeTokenType, ranSplit) of + (Nothing, _) -> return mempty + (Just TModule, _) -> return $ DL.singleton (ran, TModule) + (Just tokenType, NoSplit (_, tokenRan)) -> return $ DL.singleton (tokenRan, tokenType) + (Just tokenType, Split (_, ranPrefix, tokenRan)) -> return $ DL.fromList [(ranPrefix, TModule),(tokenRan, tokenType)] + where maybeTokenType = foldMap (getIdentifier lookupHsTokenType ranSplit) (M.keys bd) + + getIdentifier :: HsSemanticLookup -> SplitResult -> Identifier -> Maybe HsSemanticTokenType + getIdentifier lookupHsTokenType ranSplit idt = do + case idt of + Left _moduleName -> Just TModule + Right name -> do + occStr <- T.pack <$> case (occNameString . nameOccName) name of + -- the generated selector name with {-# LANGUAGE DuplicateRecordFields #-} + '$' : 's' : 'e' : 'l' : ':' : xs -> Just $ takeWhile (/= ':') xs + -- other generated names that should not be visible + '$' : c : _ | isAlphaNum c -> Nothing + c : ':' : _ | isAlphaNum c -> Nothing + ns -> Just ns + guard $ getSplitTokenText ranSplit == occStr + lookupHsTokenType idt + + +focusTokenAt :: + -- | leaf node we want to focus on + HieAST a -> + -- | (token, remains) + Tokenizer Maybe (Range, Text) +focusTokenAt leaf = do + PTokenState{cursor, rope, columnsInUtf16} <- get + let span = nodeSpan leaf + let (tokenStartPos, tokenEndPos) = srcSpanCharPositions span + -- tokenStartOff: the offset position of the token start position to the cursor position + tokenStartOff <- lift $ tokenStartPos `sub` cursor + -- tokenOff: the offset position of the token end position to the token start position + tokenOff <- lift $ tokenEndPos `sub` tokenStartPos + (gap, tokenStartRope) <- lift $ charSplitAtPositionMaybe tokenStartOff rope + (token, remains) <- lift $ charSplitAtPositionMaybe tokenOff tokenStartRope + -- ncs: token start column in utf16 + let ncs = newColumn columnsInUtf16 gap + -- nce: token end column in utf16 + let nce = newColumn ncs token + -- compute the new range for utf16, tuning the columns is enough + let ran = codePointRangeToRangeWith ncs nce $ realSrcSpanToCodePointRange span + modify $ \s -> s {columnsInUtf16 = nce, rope = remains, cursor = tokenEndPos} + return (ran, token) + where + srcSpanCharPositions :: RealSrcSpan -> (Char.Position, Char.Position) + srcSpanCharPositions real = + ( realSrcLocRopePosition $ realSrcSpanStart real, + realSrcLocRopePosition $ realSrcSpanEnd real + ) + charSplitAtPositionMaybe :: Char.Position -> Rope -> Maybe (Text, Rope) + charSplitAtPositionMaybe tokenOff rpe = do + let (prefix, suffix) = Rope.charSplitAtPosition tokenOff rpe + guard $ Rope.charLengthAsPosition prefix == tokenOff + return (Rope.toText prefix, suffix) + sub :: Char.Position -> Char.Position -> Maybe Char.Position + sub (Char.Position l1 c1) (Char.Position l2 c2) + | l1 == l2 && c1 >= c2 = Just $ Char.Position 0 (c1 - c2) + | l1 > l2 = Just $ Char.Position (l1 - l2) c1 + | otherwise = Nothing + realSrcLocRopePosition :: RealSrcLoc -> Char.Position + realSrcLocRopePosition real = Char.Position (fromIntegral $ srcLocLine real - 1) (fromIntegral $ srcLocCol real - 1) + -- | newColumn + -- rope do not treat single \n in our favor + -- for example, the row length of "123\n" and "123" are both 1 + -- we are forced to use text to compute new column + newColumn :: UInt -> Text -> UInt + newColumn n rp = case T.breakOnEnd "\n" rp of + ("", nEnd) -> n + utf16Length nEnd + (_, nEnd) -> utf16Length nEnd + codePointRangeToRangeWith :: UInt -> UInt -> CodePointRange -> Range + codePointRangeToRangeWith newStartCol newEndCol (CodePointRange (CodePointPosition startLine _) (CodePointPosition endLine _)) = + Range (Position startLine newStartCol) (Position endLine newEndCol) + +-- | splitRangeByText +-- split a qualified identifier into module name and identifier and/or strip the (), `` +-- for `ModuleA.b`, break it into `ModuleA.` and `b` +-- for `(b)`, strip `()`, and get `b` +-- for `(ModuleA.b)`, strip `()` and break it into `ModuleA.` and `b` +splitRangeByText :: Text -> Range -> Maybe SplitResult +splitRangeByText tk ran = do + let (ran', tk') = case T.uncons tk of + Just ('(', xs) -> (subOneRange ran, T.takeWhile (/= ')') xs) + Just ('`', xs) -> (subOneRange ran, T.takeWhile (/= '`') xs) + _ -> (ran, tk) + let (prefix, tk'') = T.breakOnEnd "." tk' + splitRange tk'' (utf16PositionPosition $ Rope.utf16LengthAsPosition $ Rope.fromText prefix) ran' + where + splitRange :: Text -> Position -> Range -> Maybe SplitResult + splitRange tx (Position l c) r@(Range (Position l1 c1) (Position l2 c2)) + | l1 + l > l2 || (l1 + l == l2 && c > c2) = Nothing -- out of range + | l==0 && c==0 = Just $ NoSplit (tx, r) + | otherwise = let c' = if l <= 0 then c1+c else c + in Just $ Split (tx, mkRange l1 c1 (l1 + l) c', mkRange (l1 + l) c' l2 c2) + subOneRange :: Range -> Range + subOneRange (Range (Position l1 c1) (Position l2 c2)) = Range (Position l1 (c1 + 1)) (Position l2 (c2 - 1)) + utf16PositionPosition :: Utf16.Position -> Position + utf16PositionPosition (Utf16.Position l c) = Position (fromIntegral l) (fromIntegral c) + + +utf16Length :: Integral i => Text -> i +utf16Length = fromIntegral . Utf16.length . Utf16.fromText diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs new file mode 100644 index 0000000000..da59c28d29 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs @@ -0,0 +1,160 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DerivingVia #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE StrictData #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.SemanticTokens.Types where + +import Control.DeepSeq (NFData (rnf), rwhnf) +import qualified Data.Array as A +import Data.Default (Default (def)) +import Data.Text (Text) +import Development.IDE (Pretty (pretty), RuleResult) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat hiding (loc) +import Development.IDE.Graph.Classes (Hashable) +import GHC.Generics (Generic) +import GHC.Iface.Ext.Types (TypeIndex) +import Ide.Plugin.Error (PluginError) +import Language.Haskell.TH.Syntax (Lift) +import Language.LSP.Protocol.Types + + +-- !!!! order of declarations matters deriving enum and ord +-- since token may come from different source and we want to keep the most specific one +-- and we might want to merge them. +data HsSemanticTokenType + = TVariable -- none function variable + | TFunction -- function + | TDataConstructor -- Data constructor + | TTypeVariable -- Type variable + | TClassMethod -- Class method + | TPatternSynonym -- Pattern synonym + | TTypeConstructor -- Type (Type constructor) + | TClass -- Type class + | TTypeSynonym -- Type synonym + | TTypeFamily -- type family + | TRecordField -- from match bind + | TOperator-- operator + | TModule -- module name + deriving (Eq, Ord, Show, Enum, Bounded, Generic, Lift) + +-- type SemanticTokensConfig = SemanticTokensConfig_ Identity +instance Default SemanticTokensConfig where + def = STC + { stFunction = SemanticTokenTypes_Function + , stVariable = SemanticTokenTypes_Variable + , stDataConstructor = SemanticTokenTypes_EnumMember + , stTypeVariable = SemanticTokenTypes_TypeParameter + , stClassMethod = SemanticTokenTypes_Method + -- pattern syn is like a limited version of macro of constructing a term + , stPatternSynonym = SemanticTokenTypes_Macro + -- normal data type is a tagged union type look like enum type + -- and a record is a product type like struct + -- but we don't distinguish them yet + , stTypeConstructor = SemanticTokenTypes_Enum + , stClass = SemanticTokenTypes_Class + , stTypeSynonym = SemanticTokenTypes_Type + , stTypeFamily = SemanticTokenTypes_Interface + , stRecordField = SemanticTokenTypes_Property + , stModule = SemanticTokenTypes_Namespace + , stOperator = SemanticTokenTypes_Operator + } +-- | SemanticTokensConfig_ is a configuration for the semantic tokens plugin. +-- it contains map between the hs semantic token type and default token type. +data SemanticTokensConfig = STC + { stFunction :: !SemanticTokenTypes + , stVariable :: !SemanticTokenTypes + , stDataConstructor :: !SemanticTokenTypes + , stTypeVariable :: !SemanticTokenTypes + , stClassMethod :: !SemanticTokenTypes + , stPatternSynonym :: !SemanticTokenTypes + , stTypeConstructor :: !SemanticTokenTypes + , stClass :: !SemanticTokenTypes + , stTypeSynonym :: !SemanticTokenTypes + , stTypeFamily :: !SemanticTokenTypes + , stRecordField :: !SemanticTokenTypes + , stModule :: !SemanticTokenTypes + , stOperator :: !SemanticTokenTypes + } deriving (Generic, Show) + + +instance Semigroup HsSemanticTokenType where + -- one in higher enum is more specific + a <> b = max a b + +data SemanticTokenOriginal tokenType = SemanticTokenOriginal + { _tokenType :: tokenType, + _loc :: Loc, + _name :: String + } + deriving (Eq, Ord) + +-- +instance (Show tokenType) => Show (SemanticTokenOriginal tokenType) where + show (SemanticTokenOriginal tk loc name) = show loc <> " " <> show tk <> " " <> show name + +data Loc = Loc + { _line :: UInt, + _startChar :: UInt, + _len :: UInt + } + deriving (Eq, Ord) + +instance Show Loc where + show (Loc line startChar len) = show line <> ":" <> show startChar <> "-" <> show (startChar + len) + +data GetSemanticTokens = GetSemanticTokens + deriving (Eq, Show, Generic) + +instance Hashable GetSemanticTokens + +instance NFData GetSemanticTokens + +type RangeSemanticTokenTypeList = [(Range, HsSemanticTokenType)] + +newtype RangeHsSemanticTokenTypes = RangeHsSemanticTokenTypes {rangeSemanticList :: RangeSemanticTokenTypeList} + +instance NFData RangeHsSemanticTokenTypes where + rnf :: RangeHsSemanticTokenTypes -> () + rnf (RangeHsSemanticTokenTypes a) = rwhnf a + +instance Show RangeHsSemanticTokenTypes where + show (RangeHsSemanticTokenTypes xs) = unlines $ map showRangeToken xs + +showRangeToken :: (Range, HsSemanticTokenType) -> String +showRangeToken (ran, tk) = showRange ran <> " " <> show tk +showRange :: Range -> String +showRange (Range (Position l1 c1) (Position l2 c2)) = show l1 <> ":" <> show c1 <> "-" <> show l2 <> ":" <> show c2 + +type instance RuleResult GetSemanticTokens = RangeHsSemanticTokenTypes + +data HieFunMaskKind kind where + HieFreshFun :: HieFunMaskKind Type + HieFromDiskFun :: A.Array TypeIndex Bool -> HieFunMaskKind TypeIndex + +data SemanticLog + = LogShake Shake.Log + | LogDependencyError PluginError + | LogNoAST FilePath + | LogConfig SemanticTokensConfig + | LogMsg String + | LogNoVF + | LogSemanticTokensDeltaMisMatch Text (Maybe Text) + +instance Pretty SemanticLog where + pretty theLog = case theLog of + LogShake shakeLog -> pretty shakeLog + LogNoAST path -> "no HieAst exist for file" <> pretty path + LogNoVF -> "no VirtualSourceFile exist for file" + LogConfig config -> "SemanticTokensConfig_: " <> pretty (show config) + LogMsg msg -> "SemanticLog Debug Message: " <> pretty msg + LogSemanticTokensDeltaMisMatch previousIdFromRequest previousIdFromCache + -> "SemanticTokensDeltaMisMatch: previousIdFromRequest: " <> pretty previousIdFromRequest + <> " previousIdFromCache: " <> pretty previousIdFromCache + LogDependencyError err -> "SemanticTokens' dependency error: " <> pretty err + + +type SemanticTokenId = Text diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs new file mode 100644 index 0000000000..c545d8941a --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -0,0 +1,103 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} +{-# OPTIONS_GHC -Wno-orphans #-} + + +module Ide.Plugin.SemanticTokens.Utils where + +import Data.ByteString (ByteString) +import Data.ByteString.Char8 (unpack) +import qualified Data.Map.Strict as Map +import Development.IDE (Position (..), Range (..)) +import Development.IDE.GHC.Compat +import GHC.Iface.Ext.Types (BindType (..), ContextInfo (..), + DeclType (..), Identifier, + IdentifierDetails (..), + RecFieldContext (..), Span) +import GHC.Iface.Ext.Utils (RefMap) +import Prelude hiding (length, span) + +deriving instance Show DeclType +deriving instance Show BindType +deriving instance Show RecFieldContext + +instance Show ContextInfo where + show x = case x of + Use -> "Use" + MatchBind -> "MatchBind" + IEThing _ -> "IEThing IEType" -- imported + TyDecl -> "TyDecl" + ValBind bt _ sp -> "ValBind of " <> show bt <> show sp + PatternBind {} -> "PatternBind" + ClassTyDecl _ -> "ClassTyDecl" + Decl d _ -> "Decl of " <> show d + TyVarBind _ _ -> "TyVarBind" + RecField c _ -> "RecField of " <> show c + EvidenceVarBind {} -> "EvidenceVarBind" + EvidenceVarUse -> "EvidenceVarUse" + +showCompactRealSrc :: RealSrcSpan -> String +showCompactRealSrc x = show (srcSpanStartLine x) <> ":" <> show (srcSpanStartCol x) <> "-" <> show (srcSpanEndCol x) + +-- type RefMap a = M.Map Identifier [(Span, IdentifierDetails a)] +showRefMap :: RefMap a -> String +showRefMap m = unlines + [ + showIdentifier idn ++ ":" + ++ "\n" ++ unlines [showSDocUnsafe (ppr span) ++ "\n" ++ showIdentifierDetails v | (span, v) <- spans] + | (idn, spans) <- Map.toList m] + +showIdentifierDetails :: IdentifierDetails a -> String +showIdentifierDetails x = show $ identInfo x + +showIdentifier :: Identifier -> String +showIdentifier (Left x) = showSDocUnsafe (ppr x) +showIdentifier (Right x) = nameStableString x + +showLocatedNames :: [LIdP GhcRn] -> String +showLocatedNames xs = unlines + [ showSDocUnsafe (ppr locName) ++ " " ++ show (getLoc locName) + | locName <- xs] + +showClearName :: Name -> String +showClearName name = occNameString (occName name) <> ":" <> showSDocUnsafe (ppr name) <> ":" <> showNameType name + +showName :: Name -> String +showName name = showSDocUnsafe (ppr name) <> ":" <> showNameType name + +showNameType :: Name -> String +showNameType name + | isWiredInName name = "WiredInName" + | isSystemName name = "SystemName" + | isInternalName name = "InternalName" + | isExternalName name = "ExternalName" + | otherwise = "UnknownName" + +bytestringString :: ByteString -> String +bytestringString = map (toEnum . fromEnum) . unpack + +spanNamesString :: [(Span, Name)] -> String +spanNamesString xs = unlines + [ showSDocUnsafe (ppr span) ++ " " ++ showSDocUnsafe (ppr name) + | (span, name) <- xs] + +nameTypesString :: [(Name, Type)] -> String +nameTypesString xs = unlines + [ showSDocUnsafe (ppr span) ++ " " ++ showSDocUnsafe (ppr name) + | (span, name) <- xs] + + +showSpan :: RealSrcSpan -> String +showSpan x = show (srcSpanStartLine x) <> ":" <> show (srcSpanStartCol x) <> "-" <> show (srcSpanEndCol x) + + +-- rangeToCodePointRange +mkRange :: (Integral a1, Integral a2) => a1 -> a2 -> a2 -> Range +mkRange startLine startCol len = + Range (Position (fromIntegral startLine) (fromIntegral startCol)) (Position (fromIntegral startLine) (fromIntegral $ startCol + len)) + + +rangeShortStr :: Range -> String +rangeShortStr (Range (Position startLine startColumn) (Position endLine endColumn)) = + show startLine <> ":" <> show startColumn <> "-" <> show endLine <> ":" <> show endColumn + diff --git a/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs b/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs new file mode 100644 index 0000000000..a0d1648fb3 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/SemanticTokensTest.hs @@ -0,0 +1,305 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DisambiguateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} + +import Control.Lens ((^.), (^?)) +import Data.Aeson (KeyValue (..), Object) +import qualified Data.Aeson.KeyMap as KV +import Data.Default +import Data.Functor (void) +import qualified Data.List as T +import Data.Map.Strict as Map hiding (map) +import Data.String (fromString) +import Data.Text (Text) +import qualified Data.Text as Text +import qualified Data.Text.Utf16.Rope.Mixed as Rope +import Data.Version (Version (..)) +import Development.IDE (Pretty) +import Development.IDE.Plugin.Test (WaitForIdeRuleResult (..)) +import Ide.Plugin.SemanticTokens +import Ide.Plugin.SemanticTokens.Mappings +import Ide.Plugin.SemanticTokens.Types +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Types +import qualified Language.LSP.Test as Test +import Language.LSP.VFS (VirtualFile (..)) +import System.FilePath +import System.Info (compilerVersion) +import Test.Hls +import qualified Test.Hls.FileSystem as FS +import Test.Hls.FileSystem (file, text) + +testDataDir :: FilePath +testDataDir = "plugins" "hls-semantic-tokens-plugin" "test" "testdata" testVersionDir + +mkFs :: [FS.FileTree] -> FS.VirtualFileTree +mkFs = FS.mkVirtualFileTree testDataDir + +semanticTokensPlugin :: Test.Hls.PluginTestDescriptor SemanticLog +semanticTokensPlugin = Test.Hls.mkPluginTestDescriptor enabledSemanticDescriptor "SemanticTokens" + where + enabledSemanticDescriptor recorder plId = + let semanticDescriptor = Ide.Plugin.SemanticTokens.descriptor recorder plId + in semanticDescriptor + { pluginConfigDescriptor = + (pluginConfigDescriptor semanticDescriptor) + { configInitialGenericConfig = + (configInitialGenericConfig (pluginConfigDescriptor semanticDescriptor)) + { plcGlobalOn = True + } + } + } + +-- if 9_10 and after we change the directory to the testdata/before_9_10 directory +-- if 9_10 and after we change the directory to the testdata/after_9_10 directory + +testVersionDir :: FilePath +testVersionDir + | compilerVersion >= Version [9, 10] [] = "after_9_10" + | otherwise = "before_9_10" + +goldenWithHaskellAndCapsOutPut :: (Pretty b) => Config -> PluginTestDescriptor b -> TestName -> FS.VirtualFileTree -> FilePath -> String -> (TextDocumentIdentifier -> Session String) -> TestTree +goldenWithHaskellAndCapsOutPut config plugin title tree path desc act = + goldenGitDiff title (FS.vftOriginalRoot tree path <.> desc) $ + fromString + <$> ( runSessionWithServerInTmpDir config plugin tree $ + do + doc <- openDoc (path <.> "hs") "haskell" + void waitForBuildQueue + act doc + ) + +goldenWithSemanticTokensWithDefaultConfig :: TestName -> FilePath -> TestTree +goldenWithSemanticTokensWithDefaultConfig title path = + goldenWithHaskellAndCapsOutPut + def + semanticTokensPlugin + title + (mkFs $ FS.directProject (path <.> "hs")) + path + "expected" + (docSemanticTokensString def) + +docSemanticTokensString :: SemanticTokensConfig -> TextDocumentIdentifier -> Session String +docSemanticTokensString cf doc = do + xs <- map (lspTokenHsToken cf) <$> docLspSemanticTokensString doc + return $ unlines . map show $ xs + +docLspSemanticTokensString :: (HasCallStack) => TextDocumentIdentifier -> Session [SemanticTokenOriginal Language.LSP.Protocol.Types.SemanticTokenTypes] +docLspSemanticTokensString doc = do + res <- Test.getSemanticTokens doc + textContent <- documentContents doc + let vfs = VirtualFile 0 0 (Rope.fromText textContent) + case res ^? Language.LSP.Protocol.Types._L of + Just tokens -> do + either (error . show) pure $ recoverLspSemanticTokens vfs tokens + _noTokens -> error "No tokens found" + +-- | Pass a param and return the response from `semanticTokensFull` +-- getSemanticTokensFullDelta :: TextDocumentIdentifier -> Session _ +getSemanticTokensFullDelta :: TextDocumentIdentifier -> Text -> Session (SemanticTokens |? (SemanticTokensDelta |? Null)) +getSemanticTokensFullDelta doc lastResultId = do + let params = SemanticTokensDeltaParams Nothing Nothing doc lastResultId + rsp <- request SMethod_TextDocumentSemanticTokensFullDelta params + case rsp ^. L.result of + Right x -> return x + _ -> error "No tokens found" + +semanticTokensClassTests :: TestTree +semanticTokensClassTests = + testGroup + "type class" + [ goldenWithSemanticTokensWithDefaultConfig "golden type class" "TClass", + goldenWithSemanticTokensWithDefaultConfig "imported class method InstanceClassMethodBind" "TInstanceClassMethodBind", + goldenWithSemanticTokensWithDefaultConfig "imported class method TInstanceClassMethodUse" "TInstanceClassMethodUse", + goldenWithSemanticTokensWithDefaultConfig "imported deriving" "TClassImportedDeriving" + ] + +semanticTokensValuePatternTests :: TestTree +semanticTokensValuePatternTests = + testGroup + "value and patterns " + [ goldenWithSemanticTokensWithDefaultConfig "value bind" "TValBind", + goldenWithSemanticTokensWithDefaultConfig "pattern match" "TPatternMatch", + goldenWithSemanticTokensWithDefaultConfig "pattern bind" "TPatternbind" + ] + +mkSemanticConfig :: Object -> Config +mkSemanticConfig setting = def {plugins = Map.insert "SemanticTokens" conf (plugins def)} + where + conf = def {plcConfig = setting} + +directFile :: FilePath -> Text -> [FS.FileTree] +directFile fp content = + [ FS.directCradle [Text.pack fp], + file fp (text content) + ] + +semanticTokensConfigTest :: TestTree +semanticTokensConfigTest = + testGroup + "semantic token config test" + [ testCase "function to variable" $ do + let content = Text.unlines ["module Hello where", "go _ = 1"] + let fs = mkFs $ directFile "Hello.hs" content + let funcVar = KV.fromList ["functionToken" .= var] + var :: String + var = "variable" + Test.Hls.runSessionWithTestConfig def + { testPluginDescriptor = semanticTokensPlugin + , testConfigSession = def + { ignoreConfigurationRequests = False + } + , testConfigCaps = fullLatestClientCaps + , testDirLocation = Right fs + , testLspConfig = mkSemanticConfig funcVar + } + $ const $ do + -- modifySemantic funcVar + void waitForBuildQueue + doc <- openDoc "Hello.hs" "haskell" + void waitForBuildQueue + result1 <- docLspSemanticTokensString doc + liftIO $ unlines (map show result1) @?= + T.unlines (["1:8-13 SemanticTokenTypes_Namespace \"Hello\"" | compilerVersion >= Version [9, 10] []] + ++ ["2:1-3 SemanticTokenTypes_Variable \"go\""]) + ] + + +semanticTokensFullDeltaTests :: TestTree +semanticTokensFullDeltaTests = + testGroup "semanticTokensFullDeltaTests" + [ testCase "null delta since unchanged" $ do + let file1 = "TModuleA.hs" + let expectDelta = InR (InL (SemanticTokensDelta (Just "1") [])) + Test.Hls.runSessionWithServerInTmpDir def semanticTokensPlugin (mkFs $ FS.directProjectMulti [file1]) $ do + doc1 <- openDoc file1 "haskell" + _ <- waitForAction "TypeCheck" doc1 + _ <- Test.getSemanticTokens doc1 + delta <- getSemanticTokensFullDelta doc1 "0" + liftIO $ delta @?= expectDelta, + testCase "add tokens" $ do + let file1 = "TModuleA.hs" + let expectDelta + | compilerVersion >= Version [9, 10] [] = InR (InL (SemanticTokensDelta (Just "1") [SemanticTokensEdit 25 0 (Just [2, 0, 3, 8, 0])])) + | otherwise = InR (InL (SemanticTokensDelta (Just "1") [SemanticTokensEdit 20 0 (Just [2, 0, 3, 8, 0])])) + -- r c l t m + -- where r = row, c = column, l = length, t = token, m = modifier + Test.Hls.runSessionWithServerInTmpDir def semanticTokensPlugin (mkFs $ FS.directProjectMulti [file1]) $ do + doc1 <- openDoc file1 "haskell" + _ <- waitForAction "TypeCheck" doc1 + _ <- Test.getSemanticTokens doc1 + -- open the file and append a line to it + let change = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range (Position 4 0) (Position 4 6) + , _rangeLength = Nothing + , _text = "foo = 1" + } + changeDoc doc1 [change] + _ <- waitForAction "TypeCheck" doc1 + delta <- getSemanticTokensFullDelta doc1 "0" + liftIO $ delta @?= expectDelta, + testCase "remove tokens" $ do + let file1 = "TModuleA.hs" + let expectDelta + | compilerVersion >= Version [9, 10] [] = InR (InL (SemanticTokensDelta (Just "1") [SemanticTokensEdit 5 20 (Just [])])) + | otherwise = InR (InL (SemanticTokensDelta (Just "1") [SemanticTokensEdit 0 20 (Just [])])) + -- delete all tokens + Test.Hls.runSessionWithServerInTmpDir def semanticTokensPlugin (mkFs $ FS.directProjectMulti [file1]) $ do + doc1 <- openDoc file1 "haskell" + _ <- waitForAction "TypeCheck" doc1 + _ <- Test.getSemanticTokens doc1 + -- open the file and append a line to it + let change = TextDocumentContentChangeEvent $ InL TextDocumentContentChangePartial + { _range = Range (Position 2 0) (Position 2 28) + , _rangeLength = Nothing + , _text = Text.replicate 28 " " + } + changeDoc doc1 [change] + _ <- waitForAction "TypeCheck" doc1 + delta <- getSemanticTokensFullDelta doc1 "0" + liftIO $ delta @?= expectDelta + ] + +semanticTokensTests :: TestTree +semanticTokensTests = + testGroup "other semantic Token test" + [ testCase "module import test" $ do + let file1 = "TModuleA.hs" + let file2 = "TModuleB.hs" + Test.Hls.runSessionWithServerInTmpDir def semanticTokensPlugin (mkFs $ FS.directProjectMulti [file1, file2]) $ do + doc1 <- openDoc file1 "haskell" + doc2 <- openDoc file2 "haskell" + check1 <- waitForAction "TypeCheck" doc1 + check2 <- waitForAction "TypeCheck" doc2 + case check1 of + Right (WaitForIdeRuleResult _) -> return () + Left _ -> error "TypeCheck1 failed" + case check2 of + Right (WaitForIdeRuleResult _) -> return () + Left _ -> error "TypeCheck2 failed" + + result <- docSemanticTokensString def doc2 + let expect = + unlines + ( + -- > 9.10 have module name in the token + (["1:8-16 TModule \"TModuleB\"" | compilerVersion >= Version [9, 10] []]) + ++ + [ + "3:8-16 TModule \"TModuleA\"", + "4:18-26 TModule \"TModuleA\"", + "6:1-3 TVariable \"go\"", + "6:6-10 TDataConstructor \"Game\"", + "8:1-5 TVariable \"a\\66560bb\"", + "8:8-17 TModule \"TModuleA.\"", + "8:17-20 TRecordField \"a\\66560b\"", + "8:21-23 TVariable \"go\"" + ]) + liftIO $ result @?= expect, + goldenWithSemanticTokensWithDefaultConfig "mixed constancy test result generated from one ghc version" "T1", + goldenWithSemanticTokensWithDefaultConfig "pattern bind" "TPatternSynonym", + goldenWithSemanticTokensWithDefaultConfig "type family" "TTypefamily", + goldenWithSemanticTokensWithDefaultConfig "TUnicodeSyntax" "TUnicodeSyntax", + goldenWithSemanticTokensWithDefaultConfig "TQualifiedName" "TQualifiedName", + goldenWithSemanticTokensWithDefaultConfig "TDoc" "TDoc" + ] + +semanticTokensDataTypeTests :: TestTree +semanticTokensDataTypeTests = + testGroup + "get semantic Tokens" + [ goldenWithSemanticTokensWithDefaultConfig "simple datatype" "TDataType", + goldenWithSemanticTokensWithDefaultConfig "record" "TRecord", + goldenWithSemanticTokensWithDefaultConfig "record With DuplicateRecordFields" "TRecordDuplicateRecordFields", + goldenWithSemanticTokensWithDefaultConfig "datatype import" "TDatatypeImported", + goldenWithSemanticTokensWithDefaultConfig "datatype family" "TDataFamily", + goldenWithSemanticTokensWithDefaultConfig "GADT" "TGADT" + ] + +semanticTokensFunctionTests :: TestTree +semanticTokensFunctionTests = + testGroup + "get semantic of functions" + [ goldenWithSemanticTokensWithDefaultConfig "functions" "TFunction", + goldenWithSemanticTokensWithDefaultConfig "local functions" "TFunctionLocal", + goldenWithSemanticTokensWithDefaultConfig "functions under type synonym" "TFunctionUnderTypeSynonym", + goldenWithSemanticTokensWithDefaultConfig "function in let binding" "TFunctionLet", + goldenWithSemanticTokensWithDefaultConfig "negative case non-function with constraint" "TNoneFunctionWithConstraint", + goldenWithSemanticTokensWithDefaultConfig "TOperator" "TOperator" + ] + +main :: IO () +main = + defaultTestRunner $ + testGroup + "Semantic tokens" + [ semanticTokensTests, + semanticTokensClassTests, + semanticTokensDataTypeTests, + semanticTokensValuePatternTests, + semanticTokensFunctionTests, + semanticTokensConfigTest, + semanticTokensFullDeltaTests + ] diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/T1.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/T1.expected new file mode 100644 index 0000000000..eff5c79768 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/T1.expected @@ -0,0 +1,82 @@ +4:8-12 TModule "Main" +9:6-9 TTypeConstructor "Foo" +9:12-15 TDataConstructor "Foo" +9:18-21 TRecordField "foo" +9:25-28 TTypeConstructor "Int" +11:7-10 TClass "Boo" +11:11-12 TTypeVariable "a" +12:3-6 TClassMethod "boo" +12:10-11 TTypeVariable "a" +12:15-16 TTypeVariable "a" +14:10-13 TClass "Boo" +14:14-17 TTypeConstructor "Int" +15:5-8 TClassMethod "boo" +15:9-10 TVariable "x" +15:13-14 TVariable "x" +15:15-16 TOperator "+" +17:6-8 TTypeConstructor "Dd" +17:11-13 TDataConstructor "Dd" +17:14-17 TTypeConstructor "Int" +19:9-12 TPatternSynonym "One" +19:15-18 TDataConstructor "Foo" +21:1-4 TVariable "ggg" +21:7-10 TPatternSynonym "One" +23:6-9 TTypeConstructor "Doo" +23:12-15 TDataConstructor "Doo" +23:16-24 TModule "Prelude." +23:24-27 TTypeConstructor "Int" +24:6-10 TTypeSynonym "Bar1" +24:13-16 TTypeConstructor "Int" +25:6-10 TTypeSynonym "Bar2" +25:13-16 TTypeConstructor "Doo" +27:1-3 TFunction "bb" +27:8-11 TClass "Boo" +27:12-13 TTypeVariable "a" +27:18-19 TTypeVariable "a" +27:23-24 TTypeVariable "a" +28:1-3 TFunction "bb" +28:4-5 TVariable "x" +28:9-12 TClassMethod "boo" +28:13-14 TVariable "x" +29:1-3 TFunction "aa" +29:7-11 TTypeVariable "cool" +29:15-18 TTypeConstructor "Int" +29:22-26 TTypeVariable "cool" +30:1-3 TFunction "aa" +30:4-5 TVariable "x" +30:9-10 TVariable "c" +30:14-16 TFunction "aa" +30:17-18 TVariable "x" +30:19-20 TVariable "c" +31:12-14 TVariable "xx" +31:16-18 TVariable "yy" +32:11-13 TVariable "dd" +34:2-4 TVariable "zz" +34:6-8 TVariable "kk" +35:1-3 TFunction "cc" +35:7-10 TTypeConstructor "Foo" +35:15-18 TTypeConstructor "Int" +35:20-23 TTypeConstructor "Int" +35:28-31 TTypeConstructor "Int" +36:1-3 TFunction "cc" +36:4-5 TVariable "f" +36:7-9 TVariable "gg" +36:11-13 TVariable "vv" +37:10-12 TVariable "gg" +38:14-17 TRecordField "foo" +38:18-19 TOperator "$" +38:20-21 TVariable "f" +38:24-27 TRecordField "foo" +39:14-17 TRecordField "foo" +39:18-19 TOperator "$" +39:20-21 TVariable "f" +39:24-27 TRecordField "foo" +41:1-3 TFunction "go" +41:6-9 TRecordField "foo" +42:1-4 TFunction "add" +42:8-16 TModule "Prelude." +42:16-17 TOperator "+" +47:1-5 TVariable "main" +47:9-11 TTypeConstructor "IO" +48:1-5 TVariable "main" +48:8-16 TFunction "putStrLn" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/T1.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/T1.hs new file mode 100644 index 0000000000..07b0476c1e --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/T1.hs @@ -0,0 +1,48 @@ +-- patter syn +{-# LANGUAGE PatternSynonyms #-} + +module Main where + +-- import Data.Set (Set, insert) + + +data Foo = Foo { foo :: Int } + +class Boo a where + boo :: a -> a + +instance Boo Int where + boo x = x + 1 + +data Dd = Dd Int + +pattern One = Foo 1 + +ggg = One + +data Doo = Doo Prelude.Int +type Bar1 = Int +type Bar2 = Doo + +bb :: (Boo a) => a -> a +bb x = boo x +aa :: cool -> Int -> cool +aa x = \c -> aa x c + where (xx, yy) = (1, 2) + dd = 1 + +(zz, kk) = (1, 2) +cc :: Foo -> (Int, Int) -> Int +cc f (gg, vv)= + case gg of + 1 -> foo $ f { foo = 1 } + 2 -> foo $ f { foo = 1 } + +go = foo +add = (Prelude.+) + +-- sub :: Int -> Int -> Int +-- sub x y = add x y + +main :: IO () +main = putStrLn "Hello, Haskell!" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TClass.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TClass.expected new file mode 100644 index 0000000000..f7bb4cd513 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TClass.expected @@ -0,0 +1,6 @@ +1:8-14 TModule "TClass" +4:7-10 TClass "Foo" +4:11-12 TTypeVariable "a" +5:3-6 TClassMethod "foo" +5:10-11 TTypeVariable "a" +5:15-18 TTypeConstructor "Int" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TClass.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TClass.hs new file mode 100644 index 0000000000..692754ec71 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TClass.hs @@ -0,0 +1,6 @@ +module TClass where + + +class Foo a where + foo :: a -> Int + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TClassImportedDeriving.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TClassImportedDeriving.expected new file mode 100644 index 0000000000..9ca97d9082 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TClassImportedDeriving.expected @@ -0,0 +1,4 @@ +2:8-30 TModule "TClassImportedDeriving" +4:6-9 TTypeConstructor "Foo" +4:12-15 TDataConstructor "Foo" +4:26-30 TClass "Show" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TClassImportedDeriving.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TClassImportedDeriving.hs new file mode 100644 index 0000000000..8afd8afbd9 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TClassImportedDeriving.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE StandaloneDeriving #-} +module TClassImportedDeriving where +-- deriving method source span of Show occurrence +data Foo = Foo deriving (Show) + +-- standalone deriving method not in the same position +-- deriving instance Eq Foo + +-- a :: Foo -> Foo -> Bool +-- a = (==) diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDataFamily.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDataFamily.expected new file mode 100644 index 0000000000..b3b477e541 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDataFamily.expected @@ -0,0 +1,13 @@ +2:8-19 TModule "TDatafamily" +5:13-18 TTypeFamily "XList" +5:19-20 TTypeVariable "a" +8:15-20 TTypeFamily "XList" +8:21-25 TTypeConstructor "Char" +8:28-33 TDataConstructor "XCons" +8:35-39 TTypeConstructor "Char" +8:42-47 TTypeFamily "XList" +8:48-52 TTypeConstructor "Char" +8:56-60 TDataConstructor "XNil" +11:15-20 TTypeFamily "XList" +11:26-35 TDataConstructor "XListUnit" +11:37-40 TTypeConstructor "Int" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDataFamily.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDataFamily.hs new file mode 100644 index 0000000000..b9047a72d2 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDataFamily.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE TypeFamilies #-} +module TDatafamily where + +-- Declare a list-like data family +data family XList a + +-- Declare a list-like instance for Char +data instance XList Char = XCons !Char !(XList Char) | XNil + +-- Declare a number-like instance for () +data instance XList () = XListUnit !Int diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDataType.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDataType.expected new file mode 100644 index 0000000000..7f03f4ed54 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDataType.expected @@ -0,0 +1,5 @@ +1:8-17 TModule "TDataType" +3:6-9 TTypeConstructor "Foo" +3:12-15 TDataConstructor "Foo" +3:16-19 TTypeConstructor "Int" +3:30-32 TClass "Eq" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDataType.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDataType.hs new file mode 100644 index 0000000000..894065e391 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDataType.hs @@ -0,0 +1,3 @@ +module TDataType where + +data Foo = Foo Int deriving (Eq) diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDatatypeImported.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDatatypeImported.expected new file mode 100644 index 0000000000..78ebf2bc22 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDatatypeImported.expected @@ -0,0 +1,6 @@ +1:8-25 TModule "TDatatypeImported" +3:8-17 TModule "System.IO" +5:1-3 TVariable "go" +5:7-9 TTypeConstructor "IO" +6:1-3 TVariable "go" +6:6-11 TFunction "print" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDatatypeImported.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDatatypeImported.hs new file mode 100644 index 0000000000..f6ac8996d9 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDatatypeImported.hs @@ -0,0 +1,6 @@ +module TDatatypeImported where + +import System.IO + +go :: IO () +go = print 1 diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDoc.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDoc.expected new file mode 100644 index 0000000000..30b1cdb345 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDoc.expected @@ -0,0 +1,6 @@ +1:8-12 TModule "TDoc" +4:5-10 TVariable "hello" +5:1-6 TVariable "hello" +5:10-13 TTypeConstructor "Int" +6:1-6 TVariable "hello" +6:9-15 TClassMethod "length" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDoc.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDoc.hs new file mode 100644 index 0000000000..dc5801b0e6 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TDoc.hs @@ -0,0 +1,9 @@ +module TDoc where + +-- | +-- `hello` +hello :: Int +hello = length "Hello, Haskell!" + + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunction.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunction.expected new file mode 100644 index 0000000000..2b715e0a40 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunction.expected @@ -0,0 +1,12 @@ +1:8-17 TModule "TFunction" +3:1-2 TFunction "f" +3:13-14 TTypeVariable "a" +3:16-17 TTypeVariable "a" +3:21-22 TTypeVariable "a" +4:1-2 TFunction "f" +4:3-4 TVariable "x" +4:7-8 TVariable "x" +6:1-2 TVariable "x" +6:6-7 TTypeVariable "a" +7:1-2 TVariable "x" +7:5-14 TVariable "undefined" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunction.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunction.hs new file mode 100644 index 0000000000..4efe5cecc4 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunction.hs @@ -0,0 +1,7 @@ +module TFunction where + +f :: forall a. a -> a +f x = x + +x :: a +x = undefined diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionLet.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionLet.expected new file mode 100644 index 0000000000..f51938a712 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionLet.expected @@ -0,0 +1,6 @@ +1:8-20 TModule "TFunctionLet" +3:1-2 TVariable "y" +3:6-9 TTypeConstructor "Int" +4:1-2 TVariable "y" +4:9-10 TFunction "f" +4:11-12 TVariable "x" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionLet.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionLet.hs new file mode 100644 index 0000000000..96854c34ad --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionLet.hs @@ -0,0 +1,4 @@ +module TFunctionLet where + +y :: Int +y = let f x = 1 in 1 diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionLocal.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionLocal.expected new file mode 100644 index 0000000000..34e040d641 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionLocal.expected @@ -0,0 +1,8 @@ +1:8-22 TModule "TFunctionLocal" +3:1-2 TFunction "f" +3:6-9 TTypeConstructor "Int" +3:13-16 TTypeConstructor "Int" +4:1-2 TFunction "f" +4:7-8 TFunction "g" +6:5-6 TFunction "g" +6:7-8 TVariable "x" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionLocal.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionLocal.hs new file mode 100644 index 0000000000..fed144b00c --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionLocal.hs @@ -0,0 +1,8 @@ +module TFunctionLocal where + +f :: Int -> Int +f 1 = g 1 + where + g x = 1 + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionUnderTypeSynonym.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionUnderTypeSynonym.expected new file mode 100644 index 0000000000..0779402a83 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionUnderTypeSynonym.expected @@ -0,0 +1,18 @@ +1:8-33 TModule "TFunctionUnderTypeSynonym" +3:6-8 TTypeSynonym "T1" +3:11-14 TTypeConstructor "Int" +3:18-21 TTypeConstructor "Int" +4:6-8 TTypeSynonym "T2" +4:18-19 TTypeVariable "a" +4:21-22 TTypeVariable "a" +4:26-27 TTypeVariable "a" +5:1-3 TFunction "f1" +5:7-9 TTypeSynonym "T1" +6:1-3 TFunction "f1" +6:4-5 TVariable "x" +6:8-9 TVariable "x" +7:1-3 TFunction "f2" +7:7-9 TTypeSynonym "T2" +8:1-3 TFunction "f2" +8:4-5 TVariable "x" +8:8-9 TVariable "x" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionUnderTypeSynonym.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionUnderTypeSynonym.hs new file mode 100644 index 0000000000..6485232394 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TFunctionUnderTypeSynonym.hs @@ -0,0 +1,9 @@ +module TFunctionUnderTypeSynonym where + +type T1 = Int -> Int +type T2 = forall a. a -> a +f1 :: T1 +f1 x = x +f2 :: T2 +f2 x = x + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TGADT.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TGADT.expected new file mode 100644 index 0000000000..3f07298543 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TGADT.expected @@ -0,0 +1,14 @@ +3:8-13 TModule "TGADT" +5:6-9 TTypeConstructor "Lam" +6:3-7 TDataConstructor "Lift" +6:11-12 TTypeVariable "a" +6:36-39 TTypeConstructor "Lam" +6:40-41 TTypeVariable "a" +7:3-6 TDataConstructor "Lam" +7:12-15 TTypeConstructor "Lam" +7:16-17 TTypeVariable "a" +7:21-24 TTypeConstructor "Lam" +7:25-26 TTypeVariable "b" +7:36-39 TTypeConstructor "Lam" +7:41-42 TTypeVariable "a" +7:46-47 TTypeVariable "b" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TGADT.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TGADT.hs new file mode 100644 index 0000000000..e0cccf8bed --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TGADT.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE GADTs #-} +module TGADT where + +data Lam :: * -> * where + Lift :: a -> Lam a -- ^ lifted value + Lam :: (Lam a -> Lam b) -> Lam (a -> b) -- ^ lambda abstraction diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TInstanceClassMethodBind.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TInstanceClassMethodBind.expected new file mode 100644 index 0000000000..b93e340ac3 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TInstanceClassMethodBind.expected @@ -0,0 +1,8 @@ +1:8-32 TModule "TInstanceClassMethodBind" +4:6-9 TTypeConstructor "Foo" +4:12-15 TDataConstructor "Foo" +4:16-19 TTypeConstructor "Int" +5:10-14 TClass "Show" +5:15-18 TTypeConstructor "Foo" +6:5-9 TClassMethod "show" +6:12-21 TVariable "undefined" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TInstanceClassMethodBind.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TInstanceClassMethodBind.hs new file mode 100644 index 0000000000..33976a48c1 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TInstanceClassMethodBind.hs @@ -0,0 +1,6 @@ +module TInstanceClassMethodBind where + + +data Foo = Foo Int +instance Show Foo where + show = undefined diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TInstanceClassMethodUse.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TInstanceClassMethodUse.expected new file mode 100644 index 0000000000..3fc60caab3 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TInstanceClassMethodUse.expected @@ -0,0 +1,3 @@ +1:8-31 TModule "TInstanceClassMethodUse" +4:1-3 TFunction "go" +4:8-12 TClassMethod "show" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TInstanceClassMethodUse.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TInstanceClassMethodUse.hs new file mode 100644 index 0000000000..689d1643d4 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TInstanceClassMethodUse.hs @@ -0,0 +1,5 @@ +module TInstanceClassMethodUse where + + +go = show + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TModuleA.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TModuleA.hs new file mode 100644 index 0000000000..d76f64fc1f --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TModuleA.hs @@ -0,0 +1,5 @@ +module TModuleA where + +data Game = Game {a𐐀b :: Int} + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TModuleB.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TModuleB.hs new file mode 100644 index 0000000000..d2bfe4b7fa --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TModuleB.hs @@ -0,0 +1,8 @@ +module TModuleB where + +import TModuleA +import qualified TModuleA + +go = Game 1 + +a𐐀bb = TModuleA.a𐐀b go diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TNoneFunctionWithConstraint.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TNoneFunctionWithConstraint.expected new file mode 100644 index 0000000000..a004142952 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TNoneFunctionWithConstraint.expected @@ -0,0 +1,7 @@ +1:8-35 TModule "TNoneFunctionWithConstraint" +3:1-2 TVariable "x" +3:7-9 TClass "Eq" +3:10-11 TTypeVariable "a" +3:16-17 TTypeVariable "a" +4:1-2 TVariable "x" +4:5-14 TVariable "undefined" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TNoneFunctionWithConstraint.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TNoneFunctionWithConstraint.hs new file mode 100644 index 0000000000..9a7119dbdb --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TNoneFunctionWithConstraint.hs @@ -0,0 +1,5 @@ +module TNoneFunctionWithConstraint where + +x :: (Eq a) => a +x = undefined + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TOperator.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TOperator.expected new file mode 100644 index 0000000000..c8b2ecb29d --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TOperator.expected @@ -0,0 +1,34 @@ +1:8-17 TModule "TOperator" +4:1-3 TFunction "go" +4:4-5 TFunction "f" +4:6-7 TVariable "x" +4:10-11 TFunction "f" +4:11-12 TOperator "$" +4:12-13 TVariable "x" +6:2-6 TOperator "$$$$" +7:1-2 TVariable "x" +7:7-11 TOperator "$$$$" +8:6-7 TTypeVariable "a" +8:8-11 TOperator ":+:" +8:12-13 TTypeVariable "b" +8:16-19 TDataConstructor "Add" +8:20-21 TTypeVariable "a" +8:22-23 TTypeVariable "b" +9:7-10 TOperator ":-:" +9:12-13 TTypeVariable "a" +9:14-15 TTypeVariable "b" +9:19-20 TTypeVariable "a" +9:22-23 TTypeVariable "b" +11:1-4 TFunction "add" +11:8-11 TTypeConstructor "Int" +11:12-15 TOperator ":+:" +11:16-19 TTypeConstructor "Int" +11:23-26 TTypeConstructor "Int" +11:27-30 TOperator ":-:" +11:31-34 TTypeConstructor "Int" +13:1-4 TFunction "add" +13:6-9 TDataConstructor "Add" +13:10-11 TVariable "x" +13:12-13 TVariable "y" +13:18-19 TVariable "x" +13:21-22 TVariable "y" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TOperator.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TOperator.hs new file mode 100644 index 0000000000..e2f06c92fa --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TOperator.hs @@ -0,0 +1,13 @@ +module TOperator where + +-- imported operator +go f x = f$x +-- operator defined in local module +($$$$) = b +x = 1 $$$$ 2 +data a :+: b = Add a b +type (:-:) a b = (a, b) +-- type take precedence over operator +add :: Int :+: Int -> Int :-: Int +-- class method take precedence over operator +add (Add x y) = (x, y) diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternMatch.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternMatch.expected new file mode 100644 index 0000000000..b17e52e27f --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternMatch.expected @@ -0,0 +1,3 @@ +1:8-21 TModule "TPatternMatch" +4:1-2 TFunction "g" +4:4-11 TDataConstructor "Nothing" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternMatch.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternMatch.hs new file mode 100644 index 0000000000..95e97c1abb --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternMatch.hs @@ -0,0 +1,6 @@ +module TPatternMatch where + + +g (Nothing, _) = 1 + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternSynonym.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternSynonym.expected new file mode 100644 index 0000000000..b9cff7321a --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternSynonym.expected @@ -0,0 +1,2 @@ +2:8-23 TModule "TPatternSynonym" +5:9-12 TPatternSynonym "Foo" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternSynonym.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternSynonym.hs new file mode 100644 index 0000000000..adff673ce8 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternSynonym.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE PatternSynonyms #-} +module TPatternSynonym where + + +pattern Foo = 1 + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternbind.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternbind.expected new file mode 100644 index 0000000000..ab12539d12 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternbind.expected @@ -0,0 +1,8 @@ +1:8-17 TModule "TVariable" +3:2-3 TVariable "a" +3:5-6 TVariable "b" +5:1-2 TFunction "f" +5:3-4 TFunction "g" +5:5-6 TVariable "y" +5:9-10 TFunction "g" +5:11-12 TVariable "y" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternbind.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternbind.hs new file mode 100644 index 0000000000..49e642a35d --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TPatternbind.hs @@ -0,0 +1,9 @@ +module TVariable where + +(a, b) = (1, 2) + +f g y = g y + + + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TQualifiedName.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TQualifiedName.expected new file mode 100644 index 0000000000..df305195ed --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TQualifiedName.expected @@ -0,0 +1,13 @@ +1:8-22 TModule "TQualifiedName" +3:18-27 TModule "Data.List" +6:1-2 TVariable "a" +6:5-13 TModule "Prelude." +6:13-22 TVariable "undefined" +7:1-2 TVariable "b" +7:8-18 TModule "Data.List." +7:18-22 TClassMethod "elem" +8:1-2 TVariable "c" +8:6-14 TModule "Prelude." +8:14-15 TOperator "+" +9:1-2 TVariable "d" +9:6-7 TOperator "+" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TQualifiedName.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TQualifiedName.hs new file mode 100644 index 0000000000..5dbdcc1d52 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TQualifiedName.hs @@ -0,0 +1,9 @@ +module TQualifiedName where + +import qualified Data.List + + +a = Prelude.undefined +b = 1 `Data.List.elem` [1, 2] +c = (Prelude.+) 1 1 +d = (+) 1 1 diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TRecord.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TRecord.expected new file mode 100644 index 0000000000..5be40a4a39 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TRecord.expected @@ -0,0 +1,5 @@ +1:8-15 TModule "TRecord" +4:6-9 TTypeConstructor "Foo" +4:12-15 TDataConstructor "Foo" +4:18-21 TRecordField "foo" +4:25-28 TTypeConstructor "Int" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TRecord.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TRecord.hs new file mode 100644 index 0000000000..b3176a154f --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TRecord.hs @@ -0,0 +1,7 @@ +module TRecord where + + +data Foo = Foo { foo :: Int } + + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TRecordDuplicateRecordFields.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TRecordDuplicateRecordFields.expected new file mode 100644 index 0000000000..04ef050ab0 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TRecordDuplicateRecordFields.expected @@ -0,0 +1,5 @@ +3:8-36 TModule "TRecordDuplicateRecordFields" +5:6-9 TTypeConstructor "Foo" +5:12-15 TDataConstructor "Foo" +5:18-21 TRecordField "boo" +5:26-32 TTypeSynonym "String" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TRecordDuplicateRecordFields.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TRecordDuplicateRecordFields.hs new file mode 100644 index 0000000000..395a1d3731 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TRecordDuplicateRecordFields.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE DuplicateRecordFields #-} + +module TRecordDuplicateRecordFields where + +data Foo = Foo { boo :: !String } diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TTypefamily.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TTypefamily.expected new file mode 100644 index 0000000000..1aa6bf4687 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TTypefamily.expected @@ -0,0 +1,9 @@ +2:8-19 TModule "TTypefamily" +4:13-16 TTypeFamily "Foo" +4:17-18 TTypeVariable "a" +5:3-6 TTypeFamily "Foo" +5:7-10 TTypeConstructor "Int" +5:13-16 TTypeConstructor "Int" +6:3-6 TTypeFamily "Foo" +6:7-8 TTypeVariable "a" +6:11-17 TTypeSynonym "String" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TTypefamily.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TTypefamily.hs new file mode 100644 index 0000000000..d8c925e370 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TTypefamily.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TypeFamilies #-} +module TTypefamily where + +type family Foo a where + Foo Int = Int + Foo a = String diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TUnicodeSyntax.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TUnicodeSyntax.expected new file mode 100644 index 0000000000..ad9f6ea762 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TUnicodeSyntax.expected @@ -0,0 +1,2 @@ +1:8-22 TModule "TUnicodeSyntax" +3:1-4 TVariable "a\66560b" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TUnicodeSyntax.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TUnicodeSyntax.hs new file mode 100644 index 0000000000..1b8c7c1baa --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TUnicodeSyntax.hs @@ -0,0 +1,5 @@ +module TUnicodeSyntax where + +a𐐀b = "a𐐀b" + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TValBind.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TValBind.expected new file mode 100644 index 0000000000..700509c968 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TValBind.expected @@ -0,0 +1,5 @@ +1:8-16 TModule "TValBind" +4:1-6 TVariable "hello" +4:10-13 TTypeConstructor "Int" +5:1-6 TVariable "hello" +5:9-15 TClassMethod "length" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TValBind.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TValBind.hs new file mode 100644 index 0000000000..506af37a42 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/after_9_10/TValBind.hs @@ -0,0 +1,8 @@ +module TValBind where + + +hello :: Int +hello = length "Hello, Haskell!" + + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/T1.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/T1.expected new file mode 100644 index 0000000000..cbf7699f19 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/T1.expected @@ -0,0 +1,81 @@ +9:6-9 TTypeConstructor "Foo" +9:12-15 TDataConstructor "Foo" +9:18-21 TRecordField "foo" +9:25-28 TTypeConstructor "Int" +11:7-10 TClass "Boo" +11:11-12 TTypeVariable "a" +12:3-6 TClassMethod "boo" +12:10-11 TTypeVariable "a" +12:15-16 TTypeVariable "a" +14:10-13 TClass "Boo" +14:14-17 TTypeConstructor "Int" +15:5-8 TClassMethod "boo" +15:9-10 TVariable "x" +15:13-14 TVariable "x" +15:15-16 TOperator "+" +17:6-8 TTypeConstructor "Dd" +17:11-13 TDataConstructor "Dd" +17:14-17 TTypeConstructor "Int" +19:9-12 TPatternSynonym "One" +19:15-18 TDataConstructor "Foo" +21:1-4 TVariable "ggg" +21:7-10 TPatternSynonym "One" +23:6-9 TTypeConstructor "Doo" +23:12-15 TDataConstructor "Doo" +23:16-24 TModule "Prelude." +23:24-27 TTypeConstructor "Int" +24:6-10 TTypeSynonym "Bar1" +24:13-16 TTypeConstructor "Int" +25:6-10 TTypeSynonym "Bar2" +25:13-16 TTypeConstructor "Doo" +27:1-3 TFunction "bb" +27:8-11 TClass "Boo" +27:12-13 TTypeVariable "a" +27:18-19 TTypeVariable "a" +27:23-24 TTypeVariable "a" +28:1-3 TFunction "bb" +28:4-5 TVariable "x" +28:9-12 TClassMethod "boo" +28:13-14 TVariable "x" +29:1-3 TFunction "aa" +29:7-11 TTypeVariable "cool" +29:15-18 TTypeConstructor "Int" +29:22-26 TTypeVariable "cool" +30:1-3 TFunction "aa" +30:4-5 TVariable "x" +30:9-10 TVariable "c" +30:14-16 TFunction "aa" +30:17-18 TVariable "x" +30:19-20 TVariable "c" +31:12-14 TVariable "xx" +31:16-18 TVariable "yy" +32:11-13 TVariable "dd" +34:2-4 TVariable "zz" +34:6-8 TVariable "kk" +35:1-3 TFunction "cc" +35:7-10 TTypeConstructor "Foo" +35:15-18 TTypeConstructor "Int" +35:20-23 TTypeConstructor "Int" +35:28-31 TTypeConstructor "Int" +36:1-3 TFunction "cc" +36:4-5 TVariable "f" +36:7-9 TVariable "gg" +36:11-13 TVariable "vv" +37:10-12 TVariable "gg" +38:14-17 TRecordField "foo" +38:18-19 TOperator "$" +38:20-21 TVariable "f" +38:24-27 TRecordField "foo" +39:14-17 TRecordField "foo" +39:18-19 TOperator "$" +39:20-21 TVariable "f" +39:24-27 TRecordField "foo" +41:1-3 TFunction "go" +41:6-9 TRecordField "foo" +42:1-4 TFunction "add" +42:8-16 TModule "Prelude." +42:16-17 TOperator "+" +47:1-5 TVariable "main" +47:9-11 TTypeConstructor "IO" +48:1-5 TVariable "main" +48:8-16 TFunction "putStrLn" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/T1.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/T1.hs new file mode 100644 index 0000000000..07b0476c1e --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/T1.hs @@ -0,0 +1,48 @@ +-- patter syn +{-# LANGUAGE PatternSynonyms #-} + +module Main where + +-- import Data.Set (Set, insert) + + +data Foo = Foo { foo :: Int } + +class Boo a where + boo :: a -> a + +instance Boo Int where + boo x = x + 1 + +data Dd = Dd Int + +pattern One = Foo 1 + +ggg = One + +data Doo = Doo Prelude.Int +type Bar1 = Int +type Bar2 = Doo + +bb :: (Boo a) => a -> a +bb x = boo x +aa :: cool -> Int -> cool +aa x = \c -> aa x c + where (xx, yy) = (1, 2) + dd = 1 + +(zz, kk) = (1, 2) +cc :: Foo -> (Int, Int) -> Int +cc f (gg, vv)= + case gg of + 1 -> foo $ f { foo = 1 } + 2 -> foo $ f { foo = 1 } + +go = foo +add = (Prelude.+) + +-- sub :: Int -> Int -> Int +-- sub x y = add x y + +main :: IO () +main = putStrLn "Hello, Haskell!" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TClass.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TClass.expected new file mode 100644 index 0000000000..e369963b0e --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TClass.expected @@ -0,0 +1,5 @@ +4:7-10 TClass "Foo" +4:11-12 TTypeVariable "a" +5:3-6 TClassMethod "foo" +5:10-11 TTypeVariable "a" +5:15-18 TTypeConstructor "Int" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TClass.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TClass.hs new file mode 100644 index 0000000000..692754ec71 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TClass.hs @@ -0,0 +1,6 @@ +module TClass where + + +class Foo a where + foo :: a -> Int + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TClassImportedDeriving.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TClassImportedDeriving.expected new file mode 100644 index 0000000000..3bbeb3e66c --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TClassImportedDeriving.expected @@ -0,0 +1,3 @@ +4:6-9 TTypeConstructor "Foo" +4:12-15 TDataConstructor "Foo" +4:26-30 TClass "Show" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TClassImportedDeriving.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TClassImportedDeriving.hs new file mode 100644 index 0000000000..8afd8afbd9 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TClassImportedDeriving.hs @@ -0,0 +1,10 @@ +{-# LANGUAGE StandaloneDeriving #-} +module TClassImportedDeriving where +-- deriving method source span of Show occurrence +data Foo = Foo deriving (Show) + +-- standalone deriving method not in the same position +-- deriving instance Eq Foo + +-- a :: Foo -> Foo -> Bool +-- a = (==) diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDataFamily.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDataFamily.expected new file mode 100644 index 0000000000..c95c0689f0 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDataFamily.expected @@ -0,0 +1,12 @@ +5:13-18 TTypeFamily "XList" +5:19-20 TTypeVariable "a" +8:15-20 TTypeFamily "XList" +8:21-25 TTypeConstructor "Char" +8:28-33 TDataConstructor "XCons" +8:35-39 TTypeConstructor "Char" +8:42-47 TTypeFamily "XList" +8:48-52 TTypeConstructor "Char" +8:56-60 TDataConstructor "XNil" +11:15-20 TTypeFamily "XList" +11:26-35 TDataConstructor "XListUnit" +11:37-40 TTypeConstructor "Int" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDataFamily.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDataFamily.hs new file mode 100644 index 0000000000..b9047a72d2 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDataFamily.hs @@ -0,0 +1,11 @@ +{-# LANGUAGE TypeFamilies #-} +module TDatafamily where + +-- Declare a list-like data family +data family XList a + +-- Declare a list-like instance for Char +data instance XList Char = XCons !Char !(XList Char) | XNil + +-- Declare a number-like instance for () +data instance XList () = XListUnit !Int diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDataType.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDataType.expected new file mode 100644 index 0000000000..bdf280c45e --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDataType.expected @@ -0,0 +1,4 @@ +3:6-9 TTypeConstructor "Foo" +3:12-15 TDataConstructor "Foo" +3:16-19 TTypeConstructor "Int" +3:30-32 TClass "Eq" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDataType.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDataType.hs new file mode 100644 index 0000000000..894065e391 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDataType.hs @@ -0,0 +1,3 @@ +module TDataType where + +data Foo = Foo Int deriving (Eq) diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDatatypeImported.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDatatypeImported.expected new file mode 100644 index 0000000000..2c2cd492a0 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDatatypeImported.expected @@ -0,0 +1,5 @@ +3:8-17 TModule "System.IO" +5:1-3 TVariable "go" +5:7-9 TTypeConstructor "IO" +6:1-3 TVariable "go" +6:6-11 TFunction "print" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDatatypeImported.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDatatypeImported.hs new file mode 100644 index 0000000000..f6ac8996d9 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDatatypeImported.hs @@ -0,0 +1,6 @@ +module TDatatypeImported where + +import System.IO + +go :: IO () +go = print 1 diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDoc.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDoc.expected new file mode 100644 index 0000000000..405308c3c8 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDoc.expected @@ -0,0 +1,5 @@ +4:5-10 TVariable "hello" +5:1-6 TVariable "hello" +5:10-13 TTypeConstructor "Int" +6:1-6 TVariable "hello" +6:9-15 TClassMethod "length" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDoc.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDoc.hs new file mode 100644 index 0000000000..dc5801b0e6 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TDoc.hs @@ -0,0 +1,9 @@ +module TDoc where + +-- | +-- `hello` +hello :: Int +hello = length "Hello, Haskell!" + + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunction.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunction.expected new file mode 100644 index 0000000000..f34510728b --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunction.expected @@ -0,0 +1,11 @@ +3:1-2 TFunction "f" +3:13-14 TTypeVariable "a" +3:16-17 TTypeVariable "a" +3:21-22 TTypeVariable "a" +4:1-2 TFunction "f" +4:3-4 TVariable "x" +4:7-8 TVariable "x" +6:1-2 TVariable "x" +6:6-7 TTypeVariable "a" +7:1-2 TVariable "x" +7:5-14 TVariable "undefined" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunction.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunction.hs new file mode 100644 index 0000000000..4efe5cecc4 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunction.hs @@ -0,0 +1,7 @@ +module TFunction where + +f :: forall a. a -> a +f x = x + +x :: a +x = undefined diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionLet.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionLet.expected new file mode 100644 index 0000000000..3f27b723db --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionLet.expected @@ -0,0 +1,5 @@ +3:1-2 TVariable "y" +3:6-9 TTypeConstructor "Int" +4:1-2 TVariable "y" +4:9-10 TFunction "f" +4:11-12 TVariable "x" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionLet.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionLet.hs new file mode 100644 index 0000000000..96854c34ad --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionLet.hs @@ -0,0 +1,4 @@ +module TFunctionLet where + +y :: Int +y = let f x = 1 in 1 diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionLocal.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionLocal.expected new file mode 100644 index 0000000000..176606e396 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionLocal.expected @@ -0,0 +1,7 @@ +3:1-2 TFunction "f" +3:6-9 TTypeConstructor "Int" +3:13-16 TTypeConstructor "Int" +4:1-2 TFunction "f" +4:7-8 TFunction "g" +6:5-6 TFunction "g" +6:7-8 TVariable "x" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionLocal.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionLocal.hs new file mode 100644 index 0000000000..fed144b00c --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionLocal.hs @@ -0,0 +1,8 @@ +module TFunctionLocal where + +f :: Int -> Int +f 1 = g 1 + where + g x = 1 + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionUnderTypeSynonym.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionUnderTypeSynonym.expected new file mode 100644 index 0000000000..010cf0c613 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionUnderTypeSynonym.expected @@ -0,0 +1,17 @@ +3:6-8 TTypeSynonym "T1" +3:11-14 TTypeConstructor "Int" +3:18-21 TTypeConstructor "Int" +4:6-8 TTypeSynonym "T2" +4:18-19 TTypeVariable "a" +4:21-22 TTypeVariable "a" +4:26-27 TTypeVariable "a" +5:1-3 TFunction "f1" +5:7-9 TTypeSynonym "T1" +6:1-3 TFunction "f1" +6:4-5 TVariable "x" +6:8-9 TVariable "x" +7:1-3 TFunction "f2" +7:7-9 TTypeSynonym "T2" +8:1-3 TFunction "f2" +8:4-5 TVariable "x" +8:8-9 TVariable "x" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionUnderTypeSynonym.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionUnderTypeSynonym.hs new file mode 100644 index 0000000000..6485232394 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TFunctionUnderTypeSynonym.hs @@ -0,0 +1,9 @@ +module TFunctionUnderTypeSynonym where + +type T1 = Int -> Int +type T2 = forall a. a -> a +f1 :: T1 +f1 x = x +f2 :: T2 +f2 x = x + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TGADT.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TGADT.expected new file mode 100644 index 0000000000..ad3ac0f086 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TGADT.expected @@ -0,0 +1,13 @@ +5:6-9 TTypeConstructor "Lam" +6:3-7 TDataConstructor "Lift" +6:11-12 TTypeVariable "a" +6:36-39 TTypeConstructor "Lam" +6:40-41 TTypeVariable "a" +7:3-6 TDataConstructor "Lam" +7:12-15 TTypeConstructor "Lam" +7:16-17 TTypeVariable "a" +7:21-24 TTypeConstructor "Lam" +7:25-26 TTypeVariable "b" +7:36-39 TTypeConstructor "Lam" +7:41-42 TTypeVariable "a" +7:46-47 TTypeVariable "b" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TGADT.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TGADT.hs new file mode 100644 index 0000000000..e0cccf8bed --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TGADT.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE GADTs #-} +module TGADT where + +data Lam :: * -> * where + Lift :: a -> Lam a -- ^ lifted value + Lam :: (Lam a -> Lam b) -> Lam (a -> b) -- ^ lambda abstraction diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TInstanceClassMethodBind.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TInstanceClassMethodBind.expected new file mode 100644 index 0000000000..a4a6ef98e0 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TInstanceClassMethodBind.expected @@ -0,0 +1,7 @@ +4:6-9 TTypeConstructor "Foo" +4:12-15 TDataConstructor "Foo" +4:16-19 TTypeConstructor "Int" +5:10-14 TClass "Show" +5:15-18 TTypeConstructor "Foo" +6:5-9 TClassMethod "show" +6:12-21 TVariable "undefined" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TInstanceClassMethodBind.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TInstanceClassMethodBind.hs new file mode 100644 index 0000000000..33976a48c1 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TInstanceClassMethodBind.hs @@ -0,0 +1,6 @@ +module TInstanceClassMethodBind where + + +data Foo = Foo Int +instance Show Foo where + show = undefined diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TInstanceClassMethodUse.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TInstanceClassMethodUse.expected new file mode 100644 index 0000000000..2bf39be435 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TInstanceClassMethodUse.expected @@ -0,0 +1,2 @@ +4:1-3 TFunction "go" +4:8-12 TClassMethod "show" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TInstanceClassMethodUse.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TInstanceClassMethodUse.hs new file mode 100644 index 0000000000..689d1643d4 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TInstanceClassMethodUse.hs @@ -0,0 +1,5 @@ +module TInstanceClassMethodUse where + + +go = show + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TModuleA.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TModuleA.hs new file mode 100644 index 0000000000..d76f64fc1f --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TModuleA.hs @@ -0,0 +1,5 @@ +module TModuleA where + +data Game = Game {a𐐀b :: Int} + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TModuleB.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TModuleB.hs new file mode 100644 index 0000000000..d2bfe4b7fa --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TModuleB.hs @@ -0,0 +1,8 @@ +module TModuleB where + +import TModuleA +import qualified TModuleA + +go = Game 1 + +a𐐀bb = TModuleA.a𐐀b go diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TNoneFunctionWithConstraint.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TNoneFunctionWithConstraint.expected new file mode 100644 index 0000000000..2dd89fd1da --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TNoneFunctionWithConstraint.expected @@ -0,0 +1,6 @@ +3:1-2 TVariable "x" +3:7-9 TClass "Eq" +3:10-11 TTypeVariable "a" +3:16-17 TTypeVariable "a" +4:1-2 TVariable "x" +4:5-14 TVariable "undefined" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TNoneFunctionWithConstraint.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TNoneFunctionWithConstraint.hs new file mode 100644 index 0000000000..9a7119dbdb --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TNoneFunctionWithConstraint.hs @@ -0,0 +1,5 @@ +module TNoneFunctionWithConstraint where + +x :: (Eq a) => a +x = undefined + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TOperator.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TOperator.expected new file mode 100644 index 0000000000..c19e7cb904 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TOperator.expected @@ -0,0 +1,33 @@ +4:1-3 TFunction "go" +4:4-5 TFunction "f" +4:6-7 TVariable "x" +4:10-11 TFunction "f" +4:11-12 TOperator "$" +4:12-13 TVariable "x" +6:2-6 TOperator "$$$$" +7:1-2 TVariable "x" +7:7-11 TOperator "$$$$" +8:6-7 TTypeVariable "a" +8:8-11 TOperator ":+:" +8:12-13 TTypeVariable "b" +8:16-19 TDataConstructor "Add" +8:20-21 TTypeVariable "a" +8:22-23 TTypeVariable "b" +9:7-10 TOperator ":-:" +9:12-13 TTypeVariable "a" +9:14-15 TTypeVariable "b" +9:19-20 TTypeVariable "a" +9:22-23 TTypeVariable "b" +11:1-4 TFunction "add" +11:8-11 TTypeConstructor "Int" +11:12-15 TOperator ":+:" +11:16-19 TTypeConstructor "Int" +11:23-26 TTypeConstructor "Int" +11:27-30 TOperator ":-:" +11:31-34 TTypeConstructor "Int" +13:1-4 TFunction "add" +13:6-9 TDataConstructor "Add" +13:10-11 TVariable "x" +13:12-13 TVariable "y" +13:18-19 TVariable "x" +13:21-22 TVariable "y" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TOperator.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TOperator.hs new file mode 100644 index 0000000000..e2f06c92fa --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TOperator.hs @@ -0,0 +1,13 @@ +module TOperator where + +-- imported operator +go f x = f$x +-- operator defined in local module +($$$$) = b +x = 1 $$$$ 2 +data a :+: b = Add a b +type (:-:) a b = (a, b) +-- type take precedence over operator +add :: Int :+: Int -> Int :-: Int +-- class method take precedence over operator +add (Add x y) = (x, y) diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternMatch.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternMatch.expected new file mode 100644 index 0000000000..0535662e63 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternMatch.expected @@ -0,0 +1,2 @@ +4:1-2 TFunction "g" +4:4-11 TDataConstructor "Nothing" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternMatch.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternMatch.hs new file mode 100644 index 0000000000..95e97c1abb --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternMatch.hs @@ -0,0 +1,6 @@ +module TPatternMatch where + + +g (Nothing, _) = 1 + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternSynonym.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternSynonym.expected new file mode 100644 index 0000000000..7cdf5260cb --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternSynonym.expected @@ -0,0 +1 @@ +5:9-12 TPatternSynonym "Foo" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternSynonym.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternSynonym.hs new file mode 100644 index 0000000000..adff673ce8 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternSynonym.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE PatternSynonyms #-} +module TPatternSynonym where + + +pattern Foo = 1 + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternbind.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternbind.expected new file mode 100644 index 0000000000..6c62634487 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternbind.expected @@ -0,0 +1,7 @@ +3:2-3 TVariable "a" +3:5-6 TVariable "b" +5:1-2 TFunction "f" +5:3-4 TFunction "g" +5:5-6 TVariable "y" +5:9-10 TFunction "g" +5:11-12 TVariable "y" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternbind.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternbind.hs new file mode 100644 index 0000000000..49e642a35d --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TPatternbind.hs @@ -0,0 +1,9 @@ +module TVariable where + +(a, b) = (1, 2) + +f g y = g y + + + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TQualifiedName.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TQualifiedName.expected new file mode 100644 index 0000000000..0ca7cd7d5b --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TQualifiedName.expected @@ -0,0 +1,12 @@ +3:18-27 TModule "Data.List" +6:1-2 TVariable "a" +6:5-13 TModule "Prelude." +6:13-22 TVariable "undefined" +7:1-2 TVariable "b" +7:8-18 TModule "Data.List." +7:18-22 TClassMethod "elem" +8:1-2 TVariable "c" +8:6-14 TModule "Prelude." +8:14-15 TOperator "+" +9:1-2 TVariable "d" +9:6-7 TOperator "+" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TQualifiedName.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TQualifiedName.hs new file mode 100644 index 0000000000..5dbdcc1d52 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TQualifiedName.hs @@ -0,0 +1,9 @@ +module TQualifiedName where + +import qualified Data.List + + +a = Prelude.undefined +b = 1 `Data.List.elem` [1, 2] +c = (Prelude.+) 1 1 +d = (+) 1 1 diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TRecord.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TRecord.expected new file mode 100644 index 0000000000..43b8e4d3b0 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TRecord.expected @@ -0,0 +1,4 @@ +4:6-9 TTypeConstructor "Foo" +4:12-15 TDataConstructor "Foo" +4:18-21 TRecordField "foo" +4:25-28 TTypeConstructor "Int" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TRecord.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TRecord.hs new file mode 100644 index 0000000000..b3176a154f --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TRecord.hs @@ -0,0 +1,7 @@ +module TRecord where + + +data Foo = Foo { foo :: Int } + + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TRecordDuplicateRecordFields.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TRecordDuplicateRecordFields.expected new file mode 100644 index 0000000000..70fdc63e18 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TRecordDuplicateRecordFields.expected @@ -0,0 +1,4 @@ +5:6-9 TTypeConstructor "Foo" +5:12-15 TDataConstructor "Foo" +5:18-21 TRecordField "boo" +5:26-32 TTypeSynonym "String" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TRecordDuplicateRecordFields.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TRecordDuplicateRecordFields.hs new file mode 100644 index 0000000000..395a1d3731 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TRecordDuplicateRecordFields.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE DuplicateRecordFields #-} + +module TRecordDuplicateRecordFields where + +data Foo = Foo { boo :: !String } diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TTypefamily.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TTypefamily.expected new file mode 100644 index 0000000000..08019bc3f3 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TTypefamily.expected @@ -0,0 +1,8 @@ +4:13-16 TTypeFamily "Foo" +4:17-18 TTypeVariable "a" +5:3-6 TTypeFamily "Foo" +5:7-10 TTypeConstructor "Int" +5:13-16 TTypeConstructor "Int" +6:3-6 TTypeFamily "Foo" +6:7-8 TTypeVariable "a" +6:11-17 TTypeSynonym "String" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TTypefamily.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TTypefamily.hs new file mode 100644 index 0000000000..d8c925e370 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TTypefamily.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TypeFamilies #-} +module TTypefamily where + +type family Foo a where + Foo Int = Int + Foo a = String diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TUnicodeSyntax.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TUnicodeSyntax.expected new file mode 100644 index 0000000000..0b94b7c045 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TUnicodeSyntax.expected @@ -0,0 +1 @@ +3:1-4 TVariable "a\66560b" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TUnicodeSyntax.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TUnicodeSyntax.hs new file mode 100644 index 0000000000..1b8c7c1baa --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TUnicodeSyntax.hs @@ -0,0 +1,5 @@ +module TUnicodeSyntax where + +a𐐀b = "a𐐀b" + + diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TValBind.expected b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TValBind.expected new file mode 100644 index 0000000000..ec20b01e56 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TValBind.expected @@ -0,0 +1,4 @@ +4:1-6 TVariable "hello" +4:10-13 TTypeConstructor "Int" +5:1-6 TVariable "hello" +5:9-15 TClassMethod "length" diff --git a/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TValBind.hs b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TValBind.hs new file mode 100644 index 0000000000..506af37a42 --- /dev/null +++ b/plugins/hls-semantic-tokens-plugin/test/testdata/before_9_10/TValBind.hs @@ -0,0 +1,8 @@ +module TValBind where + + +hello :: Int +hello = length "Hello, Haskell!" + + + diff --git a/plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs b/plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs new file mode 100644 index 0000000000..de468e2a87 --- /dev/null +++ b/plugins/hls-splice-plugin/src/Ide/Plugin/Splice.hs @@ -0,0 +1,513 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE MagicHash #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ViewPatterns #-} + +module Ide.Plugin.Splice (descriptor) where + +import Control.Applicative (Alternative ((<|>))) +import Control.Arrow (Arrow (first)) +import Control.Exception (SomeException) +import qualified Control.Foldl as L +import Control.Lens (Identity (..), ix, view, + (%~), (<&>), (^.)) +import Control.Monad (forM, guard, unless) +import Control.Monad.Error.Class (MonadError (throwError)) +import Control.Monad.Extra (eitherM) +import qualified Control.Monad.Fail as Fail +import Control.Monad.IO.Unlift (MonadIO (..), + askRunInIO) +import Control.Monad.Trans.Class (MonadTrans (lift)) +import Control.Monad.Trans.Except (ExceptT (..), + runExceptT) +import Control.Monad.Trans.Maybe +import Data.Aeson hiding (Null) +import qualified Data.Bifunctor as B (first) +import Data.Function +import Data.Generics +import qualified Data.Kind as Kinds +import Data.List (sortOn) +import Data.Maybe (fromMaybe, listToMaybe, + mapMaybe) +import qualified Data.Text as T +import Development.IDE +import Development.IDE.Core.FileStore (getVersionedTextDoc) +import Development.IDE.Core.PluginUtils +import Development.IDE.GHC.Compat as Compat +import Development.IDE.GHC.Compat.ExactPrint +import qualified Development.IDE.GHC.Compat.Util as Util +import Development.IDE.GHC.ExactPrint +import GHC.Exts +import qualified GHC.Runtime.Loader as Loader +import qualified GHC.Types.Error as Error +import Ide.Plugin.Error (PluginError (PluginInternalError)) +import Ide.Plugin.Splice.Types +import Ide.Types +import qualified Language.LSP.Protocol.Lens as J +import Language.LSP.Protocol.Message +import Language.LSP.Protocol.Types + +#if !MIN_VERSION_base(4,20,0) +import Data.Foldable (Foldable (foldl')) +#endif + +import GHC.Data.Bag (Bag) + +#if MIN_VERSION_ghc(9,9,0) +import GHC.Parser.Annotation (EpAnn (..)) +#else +import GHC.Parser.Annotation (SrcSpanAnn' (..)) +#endif + + +descriptor :: PluginId -> PluginDescriptor IdeState +descriptor plId = + (defaultPluginDescriptor plId "Provides a code action to evaluate a TemplateHaskell splice") + { pluginCommands = commands + , pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeAction codeAction + } + +commands :: [PluginCommand IdeState] +commands = + [ PluginCommand expandInplaceId inplaceCmdName $ expandTHSplice Inplace + -- , PluginCommand expandCommentedId commentedCmdName $ expandTHSplice Commented + ] + +newtype SubSpan = SubSpan {runSubSpan :: SrcSpan} + +instance Eq SubSpan where + (==) = (==) `on` runSubSpan + +instance Ord SubSpan where + (<=) = coerce isSubspanOf + +expandTHSplice :: + -- | Inplace? + ExpandStyle -> + CommandFunction IdeState ExpandSpliceParams +expandTHSplice _eStyle ideState _ params@ExpandSpliceParams {..} = ExceptT $ do + clientCapabilities <- pluginGetClientCapabilities + rio <- askRunInIO + let reportEditor :: ReportEditor + reportEditor msgTy msgs = liftIO $ rio $ pluginSendNotification SMethod_WindowShowMessage (ShowMessageParams msgTy (T.unlines msgs)) + expandManually :: NormalizedFilePath -> ExceptT PluginError IO WorkspaceEdit + expandManually fp = do + mresl <- + liftIO $ runAction "expandTHSplice.fallback.TypeCheck (stale)" ideState $ useWithStale TypeCheck fp + (TcModuleResult {..}, _) <- + maybe + (throwError $ PluginInternalError "Splice expansion: Type-checking information not found in cache.\nYou can once delete or replace the macro with placeholder, convince the type checker and then revert to original (erroneous) macro and expand splice again." + ) + pure mresl + reportEditor + MessageType_Warning + [ "Expansion in type-checking phase failed;" + , "trying to expand manually, but note that it is less rigorous." + ] + pm <- runActionE "expandTHSplice.fallback.GetParsedModule" ideState $ + useE GetParsedModule fp + (ps, hscEnv, _dflags) <- setupHscEnv ideState fp pm + + manualCalcEdit + clientCapabilities + reportEditor + range + ps + hscEnv + tmrTypechecked + spliceSpan + _eStyle + params + + withTypeChecked fp TcModuleResult {..} = do + (ps, _hscEnv, dflags) <- setupHscEnv ideState fp tmrParsed + let Splices {..} = tmrTopLevelSplices + let exprSuperSpans = + listToMaybe $ findSubSpansDesc srcSpan exprSplices + _patSuperSpans = + listToMaybe $ findSubSpansDesc srcSpan patSplices + typeSuperSpans = + listToMaybe $ findSubSpansDesc srcSpan typeSplices + declSuperSpans = + listToMaybe $ findSubSpansDesc srcSpan declSplices + + graftSpliceWith :: + forall ast. + HasSplice AnnListItem ast => + Maybe (SrcSpan, LocatedAn AnnListItem (ast GhcPs)) -> + Maybe (Either String WorkspaceEdit) + graftSpliceWith expandeds = + expandeds <&> \(_, expanded) -> + transform + dflags + clientCapabilities + verTxtDocId + (graft (RealSrcSpan spliceSpan Nothing) expanded) + ps + maybe (throwError $ PluginInternalError "No splice information found") (either (throwError . PluginInternalError . T.pack) pure) $ + case spliceContext of + Expr -> graftSpliceWith exprSuperSpans + Pat -> + + graftSpliceWith _patSuperSpans + + HsType -> graftSpliceWith typeSuperSpans + HsDecl -> + declSuperSpans <&> \(_, expanded) -> + transform + dflags + clientCapabilities + verTxtDocId + (graftDecls (RealSrcSpan spliceSpan Nothing) expanded) + ps + <&> + -- FIXME: Why ghc-exactprint sweeps preceding comments? + adjustToRange (verTxtDocId ^. J.uri) range + + res <- liftIO $ runMaybeT $ do + + fp <- MaybeT $ pure $ uriToNormalizedFilePath $ toNormalizedUri (verTxtDocId ^. J.uri) + eedits <- + ( lift . runExceptT . withTypeChecked fp + =<< MaybeT + (runAction "expandTHSplice.TypeCheck" ideState $ use TypeCheck fp) + ) + <|> lift (runExceptT $ expandManually fp) + + case eedits of + Left err -> do + reportEditor + MessageType_Error + [T.pack $ "Error during expanding splice: " <> show (pretty err)] + pure (Left err) + Right edits -> + pure (Right edits) + case res of + Nothing -> pure $ Right $ InR Null + Just (Left err) -> pure $ Left err + Just (Right edit) -> do + _ <- pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) + pure $ Right $ InR Null + + where + range = realSrcSpanToRange spliceSpan + srcSpan = RealSrcSpan spliceSpan Nothing + + +setupHscEnv + :: IdeState + -> NormalizedFilePath + -> ParsedModule + -> ExceptT PluginError IO (ParsedSource, HscEnv, DynFlags) +setupHscEnv ideState fp pm = do + hscEnvEq <- runActionE "expandTHSplice.fallback.ghcSessionDeps" ideState $ + useE GhcSessionDeps fp + let ps = annotateParsedSource pm + hscEnv0 = hscEnv hscEnvEq + modSum = pm_mod_summary pm + hscEnv <- liftIO $ setupDynFlagsForGHCiLike hscEnv0 $ ms_hspp_opts modSum + pure (ps, hscEnv, hsc_dflags hscEnv) + +setupDynFlagsForGHCiLike :: HscEnv -> DynFlags -> IO HscEnv +setupDynFlagsForGHCiLike env dflags = do + let dflags3 = setInterpreterLinkerOptions dflags + platform = targetPlatform dflags3 + dflags3a = setWays hostFullWays dflags3 + dflags3b = + foldl' gopt_set dflags3a $ + concatMap (wayGeneralFlags platform) hostFullWays + dflags3c = + foldl' gopt_unset dflags3b $ + concatMap (wayUnsetGeneralFlags platform) hostFullWays + dflags4 = + dflags3c + `gopt_set` Opt_ImplicitImportQualified + `gopt_set` Opt_IgnoreOptimChanges + `gopt_set` Opt_IgnoreHpcChanges + `gopt_unset` Opt_DiagnosticsShowCaret + Loader.initializePlugins (hscSetFlags dflags4 env) + +adjustToRange :: Uri -> Range -> WorkspaceEdit -> WorkspaceEdit +adjustToRange uri ran (WorkspaceEdit mhult mlt x) = + WorkspaceEdit (adjustWS <$> mhult) (fmap adjustDoc <$> mlt) x + where + adjustTextEdits :: Traversable f => f TextEdit -> f TextEdit + adjustTextEdits eds = + let minStart = + case L.fold (L.premap (view J.range) L.minimum) eds of + Nothing -> error "impossible" + Just v -> v + in adjustLine minStart <$> eds + + adjustATextEdits :: Traversable f => f (TextEdit |? AnnotatedTextEdit) -> f (TextEdit |? AnnotatedTextEdit) + adjustATextEdits = fmap $ \case + InL t -> InL $ runIdentity $ adjustTextEdits (Identity t) + InR AnnotatedTextEdit{_range, _newText, _annotationId} -> + let oldTE = TextEdit{_range,_newText} + in let TextEdit{_range,_newText} = runIdentity $ adjustTextEdits (Identity oldTE) + in InR $ AnnotatedTextEdit{_range,_newText,_annotationId} + + adjustWS = ix uri %~ adjustTextEdits + adjustDoc :: DocumentChange -> DocumentChange + adjustDoc (InR es) = InR es + adjustDoc (InL es) + | es ^. J.textDocument . J.uri == uri = + InL $ es & J.edits %~ adjustATextEdits + | otherwise = InL es + + adjustLine :: Range -> TextEdit -> TextEdit + adjustLine bad = + J.range %~ \r -> + if r == bad then ran else bad + +-- Define a pattern to get hold of a `SrcSpan` from the location part of a +-- `GenLocated`. In GHC >= 9.2 this will be a SrcSpanAnn', with annotations; +-- earlier it will just be a plain `SrcSpan`. +{-# COMPLETE AsSrcSpan #-} +#if MIN_VERSION_ghc(9,9,0) +pattern AsSrcSpan :: SrcSpan -> EpAnn ann +pattern AsSrcSpan locA <- (getLoc -> locA) +#else +pattern AsSrcSpan :: SrcSpan -> SrcSpanAnn' a +pattern AsSrcSpan locA <- SrcSpanAnn {locA} +#endif + +findSubSpansDesc :: SrcSpan -> [(LHsExpr GhcTc, a)] -> [(SrcSpan, a)] +findSubSpansDesc srcSpan = + sortOn (Down . SubSpan . fst) + . mapMaybe + ( \(L (AsSrcSpan spn) _, e) -> do + guard (spn `isSubspanOf` srcSpan) + pure (spn, e) + ) + +data SpliceClass where + OneToOneAST :: HasSplice AnnListItem ast => Proxy# ast -> SpliceClass + IsHsDecl :: SpliceClass + +data HsSpliceCompat pass + = UntypedSplice (HsUntypedSplice pass) + | TypedSplice (LHsExpr pass) + + +class (Outputable (ast GhcRn), ASTElement l (ast GhcPs)) => HasSplice l ast where + type SpliceOf ast :: Kinds.Type -> Kinds.Type + matchSplice :: Proxy# ast -> ast GhcPs -> Maybe (SpliceOf ast GhcPs) + expandSplice :: Proxy# ast -> SpliceOf ast GhcPs -> RnM (Either (ast GhcPs) (ast GhcRn), FreeVars) + +instance HasSplice AnnListItem HsExpr where + type SpliceOf HsExpr = HsSpliceCompat + matchSplice _ (HsUntypedSplice _ spl) = Just (UntypedSplice spl) + matchSplice _ (HsTypedSplice _ spl) = Just (TypedSplice spl) + matchSplice _ _ = Nothing + expandSplice _ (UntypedSplice e) = fmap (first Right) $ rnUntypedSpliceExpr e + expandSplice _ (TypedSplice e) = fmap (first Right) $ rnTypedSplice e + +instance HasSplice AnnListItem Pat where + type SpliceOf Pat = HsUntypedSplice + matchSplice _ (SplicePat _ spl) = Just spl + matchSplice _ _ = Nothing + expandSplice _ = + fmap (first (Left . unLoc . utsplice_result . snd )) . + rnSplicePat + + +instance HasSplice AnnListItem HsType where + type SpliceOf HsType = HsUntypedSplice + matchSplice _ (HsSpliceTy _ spl) = Just spl + matchSplice _ _ = Nothing + expandSplice _ = fmap (first Right) . rnSpliceType + +classifyAST :: SpliceContext -> SpliceClass +classifyAST = \case + Expr -> OneToOneAST @HsExpr proxy# + HsDecl -> IsHsDecl + Pat -> OneToOneAST @Pat proxy# + HsType -> OneToOneAST @HsType proxy# + +type ReportEditor = forall m. MonadIO m => MessageType -> [T.Text] -> m () + +manualCalcEdit :: + ClientCapabilities -> + ReportEditor -> + Range -> + ParsedSource -> + HscEnv -> + TcGblEnv -> + RealSrcSpan -> + ExpandStyle -> + ExpandSpliceParams -> + ExceptT PluginError IO WorkspaceEdit +manualCalcEdit clientCapabilities reportEditor ran ps hscEnv typechkd srcSpan _eStyle ExpandSpliceParams {..} = do + (warns, resl) <- + ExceptT $ do + (msgs, eresl) <- + initTcWithGbl hscEnv typechkd srcSpan $ + case classifyAST spliceContext of + IsHsDecl -> fmap (fmap $ adjustToRange (verTxtDocId ^. J.uri) ran) $ + flip (transformM dflags clientCapabilities verTxtDocId) ps $ + graftDeclsWithM (RealSrcSpan srcSpan Nothing) $ \case + (L _spn (SpliceD _ (SpliceDecl _ (L _ spl) _))) -> do + eExpr <- + eitherM (fail . show) pure + $ TransformT $ lift + ( lift $ + Util.try @_ @SomeException $ + (fst <$> rnTopSpliceDecls spl) + ) + pure $ Just eExpr + _ -> pure Nothing + OneToOneAST astP -> + flip (transformM dflags clientCapabilities verTxtDocId) ps $ + graftWithM (RealSrcSpan srcSpan Nothing) $ \case + (L _spn (matchSplice astP -> Just spl)) -> do + eExpr <- + eitherM (fail . show) pure + $ TransformT $ lift + ( lift $ + Util.try @_ @SomeException $ + (fst <$> expandSplice astP spl) + ) + Just <$> case eExpr of + Left x -> pure $ L _spn x + Right y -> unRenamedE dflags y + _ -> pure Nothing + let (warns, errs) = + (Error.getWarningMessages msgs, Error.getErrorMessages msgs) + pure $ (warns,) <$> maybe (throwError $ PluginInternalError $ T.pack $ showErrors errs) + (B.first (PluginInternalError . T.pack)) eresl + + unless + (null warns) + $ reportEditor + MessageType_Warning + [ "Warning during expanding: " + , "" + , T.pack (showErrors warns) + ] + pure resl + where + dflags = hsc_dflags hscEnv + showErrors = showBag + +showBag :: Error.Diagnostic a => Bag (Error.MsgEnvelope a) -> String +showBag = show . fmap (fmap toDiagnosticMessage) + +toDiagnosticMessage :: forall a. Error.Diagnostic a => a -> Error.DiagnosticMessage +toDiagnosticMessage message = + Error.DiagnosticMessage + { diagMessage = Error.diagnosticMessage + (Error.defaultDiagnosticOpts @a) + message + + , diagReason = Error.diagnosticReason message + , diagHints = Error.diagnosticHints message + } + +-- | FIXME: Is thereAny "clever" way to do this exploiting TTG? +unRenamedE :: + forall ast m l. + (Fail.MonadFail m, HasSplice l ast) => + DynFlags -> + ast GhcRn -> + TransformT m (LocatedAn l (ast GhcPs)) +unRenamedE dflags expr = do + uniq <- show <$> uniqueSrcSpanT + expr' <- + either (fail . showErrors) pure $ + parseAST @_ @(ast GhcPs) dflags uniq $ + showSDoc dflags $ ppr expr + pure expr' + where + showErrors = showBag . Error.getMessages + +data SearchResult r = + Continue | Stop | Here r + deriving (Read, Show, Eq, Ord, Data) + +fromSearchResult :: SearchResult a -> Maybe a +fromSearchResult (Here r) = Just r +fromSearchResult _ = Nothing + +-- TODO: workaround when HieAst unavailable (e.g. when the module itself errors) +-- TODO: Declaration Splices won't appear in HieAst; perhaps we must just use Parsed/Renamed ASTs? +codeAction :: PluginMethodHandler IdeState Method_TextDocumentCodeAction +codeAction state plId (CodeActionParams _ _ docId ran _) = do + verTxtDocId <- liftIO $ runAction "splice.codeAction.getVersionedTextDoc" state $ getVersionedTextDoc docId + liftIO $ fmap (fromMaybe ( InL [])) $ + runMaybeT $ do + fp <- MaybeT $ pure $ uriToNormalizedFilePath $ toNormalizedUri theUri + ParsedModule {..} <- + MaybeT . runAction "splice.codeAction.GitHieAst" state $ + use GetParsedModule fp + let spn = rangeToRealSrcSpan fp ran + mouterSplice = something' (detectSplice spn) pm_parsed_source + mcmds <- forM mouterSplice $ + \(spliceSpan, spliceContext) -> + forM expandStyles $ \(_, (title, cmdId)) -> do + let params = ExpandSpliceParams {verTxtDocId, ..} + act = mkLspCommand plId cmdId title (Just [toJSON params]) + pure $ + InR $ + CodeAction title (Just CodeActionKind_RefactorRewrite) Nothing Nothing Nothing Nothing (Just act) Nothing + + pure $ InL $ fromMaybe mempty mcmds + where + theUri = docId ^. J.uri + detectSplice :: + RealSrcSpan -> + GenericQ (SearchResult (RealSrcSpan, SpliceContext)) + detectSplice spn = + let + spanIsRelevant x = RealSrcSpan spn Nothing `isSubspanOf` x + in + mkQ + Continue + ( \case + (L (AsSrcSpan l@(RealSrcSpan spLoc _)) expr :: LHsExpr GhcPs) + | spanIsRelevant l -> + case expr of + HsTypedSplice{} -> Here (spLoc, Expr) + HsUntypedSplice{} -> Here (spLoc, Expr) + _ -> Continue + _ -> Stop + ) + `extQ` \case + (L (AsSrcSpan l@(RealSrcSpan spLoc _)) pat :: LPat GhcPs) + | spanIsRelevant l -> + case pat of + SplicePat{} -> Here (spLoc, Pat) + _ -> Continue + _ -> Stop + `extQ` \case + (L (AsSrcSpan l@(RealSrcSpan spLoc _)) ty :: LHsType GhcPs) + | spanIsRelevant l -> + case ty of + HsSpliceTy {} -> Here (spLoc, HsType) + _ -> Continue + _ -> Stop + `extQ` \case + (L (AsSrcSpan l@(RealSrcSpan spLoc _)) decl :: LHsDecl GhcPs) + | spanIsRelevant l -> + case decl of + SpliceD {} -> Here (spLoc, HsDecl) + _ -> Continue + _ -> Stop + +-- | Like 'something', but performs top-down searching, cutoffs when 'Stop' received, +-- and picks innermost result. +something' :: forall a. GenericQ (SearchResult a) -> GenericQ (Maybe a) +something' f = go + where + go :: GenericQ (Maybe a) + go x = + case f x of + Stop -> Nothing + resl -> foldl' (flip (<|>)) (fromSearchResult resl) (gmapQ go x) diff --git a/plugins/hls-splice-plugin/src/Ide/Plugin/Splice/Types.hs b/plugins/hls-splice-plugin/src/Ide/Plugin/Splice/Types.hs new file mode 100644 index 0000000000..8652762276 --- /dev/null +++ b/plugins/hls-splice-plugin/src/Ide/Plugin/Splice/Types.hs @@ -0,0 +1,55 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE OverloadedStrings #-} + +module Ide.Plugin.Splice.Types where + +import Data.Aeson (FromJSON, ToJSON) +import qualified Data.Text as T + -- This import is needed for the ToJSON/FromJSON instances of RealSrcSpan +import Development.IDE () +import Development.IDE.GHC.Compat (RealSrcSpan) +import GHC.Generics (Generic) +import Ide.Types (CommandId) +import Language.LSP.Protocol.Types (VersionedTextDocumentIdentifier) + +-- | Parameter for the addMethods PluginCommand. +data ExpandSpliceParams = ExpandSpliceParams + { verTxtDocId :: VersionedTextDocumentIdentifier + , spliceSpan :: RealSrcSpan + , spliceContext :: SpliceContext + } + deriving (Show, Eq, Generic) + deriving anyclass (ToJSON, FromJSON) + +-- FIXME: HsDecl needs different treatment of splicing. +data SpliceContext = Expr | HsDecl | Pat | HsType + deriving (Read, Show, Eq, Ord, Generic) + deriving anyclass (ToJSON, FromJSON) + +data ExpandStyle = Inplace | Commented + deriving (Read, Show, Eq, Ord, Generic) + +expandStyles :: [(ExpandStyle, (T.Text, CommandId))] +expandStyles = + [ (Inplace, (inplaceCmdName, expandInplaceId)) + -- , (Commented, commentedCmdName, expandCommentedId) + ] + +toExpandCmdTitle :: ExpandStyle -> T.Text +toExpandCmdTitle Inplace = inplaceCmdName +toExpandCmdTitle Commented = commentedCmdName + +toCommandId :: ExpandStyle -> CommandId +toCommandId Inplace = expandInplaceId +toCommandId Commented = expandCommentedId + +expandInplaceId, expandCommentedId :: CommandId +expandInplaceId = "expandTHSpliceInplace" +expandCommentedId = "expandTHSpliceCommented" + +inplaceCmdName :: T.Text +inplaceCmdName = "expand TemplateHaskell Splice (in-place)" + +commentedCmdName :: T.Text +commentedCmdName = "expand TemplateHaskell Splice (commented-out)" diff --git a/plugins/hls-splice-plugin/test/Main.hs b/plugins/hls-splice-plugin/test/Main.hs new file mode 100644 index 0000000000..38cbd4d5da --- /dev/null +++ b/plugins/hls-splice-plugin/test/Main.hs @@ -0,0 +1,119 @@ +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} +module Main + ( main + ) where + +import Control.Monad (void) +import Data.List (find) +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Text.IO as T +import qualified Ide.Plugin.Splice as Splice +import Ide.Plugin.Splice.Types +import System.FilePath +import Test.Hls + +main :: IO () +main = defaultTestRunner tests + +splicePlugin :: PluginTestDescriptor () +splicePlugin = mkPluginTestDescriptor' Splice.descriptor "splice" + +tests :: TestTree +tests = testGroup "splice" + [ goldenTest "TSimpleExp" Inplace 6 15 + , goldenTest "TSimpleExp" Inplace 6 24 + , goldenTest "TTypeAppExp" Inplace 7 5 + , goldenTest "TErrorExp" Inplace 6 15 + , goldenTest "TErrorExp" Inplace 6 51 + , goldenTest "TQQExp" Inplace 6 17 + , goldenTest "TQQExp" Inplace 6 25 + , goldenTest "TQQExpError" Inplace 6 13 + , goldenTest "TQQExpError" Inplace 6 22 + , testGroup "Pattern Splices" + [ goldenTest "TSimplePat" Inplace 6 3 + , goldenTest "TSimplePat" Inplace 6 22 + , goldenTest "TSimplePat" Inplace 6 3 + , goldenTest "TSimplePat" Inplace 6 22 + , goldenTest "TErrorPat" Inplace 6 3 + , goldenTest "TErrorPat" Inplace 6 18 + , goldenTest "TQQPat" Inplace 6 3 + , goldenTest "TQQPat" Inplace 6 11 + , goldenTest "TQQPatError" Inplace 6 3 + , goldenTest "TQQPatError" Inplace 6 11 + ] + , goldenTest "TSimpleType" Inplace 5 12 + , goldenTest "TSimpleType" Inplace 5 22 + , goldenTest "TTypeTypeError" Inplace 7 12 + , goldenTest "TTypeTypeError" Inplace 7 52 + , goldenTest "TQQType" Inplace 8 19 + , goldenTest "TQQType" Inplace 8 28 + , goldenTest "TQQTypeTypeError" Inplace 8 19 + , goldenTest "TQQTypeTypeError" Inplace 8 28 + , goldenTest "TSimpleDecl" Inplace 8 1 + , goldenTest "TQQDecl" Inplace 5 1 + , goldenTestWithEdit "TTypeKindError" ( + if ghcVersion >= GHC96 then + "96-expected" + else + "expected" + ) Inplace 7 9 + , goldenTestWithEdit "TDeclKindError" "expected" Inplace 8 1 + ] + +goldenTest :: FilePath -> ExpandStyle -> Int -> Int -> TestTree +goldenTest fp tc line col = + goldenWithHaskellDoc def splicePlugin (fp <> " (golden)") testDataDir fp "expected" "hs" $ \doc -> do + -- wait for the entire build to finish, so that code actions that + -- use stale data will get uptodate stuff + void waitForBuildQueue + actions <- getCodeActions doc $ pointRange line col + case find ((== Just (toExpandCmdTitle tc)) . codeActionTitle) actions of + Just (InR CodeAction {_command = Just c}) -> do + executeCommand c + void $ skipManyTill anyMessage (message SMethod_WorkspaceApplyEdit) + _ -> liftIO $ assertFailure "No CodeAction detected" + +goldenTestWithEdit :: FilePath -> FilePath -> ExpandStyle -> Int -> Int -> TestTree +goldenTestWithEdit fp expect tc line col = + goldenWithHaskellDoc def splicePlugin (fp <> " (golden)") testDataDir fp expect "hs" $ \doc -> do + orig <- documentContents doc + let + lns = T.lines orig + theRange = + Range + { _start = Position 0 0 + , _end = Position (fromIntegral $ length lns + 1) 1 + } + + void waitForDiagnostics + void waitForBuildQueue + alt <- liftIO $ T.readFile (testDataDir fp <.> "error.hs") + void $ applyEdit doc $ TextEdit theRange alt + changeDoc doc [TextDocumentContentChangeEvent $ InL + TextDocumentContentChangePartial {_range = theRange, _rangeLength = Nothing, _text = alt} + ] + + void waitForDiagnostics + -- wait for the entire build to finish + void waitForBuildQueue + actions <- getCodeActions doc $ pointRange line col + case find ((== Just (toExpandCmdTitle tc)) . codeActionTitle) actions of + Just (InR CodeAction {_command = Just c}) -> do + executeCommand c + void $ skipManyTill anyMessage (message SMethod_WorkspaceApplyEdit) + _ -> liftIO $ assertFailure "No CodeAction detected" + +testDataDir :: FilePath +testDataDir = "plugins" "hls-splice-plugin" "test" "testdata" + +pointRange :: Int -> Int -> Range +pointRange (subtract 1 -> fromIntegral -> line) (subtract 1 -> fromIntegral -> col) = + Range (Position line col) (Position line $ col + 1) + +-- | Get the title of a code action. +codeActionTitle :: (Command |? CodeAction) -> Maybe Text +codeActionTitle InL {} = Nothing +codeActionTitle (InR CodeAction {_title}) = Just _title diff --git a/plugins/hls-splice-plugin/test/testdata/QQ.hs b/plugins/hls-splice-plugin/test/testdata/QQ.hs new file mode 100644 index 0000000000..bf5efeb1b5 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/QQ.hs @@ -0,0 +1,28 @@ +{-# LANGUAGE TemplateHaskell #-} +module QQ (str) where + +import Language.Haskell.TH + ( mkName, + stringL, + litP, + clause, + litE, + normalB, + funD, + sigD, + litT, + strTyLit ) +import Language.Haskell.TH.Quote (QuasiQuoter (..)) + +str :: QuasiQuoter +str = + QuasiQuoter + { quoteExp = litE . stringL + , quotePat = litP . stringL + , quoteType = litT . strTyLit + , quoteDec = \name -> + sequence + [ sigD (mkName name) [t|String|] + , funD (mkName name) [clause [] (normalB $ litE $ stringL name) []] + ] + } diff --git a/plugins/hls-splice-plugin/test/testdata/TDeclKindError.error.hs b/plugins/hls-splice-plugin/test/testdata/TDeclKindError.error.hs new file mode 100644 index 0000000000..e21e057ed1 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TDeclKindError.error.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE QuasiQuotes #-} +module TSimpleDecl where +import Language.Haskell.TH ( mkName, clause, normalB, funD, sigD ) + +-- Foo +-- Bar +$(sequence + [sigD (mkName "foo") [t|Int|] + ,funD (mkName "foo") [clause [] (normalB [|42|]) []] + ,sigD (mkName "bar") [t|Int|] + ] + ) +-- Bar +-- ee +-- dddd diff --git a/plugins/hls-splice-plugin/test/testdata/TDeclKindError.expected.hs b/plugins/hls-splice-plugin/test/testdata/TDeclKindError.expected.hs new file mode 100644 index 0000000000..b1f0250b41 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TDeclKindError.expected.hs @@ -0,0 +1,13 @@ +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE QuasiQuotes #-} +module TSimpleDecl where +import Language.Haskell.TH ( mkName, clause, normalB, funD, sigD ) + +-- Foo +-- Bar +foo :: Int +foo = 42 +bar :: Int +-- Bar +-- ee +-- dddd diff --git a/plugins/hls-splice-plugin/test/testdata/TDeclKindError.hs b/plugins/hls-splice-plugin/test/testdata/TDeclKindError.hs new file mode 100644 index 0000000000..027d4f83dd --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TDeclKindError.hs @@ -0,0 +1,15 @@ +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE QuasiQuotes #-} +module TSimpleDecl where +import Language.Haskell.TH ( mkName, clause, normalB, funD, sigD ) + +-- Foo +-- Bar +$(sequence + [sigD (mkName "foo") [t|Int|] + ,funD (mkName "foo") [clause [] (normalB [|42|]) []] + ] + ) +-- Bar +-- ee +-- dddd diff --git a/plugins/hls-splice-plugin/test/testdata/TErrorExp.expected.hs b/plugins/hls-splice-plugin/test/testdata/TErrorExp.expected.hs new file mode 100644 index 0000000000..420d9834ea --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TErrorExp.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module TErrorExp where +import Language.Haskell.TH ( tupE, litE, integerL ) + +main :: IO () +main = return (42, ()) diff --git a/plugins/hls-splice-plugin/test/testdata/TErrorExp.hs b/plugins/hls-splice-plugin/test/testdata/TErrorExp.hs new file mode 100644 index 0000000000..fb696dc2dd --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TErrorExp.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module TErrorExp where +import Language.Haskell.TH ( tupE, litE, integerL ) + +main :: IO () +main = return $(tupE [litE $ integerL 42, tupE []]) diff --git a/plugins/hls-splice-plugin/test/testdata/TErrorPat.expected.hs b/plugins/hls-splice-plugin/test/testdata/TErrorPat.expected.hs new file mode 100644 index 0000000000..184c9bd9eb --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TErrorPat.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module TErrorPat where +import Language.Haskell.TH ( conP ) + +f :: () -> () +f True = x diff --git a/plugins/hls-splice-plugin/test/testdata/TErrorPat.hs b/plugins/hls-splice-plugin/test/testdata/TErrorPat.hs new file mode 100644 index 0000000000..87f3d2c9cb --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TErrorPat.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module TErrorPat where +import Language.Haskell.TH ( conP ) + +f :: () -> () +f $(conP 'True []) = x diff --git a/plugins/hls-splice-plugin/test/testdata/TQQDecl.expected.hs b/plugins/hls-splice-plugin/test/testdata/TQQDecl.expected.hs new file mode 100644 index 0000000000..781f23e12d --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQDecl.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE QuasiQuotes #-} +module TQQDecl where +import QQ (str) + +foo :: String +foo = "foo" diff --git a/plugins/hls-splice-plugin/test/testdata/TQQDecl.hs b/plugins/hls-splice-plugin/test/testdata/TQQDecl.hs new file mode 100644 index 0000000000..90a05ce7d3 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQDecl.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE QuasiQuotes #-} +module TQQDecl where +import QQ (str) + +[str|foo|] diff --git a/plugins/hls-splice-plugin/test/testdata/TQQExp.expected.hs b/plugins/hls-splice-plugin/test/testdata/TQQExp.expected.hs new file mode 100644 index 0000000000..26f1169513 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQExp.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE QuasiQuotes #-} +module TQQExp where +import QQ + +main :: IO () +main = putStrLn "str" diff --git a/plugins/hls-splice-plugin/test/testdata/TQQExp.hs b/plugins/hls-splice-plugin/test/testdata/TQQExp.hs new file mode 100644 index 0000000000..b600df586a --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQExp.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE QuasiQuotes #-} +module TQQExp where +import QQ + +main :: IO () +main = putStrLn [str|str|] diff --git a/plugins/hls-splice-plugin/test/testdata/TQQExpError.expected.hs b/plugins/hls-splice-plugin/test/testdata/TQQExpError.expected.hs new file mode 100644 index 0000000000..16c7678d0d --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQExpError.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE QuasiQuotes #-} +module TQQExpError where +import QQ + +main :: IO () +main = pure "str" diff --git a/plugins/hls-splice-plugin/test/testdata/TQQExpError.hs b/plugins/hls-splice-plugin/test/testdata/TQQExpError.hs new file mode 100644 index 0000000000..56897837da --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQExpError.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE QuasiQuotes #-} +module TQQExpError where +import QQ + +main :: IO () +main = pure [str|str|] diff --git a/plugins/hls-splice-plugin/test/testdata/TQQPat.expected.hs b/plugins/hls-splice-plugin/test/testdata/TQQPat.expected.hs new file mode 100644 index 0000000000..eb99524050 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQPat.expected.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE QuasiQuotes #-} +module TQQPat where +import QQ + +f :: String -> IO () +f "str" = putStrLn "is str" +f _ = putStrLn " not str" diff --git a/plugins/hls-splice-plugin/test/testdata/TQQPat.hs b/plugins/hls-splice-plugin/test/testdata/TQQPat.hs new file mode 100644 index 0000000000..e1ada41287 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQPat.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE QuasiQuotes #-} +module TQQPat where +import QQ + +f :: String -> IO () +f [str|str|] = putStrLn "is str" +f _ = putStrLn " not str" diff --git a/plugins/hls-splice-plugin/test/testdata/TQQPatError.expected.hs b/plugins/hls-splice-plugin/test/testdata/TQQPatError.expected.hs new file mode 100644 index 0000000000..0f928feab7 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQPatError.expected.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE QuasiQuotes #-} +module TQQPatError where +import QQ + +f :: () -> IO () +f "str" = putStrLn "is str" +f _ = putStrLn " not str" diff --git a/plugins/hls-splice-plugin/test/testdata/TQQPatError.hs b/plugins/hls-splice-plugin/test/testdata/TQQPatError.hs new file mode 100644 index 0000000000..d89141a875 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQPatError.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE QuasiQuotes #-} +module TQQPatError where +import QQ + +f :: () -> IO () +f [str|str|] = putStrLn "is str" +f _ = putStrLn " not str" diff --git a/plugins/hls-splice-plugin/test/testdata/TQQType.expected.hs b/plugins/hls-splice-plugin/test/testdata/TQQType.expected.hs new file mode 100644 index 0000000000..f93798e01e --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQType.expected.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE QuasiQuotes #-} +module TQQType where +import Language.Haskell.TH ( appT, numTyLit, litT, conT ) +import Data.Proxy ( Proxy(..) ) +import QQ + +main :: IO (Proxy "str") +main = return Proxy diff --git a/plugins/hls-splice-plugin/test/testdata/TQQType.hs b/plugins/hls-splice-plugin/test/testdata/TQQType.hs new file mode 100644 index 0000000000..2c670793e2 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQType.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE QuasiQuotes #-} +module TQQType where +import Language.Haskell.TH ( appT, numTyLit, litT, conT ) +import Data.Proxy ( Proxy(..) ) +import QQ + +main :: IO (Proxy [str|str|]) +main = return Proxy diff --git a/plugins/hls-splice-plugin/test/testdata/TQQTypeTypeError.expected.hs b/plugins/hls-splice-plugin/test/testdata/TQQTypeTypeError.expected.hs new file mode 100644 index 0000000000..70e37e7701 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQTypeTypeError.expected.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE QuasiQuotes #-} +module TQQTypeTypeError where +import Language.Haskell.TH ( appT, numTyLit, litT, conT ) +import Data.Proxy ( Proxy(..) ) +import QQ + +main :: IO (Proxy "str") +main = return () diff --git a/plugins/hls-splice-plugin/test/testdata/TQQTypeTypeError.hs b/plugins/hls-splice-plugin/test/testdata/TQQTypeTypeError.hs new file mode 100644 index 0000000000..3f644a3288 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TQQTypeTypeError.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE QuasiQuotes #-} +module TQQTypeTypeError where +import Language.Haskell.TH ( appT, numTyLit, litT, conT ) +import Data.Proxy ( Proxy(..) ) +import QQ + +main :: IO (Proxy [str|str|]) +main = return () diff --git a/plugins/hls-splice-plugin/test/testdata/TSimpleDecl.expected.hs b/plugins/hls-splice-plugin/test/testdata/TSimpleDecl.expected.hs new file mode 100644 index 0000000000..90c2bf1b09 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TSimpleDecl.expected.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE QuasiQuotes #-} +module TSimpleDecl where +import Language.Haskell.TH ( mkName, clause, normalB, funD, sigD ) + +-- Foo +-- Bar +foo :: Int +foo = 42 +-- Bar +-- ee +-- dddd diff --git a/plugins/hls-splice-plugin/test/testdata/TSimpleDecl.hs b/plugins/hls-splice-plugin/test/testdata/TSimpleDecl.hs new file mode 100644 index 0000000000..027d4f83dd --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TSimpleDecl.hs @@ -0,0 +1,15 @@ +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE QuasiQuotes #-} +module TSimpleDecl where +import Language.Haskell.TH ( mkName, clause, normalB, funD, sigD ) + +-- Foo +-- Bar +$(sequence + [sigD (mkName "foo") [t|Int|] + ,funD (mkName "foo") [clause [] (normalB [|42|]) []] + ] + ) +-- Bar +-- ee +-- dddd diff --git a/plugins/hls-splice-plugin/test/testdata/TSimpleExp.expected.hs b/plugins/hls-splice-plugin/test/testdata/TSimpleExp.expected.hs new file mode 100644 index 0000000000..fb8967b504 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TSimpleExp.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module TSimpleExp where +import Language.Haskell.TH ( tupE, litE, integerL ) + +main :: IO () +main = return () diff --git a/plugins/hls-splice-plugin/test/testdata/TSimpleExp.hs b/plugins/hls-splice-plugin/test/testdata/TSimpleExp.hs new file mode 100644 index 0000000000..7f5db568ac --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TSimpleExp.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module TSimpleExp where +import Language.Haskell.TH ( tupE, litE, integerL ) + +main :: IO () +main = return $(tupE []) diff --git a/plugins/hls-splice-plugin/test/testdata/TSimplePat.expected.hs b/plugins/hls-splice-plugin/test/testdata/TSimplePat.expected.hs new file mode 100644 index 0000000000..82c4891d3b --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TSimplePat.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module TSimplePat where +import Language.Haskell.TH ( varP, mkName ) + +f :: x -> x +f x = x diff --git a/plugins/hls-splice-plugin/test/testdata/TSimplePat.hs b/plugins/hls-splice-plugin/test/testdata/TSimplePat.hs new file mode 100644 index 0000000000..ee6f1d3ed3 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TSimplePat.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module TSimplePat where +import Language.Haskell.TH ( varP, mkName ) + +f :: x -> x +f $(varP $ mkName "x") = x diff --git a/plugins/hls-splice-plugin/test/testdata/TSimpleType.expected.hs b/plugins/hls-splice-plugin/test/testdata/TSimpleType.expected.hs new file mode 100644 index 0000000000..8975b4f926 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TSimpleType.expected.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module TSimpleType where +import Language.Haskell.TH ( tupleT ) + +main :: IO () +main = return () diff --git a/plugins/hls-splice-plugin/test/testdata/TSimpleType.hs b/plugins/hls-splice-plugin/test/testdata/TSimpleType.hs new file mode 100644 index 0000000000..55b5c59d05 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TSimpleType.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module TSimpleType where +import Language.Haskell.TH ( tupleT ) + +main :: IO $(tupleT 0) +main = return () diff --git a/plugins/hls-splice-plugin/test/testdata/TTypeAppExp.expected.hs b/plugins/hls-splice-plugin/test/testdata/TTypeAppExp.expected.hs new file mode 100644 index 0000000000..b74d153ee2 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TTypeAppExp.expected.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} +module TTypeAppExp where +import Data.Proxy + +f :: Proxy Int +f = Proxy @Int diff --git a/plugins/hls-splice-plugin/test/testdata/TTypeAppExp.hs b/plugins/hls-splice-plugin/test/testdata/TTypeAppExp.hs new file mode 100644 index 0000000000..0cc071a08d --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TTypeAppExp.hs @@ -0,0 +1,7 @@ +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} +module TTypeAppExp where +import Data.Proxy + +f :: Proxy Int +f = $([|Proxy @Int|]) diff --git a/plugins/hls-splice-plugin/test/testdata/TTypeKindError.96-expected.hs b/plugins/hls-splice-plugin/test/testdata/TTypeKindError.96-expected.hs new file mode 100644 index 0000000000..101e12e402 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TTypeKindError.96-expected.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE TemplateHaskell #-} +module TTypeKindError where +import Language.Haskell.TH ( numTyLit, litT ) +import Data.Proxy ( Proxy ) + +main :: 42 +main = return () diff --git a/plugins/hls-splice-plugin/test/testdata/TTypeKindError.error.hs b/plugins/hls-splice-plugin/test/testdata/TTypeKindError.error.hs new file mode 100644 index 0000000000..58631e8464 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TTypeKindError.error.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE TemplateHaskell #-} +module TTypeKindError where +import Language.Haskell.TH ( numTyLit, litT ) +import Data.Proxy ( Proxy ) + +main :: $(litT (numTyLit 42)) +main = return () diff --git a/plugins/hls-splice-plugin/test/testdata/TTypeKindError.expected.hs b/plugins/hls-splice-plugin/test/testdata/TTypeKindError.expected.hs new file mode 100644 index 0000000000..ef04a42611 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TTypeKindError.expected.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE TemplateHaskell #-} +module TTypeKindError where +import Language.Haskell.TH ( numTyLit, litT ) +import Data.Proxy ( Proxy ) + +main :: (42) +main = return () diff --git a/plugins/hls-splice-plugin/test/testdata/TTypeKindError.hs b/plugins/hls-splice-plugin/test/testdata/TTypeKindError.hs new file mode 100644 index 0000000000..c14dc0e68c --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TTypeKindError.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE TemplateHaskell #-} +module TTypeKindError where +import Language.Haskell.TH ( numTyLit, litT ) +import Data.Proxy ( Proxy ) + +main :: IO () +main = return () diff --git a/plugins/hls-splice-plugin/test/testdata/TTypeTypeError.expected.hs b/plugins/hls-splice-plugin/test/testdata/TTypeTypeError.expected.hs new file mode 100644 index 0000000000..f19e495e6d --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TTypeTypeError.expected.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE TemplateHaskell #-} +module TTypeTypeError where +import Language.Haskell.TH ( appT, numTyLit, litT, conT ) +import Data.Proxy ( Proxy ) + +main :: IO (Proxy 42) +main = return () diff --git a/plugins/hls-splice-plugin/test/testdata/TTypeTypeError.hs b/plugins/hls-splice-plugin/test/testdata/TTypeTypeError.hs new file mode 100644 index 0000000000..37a8b3c931 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/TTypeTypeError.hs @@ -0,0 +1,8 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE TemplateHaskell #-} +module TTypeTypeError where +import Language.Haskell.TH ( appT, numTyLit, litT, conT ) +import Data.Proxy ( Proxy ) + +main :: IO $(conT ''Proxy `appT` litT (numTyLit 42)) +main = return () diff --git a/plugins/hls-splice-plugin/test/testdata/hie.yaml b/plugins/hls-splice-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..39bd673f43 --- /dev/null +++ b/plugins/hls-splice-plugin/test/testdata/hie.yaml @@ -0,0 +1,21 @@ +cradle: + direct: + arguments: + - QQ.hs + - TQQExpError.hs + - TSimpleExp.hs + - TTypeAppExp.hs + - TDeclKindError.hs + - TQQPat.hs + - TSimplePat.hs + - TErrorExp.hs + - TQQPatError.hs + - TSimpleType.hs + - TErrorPat.hs + - TQQType.hs + - TTypeKindError.hs + - TQQDecl.hs + - TQQTypeTypeError.hs + - TTypeTypeError.hs + - TQQExp.hs + - TSimpleDecl.hs diff --git a/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs b/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs new file mode 100644 index 0000000000..77c9817dba --- /dev/null +++ b/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs @@ -0,0 +1,204 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE PatternSynonyms #-} +module Ide.Plugin.Stan (descriptor, Log) where + +import Control.DeepSeq (NFData) +import Control.Monad (void) +import Control.Monad.IO.Class (liftIO) +import Data.Foldable (toList) +import Data.Hashable (Hashable) +import qualified Data.HashMap.Strict as HM +import Data.Maybe (mapMaybe) +import qualified Data.Text as T +import Development.IDE +import Development.IDE.Core.Rules (getHieFile) +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat (HieFile (..)) +import GHC.Generics (Generic) +import Ide.Plugin.Config (PluginConfig (..)) +import Ide.Types (PluginDescriptor (..), PluginId, + configHasDiagnostics, + configInitialGenericConfig, + defaultConfigDescriptor, + defaultPluginDescriptor) +import qualified Language.LSP.Protocol.Types as LSP +import Stan (createCabalExtensionsMap, + getStanConfig) +import Stan.Analysis (Analysis (..), runAnalysis) +import Stan.Category (Category (..)) +import Stan.Cli (StanArgs (..)) +import Stan.Config (Config, ConfigP (..), applyConfig) +import Stan.Config.Pretty (prettyConfigCli) +import Stan.Core.Id (Id (..)) +import Stan.EnvVars (EnvVars (..), envVarsToText) +import Stan.Inspection (Inspection (..)) +import Stan.Inspection.All (inspectionsIds, inspectionsMap) +import Stan.Observation (Observation (..)) +import Stan.Report.Settings (OutputSettings (..), + ToggleSolution (..), + Verbosity (..)) +import Stan.Toml (usedTomlFiles) +import System.Directory (makeRelativeToCurrentDirectory) +import Trial (Fatality, Trial (..), fiasco, + pattern FiascoL, pattern ResultL, + prettyTrial, prettyTrialWith) + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = (defaultPluginDescriptor plId desc) + { pluginRules = rules recorder plId + , pluginConfigDescriptor = defConfigDescriptor + { configHasDiagnostics = True + -- We disable this plugin by default because users have been complaining about + -- the diagnostics, see https://github.com/haskell/haskell-language-server/issues/3916 + , configInitialGenericConfig = (configInitialGenericConfig defConfigDescriptor) + { plcGlobalOn = False + } + } + } + where + defConfigDescriptor = defaultConfigDescriptor + desc = "Provides stan diagnostics. Built with stan-" <> VERSION_stan + +data Log = LogShake !Shake.Log + | LogWarnConf ![(Fatality, T.Text)] + | LogDebugStanConfigResult ![FilePath] !(Trial T.Text Config) + | LogDebugStanEnvVars !EnvVars + +-- We use this function to remove the terminal escape sequences emmited by Trial pretty printing functions. +-- See https://github.com/kowainik/trial/pull/73#issuecomment-1868233235 +stripModifiers :: T.Text -> T.Text +stripModifiers = go "" + where + go acc txt = + case T.findIndex (== '\x1B') txt of + Nothing -> acc <> txt + Just index -> let (beforeEsc, afterEsc) = T.splitAt index txt + in go (acc <> beforeEsc) (consumeEscapeSequence afterEsc) + consumeEscapeSequence :: T.Text -> T.Text + consumeEscapeSequence txt = + case T.findIndex (== 'm') txt of + Nothing -> txt + Just index -> T.drop (index + 1) txt + +instance Pretty Log where + pretty = \case + LogShake log -> pretty log + LogWarnConf errs -> "Fiasco encountered when trying to load stan configuration. Using default inspections:" + <> line <> (pretty $ show errs) + LogDebugStanConfigResult fps t -> "Config result using: " + <> pretty fps <> line <> pretty (stripModifiers $ prettyTrialWith (T.unpack . prettyConfigCli) t) + LogDebugStanEnvVars envVars -> "EnvVars " <> + case envVars of + EnvVars trial@(FiascoL _) -> pretty (stripModifiers $ prettyTrial trial) + + -- if the envVars are not set, 'envVarsToText returns an empty string' + _ -> "found: " <> (pretty $ envVarsToText envVars) + +data GetStanDiagnostics = GetStanDiagnostics + deriving (Eq, Show, Generic) + +instance Hashable GetStanDiagnostics + +instance NFData GetStanDiagnostics + +type instance RuleResult GetStanDiagnostics = () + +rules :: Recorder (WithPriority Log) -> PluginId -> Rules () +rules recorder plId = do + define (cmapWithPrio LogShake recorder) $ + \GetStanDiagnostics file -> do + config <- getPluginConfigAction plId + if plcGlobalOn config && plcDiagnosticsOn config then do + maybeHie <- getHieFile file + case maybeHie of + Nothing -> return ([], Nothing) + Just hie -> do + let isLoud = False -- in Stan: notJson = not isLoud + let stanArgs = + StanArgs + { stanArgsHiedir = "" -- :: !FilePath -- ^ Directory with HIE files + , stanArgsCabalFilePath = [] -- :: ![FilePath] -- ^ Path to @.cabal@ files. + , stanArgsOutputSettings = OutputSettings NonVerbose ShowSolution -- :: !OutputSettings -- ^ Settings for output terminal report + -- doesnt matter, because it is silenced by isLoud + , stanArgsReport = Nothing -- :: !(Maybe ReportArgs) -- ^ @HTML@ report settings + , stanArgsUseDefaultConfigFile = fiasco "" -- :: !(TaggedTrial Text Bool) -- ^ Use default @.stan.toml@ file + , stanArgsConfigFile = Nothing -- :: !(Maybe FilePath) -- ^ Path to a custom configurations file. + , stanArgsConfig = ConfigP + { configChecks = fiasco "'hls-stan-plugin' doesn't receive CLI options for: checks" + , configRemoved = fiasco "'hls-stan-plugin' doesn't receive CLI options for: remove" + , configIgnored = fiasco "'hls-stan-plugin' doesn't receive CLI options for: ignore" + } + -- if they are not fiascos, .stan.toml's aren't taken into account + ,stanArgsJsonOut = not isLoud -- :: !Bool -- ^ Output the machine-readable output in JSON format instead. + } + + (configTrial, useDefConfig, env) <- liftIO $ getStanConfig stanArgs isLoud + tomlsUsedByStan <- liftIO $ usedTomlFiles useDefConfig (stanArgsConfigFile stanArgs) + logWith recorder Debug (LogDebugStanConfigResult tomlsUsedByStan configTrial) + + -- If envVar is set to 'False', stan will ignore all local and global .stan.toml files + logWith recorder Debug (LogDebugStanEnvVars env) + + -- Note that Stan works in terms of relative paths, but the HIE come in as absolute. Without + -- making its path relative, the file name(s) won't line up with the associated Map keys. + relativeHsFilePath <- liftIO $ makeRelativeToCurrentDirectory $ fromNormalizedFilePath file + let hieRelative = hie{hie_hs_file=relativeHsFilePath} + + (checksMap, ignoredObservations) <- case configTrial of + FiascoL es -> do + logWith recorder Development.IDE.Warning (LogWarnConf es) + -- If we can't read the config file, default to using all inspections: + let allInspections = HM.singleton relativeHsFilePath inspectionsIds + pure (allInspections, []) + ResultL _warnings stanConfig -> do + -- HashMap of *relative* file paths to info about enabled checks for those file paths. + let checksMap = applyConfig [relativeHsFilePath] stanConfig + pure (checksMap, configIgnored stanConfig) + + -- A Map from *relative* file paths (just one, in this case) to language extension info: + cabalExtensionsMap <- liftIO $ createCabalExtensionsMap isLoud (stanArgsCabalFilePath stanArgs) [hieRelative] + let analysis = runAnalysis cabalExtensionsMap checksMap ignoredObservations [hieRelative] + return (analysisToDiagnostics file analysis, Just ()) + else return ([], Nothing) + + action $ do + files <- getFilesOfInterestUntracked + void $ uses GetStanDiagnostics $ HM.keys files + where + analysisToDiagnostics :: NormalizedFilePath -> Analysis -> [FileDiagnostic] + analysisToDiagnostics file = mapMaybe (observationToDianostic file) . toList . analysisObservations + observationToDianostic :: NormalizedFilePath -> Observation -> Maybe FileDiagnostic + observationToDianostic file Observation {observationSrcSpan, observationInspectionId} = + do + inspection <- HM.lookup observationInspectionId inspectionsMap + let + -- Looking similar to Stan CLI output + -- We do not use `prettyShowInspection` cuz Id is redundant here + -- `prettyShowSeverity` and `prettyShowCategory` would contain color + -- codes and are replaced, too + message :: T.Text + message = + T.unlines $ + [ " ✲ Name: " <> inspectionName inspection, + " ✲ Description: " <> inspectionDescription inspection, + " ✲ Severity: " <> (T.pack $ show $ inspectionSeverity inspection), + " ✲ Category: " <> T.intercalate " " + (map (("#" <>) . unCategory) $ toList $ inspectionCategory inspection), + "Possible solutions:" + ] + ++ map (" - " <>) (inspectionSolution inspection) + return $ + ideErrorFromLspDiag + LSP.Diagnostic + { _range = realSrcSpanToRange observationSrcSpan, + _severity = Just LSP.DiagnosticSeverity_Hint, + _code = Just (LSP.InR $ unId (inspectionId inspection)), + _source = Just "stan", + _message = message, + _relatedInformation = Nothing, + _tags = Nothing, + _codeDescription = Nothing, + _data_ = Nothing + } + file + Nothing diff --git a/plugins/hls-stan-plugin/test/Main.hs b/plugins/hls-stan-plugin/test/Main.hs new file mode 100644 index 0000000000..231707d142 --- /dev/null +++ b/plugins/hls-stan-plugin/test/Main.hs @@ -0,0 +1,85 @@ +module Main + ( main, + ) +where + +import Control.Lens ((^.)) +import qualified Data.Text as T +import qualified Ide.Plugin.Stan as Stan +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import System.FilePath +import Test.Hls + +main :: IO () +main = defaultTestRunner tests + +tests :: TestTree +tests = + testGroup + "stan suggestions" + [ testCase "provides diagnostics" $ + runStanSession "" $ do + doc <- openDoc "test.hs" "haskell" + diags@(reduceDiag : _) <- waitForDiagnosticsFromSource doc "stan" + liftIO $ do + length diags @?= 1 + reduceDiag ^. L.range @?= Range (Position 0 0) (Position 3 19) + reduceDiag ^. L.severity @?= Just DiagnosticSeverity_Hint + let expectedPrefix = " ✲ Name: " + assertBool "" $ T.isPrefixOf expectedPrefix (reduceDiag ^. L.message) + reduceDiag ^. L.source @?= Just "stan" + return () + , testCase "ignores diagnostics from .stan.toml" $ + runStanSession "" $ do + doc <- openDoc ("dir" "configTest.hs") "haskell" + diags <- waitForDiagnosticsFromSource doc "stan" + liftIO $ length diags @?= 0 + return () + , testCase "respects LANGUAGE pragmas in the source file" $ + runStanSession "" $ do + doc <- openDoc ("extensions-language-pragma" "LanguagePragmaTest.hs") "haskell" + diags <- waitForDiagnosticsFromSource doc "stan" + -- We must include at least one valid diagnostic in our test file to avoid + -- the false-positive case where Stan finds no analyses to perform due to a + -- bad mapping, which would also lead to zero diagnostics being returned. + liftIO $ length diags @?= 1 + return () + , testCase "respects language extensions defined in the .cabal file" $ + runStanSession "" $ do + doc <- openDoc ("extensions-cabal-file" "CabalFileTest.hs") "haskell" + diags <- waitForDiagnosticsFromSource doc "stan" + -- We need at least one valid diagnostic here too, for the same reason as above. + liftIO $ length diags @?= 1 + return () + ] + +testDir :: FilePath +testDir = "plugins" "hls-stan-plugin" "test" "testdata" + +stanPlugin :: PluginTestDescriptor Stan.Log +stanPlugin = mkPluginTestDescriptor enabledStanDescriptor "stan" + where + -- We have to explicitly enable the plugin as it is disabled by default as + -- per request: https://github.com/haskell/haskell-language-server/issues/3916 + -- + enabledStanDescriptor recorder plId = + let stanPluginDescriptor = Stan.descriptor recorder plId + in stanPluginDescriptor + { pluginConfigDescriptor = (pluginConfigDescriptor stanPluginDescriptor) + { configInitialGenericConfig = (configInitialGenericConfig (pluginConfigDescriptor stanPluginDescriptor)) + { plcGlobalOn = True + } + } + } + +runStanSession :: FilePath -> Session a -> IO a +runStanSession subdir = + failIfSessionTimeout + . runSessionWithTestConfig def{ + testConfigCaps=codeActionNoResolveCaps + , testShiftRoot=True + , testPluginDescriptor=stanPlugin + , testDirLocation=Left (testDir subdir) + } + . const diff --git a/plugins/hls-stan-plugin/test/testdata/.stan.toml b/plugins/hls-stan-plugin/test/testdata/.stan.toml new file mode 100644 index 0000000000..ce73b7f29c --- /dev/null +++ b/plugins/hls-stan-plugin/test/testdata/.stan.toml @@ -0,0 +1,22 @@ +# See https://github.com/kowainik/stan/issues/531 +# Unix +[[check]] +type = "Exclude" +id = "STAN-0103" +file = "dir/configTest.hs" + +[[check]] +type = "Exclude" +id = "STAN-0212" +directory = "dir/" + +# Windows +[[check]] +type = "Exclude" +id = "STAN-0103" +file = "dir\\configTest.hs" + +[[check]] +type = "Exclude" +id = "STAN-0212" +directory = "dir\\" diff --git a/plugins/hls-stan-plugin/test/testdata/dir/configTest.hs b/plugins/hls-stan-plugin/test/testdata/dir/configTest.hs new file mode 100644 index 0000000000..add256058b --- /dev/null +++ b/plugins/hls-stan-plugin/test/testdata/dir/configTest.hs @@ -0,0 +1,3 @@ +a = length [1..] + +b = undefined diff --git a/plugins/hls-stan-plugin/test/testdata/extensions-cabal-file/CabalFileTest.hs b/plugins/hls-stan-plugin/test/testdata/extensions-cabal-file/CabalFileTest.hs new file mode 100644 index 0000000000..77b6dc3845 --- /dev/null +++ b/plugins/hls-stan-plugin/test/testdata/extensions-cabal-file/CabalFileTest.hs @@ -0,0 +1,7 @@ +module CabalFileTest () where + +-- With `StrictData` enabled in the `.cabal` file, Stan shouldn't complain here: +data A = A Int Int + +-- ...but it should still complain here! +kewlFunc = undefined diff --git a/plugins/hls-stan-plugin/test/testdata/extensions-cabal-file/cabal-file-test.cabal b/plugins/hls-stan-plugin/test/testdata/extensions-cabal-file/cabal-file-test.cabal new file mode 100644 index 0000000000..094f06d1dd --- /dev/null +++ b/plugins/hls-stan-plugin/test/testdata/extensions-cabal-file/cabal-file-test.cabal @@ -0,0 +1,9 @@ +cabal-version: 3.0 +name: cabal-file-test +version: 0.0.0.0 + +library + exposed-modules: CabalFileTest + hs-source-dirs: extensions-cabal-file + -- Specifically, we're testing that Stan respects the following extension definition: + default-extensions: StrictData diff --git a/plugins/hls-stan-plugin/test/testdata/extensions-language-pragma/LanguagePragmaTest.hs b/plugins/hls-stan-plugin/test/testdata/extensions-language-pragma/LanguagePragmaTest.hs new file mode 100644 index 0000000000..6f5631ac8c --- /dev/null +++ b/plugins/hls-stan-plugin/test/testdata/extensions-language-pragma/LanguagePragmaTest.hs @@ -0,0 +1,9 @@ +{-# LANGUAGE StrictData #-} + +module LanguagePragmaTest () where + +-- With the above `StrictData` language pragma, Stan shouldn't complain here: +data A = A Int Int + +-- ...but it should still complain here! +kewlFunc = undefined diff --git a/plugins/hls-stan-plugin/test/testdata/extensions-language-pragma/language-pragma-test.cabal b/plugins/hls-stan-plugin/test/testdata/extensions-language-pragma/language-pragma-test.cabal new file mode 100644 index 0000000000..336388d4fa --- /dev/null +++ b/plugins/hls-stan-plugin/test/testdata/extensions-language-pragma/language-pragma-test.cabal @@ -0,0 +1,11 @@ +cabal-version: 3.0 +name: language-pragma-test +version: 0.0.0.0 + +-- Without at least a minimal valid `.cabal` file, Stan won't bother building its +-- map of language extensions. This means it also won't detect LANGUAGE pragmas +-- without this file. + +library + exposed-modules: LanguagePragmaTest + hs-source-dirs: extensions-language-pragma diff --git a/plugins/hls-stan-plugin/test/testdata/hie.yaml b/plugins/hls-stan-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..577238428b --- /dev/null +++ b/plugins/hls-stan-plugin/test/testdata/hie.yaml @@ -0,0 +1,4 @@ +cradle: + direct: + arguments: + - test.hs \ No newline at end of file diff --git a/plugins/hls-stan-plugin/test/testdata/test.hs b/plugins/hls-stan-plugin/test/testdata/test.hs new file mode 100644 index 0000000000..7a184b01f0 --- /dev/null +++ b/plugins/hls-stan-plugin/test/testdata/test.hs @@ -0,0 +1,4 @@ +orderPair x y + | x < y = 1 + | x > y = 2 + | otherwise = 3 diff --git a/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs b/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs new file mode 100644 index 0000000000..767cc061df --- /dev/null +++ b/plugins/hls-stylish-haskell-plugin/src/Ide/Plugin/StylishHaskell.hs @@ -0,0 +1,102 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} +module Ide.Plugin.StylishHaskell + ( descriptor + , provider + , Log + ) +where + +import Control.Monad.Except (throwError) +import Control.Monad.IO.Class +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE hiding (getExtensions, + pluginHandlers) +import Development.IDE.Core.PluginUtils +import Development.IDE.GHC.Compat (ModSummary (ms_hspp_opts), + extensionFlags) +import qualified Development.IDE.GHC.Compat.Util as Util +import GHC.LanguageExtensions.Type +import Ide.Plugin.Error (PluginError (PluginInternalError)) +import Ide.PluginUtils +import Ide.Types hiding (Config) +import Language.Haskell.Stylish +import Language.LSP.Protocol.Types as LSP +import System.Directory +import System.FilePath + +data Log + = LogLanguageExtensionFromDynFlags + +instance Pretty Log where + pretty = \case + LogLanguageExtensionFromDynFlags -> "stylish-haskell uses the language extensions from DynFlags" + + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = (defaultPluginDescriptor plId desc) + { pluginHandlers = mkFormattingHandlers (provider recorder) + } + where + desc = "Provides formatting of Haskell files via stylish-haskell. Built with stylish-haskell-" <> VERSION_stylish_haskell + +-- | Formatter provider of stylish-haskell. +-- Formats the given source in either a given Range or the whole Document. +-- If the provider fails an error is returned that can be displayed to the user. +provider :: Recorder (WithPriority Log) -> FormattingHandler IdeState +provider recorder ide _token typ contents fp _opts = do + (msrModSummary -> ms_hspp_opts -> dyn) <- runActionE "stylish-haskell" ide $ useE GetModSummary fp + let file = fromNormalizedFilePath fp + config <- liftIO $ loadConfigFrom file + mergedConfig <- liftIO $ getMergedConfig dyn config + let (range, selectedContents) = case typ of + FormatText -> (fullRange contents, contents) + FormatRange r -> (normalize r, extractTextInRange (extendToFullLines r) contents) + result = runStylishHaskell file mergedConfig selectedContents + case result of + Left err -> throwError $ PluginInternalError $ T.pack $ "stylishHaskellCmd: " ++ err + Right new -> pure $ LSP.InL [TextEdit range new] + where + getMergedConfig dyn config + | null (configLanguageExtensions config) + = do + logWith recorder Info LogLanguageExtensionFromDynFlags + pure + $ config + { configLanguageExtensions = getExtensions dyn } + | otherwise + = pure config + + getExtensions = map showExtension . Util.toList . extensionFlags + + showExtension Cpp = "CPP" + showExtension other = show other + +-- | Recursively search in every directory of the given filepath for .stylish-haskell.yaml. +-- If no such file has been found, return default config. +loadConfigFrom :: FilePath -> IO Config +loadConfigFrom file = do +#if MIN_VERSION_stylish_haskell(0,15,0) + let configSearchStrategy = SearchFromDirectory (takeDirectory file) + config <- loadConfig (makeVerbose False) configSearchStrategy +#else + currDir <- getCurrentDirectory + setCurrentDirectory (takeDirectory file) + config <- loadConfig (makeVerbose False) Nothing + setCurrentDirectory currDir +#endif + pure config + +-- | Run stylish-haskell on the given text with the given configuration. +runStylishHaskell :: FilePath -- ^ Location of the file being formatted. Used for error message + -> Config -- ^ Configuration for stylish-haskell + -> Text -- ^ Text to format + -> Either String Text -- ^ Either formatted Text or an error message +runStylishHaskell file config = fmap fromLines . fmt . toLines + where + fromLines = T.pack . unlines + fmt = runSteps (configLanguageExtensions config) (Just file) (configSteps config) + toLines = lines . T.unpack diff --git a/plugins/hls-stylish-haskell-plugin/test/Main.hs b/plugins/hls-stylish-haskell-plugin/test/Main.hs new file mode 100644 index 0000000000..22e9499947 --- /dev/null +++ b/plugins/hls-stylish-haskell-plugin/test/Main.hs @@ -0,0 +1,28 @@ +{-# LANGUAGE OverloadedStrings #-} +module Main + ( main + ) where + +import qualified Ide.Plugin.StylishHaskell as StylishHaskell +import System.FilePath +import Test.Hls + +main :: IO () +main = defaultTestRunner tests + +stylishHaskellPlugin :: PluginTestDescriptor StylishHaskell.Log +stylishHaskellPlugin = mkPluginTestDescriptor StylishHaskell.descriptor "stylishHaskell" + +tests :: TestTree +tests = testGroup "stylish-haskell" + [ goldenWithStylishHaskell "formats a document" "StylishHaskell" "formatted_document" $ \doc -> do + formatDoc doc (FormattingOptions 2 True Nothing Nothing Nothing) + , goldenWithStylishHaskell "formats a range" "StylishHaskell" "formatted_range" $ \doc -> do + formatRange doc (FormattingOptions 2 True Nothing Nothing Nothing) (Range (Position 0 0) (Position 2 21)) + ] + +goldenWithStylishHaskell :: TestName -> FilePath -> FilePath -> (TextDocumentIdentifier -> Session ()) -> TestTree +goldenWithStylishHaskell title fp desc = goldenWithHaskellDocFormatter def stylishHaskellPlugin "stylishHaskell" def title testDataDir fp desc "hs" + +testDataDir :: FilePath +testDataDir = "plugins" "hls-stylish-haskell-plugin" "test" "testdata" diff --git a/test/testdata/StylishHaksell.format_document.hs b/plugins/hls-stylish-haskell-plugin/test/testdata/StylishHaskell.formatted_document.hs similarity index 100% rename from test/testdata/StylishHaksell.format_document.hs rename to plugins/hls-stylish-haskell-plugin/test/testdata/StylishHaskell.formatted_document.hs diff --git a/test/testdata/StylishHaksell.format_range.hs b/plugins/hls-stylish-haskell-plugin/test/testdata/StylishHaskell.formatted_range.hs similarity index 100% rename from test/testdata/StylishHaksell.format_range.hs rename to plugins/hls-stylish-haskell-plugin/test/testdata/StylishHaskell.formatted_range.hs diff --git a/test/testdata/StylishHaskell.hs b/plugins/hls-stylish-haskell-plugin/test/testdata/StylishHaskell.hs similarity index 100% rename from test/testdata/StylishHaskell.hs rename to plugins/hls-stylish-haskell-plugin/test/testdata/StylishHaskell.hs diff --git a/plugins/hls-stylish-haskell-plugin/test/testdata/hie.yaml b/plugins/hls-stylish-haskell-plugin/test/testdata/hie.yaml new file mode 100644 index 0000000000..824558147d --- /dev/null +++ b/plugins/hls-stylish-haskell-plugin/test/testdata/hie.yaml @@ -0,0 +1,3 @@ +cradle: + direct: + arguments: [] diff --git a/release/README.md b/release/README.md new file mode 100644 index 0000000000..035e07fda1 --- /dev/null +++ b/release/README.md @@ -0,0 +1,7 @@ +# Making and uploading the Gitlab release to downloads.haskell.org + +1. Run the gitlab release pipeline using https://gitlab.haskell.org/haskell/haskell-language-server/-/pipelines/new +2. Once the pipeline has completed, download the artifacts using `fetch_gitlab.py` + - For example for the `1.7.0.0` release: `python fetch_gitlab.py -p --output haskell-language-server-1.7.0.0 -r 1.7.0.0` + - Ensure all the artifacts in the output directory are accurate and add any missing/extra artifacts +3. `cd` to the output directory created in the previous step, and run `SIGNING_KEY= ../upload.sh` diff --git a/release/fetch_gitlab.py b/release/fetch_gitlab.py new file mode 100644 index 0000000000..7aa18c9527 --- /dev/null +++ b/release/fetch_gitlab.py @@ -0,0 +1,77 @@ +# adapted from https://gitlab.haskell.org/bgamari/ghc-utils/-/blob/master/rel-eng/fetch-gitlab-artifacts/fetch_gitlab.py +import logging +from pathlib import Path +import subprocess +import gitlab + +logging.basicConfig(level=logging.INFO) + +def strip_prefix(s, prefix): + if s.startswith(prefix): + return s[len(prefix):] + else: + return None + +def fetch_artifacts(release: str, pipeline_id: int, + dest_dir: Path, gl: gitlab.Gitlab): + dest_dir.mkdir(exist_ok=True) + proj = gl.projects.get('haskell/haskell-language-server') + pipeline = proj.pipelines.get(pipeline_id) + tmpdir = Path("fetch-gitlab") + tmpdir.mkdir(exist_ok=True) + for pipeline_job in pipeline.jobs.list(all=True): + if len(pipeline_job.artifacts) == 0: + logging.info(f'job {pipeline_job.name} ({pipeline_job.id}) has no artifacts') + continue + + job = proj.jobs.get(pipeline_job.id) + platform = strip_prefix(job.name, 'tar-') + if not platform: + logging.info(f'Skipping {job.name} (not a tar job)') + continue + try: + destdir = tmpdir / job.name + zip_name = Path(f"{tmpdir}/{job.name}.zip") + if not zip_name.exists() or zip_name.stat().st_size == 0: + logging.info(f'downloading archive {zip_name} for job {job.name} (job {job.id})...') + with open(zip_name, 'wb') as f: + job.artifacts(streamed=True, action=f.write) + + if zip_name.stat().st_size == 0: + logging.info(f'artifact archive for job {job.name} (job {job.id}) is empty') + continue + + extension = 'zip' if job.name.endswith('windows') else 'tar.xz' + dest = dest_dir / f'haskell-language-server-{release}-{platform}.{extension}' + if dest.exists(): + logging.info(f'bindist {dest} already exists') + continue + + subprocess.run(['unzip', '-bo', zip_name, '-d', destdir]) + bindist_files = list(destdir.glob(f'*/haskell-language-server*.{extension}')) + if len(bindist_files) == 0: + logging.warn(f'Bindist does not exist') + continue + + bindist = bindist_files[0] + logging.info(f'extracted {job.name} to {dest}') + bindist.replace(dest) + except Exception as e: + logging.error(f'Error fetching job {job.name}: {e}') + pass + +def main(): + import argparse + parser = argparse.ArgumentParser() + parser.add_argument('--pipeline', '-p', required=True, type=int, help="pipeline id") + parser.add_argument('--release', '-r', required=True, type=str, help="release name") + parser.add_argument('--output', '-o', type=Path, default=Path.cwd(), help="output directory") + parser.add_argument('--profile', '-P', default='haskell', + help='python-gitlab.cfg profile name') + args = parser.parse_args() + gl = gitlab.Gitlab.from_config(args.profile) + fetch_artifacts(args.release, args.pipeline, + dest_dir=args.output, gl=gl) + +if __name__ == '__main__': + main() diff --git a/release/update_versions.sh b/release/update_versions.sh new file mode 100755 index 0000000000..ac9e9c752c --- /dev/null +++ b/release/update_versions.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -ex + +function replaceHlsVersion() { + # Update all `version:` fields + sed -ri "s/^version:( +)${1}/version:\1${2}/" ./*.cabal ./**/*.cabal + # Update all constraints expected to be in the form `== `. + # We usually don't force an exact version, so this is relatively unambiguous. + # We could introduce some more ad-hoc parsing, if there is still ambiguity. + sed -ri "s/== ${1}/== ${2}/" ./*.cabal ./**/*.cabal +} + +if [ $# -ne 2 ]; +then + echo "USAGE: ./relase/update_versions.sh " +fi + +replaceHlsVersion "${1}" "${2}" diff --git a/release/upload.sh b/release/upload.sh new file mode 100755 index 0000000000..22dc6d438d --- /dev/null +++ b/release/upload.sh @@ -0,0 +1,154 @@ +#!/usr/bin/env bash + +set -e + +# This is a script for preparing and uploading a release of Haskell Language Server. +# Adapted from https://gitlab.haskell.org/bgamari/ghc-utils/-/commits/master/rel-eng/upload.sh +# +# Usage, +# 1. Set $SIGNING_KEY to your key id (prefixed with '=') +# 2. Create a directory called haskell-langauge-server- and place the binary tarballs there +# 4. Run this script from that directory +# +# You can also invoke the script with an argument to perform only +# a subset of the usual release, +# +# upload.sh gen_hashes generate signed hashes of the release +# tarballs +# upload.sh sign generate signed hashes of the release +# tarballs +# upload.sh upload upload the tarballs and documentation +# to downloads.haskell.org +# +# Prerequisites: moreutils + +# Infer release name from directory name +if [ -z "$rel_name" ]; then + rel_name="$(basename $(pwd))" +fi + +# Infer version from tarball names +if [ -z "$ver" ]; then + ver="$(ls haskell-language-server-*.tar.* | sed -ne 's/haskell-language-server-\([0-9]\+\.[0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).\+/\1/p' | head -n1)" + if [ -z "$ver" ]; then echo "Failed to infer \$ver"; exit 1; fi +fi + +echo HLS version $ver + +host="gitlab.haskell.org:2222" + +usage() { + echo "Usage: [rel_name=] SIGNING_KEY= $0 " + echo + echo "where," + echo " rel_name gives the release name (e.g. 1.7.0.0)" + echo "and is one of," + echo " [nothing] do everything below" + echo " gen_hashes generated hashes of the release tarballs" + echo " sign sign hashes of the release tarballs" + echo " upload upload the tarballs and documentation to downloads.haskell.org" + echo " purge_all purge entire release from the CDN" + echo " purge_file file purge a given file from the CDN" + echo " verify verify the signatures in this directory" + echo +} + +if [ -z "$ver" ]; then + usage + exit 1 +fi +if [ -z "$rel_name" ]; then + rel_name="$ver" +fi + +# returns the set of files that must have hashes generated. +function hash_files() { + echo $(find -maxdepth 1 \ + -iname '*.xz' \ + -o -iname '*.gz' \ + -o -iname '*.lz' \ + -o -iname '*.zip' \ + ) + echo $(find -maxdepth 1 -iname '*.patch') +} + +function gen_hashes() { + echo -n "Hashing..." + sha1sum $(hash_files) >| SHA1SUMS & + sha256sum $(hash_files) >| SHA256SUMS & + wait + echo "done" +} + +function sign() { + # Kill DISPLAY lest pinentry won't work + DISPLAY= + eval "$(gpg-agent --daemon --sh --pinentry-program $(which pinentry))" + for i in $(hash_files) SHA1SUMS SHA256SUMS; do + if [ -e $i -a -e $i.sig -a $i.sig -nt $i ]; then + echo "Skipping signing of $i" + continue + elif [ -e $i.sig ] && gpg2 --verify $i.sig; then + # Don't resign if current signature is valid + touch $i.sig + continue + fi + echo "Signing $i" + rm -f $i.sig + gpg2 --use-agent --detach-sign --local-user="$SIGNING_KEY" $i + done +} + +function verify() { + if [ $(find -iname '*.sig' | wc -l) -eq 0 ]; then + echo "No signatures to verify" + return + fi + + for i in *.sig; do + echo + echo Verifying $i + gpg2 --verify $i $(basename $i .sig) + done +} + +function upload() { + verify + chmod ugo+r,o-w -R . + dir=$(echo $rel_name | sed s/-release//) + lftp -c " \ + open -u hls-downloads: sftp://$host && \ + mirror -P20 -c --reverse --exclude=fetch-gitlab --exclude=out . hls/$dir && \ + wait all;" + chmod ugo-w $(ls *.xz *.gz *.zip) +} + +function purge_all() { + # Purge CDN cache + curl -X PURGE http://downloads.haskell.org/hls/ + curl -X PURGE http://downloads.haskell.org/~hls/ + curl -X PURGE http://downloads.haskell.org/hls/$dir + curl -X PURGE http://downloads.haskell.org/hls/$dir/ + curl -X PURGE http://downloads.haskell.org/~hls/$dir + curl -X PURGE http://downloads.haskell.org/~hls/$dir/ + for i in *; do + purge_file $i + done +} + +function purge_file() { + curl -X PURGE http://downloads.haskell.org/~hls/$rel_name/$i + curl -X PURGE http://downloads.haskell.org/~hls/$rel_name/$i/ + curl -X PURGE http://downloads.haskell.org/hls/$rel_name/$i + curl -X PURGE http://downloads.haskell.org/hls/$rel_name/$i/ +} + + +if [ "x$1" == "x" ]; then + gen_hashes + sign + upload + purge_all +else + $@ +fi diff --git a/scripts/release/create-yaml-snippet.sh b/scripts/release/create-yaml-snippet.sh new file mode 100644 index 0000000000..6ee25b01b5 --- /dev/null +++ b/scripts/release/create-yaml-snippet.sh @@ -0,0 +1,107 @@ +#!/bin/bash + +set -eu +set -o pipefail + +RELEASE=$1 + +cd "gh-release-artifacts/haskell-language-server-${RELEASE}" + +cat < /dev/stdout + $RELEASE: + viTags: + - Latest + viChangeLog: https://github.com/haskell/haskell-language-server/blob/master/ChangeLog.md + viPostInstall: *hls-post-install + viSourceDL: + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-src.tar.gz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-src.tar.gz" | awk '{ print $1 }') + viArch: + A_64: + Linux_Debian: + '< 10': &hls-${RELEASE//./}-64-deb9 + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-deb9.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-deb9.tar.xz" | awk '{ print $1 }') + '(>= 10 && < 11)': &hls-${RELEASE//./}-64-deb10 + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-deb10.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-deb10.tar.xz" | awk '{ print $1 }') + '(>= 11 && < 12)': &hls-${RELEASE//./}-64-deb11 + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-deb11.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-deb11.tar.xz" | awk '{ print $1 }') + '>= 12': &hls-${RELEASE//./}-64-deb12 + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-deb12.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-deb12.tar.xz" | awk '{ print $1 }') + unknown_versioning: &hls-${RELEASE//./}-64-deb11 + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-deb11.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-deb11.tar.xz" | awk '{ print $1 }') + Linux_Ubuntu: + '( >= 16 && < 19 )': &hls-${RELEASE//./}-64-ubuntu18 + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu1804.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu1804.tar.xz" | awk '{ print $1 }') + '( >= 20 && < 22 )': &hls-${RELEASE//./}-64-ubuntu20 + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu2004.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu2004.tar.xz" | awk '{ print $1 }') + unknown_versioning: &hls-${RELEASE//./}-64-ubuntu22 + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-ubuntu2204.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-ubuntu2204.tar.xz" | awk '{ print $1 }') + Linux_Mint: + '< 20': + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-mint193.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-mint193.tar.xz" | awk '{ print $1 }') + '(>= 20 && < 21)': + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-mint202.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-mint202.tar.xz" | awk '{ print $1 }') + '>= 21': + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-mint213.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-mint213.tar.xz" | awk '{ print $1 }') + Linux_Fedora: + '(>= 33 && < 40)': &hls-${RELEASE//./}-64-fedora33 + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-fedora33.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-fedora33.tar.xz" | awk '{ print $1 }') + '>= 40': &hls-${RELEASE//./}-64-fedora40 + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-fedora40.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-fedora40.tar.xz" | awk '{ print $1 }') + unknown_versioning: *hls-${RELEASE//./}-64-unknown + Linux_UnknownLinux: + unknown_versioning: &hls-${RELEASE//./}-64-unknown + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-linux-unknown.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-linux-unknown.tar.xz" | awk '{ print $1 }') + Linux_RedHat: + unknown_versioning: *hls-${RELEASE//./}-64-unknown + Darwin: + unknown_versioning: + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-apple-darwin.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-apple-darwin.tar.xz" | awk '{ print $1 }') + Windows: + unknown_versioning: + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-x86_64-mingw64.zip + dlHash: $(sha256sum "haskell-language-server-$RELEASE-x86_64-mingw64.zip" | awk '{ print $1 }') + A_ARM64: + Linux_UnknownLinux: + unknown_versioning: + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-aarch64-linux-ubuntu2004.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-aarch64-linux-ubuntu2004.tar.xz" | awk '{ print $1 }') + Darwin: + unknown_versioning: + dlUri: https://downloads.haskell.org/~hls/haskell-language-server-$RELEASE/haskell-language-server-$RELEASE-aarch64-apple-darwin.tar.xz + dlSubdir: haskell-language-server-$RELEASE + dlHash: $(sha256sum "haskell-language-server-$RELEASE-aarch64-apple-darwin.tar.xz" | awk '{ print $1 }') +EOF + diff --git a/scripts/release/download-gh-artifacts.sh b/scripts/release/download-gh-artifacts.sh new file mode 100644 index 0000000000..217422eedb --- /dev/null +++ b/scripts/release/download-gh-artifacts.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +set -eu +set -o pipefail + +RELEASE=$1 +SIGNER=$2 + +echo "RELEASE: $RELEASE" +echo "SIGNER: $SIGNER" + +for com in gh gpg curl sha256sum ; do + command -V ${com} >/dev/null 2>&1 +done + +[ ! -e "gh-release-artifacts/haskell-language-server-${RELEASE}" ] + +mkdir -p "gh-release-artifacts/haskell-language-server-${RELEASE}" + +cd "gh-release-artifacts/haskell-language-server-${RELEASE}" + +# github +gh release download "$RELEASE" + +## We can't do cirrus releases any more, as we build HLS releases with ghcup vanilla binaries. +## Vanilla means "upstream", aka GHC HQ, and GHC HQ does not provide bindists for FreeBSD. +## Until we start using ghcup's mainstream distribution channel, we can't even begin to build +## binaries for FreeBSD. We keep this here for the next generation or when the situation changes. +## +## We don't use ghcup's mainstream distribution channel, as we only provide vanilla binaries +## as requested by the ghcup distribution channel team. +# cirrus +# curl --fail -L -o "haskell-language-server-${RELEASE}-x86_64-freebsd.tar.xz" \ +# "https://api.cirrus-ci.com/v1/artifact/github/haskell/haskell-language-server/bindist/bindist/out/haskell-language-server-${RELEASE}-x86_64-freebsd.tar.xz?branch=${RELEASE}" + + +sha256sum haskell-language-server-* > SHA256SUMS +gpg --detach-sign -u "${SIGNER}" SHA256SUMS + +## see comment above +# gh release upload "$RELEASE" "haskell-language-server-${RELEASE}-x86_64-freebsd.tar.xz" SHA256SUMS SHA256SUMS.sig +gh release upload "$RELEASE" SHA256SUMS SHA256SUMS.sig diff --git a/shake-bench/LICENSE b/shake-bench/LICENSE new file mode 100644 index 0000000000..b4f377fc10 --- /dev/null +++ b/shake-bench/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020-2021 Jose Iborra Lopez + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/shake-bench/shake-bench.cabal b/shake-bench/shake-bench.cabal new file mode 100644 index 0000000000..c381089aba --- /dev/null +++ b/shake-bench/shake-bench.cabal @@ -0,0 +1,45 @@ +cabal-version: 2.2 +name: shake-bench +version: 0.2.0.0 +synopsis: Build rules for historical benchmarking +license: Apache-2.0 +license-file: LICENSE +author: Pepe Iborra +maintainer: pepeiborra@gmail.com +category: Development +build-type: Simple +-- description is a single line so that implicit-hie can parse it +description: A library Shake rules to build and run benchmarks for multiple revisions of a project. An example of usage can be found in the ghcide benchmark suite + +source-repository head + type: git + location: https://github.com/haskell/haskell-language-server.git + +library + if impl(ghc > 9.11) + buildable: False + exposed-modules: Development.Benchmark.Rules + hs-source-dirs: src + build-depends: + aeson, + base == 4.*, + bytestring, + Chart, + Chart-diagrams, + diagrams-contrib, + diagrams-core, + diagrams-lib, + diagrams-svg, + directory, + extra >= 1.7.2, + filepath, + lens, + lens-aeson, + mtl, + shake, + text + default-language: GHC2021 + default-extensions: + LambdaCase + RecordWildCards + ViewPatterns diff --git a/shake-bench/src/Development/Benchmark/Rules.hs b/shake-bench/src/Development/Benchmark/Rules.hs new file mode 100644 index 0000000000..8ba2b3f0df --- /dev/null +++ b/shake-bench/src/Development/Benchmark/Rules.hs @@ -0,0 +1,802 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE ApplicativeDo #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +{- | + This module provides a bunch of Shake rules to build multiple revisions of a + project and analyse their performance. + + It assumes a project bench suite composed of examples that runs a fixed set + of experiments on every example + + Your code must implement all of the GetFoo oracles and the IsExample class, + instantiate the Shake rules, and probably 'want' a set of targets. + + The results of the benchmarks and the analysis are recorded in the file + system, using the following structure: + + + ├── binaries + │ └── + │  ├── ghc.path - path to ghc used to build the executable + │  ├── - binary for this version + │  └── commitid - Git commit id for this reference + ├─ + │ ├── results.csv - aggregated results for all the versions and configurations + │ ├── .svg - graph of bytes over elapsed time, for all the versions and configurations + | └── + │ └── + │   ├── .gcStats.log - RTS -s output + │   ├── .csv - stats for the experiment + │   ├── .svg - Graph of bytes over elapsed time + │   ├── .diff.svg - idem, including the previous version + │   ├── .heap.svg - Heap profile + │   ├── .log - bench stdout + │   └── results.csv - results of all the experiments for the example + ├── results.csv - aggregated results of all the examples, experiments, versions and configurations + └── .svg - graph of bytes over elapsed time, for all the examples, experiments, versions and configurations + + For diff graphs, the "previous version" is the preceding entry in the list of versions + in the config file. A possible improvement is to obtain this info via `git rev-list`. + -} +{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} +module Development.Benchmark.Rules + ( + buildRules, MkBuildRules(..), OutputFolder, ProjectRoot, + benchRules, MkBenchRules(..), BenchProject(..), ProfilingMode(..), + addGetParentOracle, + csvRules, + svgRules, + heapProfileRules, + phonyRules, + allTargetsForExample, + GetExample(..), GetExamples(..), + IsExample(..), RuleResultForExample, + GetExperiments(..), + GetVersions(..), + GetCommitId(..), + GetBuildSystem(..), + GetConfigurations(..), Configuration(..), + BuildSystem(..), findGhcForBuildSystem, + Escaped(..), Unescaped(..), escapeExperiment, unescapeExperiment, + GitCommit + + ) where + +import Control.Applicative +import Control.Lens (preview, view, (^.)) +import Control.Monad +import qualified Control.Monad.State as S +import Data.Aeson (FromJSON (..), + ToJSON (..), + Value (..), object, + (.!=), (.:?), (.=)) +import Data.Aeson.Lens (AsJSON (_JSON), + _Object, _String) +import Data.ByteString.Lazy (ByteString) +import Data.Char (isAlpha, isDigit) +import Data.List (find, intercalate, + isInfixOf, + isSuffixOf, + stripPrefix, + transpose) +import Data.List.Extra (lower, splitOn) +import Data.Maybe (fromMaybe) +import Data.String (fromString) +import Data.Text (Text) +import qualified Data.Text as T +import Development.Shake +import Development.Shake.Classes (Binary, Hashable, + NFData, Typeable) +import GHC.Exts (IsList (toList), + fromList) +import GHC.Generics (Generic) +import GHC.Stack (HasCallStack) +import qualified Graphics.Rendering.Chart.Backend.Diagrams as E +import qualified Graphics.Rendering.Chart.Easy as E +import Numeric.Natural +import System.Directory (createDirectoryIfMissing, + findExecutable, + renameFile) +import System.FilePath +import System.Time.Extra (Seconds) +import qualified Text.ParserCombinators.ReadP as P +import Text.Printf +import Text.Read (Read (..), get, + readMaybe, + readP_to_Prec) + +newtype GetExperiments = GetExperiments () deriving newtype (Binary, Eq, Hashable, NFData, Show) +newtype GetVersions = GetVersions () deriving newtype (Binary, Eq, Hashable, NFData, Show) +newtype GetParent = GetParent Text deriving newtype (Binary, Eq, Hashable, NFData, Show) +newtype GetCommitId = GetCommitId String deriving newtype (Binary, Eq, Hashable, NFData, Show) +newtype GetBuildSystem = GetBuildSystem () deriving newtype (Binary, Eq, Hashable, NFData, Show) +newtype GetExample = GetExample String deriving newtype (Binary, Eq, Hashable, NFData, Show) +newtype GetExamples = GetExamples () deriving newtype (Binary, Eq, Hashable, NFData, Show) +newtype GetConfigurations = GetConfigurations () deriving newtype (Binary, Eq, Hashable, NFData, Show) + +type instance RuleResult GetExperiments = [Unescaped String] +type instance RuleResult GetVersions = [GitCommit] +type instance RuleResult GetParent = Text +type instance RuleResult GetCommitId = String +type instance RuleResult GetBuildSystem = BuildSystem + +type RuleResultForExample e = + ( RuleResult GetExample ~ Maybe e + , RuleResult GetExamples ~ [e] + , IsExample e) + +data Configuration = Configuration {confName :: String, confValue :: ByteString} + deriving (Binary, Eq, Generic, Hashable, NFData, Show) +type instance RuleResult GetConfigurations = [Configuration] + +-- | Knowledge needed to run an example +class (Binary e, Eq e, Hashable e, NFData e, Show e, Typeable e) => IsExample e where + getExampleName :: e -> String + +-------------------------------------------------------------------------------- + +allTargetsForExample :: IsExample e => ProfilingMode -> FilePath -> e -> Action [FilePath] +allTargetsForExample prof baseFolder ex = do + experiments <- askOracle $ GetExperiments () + versions <- askOracle $ GetVersions () + configurations <- askOracle $ GetConfigurations () + let buildFolder = baseFolder profilingPath prof + return $ + [ + buildFolder getExampleName ex "results.csv" + , buildFolder getExampleName ex "resultDiff.csv"] + ++ [ buildFolder getExampleName ex escaped (escapeExperiment e) <.> "svg" + | e <- experiments + ] + ++ [ buildFolder + getExampleName ex + T.unpack (humanName ver) + confName + escaped (escapeExperiment e) <.> + mode + | e <- experiments, + ver <- versions, + Configuration{confName} <- configurations, + mode <- ["svg", "diff.svg"] ++ ["heap.svg" | prof /= NoProfiling] + ] + +allBinaries :: FilePath -> String -> Action [FilePath] +allBinaries buildFolder executableName = do + versions <- askOracle $ GetVersions () + return $ + [ buildFolder "binaries" T.unpack (humanName ver) executableName + | ver <- versions] + +-- | Generate a set of phony rules: +-- * all +-- * for each example +phonyRules + :: (Traversable t, IsExample e) + => String -- ^ prefix + -> String -- ^ Executable name + -> ProfilingMode + -> FilePath + -> t e + -> Rules () +phonyRules prefix executableName prof buildFolder examples = do + forM_ examples $ \ex -> + phony (prefix <> getExampleName ex) $ need =<< + allTargetsForExample prof buildFolder ex + phony (prefix <> "all") $ do + exampleTargets <- forM examples $ \ex -> + allTargetsForExample prof buildFolder ex + need $ (buildFolder profilingPath prof "results.csv") + : concat exampleTargets + need $ (buildFolder profilingPath prof "resultDiff.csv") + : concat exampleTargets + phony (prefix <> "all-binaries") $ need =<< allBinaries buildFolder executableName +-------------------------------------------------------------------------------- +type OutputFolder = FilePath +type ProjectRoot = FilePath + +data MkBuildRules buildSystem = MkBuildRules + { -- | Return the path to the GHC executable to use for the project found in the cwd + findGhc :: buildSystem -> FilePath -> IO FilePath + -- | Name of the binary produced by 'buildProject' + , executableName :: String + -- | An action that captures the source dependencies, used for the HEAD build + , projectDepends :: Action () + -- | Build the project found in the given path and save the build artifacts in the output folder + , buildProject :: buildSystem + -> ProjectRoot + -> OutputFolder + -> Action () + } + +-- | Rules that drive a build system to build various revisions of a project +buildRules :: FilePattern -> MkBuildRules BuildSystem -> Rules () +-- TODO generalize BuildSystem +buildRules build MkBuildRules{..} = do + -- query git for the commitid for a version + build -/- "binaries/*/commitid" %> \out -> do + alwaysRerun + + let [_,_,ver,_] = splitDirectories out + mbEntry <- find ((== T.pack ver) . humanName) <$> askOracle (GetVersions ()) + let gitThing :: String + gitThing = maybe ver (T.unpack . gitName) mbEntry + Stdout commitid <- command [] "git" ["rev-list", "-n", "1", gitThing] + writeFileChanged out $ init commitid + + -- build rules for HEAD + priority 10 $ [ build -/- "binaries/HEAD/" <> executableName + , build -/- "binaries/HEAD/ghc.path" + ] + &%> \[out, ghcpath] -> do + projectDepends + liftIO $ createDirectoryIfMissing True $ dropFileName out + buildSystem <- askOracle $ GetBuildSystem () + buildProject buildSystem "." (takeDirectory out) + ghcLoc <- liftIO $ findGhc buildSystem "." + writeFile' ghcpath ghcLoc + + -- build rules for non HEAD revisions + [build -/- "binaries/*/" <> executableName + ,build -/- "binaries/*/ghc.path" + ] &%> \[out, ghcPath] -> do + let [_, _binaries, ver, _] = splitDirectories out + liftIO $ createDirectoryIfMissing True $ dropFileName out + commitid <- readFile' $ takeDirectory out "commitid" + cmd_ $ "git worktree add bench-temp-" ++ ver ++ " " ++ commitid + buildSystem <- askOracle $ GetBuildSystem () + flip actionFinally (cmd_ ("git worktree remove bench-temp-" <> ver <> " --force" :: String)) $ do + ghcLoc <- liftIO $ findGhc buildSystem ver + buildProject buildSystem ("bench-temp-" <> ver) (".." takeDirectory out) + writeFile' ghcPath ghcLoc + +-------------------------------------------------------------------------------- +data MkBenchRules buildSystem example = forall setup. MkBenchRules + { + -- | Workaround for Shake not allowing to call 'askOracle' from 'benchProject + setupProject :: Action setup + -- | An action that invokes the executable to run the benchmark + , benchProject :: setup -> buildSystem -> [CmdOption] -> BenchProject example -> Action () + -- | An action that performs any necessary warmup. Will only be invoked once + , warmupProject :: buildSystem -> FilePath -> [CmdOption] -> example -> Action () + -- | Name of the executable to benchmark. Should match the one used to 'MkBuildRules' + , executableName :: String + -- | Number of concurrent benchmarks to run + , parallelism :: Natural + } + +data BenchProject example = BenchProject + { outcsv :: FilePath -- ^ where to save the CSV output + , exePath :: FilePath -- ^ where to find the executable for benchmarking + , exeExtraArgs :: [String] -- ^ extra args for the executable + , example :: example -- ^ example to benchmark + , experiment :: Escaped String -- ^ experiment to run + , configuration :: ByteString -- ^ configuration to use + } + +data ProfilingMode = NoProfiling | CheapHeapProfiling Seconds + deriving (Eq) + +profilingP :: String -> Maybe ProfilingMode +profilingP "unprofiled" = Just NoProfiling +profilingP inp | Just delay <- stripPrefix "profiled-" inp, Just i <- readMaybe delay = Just $ CheapHeapProfiling i +profilingP _ = Nothing + +profilingPath :: ProfilingMode -> FilePath +profilingPath NoProfiling = "unprofiled" +profilingPath (CheapHeapProfiling i) = "profiled-" <> show i + +-- TODO generalize BuildSystem +benchRules :: RuleResultForExample example => FilePattern -> MkBenchRules BuildSystem example -> Rules () +benchRules build MkBenchRules{..} = do + + benchResource <- newResource "ghcide-bench" (fromIntegral parallelism) + -- warmup an example + build -/- "binaries/*/*.warmup" %> \out -> do + let [_, _, ver, exampleName] = splitDirectories (dropExtension out) + let exePath = build "binaries" ver executableName + ghcPath = build "binaries" ver "ghc.path" + need [exePath, ghcPath] + buildSystem <- askOracle $ GetBuildSystem () + example <- fromMaybe (error $ "Unknown example " <> exampleName) + <$> askOracle (GetExample exampleName) + let exeExtraArgs = [] + outcsv = "" + experiment = Escaped "hover" + withResource benchResource 1 $ warmupProject buildSystem exePath + [ EchoStdout False, + FileStdout out, + RemEnv "NIX_GHC_LIBDIR", + RemEnv "GHC_PACKAGE_PATH", + AddPath [takeDirectory ghcPath, "."] [] + ] + example + -- run an experiment + priority 0 $ + [ build -/- "*/*/*/*/*.csv", + build -/- "*/*/*/*/*.gcStats.log", + build -/- "*/*/*/*/*.output.log", + build -/- "*/*/*/*/*.eventlog", + build -/- "*/*/*/*/*.hp" + ] &%> \[outcsv, outGc, outLog, outEventlog, outHp] -> do + let [_, flavour, exampleName, ver, conf, exp] = splitDirectories outcsv + prof = fromMaybe (error $ "Not a valid profiling mode: " <> flavour) $ profilingP flavour + example <- fromMaybe (error $ "Unknown example " <> exampleName) + <$> askOracle (GetExample exampleName) + buildSystem <- askOracle $ GetBuildSystem () + configurations <- askOracle $ GetConfigurations () + setupRes <- setupProject + liftIO $ createDirectoryIfMissing True $ dropFileName outcsv + let exePath = build "binaries" ver executableName + exeExtraArgs = + [ "+RTS" + , "-l" + , "-ol" <> outEventlog + , "-S" <> outGc] + ++ concat + [[ "-h" + , "-i" <> show i + , "-po" <> dropExtension outHp + , "-qg"] + | CheapHeapProfiling i <- [prof]] + ++ ["-RTS"] + ghcPath = build "binaries" ver "ghc.path" + warmupPath = build "binaries" ver exampleName <.> "warmup" + experiment = Escaped $ dropExtension exp + Just Configuration{..} = find (\Configuration{confName} -> confName == conf) configurations + configuration = confValue + need [exePath, ghcPath, warmupPath] + ghcPath <- readFile' ghcPath + withResource benchResource 1 $ do + benchProject setupRes buildSystem + [ EchoStdout False, + FileStdout outLog, + RemEnv "NIX_GHC_LIBDIR", + RemEnv "GHC_PACKAGE_PATH", + AddPath [takeDirectory ghcPath, "."] [] + ] + BenchProject {..} + liftIO $ case prof of + NoProfiling -> writeFile outHp dummyHp + _ -> return () + + -- extend csv output with allocation data + csvContents <- liftIO $ lines <$> readFile outcsv + let header = head csvContents + results = tail csvContents + header' = header <> ", maxResidency, allocatedBytes" + results' <- forM results $ \row -> do + (maxResidency, allocations) <- liftIO + (parseMaxResidencyAndAllocations <$> readFile outGc) + return $ printf "%s, %s, %s" row (showMB maxResidency) (showMB allocations) + let csvContents' = header' : results' + writeFileLines outcsv csvContents' + where + showMB :: Int -> String + showMB x = show (x `div` 2^(20::Int)) <> "MB" + +-- Parse the max residency and allocations in RTS -s output +parseMaxResidencyAndAllocations :: String -> (Int, Int) +parseMaxResidencyAndAllocations input = + (f "maximum residency", f "bytes allocated in the heap") + where + inps = reverse $ lines input + f label = case find (label `isInfixOf`) inps of + Just l -> read $ filter isDigit $ head $ words l + Nothing -> -1 + + +-------------------------------------------------------------------------------- +-- | oracles to get previous version of a given version +-- used for diff the results +addGetParentOracle :: Rules () +addGetParentOracle = void $ addOracle $ \(GetParent name) -> findPrev name <$> askOracle (GetVersions ()) +-- | Rules to aggregate the CSV output of individual experiments +csvRules :: forall example . RuleResultForExample example => FilePattern -> Rules () +csvRules build = do + let genConfig resultName prefixName prefixOracles out = do + configurations <- prefixOracles + let allResultFiles = [takeDirectory out c resultName | c <- configurations ] + allResults <- traverse readFileLines allResultFiles + let header = head $ head allResults + results = map tail allResults + header' = prefixName <> ", " <> header + results' = zipWith (\v -> map (\l -> v <> ", " <> l)) configurations results + writeFileChanged out $ unlines $ header' : interleave results' + -- build results for every experiment*example + priority 1 $ build -/- "*/*/*/*/results.csv" %> \out -> do + experiments <- askOracle $ GetExperiments () + let allResultFiles = [takeDirectory out escaped (escapeExperiment e) <.> "csv" | e <- experiments] + allResults <- traverse readFileLines allResultFiles + let header = head $ head allResults + results = map tail allResults + writeFileChanged out $ unlines $ header : concat results + priority 2 $ build -/- "*/*/*/*/resultDiff.csv" %> \out -> do + let out2@[b, flav, example, ver, conf, exp_] = splitDirectories out + prev <- fmap T.unpack $ askOracle $ GetParent $ T.pack ver + allResultsCur <- readFileLines $ joinPath [b ,flav, example, ver, conf] "results.csv" + allResultsPrev <- readFileLines $ joinPath [b ,flav, example, prev, conf] "results.csv" + let resultsPrev = tail allResultsPrev + let resultsCur = tail allResultsCur + let resultDiff = zipWith convertToDiffResults resultsCur resultsPrev + writeFileChanged out $ unlines $ head allResultsCur : resultDiff + -- aggregate all configurations for an experiment + priority 3 $ build -/- "*/*/*/results.csv" %> genConfig "results.csv" + "Configuration" (map confName <$> askOracle (GetConfigurations ())) + priority 3 $ build -/- "*/*/*/resultDiff.csv" %> genConfig "resultDiff.csv" + "Configuration" (map confName <$> askOracle (GetConfigurations ())) + -- aggregate all experiments for an example + priority 4 $ build -/- "*/*/results.csv" %> genConfig "results.csv" + "Version" (map (T.unpack . humanName) <$> askOracle (GetVersions ())) + priority 4 $ build -/- "*/*/resultDiff.csv" %> genConfig "resultDiff.csv" + "Version" (map (T.unpack . humanName) <$> askOracle (GetVersions ())) + -- aggregate all examples + priority 5 $ build -/- "*/results.csv" %> genConfig "results.csv" + "Example" (map getExampleName <$> askOracle (GetExamples ())) + priority 5 $ build -/- "*/resultDiff.csv" %> genConfig "resultDiff.csv" + "Example" (map getExampleName <$> askOracle (GetExamples ())) + +convertToDiffResults :: String -> String -> String +convertToDiffResults line baseLine = intercalate "," diffResults + where items = parseLine line + baseItems = parseLine baseLine + diffItems = zipWith diffItem items baseItems + diffResults = map showItemDiffResult diffItems + +showItemDiffResult :: (Item, Maybe Double) -> String +showItemDiffResult (ItemString x, _) = x +showItemDiffResult (_, Nothing) = "NA" +showItemDiffResult (Mem x, Just y) = printf "%.2f" (y * 100 - 100) <> "%" +showItemDiffResult (Time x, Just y) = printf "%.2f" (y * 100 - 100) <> "%" + +diffItem :: Item -> Item -> (Item, Maybe Double) +diffItem (Mem x) (Mem y) = (Mem x, Just $ fromIntegral x / fromIntegral y) +diffItem (Time x) (Time y) = (Time x, if y == 0 then Nothing else Just $ x / y) +diffItem (ItemString x) (ItemString y) = (ItemString x, Nothing) +diffItem _ _ = (ItemString "no match", Nothing) + +data Item = Mem Int | Time Double | ItemString String + deriving (Show) + +parseLine :: String -> [Item] +parseLine = map f . splitOn "," + where + f x + | "MB" `isSuffixOf` x = Mem $ read $ reverse $ drop 2 $ reverse x + | otherwise = + case readMaybe @Double x of + Just time -> Time time + Nothing -> ItemString x + +-------------------------------------------------------------------------------- + +-- | Rules to produce charts for the GC stats +svgRules :: FilePattern -> Rules () +svgRules build = do + -- chart GC stats for an experiment on a given revision + priority 1 $ + build -/- "*/*/*/*/*.svg" %> \out -> do + let [_, _, _example, ver, conf, _exp] = splitDirectories out + runLog <- loadRunLog (Escaped $ replaceExtension out "csv") ver conf + let diagram = Diagram Live [runLog] title + title = ver <> " live bytes over time" + plotDiagram True diagram out + + -- chart of GC stats for an experiment on this and the previous revision + priority 2 $ + build -/- "*/*/*/*/*.diff.svg" %> \out -> do + let [b, flav, example, ver, conf, exp_] = splitDirectories out + exp = Escaped $ dropExtension2 exp_ + prev <- fmap T.unpack $ askOracle $ GetParent $ T.pack ver + + runLog <- loadRunLog (Escaped $ replaceExtension (dropExtension out) "csv") ver conf + runLogPrev <- loadRunLog (Escaped $ joinPath [b,flav, example, prev, conf, replaceExtension (dropExtension exp_) "csv"]) prev conf + + let diagram = Diagram Live [runLog, runLogPrev] title + title = show (unescapeExperiment exp) <> " - live bytes over time compared" + plotDiagram True diagram out + + -- aggregated chart of GC stats for all the configurations + build -/- "*/*/*/*.svg" %> \out -> do + let exp = Escaped $ dropExtension $ takeFileName out + [b, flav, example, ver] = splitDirectories out + versions <- askOracle $ GetVersions () + configurations <- askOracle $ GetConfigurations () + + runLogs <- forM configurations $ \Configuration{confName} -> do + loadRunLog (Escaped $ takeDirectory out confName replaceExtension (takeFileName out) "csv") ver confName + + let diagram = Diagram Live runLogs title + title = show (unescapeExperiment exp) <> " - live bytes over time" + plotDiagram False diagram out + + -- aggregated chart of GC stats for all the revisions + build -/- "*/*/*.svg" %> \out -> do + let exp = Escaped $ dropExtension $ takeFileName out + versions <- askOracle $ GetVersions () + configurations <- askOracle $ GetConfigurations () + + runLogs <- forM (filter include versions) $ \v -> + forM configurations $ \Configuration{confName} -> do + let v' = T.unpack (humanName v) + loadRunLog (Escaped $ takeDirectory out v' confName replaceExtension (takeFileName out) "csv") v' confName + + let diagram = Diagram Live (concat runLogs) title + title = show (unescapeExperiment exp) <> " - live bytes over time" + plotDiagram False diagram out + +heapProfileRules :: FilePattern -> Rules () +heapProfileRules build = do + priority 3 $ + build -/- "*/*/*/*/*.heap.svg" %> \out -> do + let hpFile = dropExtension2 out <.> "hp" + need [hpFile] + cmd_ ("eventlog2html" :: String) ["--heap-profile", hpFile] + liftIO $ renameFile (dropExtension hpFile <.> "svg") out + +dropExtension2 :: FilePath -> FilePath +dropExtension2 = dropExtension . dropExtension +-------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- + +-- | Default build system that handles Cabal and Stack +data BuildSystem = Cabal | Stack + deriving (Eq, Read, Show, Generic) + deriving (Binary, Hashable, NFData) + +findGhcForBuildSystem :: BuildSystem -> FilePath -> IO FilePath +findGhcForBuildSystem Cabal _cwd = + liftIO $ fromMaybe (error "ghc is not in the PATH") <$> findExecutable "ghc" +findGhcForBuildSystem Stack cwd = do + Stdout ghcLoc <- cmd [Cwd cwd] ("stack exec which ghc" :: String) + return ghcLoc + +instance FromJSON BuildSystem where + parseJSON x = fromString . lower <$> parseJSON x + where + fromString "stack" = Stack + fromString "cabal" = Cabal + fromString other = error $ "Unknown build system: " <> other + +instance ToJSON BuildSystem where + toJSON = toJSON . show + +-------------------------------------------------------------------------------- + +data GitCommit = GitCommit + { -- | A git hash, tag or branch name (e.g. v0.1.0) + gitName :: Text, + -- | A human understandable name (e.g. fix-collisions-leak) + name :: Maybe Text, + -- | The human understandable name of the parent, if specified explicitly + parent :: Maybe Text, + -- | Whether to include this version in the top chart + include :: Bool + } + deriving (Binary, Eq, Hashable, Generic, NFData, Show) + +instance FromJSON GitCommit where + parseJSON (String s) = pure $ GitCommit s Nothing Nothing True + parseJSON o@(Object _) = do + let keymap = o ^. _Object + case toList keymap of + -- excuse the aeson 2.0 compatibility hack + [(preview _String . toJSON -> Just name, String gitName)] -> + pure $ GitCommit gitName (Just name) Nothing True + [(preview _String . toJSON -> Just name, Object props)] -> + GitCommit + <$> props .:? "git" .!= name + <*> pure (Just name) + <*> props .:? "parent" + <*> props .:? "include" .!= True + _ -> empty + parseJSON _ = empty + +instance ToJSON GitCommit where + toJSON GitCommit {..} = + case name of + Nothing -> String gitName + Just n -> object [fromString (T.unpack n) .= String gitName] + +humanName :: GitCommit -> Text +humanName GitCommit {..} = fromMaybe gitName name + +findPrev :: Text -> [GitCommit] -> Text +findPrev name (x : y : xx) + | humanName y == name = humanName x + | otherwise = findPrev name (y : xx) +findPrev name _ = name + +-------------------------------------------------------------------------------- + +-- | A line in the output of -S +data Frame = Frame + { allocated, copied, live :: !Int, + user, elapsed, totUser, totElapsed :: !Double, + generation :: !Int + } + deriving (Show) + +instance Read Frame where + readPrec = do + spaces + allocated <- readPrec @Int <* spaces + copied <- readPrec @Int <* spaces + live <- readPrec @Int <* spaces + user <- readPrec @Double <* spaces + elapsed <- readPrec @Double <* spaces + totUser <- readPrec @Double <* spaces + totElapsed <- readPrec @Double <* spaces + _ <- readPrec @Int <* spaces + _ <- readPrec @Int <* spaces + "(Gen: " <- replicateM 7 get + generation <- readPrec @Int + ')' <- get + return Frame {..} + where + spaces = readP_to_Prec $ const P.skipSpaces + +-- | A file path containing the output of -S for a given run +data RunLog = RunLog + { runVersion :: !String, + runConfiguration :: !String, + runFrames :: ![Frame], + runSuccess :: !Bool, + runFirstReponse :: !(Maybe Seconds) + } + +loadRunLog :: HasCallStack => Escaped FilePath -> String -> String -> Action RunLog +loadRunLog (Escaped csv_fp) ver conf = do + let log_fp = replaceExtension csv_fp "gcStats.log" + log <- readFileLines log_fp + csv <- readFileLines csv_fp + let frames = + [ f + | l <- log, + Just f <- [readMaybe l], + -- filter out gen 0 events as there are too many + generation f == 1 + ] + -- TODO this assumes a certain structure in the CSV file + (success, firstResponse) = case map (map T.strip . T.split (== ',') . T.pack) csv of + [header, row] + | let table = zip header row + timeForFirstResponse :: Maybe Seconds + timeForFirstResponse = readMaybe . T.unpack =<< lookup "firstBuildTime" table + , Just s <- lookup "success" table + , Just s <- readMaybe (T.unpack s) + -> (s,timeForFirstResponse) + _ -> error $ "Cannot parse: " <> csv_fp + return $ RunLog ver conf frames success firstResponse + +-------------------------------------------------------------------------------- + +data TraceMetric = Allocated | Copied | Live | User | Elapsed + deriving (Generic, Enum, Bounded, Read) + +instance Show TraceMetric where + show Allocated = "Allocated bytes" + show Copied = "Copied bytes" + show Live = "Live bytes" + show User = "User time" + show Elapsed = "Elapsed time" + +frameMetric :: TraceMetric -> Frame -> Double +frameMetric Allocated = fromIntegral . allocated +frameMetric Copied = fromIntegral . copied +frameMetric Live = fromIntegral . live +frameMetric Elapsed = elapsed +frameMetric User = user + +data Diagram = Diagram + { traceMetric :: TraceMetric, + runLogs :: [RunLog], + title :: String + } + deriving (Generic) + +plotDiagram :: Bool -> Diagram -> FilePath -> Action () +plotDiagram includeFailed t@Diagram {traceMetric, runLogs} out = do + let extract = frameMetric traceMetric + liftIO $ E.toFile E.def out $ do + E.layout_title E..= title t + E.setColors myColors + forM_ runLogs $ \rl -> + when (includeFailed || runSuccess rl) $ do + -- Get the color we are going to use + ~(c:_) <- E.liftCState $ S.gets (E.view E.colors) + E.plot $ do + lplot <- E.line + (runVersion rl ++ " " ++ runConfiguration rl ++ if runSuccess rl then "" else " (FAILED)") + [ [ (totElapsed f, extract f) + | f <- runFrames rl + ] + ] + return (lplot E.& E.plot_lines_style . E.line_width E.*~ 2) + case runFirstReponse rl of + Just t -> E.plot $ pure $ + E.vlinePlot ("First build: " ++ runVersion rl) (E.defaultPlotLineStyle E.& E.line_color E..~ c) t + _ -> pure () + +-------------------------------------------------------------------------------- + +newtype Escaped a = Escaped {escaped :: a} + +newtype Unescaped a = Unescaped {unescaped :: a} + deriving newtype (Show, FromJSON, ToJSON, Eq, NFData, Binary, Hashable) + +escapeExperiment :: Unescaped String -> Escaped String +escapeExperiment = Escaped . map f . unescaped + where + f ' ' = '_' + f other = other + +unescapeExperiment :: Escaped String -> Unescaped String +unescapeExperiment = Unescaped . map f . escaped + where + f '_' = ' ' + f other = other + +-------------------------------------------------------------------------------- + +(-/-) :: FilePattern -> FilePattern -> FilePattern +a -/- b = a <> "/" <> b + +interleave :: [[a]] -> [a] +interleave = concat . transpose + +-------------------------------------------------------------------------------- + +myColors :: [E.AlphaColour Double] +myColors = map E.opaque + [ E.blue + , E.green + , E.red + , E.orange + , E.yellow + , E.violet + , E.black + , E.gold + , E.brown + , E.hotpink + , E.aliceblue + , E.aqua + , E.beige + , E.bisque + , E.blueviolet + , E.burlywood + , E.cadetblue + , E.chartreuse + , E.coral + , E.crimson + , E.darkblue + , E.darkgray + , E.darkgreen + , E.darkkhaki + , E.darkmagenta + , E.deeppink + , E.dodgerblue + , E.firebrick + , E.forestgreen + , E.fuchsia + , E.greenyellow + , E.lightsalmon + , E.seagreen + , E.olive + , E.sandybrown + , E.sienna + , E.peru + ] + +dummyHp :: String +dummyHp = + "JOB \"ghcide\" \ + \DATE \"Sun Jan 31 09:30 2021\" \ + \SAMPLE_UNIT \"seconds\" \ + \VALUE_UNIT \"bytes\" \ + \BEGIN_SAMPLE 0.000000 \ + \END_SAMPLE 0.000000" diff --git a/shell.nix b/shell.nix index 2129b8b4b7..513b02ec3e 100644 --- a/shell.nix +++ b/shell.nix @@ -1,16 +1,11 @@ -with (import {}); -stdenv.mkDerivation { - name = "haskell-language-server"; - buildInputs = [ - gmp - zlib - ncurses - - haskellPackages.cabal-install - ]; - src = null; - shellHook = '' - export LD_LIBRARY_PATH=${gmp}/lib:${zlib}/lib:${ncurses}/lib - export PATH=$PATH:$HOME/.local/bin - ''; -} +# This file is the compt layer of flakes: https://github.com/edolstra/flake-compat +# See flake.nix for details +(import ( + let + lock = builtins.fromJSON (builtins.readFile ./flake.lock); + in fetchTarball { + url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; + sha256 = lock.nodes.flake-compat.locked.narHash; } +) { + src = ./.; +}).shellNix diff --git a/src/HlsPlugins.hs b/src/HlsPlugins.hs new file mode 100644 index 0000000000..4c135fc48b --- /dev/null +++ b/src/HlsPlugins.hs @@ -0,0 +1,252 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE OverloadedStrings #-} +module HlsPlugins where + +import Ide.Logger (Pretty (pretty), Recorder, + WithPriority, cmapWithPrio) +import Ide.PluginUtils (pluginDescToIdePlugins) +import Ide.Types (IdePlugins, + PluginId (PluginId)) + +-- fixed plugins +import Development.IDE (IdeState) +import qualified Development.IDE.Plugin.HLS.GhcIde as GhcIde + +-- haskell-language-server optional plugins +#if hls_qualifyImportedNames +import qualified Ide.Plugin.QualifyImportedNames as QualifyImportedNames +#endif + +#if hls_callHierarchy +import qualified Ide.Plugin.CallHierarchy as CallHierarchy +#endif +#if hls_cabal +import qualified Ide.Plugin.Cabal as Cabal +#endif +#if hls_class +import qualified Ide.Plugin.Class as Class +#endif + +#if hls_eval +import qualified Ide.Plugin.Eval as Eval +#endif + +#if hls_importLens +import qualified Ide.Plugin.ExplicitImports as ExplicitImports +#endif + + + +#if hls_rename +import qualified Ide.Plugin.Rename as Rename +#endif + +#if hls_retrie +import qualified Ide.Plugin.Retrie as Retrie +#endif + +#if hls_hlint +import qualified Ide.Plugin.Hlint as Hlint +#endif + +#if hls_stan +import qualified Ide.Plugin.Stan as Stan +#endif + +#if hls_moduleName +import qualified Ide.Plugin.ModuleName as ModuleName +#endif + +#if hls_pragmas +import qualified Ide.Plugin.Pragmas as Pragmas +#endif + +#if hls_splice +import qualified Ide.Plugin.Splice as Splice +#endif + +#if hls_alternateNumberFormat +import qualified Ide.Plugin.AlternateNumberFormat as AlternateNumberFormat +#endif + +#if hls_codeRange +import qualified Ide.Plugin.CodeRange as CodeRange +#endif + +#if hls_changeTypeSignature +import qualified Ide.Plugin.ChangeTypeSignature as ChangeTypeSignature +#endif + +#if hls_gadt +import qualified Ide.Plugin.GADT as GADT +#endif + +#if explicitFixity +import qualified Ide.Plugin.ExplicitFixity as ExplicitFixity +#endif + +#if explicitFields +import qualified Ide.Plugin.ExplicitFields as ExplicitFields +#endif + +#if hls_overloaded_record_dot +import qualified Ide.Plugin.OverloadedRecordDot as OverloadedRecordDot +#endif + +#if hls_notes +import qualified Ide.Plugin.Notes as Notes +#endif + +-- formatters + +#if hls_floskell +import qualified Ide.Plugin.Floskell as Floskell +#endif + +#if hls_fourmolu +import qualified Ide.Plugin.Fourmolu as Fourmolu +#endif + +#if hls_cabalfmt +import qualified Ide.Plugin.CabalFmt as CabalFmt +#endif + +#if hls_cabalgild +import qualified Ide.Plugin.CabalGild as CabalGild +#endif + +#if hls_ormolu +import qualified Ide.Plugin.Ormolu as Ormolu +#endif + +#if hls_stylishHaskell +import qualified Ide.Plugin.StylishHaskell as StylishHaskell +#endif + +#if hls_refactor +import qualified Development.IDE.Plugin.CodeAction as Refactor +#endif + +#if hls_semanticTokens +import qualified Ide.Plugin.SemanticTokens as SemanticTokens +#endif + + +data Log = forall a. (Pretty a) => Log PluginId a + +instance Pretty Log where + pretty (Log (PluginId pId) a) = pretty pId <> ": " <> pretty a + +-- --------------------------------------------------------------------- + +-- | The plugins configured for use in this instance of the language +-- server. +-- These can be freely added or removed to tailor the available +-- features of the server. + +idePlugins :: Recorder (WithPriority Log) -> IdePlugins IdeState +idePlugins recorder = pluginDescToIdePlugins allPlugins + where + pluginRecorder :: forall log. (Pretty log) => PluginId -> Recorder (WithPriority log) + pluginRecorder pluginId = cmapWithPrio (Log pluginId) recorder + allPlugins = +#if hls_cabal + let pId = "cabal" in Cabal.descriptor (pluginRecorder pId) pId : + let caId = "cabalHaskellIntegration" in Cabal.haskellInteractionDescriptor (pluginRecorder caId) caId : +#endif +#if hls_pragmas + Pragmas.suggestPragmaDescriptor "pragmas-suggest" : + Pragmas.completionDescriptor "pragmas-completion" : + Pragmas.suggestDisableWarningDescriptor "pragmas-disable" : +#endif +#if hls_floskell + Floskell.descriptor "floskell" : +#endif +#if hls_fourmolu + let pId = "fourmolu" in Fourmolu.descriptor (pluginRecorder pId) pId: +#endif +#if hls_cabalfmt + let pId = "cabal-fmt" in CabalFmt.descriptor (pluginRecorder pId) pId: +#endif +#if hls_cabalgild + -- this pId needs to be kept in sync with the hardcoded + -- cabalFormattingProvider in the Default Config + let pId = "cabal-gild" in CabalGild.descriptor (pluginRecorder pId) pId: +#endif +#if hls_ormolu + -- this pId needs to be kept in sync with the hardcoded + -- haskellFormattingProvider in the Default Config + let pId = "ormolu" in Ormolu.descriptor (pluginRecorder pId) pId : +#endif +#if hls_stylishHaskell + let pId = "stylish-haskell" in StylishHaskell.descriptor (pluginRecorder pId) pId : +#endif +#if hls_rename + let pId = "rename" in Rename.descriptor (pluginRecorder pId) pId: +#endif +#if hls_retrie + let pId = "retrie" in Retrie.descriptor (pluginRecorder pId) pId : +#endif +#if hls_callHierarchy + CallHierarchy.descriptor "callHierarchy" : +#endif +#if hls_semanticTokens + let pId = "semanticTokens" in SemanticTokens.descriptor (pluginRecorder pId) pId: +#endif +#if hls_class + let pId = "class" in Class.descriptor (pluginRecorder pId) pId: +#endif +#if hls_eval + let pId = "eval" in Eval.descriptor (pluginRecorder pId) pId: +#endif +#if hls_importLens + let pId = "importLens" in ExplicitImports.descriptor (pluginRecorder pId) pId: +#endif +#if hls_qualifyImportedNames + QualifyImportedNames.descriptor "qualifyImportedNames" : +#endif +#if hls_moduleName + let pId = "moduleName" in ModuleName.descriptor (pluginRecorder pId) pId: +#endif +#if hls_hlint + let pId = "hlint" in Hlint.descriptor (pluginRecorder pId) pId: +#endif +#if hls_stan + let pId = "stan" in Stan.descriptor (pluginRecorder pId) pId : +#endif +#if hls_splice + Splice.descriptor "splice" : +#endif +#if hls_alternateNumberFormat + let pId = "alternateNumberFormat" in AlternateNumberFormat.descriptor (pluginRecorder pId) pId : +#endif +#if hls_codeRange + let pId = "codeRange" in CodeRange.descriptor (pluginRecorder pId) pId: +#endif +#if hls_changeTypeSignature + let pId = "changeTypeSignature" in ChangeTypeSignature.descriptor (pluginRecorder pId) pId : +#endif +#if hls_gadt + GADT.descriptor "gadt" : +#endif +#if hls_refactor + let pId = "ghcide-code-actions-imports-exports" in Refactor.iePluginDescriptor (pluginRecorder pId) pId : + let pId = "ghcide-code-actions-type-signatures" in Refactor.typeSigsPluginDescriptor (pluginRecorder pId) pId : + let pId = "ghcide-code-actions-bindings" in Refactor.bindingsPluginDescriptor (pluginRecorder pId) pId : + let pId = "ghcide-code-actions-fill-holes" in Refactor.fillHolePluginDescriptor (pluginRecorder pId) pId : + let pId = "ghcide-extend-import-action" in Refactor.extendImportPluginDescriptor (pluginRecorder pId) pId : +#endif +#if explicitFixity + let pId = "explicit-fixity" in ExplicitFixity.descriptor (pluginRecorder pId) pId : +#endif +#if explicitFields + let pId = "explicit-fields" in ExplicitFields.descriptor (pluginRecorder pId) pId : +#endif +#if hls_overloaded_record_dot + let pId = "overloaded-record-dot" in OverloadedRecordDot.descriptor (pluginRecorder pId) pId : +#endif +#if hls_notes + let pId = "notes" in Notes.descriptor (pluginRecorder pId) pId : +#endif + GhcIde.descriptors (pluginRecorder "ghcide") + diff --git a/src/Ide/Arguments.hs b/src/Ide/Arguments.hs new file mode 100644 index 0000000000..be7f35e455 --- /dev/null +++ b/src/Ide/Arguments.hs @@ -0,0 +1,209 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE CPP #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TemplateHaskell #-} + +module Ide.Arguments + ( Arguments(..) + , GhcideArguments(..) + , PrintVersion(..) + , BiosAction(..) + , getArguments + , haskellLanguageServerVersion + , haskellLanguageServerNumericVersion + ) where + +import Data.Version +import Development.IDE (IdeState) +import Development.IDE.Main (Command (..), commandP) +import GitHash (giHash, tGitInfoCwdTry) +import Ide.Logger (Priority (..)) +import Ide.Types (IdePlugins) +import Options.Applicative +import Paths_haskell_language_server +import System.Environment + +-- --------------------------------------------------------------------- + +data Arguments + = VersionMode PrintVersion + | ProbeToolsMode + | ListPluginsMode + | BiosMode BiosAction + | Ghcide GhcideArguments + | VSCodeExtensionSchemaMode + | PluginsCustomConfigMarkdownReferenceMode + | DefaultConfigurationMode + | PrintLibDir + +data GhcideArguments = GhcideArguments + { argsCommand :: Command + , argsCwd :: Maybe FilePath + , argsShakeProfiling :: Maybe FilePath + , argsTesting :: Bool + , argsExamplePlugin :: Bool + , argsLogLevel :: Priority + , argsLogFile :: Maybe String + -- ^ the minimum log level to show + , argsLogStderr :: Bool + , argsLogClient :: Bool + , argsThreads :: Int + , argsProjectGhcVersion :: Bool + } deriving Show + +data PrintVersion + = PrintVersion + | PrintNumericVersion + deriving (Show, Eq, Ord) + +data BiosAction + = PrintCradleType + deriving (Show, Eq, Ord) + +getArguments :: String -> IdePlugins IdeState -> IO Arguments +getArguments exeName plugins = execParser opts + where + opts = info (( + VersionMode <$> printVersionParser exeName + <|> probeToolsParser exeName + <|> hsubparser + ( command "vscode-extension-schema" extensionSchemaCommand + <> command "generate-default-config" generateDefaultConfigCommand + <> command "plugins-custom-config-markdown-reference" pluginsCustomConfigMarkdownReferenceCommand + ) + <|> listPluginsParser + <|> BiosMode <$> biosParser + <|> Ghcide <$> arguments plugins + <|> flag' PrintLibDir (long "print-libdir" <> help "Print project GHCs libdir") + ) + <**> helper) + ( fullDesc + <> progDesc "Used as a test bed to check your IDE Client will work" + <> header (exeName ++ " - GHC Haskell LSP server")) + + extensionSchemaCommand = + info (pure VSCodeExtensionSchemaMode) + (fullDesc <> progDesc "Print generic config schema for plugins (used in the package.json of haskell vscode extension)") + generateDefaultConfigCommand = + info (pure DefaultConfigurationMode) + (fullDesc <> progDesc "Print config supported by the server with default values") + pluginsCustomConfigMarkdownReferenceCommand = + info (pure PluginsCustomConfigMarkdownReferenceMode) + (fullDesc <> progDesc "Print markdown reference for plugins custom config") + +printVersionParser :: String -> Parser PrintVersion +printVersionParser exeName = + flag' PrintVersion + (long "version" <> help ("Show " ++ exeName ++ " and GHC versions")) + <|> + flag' PrintNumericVersion + (long "numeric-version" <> help ("Show numeric version of " ++ exeName)) + +biosParser :: Parser BiosAction +biosParser = + flag' PrintCradleType + (long "print-cradle" <> help "Print the project cradle type") + +probeToolsParser :: String -> Parser Arguments +probeToolsParser exeName = + flag' ProbeToolsMode + (long "probe-tools" <> help ("Show " ++ exeName ++ " version and other tools of interest")) + +listPluginsParser :: Parser Arguments +listPluginsParser = + flag' ListPluginsMode + (long "list-plugins" <> help "List all available plugins") + +arguments :: IdePlugins IdeState -> Parser GhcideArguments +arguments plugins = GhcideArguments + <$> (commandP plugins <|> lspCommand <|> checkCommand) + <*> optional (strOption $ long "cwd" <> metavar "DIR" + <> help "Change to this directory") + <*> optional (strOption $ long "shake-profiling" <> metavar "DIR" + <> help "Dump profiling reports to this directory") + <*> switch (long "test" + <> help "Enable additional lsp messages used by the testsuite") + <*> switch (long "example" + <> help "Include the Example Plugin. For Plugin devs only") + + <*> + (option @Priority auto + (long "log-level" + <> help "Only show logs at or above this log level" + <> metavar "LOG_LEVEL" + <> value Info + <> showDefault + ) + <|> + flag' Debug + (long "debug" + <> short 'd' + <> help "Sets the log level to Debug, alias for '--log-level Debug'" + ) + ) + -- This option is a little inconsistent with the other log options, since + -- it's not a boolean and there is no way to turn it off. That's okay + -- since the default is off. + <*> (optional (strOption + ( long "log-file" + <> metavar "LOGFILE" + <> help "Send logs to a file" + )) <|> (optional (strOption + ( long "logfile" + <> short 'l' + <> metavar "LOGFILE" + <> help "Send logs to a file" + -- deprecated alias so users don't need to update their CLI calls + -- immediately + <> internal + ))) + ) + -- Boolean option so we can toggle the default in a consistent way + <*> option auto + ( long "log-stderr" + <> help "Send logs to stderr" + <> metavar "BOOL" + <> value True + <> showDefault + ) + -- Boolean option so we can toggle the default in a consistent way + <*> option auto + ( long "log-client" + <> help "Send logs to the client using the window/logMessage LSP method" + <> metavar "BOOL" + -- This is off by default, since some clients will show duplicate logs + -- if we log both to stderr and the client + <> value False + <> showDefault + ) + <*> option auto + (short 'j' + <> help "Number of threads (0: automatic)" + <> metavar "NUM" + <> value 0 + <> showDefault + ) + <*> switch (long "project-ghc-version" + <> help "Work out the project GHC version and print it") + where + lspCommand = LSP <$ flag' True (long "lsp" <> help "Start talking to an LSP server") + checkCommand = Check <$> many (argument str (metavar "FILES/DIRS...")) + +-- --------------------------------------------------------------------- + +haskellLanguageServerNumericVersion :: String +haskellLanguageServerNumericVersion = showVersion version + +haskellLanguageServerVersion :: IO String +haskellLanguageServerVersion = do + path <- getExecutablePath + let gi = $$tGitInfoCwdTry + gitHashSection = case gi of + Right gi -> " (GIT hash: " <> giHash gi <> ")" + Left _ -> "" + return $ "haskell-language-server version: " <> haskellLanguageServerNumericVersion + <> " (GHC: " <> VERSION_ghc + <> ") (PATH: " <> path <> ")" + <> gitHashSection + diff --git a/src/Ide/Logger.hs b/src/Ide/Logger.hs deleted file mode 100644 index bd720ffc20..0000000000 --- a/src/Ide/Logger.hs +++ /dev/null @@ -1,43 +0,0 @@ -{- | Provides an implementation of the ghcide @Logger@ which uses - @System.Log.Logger@ under the hood. --} -module Ide.Logger - ( - hlsLogger - , logm - , debugm - , warningm - , errorm - ) where - -import Control.Monad.IO.Class -import qualified Data.Text as T -import qualified Development.IDE.Types.Logger as L -import System.Log.Logger - --- --------------------------------------------------------------------- - -hlsLogger :: L.Logger -hlsLogger = L.Logger $ \pri txt -> - case pri of - L.Telemetry -> logm (T.unpack txt) - L.Debug -> debugm (T.unpack txt) - L.Info -> logm (T.unpack txt) - L.Warning -> warningm (T.unpack txt) - L.Error -> errorm (T.unpack txt) - --- --------------------------------------------------------------------- - -logm :: MonadIO m => String -> m () -logm s = liftIO $ infoM "hls" s - -debugm :: MonadIO m => String -> m () -debugm s = liftIO $ debugM "hls" s - -warningm :: MonadIO m => String -> m () -warningm s = liftIO $ warningM "hls" s - -errorm :: MonadIO m => String -> m () -errorm s = liftIO $ errorM "hls" s - --- --------------------------------------------------------------------- diff --git a/src/Ide/Main.hs b/src/Ide/Main.hs new file mode 100644 index 0000000000..f122b53fa6 --- /dev/null +++ b/src/Ide/Main.hs @@ -0,0 +1,148 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE CPP #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Main(defaultMain, runLspMode, Log(..)) where + +import Control.Monad.Extra +import qualified Data.Aeson.Encode.Pretty as A +import Data.Coerce (coerce) +import Data.Default +import Data.Function ((&)) +import Data.List (sortOn) +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Text.IO as T (putStrLn) +import Data.Text.Lazy.Encoding (decodeUtf8) +import qualified Data.Text.Lazy.IO as LT +import Development.IDE.Core.Rules hiding (Log) +import Development.IDE.Core.Tracing (withTelemetryRecorder) +import Development.IDE.Main (isLSP) +import qualified Development.IDE.Main as IDEMain +import qualified Development.IDE.Session as Session +import qualified Development.IDE.Types.Options as Ghcide +import qualified HIE.Bios.Environment as HieBios +import HIE.Bios.Types hiding (Log) +import qualified HIE.Bios.Types as HieBios +import Ide.Arguments +import Ide.Logger as G +import Ide.Plugin.ConfigUtils (pluginsCustomConfigToMarkdownTables, + pluginsToDefaultConfig, + pluginsToVSCodeExtensionSchema) +import Ide.Types (IdePlugins, PluginId (PluginId), + describePlugin, ipMap, pluginId) +import Ide.Version +import Prettyprinter as PP +import System.Directory +import qualified System.Directory.Extra as IO +import System.FilePath + +data Log + = LogVersion !String + | LogDirectory !FilePath + | LogLspStart !GhcideArguments ![PluginId] + | LogIDEMain IDEMain.Log + | LogHieBios HieBios.Log + | LogSession Session.Log + | LogOther T.Text + deriving Show + +instance Pretty Log where + pretty log = case log of + LogVersion version -> pretty version + LogDirectory path -> "Directory:" <+> pretty path + LogLspStart ghcideArgs pluginIds -> + nest 2 $ + vsep + [ "Starting (haskell-language-server) LSP server..." + , viaShow ghcideArgs + , "PluginIds:" <+> pretty (coerce @_ @[Text] pluginIds) ] + LogIDEMain iDEMainLog -> pretty iDEMainLog + LogHieBios hieBiosLog -> pretty hieBiosLog + LogSession sessionLog -> pretty sessionLog + LogOther t -> pretty t + +defaultMain :: Recorder (WithPriority Log) -> Arguments -> IdePlugins IdeState -> IO () +defaultMain recorder args idePlugins = do + -- WARNING: If you write to stdout before runLanguageServer + -- then the language server will not work + + hlsVer <- haskellLanguageServerVersion + case args of + ProbeToolsMode -> do + programsOfInterest <- findProgramVersions + putStrLn hlsVer + putStrLn "Tool versions found on the $PATH" + putStrLn $ showProgramVersionOfInterest programsOfInterest + + VersionMode PrintVersion -> + putStrLn hlsVer + + VersionMode PrintNumericVersion -> + putStrLn haskellLanguageServerNumericVersion + + ListPluginsMode -> do + let pluginSummary = + PP.vsep + $ map describePlugin + $ sortOn pluginId + $ ipMap idePlugins + print pluginSummary + + BiosMode PrintCradleType -> do + dir <- IO.getCurrentDirectory + hieYaml <- Session.findCradle def (dir "a") + cradle <- Session.loadCradle def (cmapWithPrio LogSession recorder) hieYaml dir + print cradle + + Ghcide ghcideArgs -> do + {- see WARNING above -} + logWith recorder Info $ LogVersion hlsVer + runLspMode recorder ghcideArgs idePlugins + + VSCodeExtensionSchemaMode -> do + LT.putStrLn $ decodeUtf8 $ encodePrettySorted $ pluginsToVSCodeExtensionSchema idePlugins + PluginsCustomConfigMarkdownReferenceMode -> do + T.putStrLn $ pluginsCustomConfigToMarkdownTables idePlugins + DefaultConfigurationMode -> do + LT.putStrLn $ decodeUtf8 $ encodePrettySorted $ pluginsToDefaultConfig idePlugins + PrintLibDir -> do + d <- getCurrentDirectory + let initialFp = d "a" + hieYaml <- Session.findCradle def initialFp + cradle <- Session.loadCradle def (cmapWithPrio LogSession recorder) hieYaml d + (CradleSuccess libdir) <- HieBios.getRuntimeGhcLibDir cradle + putStr libdir + where + encodePrettySorted = A.encodePretty' A.defConfig + { A.confCompare = compare + } + +-- --------------------------------------------------------------------- + +runLspMode :: Recorder (WithPriority Log) -> GhcideArguments -> IdePlugins IdeState -> IO () +runLspMode recorder ghcideArgs@GhcideArguments{..} idePlugins = withTelemetryRecorder $ \telemetryRecorder' -> do + let log = logWith recorder + whenJust argsCwd IO.setCurrentDirectory + dir <- IO.getCurrentDirectory + log Info $ LogDirectory dir + + when (isLSP argsCommand) $ do + log Info $ LogLspStart ghcideArgs (map pluginId $ ipMap idePlugins) + + let args = (if argsTesting then IDEMain.testing else IDEMain.defaultArguments) + (cmapWithPrio LogIDEMain recorder) dir idePlugins + + let telemetryRecorder = telemetryRecorder' & cmapWithPrio pretty + + IDEMain.defaultMain (cmapWithPrio LogIDEMain $ recorder <> telemetryRecorder) args + { IDEMain.argCommand = argsCommand + , IDEMain.argsThreads = if argsThreads == 0 then Nothing else Just $ fromIntegral argsThreads + , IDEMain.argsIdeOptions = \config sessionLoader -> + let defOptions = IDEMain.argsIdeOptions args config sessionLoader + in defOptions { Ghcide.optShakeProfiling = argsShakeProfiling } + } + diff --git a/src/Ide/Plugin.hs b/src/Ide/Plugin.hs deleted file mode 100644 index 0fd249745c..0000000000 --- a/src/Ide/Plugin.hs +++ /dev/null @@ -1,581 +0,0 @@ -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE GADTs #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} - -module Ide.Plugin - ( - asGhcIdePlugin - , pluginDescToIdePlugins - , mkLspCommand - , allLspCmdIds - , allLspCmdIds' - , getPid - , responseError - ) where - -import Control.Lens ( (^.) ) -import Control.Monad -import qualified Data.Aeson as J -import qualified Data.Default -import Data.Either -import qualified Data.List as List -import qualified Data.Map as Map -import Data.Maybe -import qualified Data.Text as T -import Development.IDE.Core.Rules -import Development.IDE.Core.Shake -import Development.IDE.LSP.Server -import Development.IDE.Plugin hiding (pluginRules) -import Development.IDE.Types.Diagnostics as D -import Development.IDE.Types.Logger -import Development.Shake hiding ( Diagnostic, command ) -import GHC.Generics -import Ide.Plugin.Config -import Ide.Plugin.Formatter -import Ide.Types -import qualified Language.Haskell.LSP.Core as LSP -import Language.Haskell.LSP.Messages -import Language.Haskell.LSP.Types -import qualified Language.Haskell.LSP.Types as J -import qualified Language.Haskell.LSP.Types.Capabilities as C -import Language.Haskell.LSP.Types.Lens as L hiding (formatting, rangeFormatting) -import qualified Language.Haskell.LSP.VFS as VFS -import Text.Regex.TDFA.Text() - --- --------------------------------------------------------------------- - --- | Map a set of plugins to the underlying ghcide engine. Main point is --- IdePlugins are arranged by kind of operation, 'Plugin' is arranged by message --- category ('Notifaction', 'Request' etc). -asGhcIdePlugin :: IdePlugins -> Plugin Config -asGhcIdePlugin mp = - mkPlugin rulesPlugins (Just . pluginRules) <> - mkPlugin executeCommandPlugins (Just . pluginCommands) <> - mkPlugin codeActionPlugins pluginCodeActionProvider <> - mkPlugin codeLensPlugins pluginCodeLensProvider <> - -- Note: diagnostics are provided via Rules from pluginDiagnosticProvider - mkPlugin hoverPlugins pluginHoverProvider <> - mkPlugin symbolsPlugins pluginSymbolsProvider <> - mkPlugin formatterPlugins pluginFormattingProvider <> - mkPlugin completionsPlugins pluginCompletionProvider <> - mkPlugin renamePlugins pluginRenameProvider - where - justs (p, Just x) = [(p, x)] - justs (_, Nothing) = [] - - ls = Map.toList (ipMap mp) - - mkPlugin :: ([(PluginId, b)] -> Plugin Config) -> (PluginDescriptor -> Maybe b) -> Plugin Config - mkPlugin maker selector - = maker $ concatMap (\(pid, p) -> justs (pid, selector p)) ls - - -pluginDescToIdePlugins :: [PluginDescriptor] -> IdePlugins -pluginDescToIdePlugins plugins = IdePlugins $ Map.fromList $ map (\p -> (pluginId p, p)) plugins - -allLspCmdIds' :: T.Text -> IdePlugins -> [T.Text] -allLspCmdIds' pid mp = mkPlugin (allLspCmdIds pid) (Just . pluginCommands) - where - justs (p, Just x) = [(p, x)] - justs (_, Nothing) = [] - - ls = Map.toList (ipMap mp) - - mkPlugin maker selector - = maker $ concatMap (\(pid, p) -> justs (pid, selector p)) ls - --- --------------------------------------------------------------------- - -rulesPlugins :: [(PluginId, Rules ())] -> Plugin Config -rulesPlugins rs = Plugin rules mempty - where - rules = mconcat $ map snd rs - -codeActionPlugins :: [(PluginId, CodeActionProvider)] -> Plugin Config -codeActionPlugins cas = Plugin codeActionRules (codeActionHandlers cas) - -codeActionRules :: Rules () -codeActionRules = mempty - -codeActionHandlers :: [(PluginId, CodeActionProvider)] -> PartialHandlers Config -codeActionHandlers cas = PartialHandlers $ \WithMessage{..} x -> return x - { LSP.codeActionHandler - = withResponse RspCodeAction (makeCodeAction cas) - } - -makeCodeAction :: [(PluginId, CodeActionProvider)] - -> LSP.LspFuncs Config -> IdeState - -> CodeActionParams - -> IO (Either ResponseError (List CAResult)) -makeCodeAction cas lf ideState (CodeActionParams docId range context _) = do - let caps = LSP.clientCapabilities lf - unL (List ls) = ls - r <- mapM (\(pid,provider) -> provider lf ideState pid docId range context) cas - let actions = filter wasRequested . concat $ map unL $ rights r - res <- send caps actions - return $ Right res - where - wasRequested :: CAResult -> Bool - wasRequested (CACommand _) = True - wasRequested (CACodeAction ca) - | Nothing <- only context = True - | Just (List allowed) <- only context - , Just caKind <- ca ^. kind = caKind `elem` allowed - | otherwise = False - - wrapCodeAction :: C.ClientCapabilities -> CAResult -> IO (Maybe CAResult) - wrapCodeAction _ (CACommand cmd) = return $ Just (CACommand cmd) - wrapCodeAction caps (CACodeAction action) = do - - let (C.ClientCapabilities _ textDocCaps _ _) = caps - let literalSupport = textDocCaps >>= C._codeAction >>= C._codeActionLiteralSupport - - case literalSupport of - Nothing -> do - let cmdParams = [J.toJSON (FallbackCodeActionParams (action ^. edit) (action ^. command))] - cmd <- mkLspCommand "hls" "fallbackCodeAction" (action ^. title) (Just cmdParams) - return $ Just (CACommand cmd) - Just _ -> return $ Just (CACodeAction action) - - send :: C.ClientCapabilities -> [CAResult] -> IO (List CAResult) - send caps codeActions = List . catMaybes <$> mapM (wrapCodeAction caps) codeActions - -data FallbackCodeActionParams = - FallbackCodeActionParams - { fallbackWorkspaceEdit :: Maybe WorkspaceEdit - , fallbackCommand :: Maybe Command - } - deriving (Generic, J.ToJSON, J.FromJSON) - --- ----------------------------------------------------------- - -codeLensPlugins :: [(PluginId, CodeLensProvider)] -> Plugin Config -codeLensPlugins cas = Plugin codeLensRules (codeLensHandlers cas) - -codeLensRules :: Rules () -codeLensRules = mempty - -codeLensHandlers :: [(PluginId, CodeLensProvider)] -> PartialHandlers Config -codeLensHandlers cas = PartialHandlers $ \WithMessage{..} x -> return x - { LSP.codeLensHandler - = withResponse RspCodeLens (makeCodeLens cas) - } - -makeCodeLens :: [(PluginId, CodeLensProvider)] - -> LSP.LspFuncs Config - -> IdeState - -> CodeLensParams - -> IO (Either ResponseError (List CodeLens)) -makeCodeLens cas lf ideState params = do - logInfo (ideLogger ideState) "Plugin.makeCodeLens (ideLogger)" -- AZ - let - makeLens (pid, provider) = do - r <- provider lf ideState pid params - return (pid, r) - breakdown :: [(PluginId, Either ResponseError a)] -> ([(PluginId, ResponseError)], [(PluginId, a)]) - breakdown ls = (concatMap doOneLeft ls, concatMap doOneRight ls) - where - doOneLeft (pid, Left err) = [(pid,err)] - doOneLeft (_, Right _) = [] - - doOneRight (pid, Right a) = [(pid,a)] - doOneRight (_, Left _) = [] - - r <- mapM makeLens cas - case breakdown r of - ([],[]) -> return $ Right $ List [] - (es,[]) -> return $ Left $ ResponseError InternalError (T.pack $ "codeLens failed:" ++ show es) Nothing - (_,rs) -> return $ Right $ List (concatMap (\(_,List cs) -> cs) rs) - --- ----------------------------------------------------------- - -executeCommandPlugins :: [(PluginId, [PluginCommand])] -> Plugin Config -executeCommandPlugins ecs = Plugin mempty (executeCommandHandlers ecs) - -executeCommandHandlers :: [(PluginId, [PluginCommand])] -> PartialHandlers Config -executeCommandHandlers ecs = PartialHandlers $ \WithMessage{..} x -> return x{ - LSP.executeCommandHandler = withResponseAndRequest RspExecuteCommand ReqApplyWorkspaceEdit (makeExecuteCommands ecs) - } - --- type ExecuteCommandProvider = IdeState --- -> ExecuteCommandParams --- -> IO (Either ResponseError Value, Maybe (ServerMethod, ApplyWorkspaceEditParams)) -makeExecuteCommands :: [(PluginId, [PluginCommand])] -> LSP.LspFuncs Config -> ExecuteCommandProvider -makeExecuteCommands ecs lf ide = do - let - pluginMap = Map.fromList ecs - parseCmdId :: T.Text -> Maybe (PluginId, CommandId) - parseCmdId x = case T.splitOn ":" x of - [plugin, command] -> Just (PluginId plugin, CommandId command) - [_, plugin, command] -> Just (PluginId plugin, CommandId command) - _ -> Nothing - - execCmd :: ExecuteCommandParams -> IO (Either ResponseError J.Value, Maybe (ServerMethod, ApplyWorkspaceEditParams)) - execCmd (ExecuteCommandParams cmdId args _) = do - -- The parameters to the HIE command are always the first element - let cmdParams :: J.Value - cmdParams = case args of - Just (J.List (x:_)) -> x - _ -> J.Null - - case parseCmdId cmdId of - -- Shortcut for immediately applying a applyWorkspaceEdit as a fallback for v3.8 code actions - Just ("hls", "fallbackCodeAction") -> - case J.fromJSON cmdParams of - J.Success (FallbackCodeActionParams mEdit mCmd) -> do - - -- Send off the workspace request if it has one - forM_ mEdit $ \edit -> do - let eParams = J.ApplyWorkspaceEditParams edit - -- TODO: Use lspfuncs to send an applyedit message. Or change - -- the API to allow a list of messages to be returned. - return (Right J.Null, Just(J.WorkspaceApplyEdit, eParams)) - - case mCmd of - -- If we have a command, continue to execute it - Just (J.Command _ innerCmdId innerArgs) - -> execCmd (ExecuteCommandParams innerCmdId innerArgs Nothing) - Nothing -> return (Right J.Null, Nothing) - - J.Error _str -> return (Right J.Null, Nothing) - -- Couldn't parse the fallback command params - -- _ -> liftIO $ - -- LSP.sendErrorResponseS (LSP.sendFunc lf) - -- (J.responseId (req ^. J.id)) - -- J.InvalidParams - -- "Invalid fallbackCodeAction params" - - -- Just an ordinary HIE command - Just (plugin, cmd) -> runPluginCommand pluginMap lf ide plugin cmd cmdParams - - -- Couldn't parse the command identifier - _ -> return (Left $ ResponseError InvalidParams "Invalid command identifier" Nothing, Nothing) - - execCmd - -{- - ReqExecuteCommand req -> do - liftIO $ U.logs $ "reactor:got ExecuteCommandRequest:" ++ show req - lf <- asks lspFuncs - - let params = req ^. J.params - - parseCmdId :: T.Text -> Maybe (PluginId, CommandId) - parseCmdId x = case T.splitOn ":" x of - [plugin, command] -> Just (PluginId plugin, CommandId command) - [_, plugin, command] -> Just (PluginId plugin, CommandId command) - _ -> Nothing - - callback obj = do - liftIO $ U.logs $ "ExecuteCommand response got:r=" ++ show obj - case fromDynJSON obj :: Maybe J.WorkspaceEdit of - Just v -> do - lid <- nextLspReqId - reactorSend $ RspExecuteCommand $ Core.makeResponseMessage req (A.Object mempty) - let msg = fmServerApplyWorkspaceEditRequest lid $ J.ApplyWorkspaceEditParams v - liftIO $ U.logs $ "ExecuteCommand sending edit: " ++ show msg - reactorSend $ ReqApplyWorkspaceEdit msg - Nothing -> reactorSend $ RspExecuteCommand $ Core.makeResponseMessage req $ dynToJSON obj - - execCmd cmdId args = do - -- The parameters to the HIE command are always the first element - let cmdParams = case args of - Just (J.List (x:_)) -> x - _ -> A.Null - - case parseCmdId cmdId of - -- Shortcut for immediately applying a applyWorkspaceEdit as a fallback for v3.8 code actions - Just ("hls", "fallbackCodeAction") -> do - case A.fromJSON cmdParams of - A.Success (FallbackCodeActionParams mEdit mCmd) -> do - - -- Send off the workspace request if it has one - forM_ mEdit $ \edit -> do - lid <- nextLspReqId - let eParams = J.ApplyWorkspaceEditParams edit - eReq = fmServerApplyWorkspaceEditRequest lid eParams - reactorSend $ ReqApplyWorkspaceEdit eReq - - case mCmd of - -- If we have a command, continue to execute it - Just (J.Command _ innerCmdId innerArgs) -> execCmd innerCmdId innerArgs - - -- Otherwise we need to send back a response oureslves - Nothing -> reactorSend $ RspExecuteCommand $ Core.makeResponseMessage req (A.Object mempty) - - -- Couldn't parse the fallback command params - _ -> liftIO $ - Core.sendErrorResponseS (Core.sendFunc lf) - (J.responseId (req ^. J.id)) - J.InvalidParams - "Invalid fallbackCodeAction params" - -- Just an ordinary HIE command - Just (plugin, cmd) -> - let preq = GReq tn "plugin" Nothing Nothing (Just $ req ^. J.id) callback (toDynJSON (Nothing :: Maybe J.WorkspaceEdit)) - $ runPluginCommand plugin cmd cmdParams - in makeRequest preq - - -- Couldn't parse the command identifier - _ -> liftIO $ - Core.sendErrorResponseS (Core.sendFunc lf) - (J.responseId (req ^. J.id)) - J.InvalidParams - "Invalid command identifier" - - execCmd (params ^. J.command) (params ^. J.arguments) --} - --- ----------------------------------------------------------- - --- | Runs a plugin command given a PluginId, CommandId and --- arguments in the form of a JSON object. -runPluginCommand :: Map.Map PluginId [PluginCommand] - -> LSP.LspFuncs Config - -> IdeState - -> PluginId - -> CommandId - -> J.Value - -> IO (Either ResponseError J.Value, - Maybe (ServerMethod, ApplyWorkspaceEditParams)) -runPluginCommand m lf ide p@(PluginId p') com@(CommandId com') arg = - case Map.lookup p m of - Nothing -> return - (Left $ ResponseError InvalidRequest ("Plugin " <> p' <> " doesn't exist") Nothing, Nothing) - Just xs -> case List.find ((com ==) . commandId) xs of - Nothing -> return (Left $ - ResponseError InvalidRequest ("Command " <> com' <> " isn't defined for plugin " <> p' - <> ". Legal commands are: " <> T.pack(show $ map commandId xs)) Nothing, Nothing) - Just (PluginCommand _ _ f) -> case J.fromJSON arg of - J.Error err -> return (Left $ - ResponseError InvalidParams ("error while parsing args for " <> com' <> " in plugin " <> p' - <> ": " <> T.pack err - <> "\narg = " <> T.pack (show arg)) Nothing, Nothing) - J.Success a -> f lf ide a - --- lsp-request: error while parsing args for typesignature.add in plugin ghcide: --- When parsing the record ExecuteCommandParams of type --- Language.Haskell.LSP.Types.DataTypesJSON.ExecuteCommandParams the key command --- was not present. - --- ----------------------------------------------------------- - -mkLspCommand :: PluginId -> CommandId -> T.Text -> Maybe [J.Value] -> IO Command -mkLspCommand plid cn title args' = do - pid <- getPid - let cmdId = mkLspCmdId pid plid cn - let args = List <$> args' - return $ Command title cmdId args - -mkLspCmdId :: T.Text -> PluginId -> CommandId -> T.Text -mkLspCmdId pid (PluginId plid) (CommandId cid) - = pid <> ":" <> plid <> ":" <> cid - -allLspCmdIds :: T.Text -> [(PluginId, [PluginCommand])] -> [T.Text] -allLspCmdIds pid commands = concat $ map go commands - where - go (plid, cmds) = map (mkLspCmdId pid plid . commandId) cmds - --- --------------------------------------------------------------------- - -hoverPlugins :: [(PluginId, HoverProvider)] -> Plugin Config -hoverPlugins hs = Plugin hoverRules (hoverHandlers hs) - -hoverRules :: Rules () -hoverRules = mempty - -hoverHandlers :: [(PluginId, HoverProvider)] -> PartialHandlers Config -hoverHandlers hps = PartialHandlers $ \WithMessage{..} x -> - return x{LSP.hoverHandler = withResponse RspHover (makeHover hps)} - -makeHover :: [(PluginId, HoverProvider)] - -> LSP.LspFuncs Config -> IdeState - -> TextDocumentPositionParams - -> IO (Either ResponseError (Maybe Hover)) -makeHover hps _lf ideState params - = do - mhs <- mapM (\(_,p) -> p ideState params) hps - -- TODO: We should support ServerCapabilities and declare that - -- we don't support hover requests during initialization if we - -- don't have any hover providers - -- TODO: maybe only have provider give MarkedString and - -- work out range here? - let hs = catMaybes (rights mhs) - r = listToMaybe $ mapMaybe (^. range) hs - h = case mconcat ((map (^. contents) hs) :: [HoverContents]) of - HoverContentsMS (List []) -> Nothing - hh -> Just $ Hover hh r - return $ Right h - --- --------------------------------------------------------------------- --- --------------------------------------------------------------------- - -symbolsPlugins :: [(PluginId, SymbolsProvider)] -> Plugin Config -symbolsPlugins hs = Plugin symbolsRules (symbolsHandlers hs) - -symbolsRules :: Rules () -symbolsRules = mempty - -symbolsHandlers :: [(PluginId, SymbolsProvider)] -> PartialHandlers Config -symbolsHandlers hps = PartialHandlers $ \WithMessage{..} x -> - return x {LSP.documentSymbolHandler = withResponse RspDocumentSymbols (makeSymbols hps)} - -makeSymbols :: [(PluginId, SymbolsProvider)] - -> LSP.LspFuncs Config - -> IdeState - -> DocumentSymbolParams - -> IO (Either ResponseError DSResult) -makeSymbols sps lf ideState params - = do - let uri' = params ^. textDocument . uri - (C.ClientCapabilities _ tdc _ _) = LSP.clientCapabilities lf - supportsHierarchy = fromMaybe False $ tdc >>= C._documentSymbol - >>= C._hierarchicalDocumentSymbolSupport - convertSymbols :: [DocumentSymbol] -> DSResult - convertSymbols symbs - | supportsHierarchy = DSDocumentSymbols $ List symbs - | otherwise = DSSymbolInformation (List $ concatMap (go Nothing) symbs) - where - go :: Maybe T.Text -> DocumentSymbol -> [SymbolInformation] - go parent ds = - let children' :: [SymbolInformation] - children' = concatMap (go (Just name')) (fromMaybe mempty (ds ^. children)) - loc = Location uri' (ds ^. range) - name' = ds ^. name - si = SymbolInformation name' (ds ^. kind) (ds ^. deprecated) loc parent - in [si] <> children' - - mhs <- mapM (\(_,p) -> p lf ideState params) sps - case rights mhs of - [] -> return $ Left $ responseError $ T.pack $ show $ lefts mhs - hs -> return $ Right $ convertSymbols $ concat hs - - --- --------------------------------------------------------------------- --- --------------------------------------------------------------------- - -renamePlugins :: [(PluginId, RenameProvider)] -> Plugin Config -renamePlugins providers = Plugin rules handlers - where - rules = mempty - handlers = PartialHandlers $ \WithMessage{..} x -> return x - { LSP.renameHandler = withResponse RspRename (renameWith providers)} - -renameWith :: - [(PluginId, RenameProvider)] -> - LSP.LspFuncs Config -> - IdeState -> - RenameParams -> - IO (Either ResponseError WorkspaceEdit) -renameWith providers lspFuncs state params = do - results <- mapM (\(_,p) -> p lspFuncs state params) providers - case partitionEithers results of - (errors, []) -> return $ Left $ responseError $ T.pack $ show $ errors - (_, edits) -> return $ Right $ mconcat edits - --- --------------------------------------------------------------------- --- --------------------------------------------------------------------- - -formatterPlugins :: [(PluginId, FormattingProvider IO)] -> Plugin Config -formatterPlugins providers - = Plugin formatterRules - (formatterHandlers (Map.fromList (("none",noneProvider):providers))) - -formatterRules :: Rules () -formatterRules = mempty - -formatterHandlers :: Map.Map PluginId (FormattingProvider IO) -> PartialHandlers Config -formatterHandlers providers = PartialHandlers $ \WithMessage{..} x -> return x - { LSP.documentFormattingHandler - = withResponse RspDocumentFormatting (formatting providers) - , LSP.documentRangeFormattingHandler - = withResponse RspDocumentRangeFormatting (rangeFormatting providers) - } - --- --------------------------------------------------------------------- --- --------------------------------------------------------------------- - -completionsPlugins :: [(PluginId, CompletionProvider)] -> Plugin Config -completionsPlugins cs = Plugin completionsRules (completionsHandlers cs) - -completionsRules :: Rules () -completionsRules = mempty - -completionsHandlers :: [(PluginId, CompletionProvider)] -> PartialHandlers Config -completionsHandlers cps = PartialHandlers $ \WithMessage{..} x -> - return x {LSP.completionHandler = withResponse RspCompletion (makeCompletions cps)} - -makeCompletions :: [(PluginId, CompletionProvider)] - -> LSP.LspFuncs Config - -> IdeState - -> CompletionParams - -> IO (Either ResponseError CompletionResponseResult) -makeCompletions sps lf ideState params@(CompletionParams (TextDocumentIdentifier doc) pos _context _mt) - = do - mprefix <- getPrefixAtPos lf doc pos - _snippets <- WithSnippets <$> completionSnippetsOn <$> (getClientConfig lf) - - let - combine :: [CompletionResponseResult] -> CompletionResponseResult - combine cs = go (Completions $ List []) cs - where - go acc [] = acc - go (Completions (List ls)) (Completions (List ls2):rest) - = go (Completions (List (ls <> ls2))) rest - go (Completions (List ls)) (CompletionList (CompletionListType complete (List ls2)):rest) - = go (CompletionList $ CompletionListType complete (List (ls <> ls2))) rest - go (CompletionList (CompletionListType complete (List ls))) (CompletionList (CompletionListType complete2 (List ls2)):rest) - = go (CompletionList $ CompletionListType (complete || complete2) (List (ls <> ls2))) rest - go (CompletionList (CompletionListType complete (List ls))) (Completions (List ls2):rest) - = go (CompletionList $ CompletionListType complete (List (ls <> ls2))) rest - - case mprefix of - Nothing -> return $ Right $ Completions $ List [] - Just _prefix -> do - mhs <- mapM (\(_,p) -> p lf ideState params) sps - case rights mhs of - [] -> return $ Left $ responseError $ T.pack $ show $ lefts mhs - hs -> return $ Right $ combine hs - -{- - ReqCompletion req -> do - liftIO $ U.logs $ "reactor:got CompletionRequest:" ++ show req - let (_, doc, pos) = reqParams req - - mprefix <- getPrefixAtPos doc pos - - let callback compls = do - let rspMsg = Core.makeResponseMessage req - $ J.Completions $ J.List compls - reactorSend $ RspCompletion rspMsg - case mprefix of - Nothing -> callback [] - Just prefix -> do - snippets <- Completions.WithSnippets <$> configVal completionSnippetsOn - let hreq = IReq tn "completion" (req ^. J.id) callback - $ lift $ Completions.getCompletions doc prefix snippets - makeRequest hreq --} - -getPrefixAtPos :: LSP.LspFuncs Config -> Uri -> Position -> IO (Maybe VFS.PosPrefixInfo) -getPrefixAtPos lf uri pos = do - mvf <- (LSP.getVirtualFileFunc lf) (J.toNormalizedUri uri) - case mvf of - Just vf -> VFS.getCompletionPrefix pos vf - Nothing -> return Nothing - --- --------------------------------------------------------------------- --- | Returns the current client configuration. It is not wise to permanently --- cache the returned value of this function, as clients can at runitime change --- their configuration. --- --- If no custom configuration has been set by the client, this function returns --- our own defaults. -getClientConfig :: LSP.LspFuncs Config -> IO Config -getClientConfig lf = fromMaybe Data.Default.def <$> LSP.config lf - --- --------------------------------------------------------------------- diff --git a/src/Ide/Plugin/Brittany.hs b/src/Ide/Plugin/Brittany.hs deleted file mode 100644 index 9d4fca2368..0000000000 --- a/src/Ide/Plugin/Brittany.hs +++ /dev/null @@ -1,99 +0,0 @@ -module Ide.Plugin.Brittany where - -import Control.Lens -import Control.Monad.IO.Class -import Control.Monad.Trans.Maybe (MaybeT, runMaybeT) -import Data.Coerce -import Data.Semigroup -import Data.Text (Text) -import qualified Data.Text as T -import Development.IDE.Types.Location -import Language.Haskell.Brittany -import Language.Haskell.LSP.Types as J -import qualified Language.Haskell.LSP.Types.Lens as J -import Ide.Plugin.Formatter -import Ide.PluginUtils -import Ide.Types - -import System.FilePath -import Data.Maybe (maybeToList) - -descriptor :: PluginId -> PluginDescriptor -descriptor plId = (defaultPluginDescriptor plId) - { pluginFormattingProvider = Just provider - } - --- | Formatter provider of Brittany. --- Formats the given source in either a given Range or the whole Document. --- If the provider fails an error is returned that can be displayed to the user. -provider - :: FormattingProvider IO -provider _lf _ideState typ contents fp opts = do --- text uri formatType opts = pluginGetFile "brittanyCmd: " uri $ \fp -> do - confFile <- liftIO $ getConfFile fp - let (range, selectedContents) = case typ of - FormatText -> (fullRange contents, contents) - FormatRange r -> (normalize r, extractRange r contents) - - res <- formatText confFile opts selectedContents - case res of - Left err -> return $ Left $ responseError (T.pack $ "brittanyCmd: " ++ unlines (map showErr err)) - Right newText -> return $ Right $ J.List [TextEdit range newText] - --- | Primitive to format text with the given option. --- May not throw exceptions but return a Left value. --- Errors may be presented to the user. -formatText - :: MonadIO m - => Maybe FilePath -- ^ Path to configs. If Nothing, default configs will be used. - -> FormattingOptions -- ^ Options for the formatter such as indentation. - -> Text -- ^ Text to format - -> m (Either [BrittanyError] Text) -- ^ Either formatted Text or a error from Brittany. -formatText confFile opts text = - liftIO $ runBrittany tabSize confFile text - where tabSize = opts ^. J.tabSize - --- | Recursively search in every directory of the given filepath for brittany.yaml. --- If no such file has been found, return Nothing. -getConfFile :: NormalizedFilePath -> IO (Maybe FilePath) -getConfFile = findLocalConfigPath . takeDirectory . fromNormalizedFilePath - --- | Run Brittany on the given text with the given tab size and --- a configuration path. If no configuration path is given, a --- default configuration is chosen. The configuration may overwrite --- tab size parameter. --- --- Returns either a list of Brittany Errors or the reformatted text. --- May not throw an exception. -runBrittany :: Int -- ^ tab size - -> Maybe FilePath -- ^ local config file - -> Text -- ^ text to format - -> IO (Either [BrittanyError] Text) -runBrittany tabSize confPath text = do - let cfg = mempty - { _conf_layout = - mempty { _lconfig_indentAmount = opt (coerce tabSize) - } - , _conf_forward = - (mempty :: CForwardOptions Option) - { _options_ghc = opt (runIdentity ( _options_ghc forwardOptionsSyntaxExtsEnabled)) - } - } - - config <- fromMaybeT (pure staticDefaultConfig) (readConfigsWithUserConfig cfg (maybeToList confPath)) - parsePrintModule config text - -fromMaybeT :: Monad m => m a -> MaybeT m a -> m a -fromMaybeT def act = runMaybeT act >>= maybe def return - -opt :: a -> Option a -opt = Option . Just - -showErr :: BrittanyError -> String -showErr (ErrorInput s) = s -showErr (ErrorMacroConfig err input) - = "Error: parse error in inline configuration: " ++ err ++ " in the string \"" ++ input ++ "\"." -showErr (ErrorUnusedComment s) = s -showErr (LayoutWarning s) = s -showErr (ErrorUnknownNode s _) = s -showErr ErrorOutputCheck = "Brittany error - invalid output" diff --git a/src/Ide/Plugin/Config.hs b/src/Ide/Plugin/Config.hs deleted file mode 100644 index 7b8e372b28..0000000000 --- a/src/Ide/Plugin/Config.hs +++ /dev/null @@ -1,104 +0,0 @@ -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TypeFamilies #-} -module Ide.Plugin.Config - ( - getInitialConfig - , getConfigFromNotification - , Config(..) - ) where - -import qualified Data.Aeson as A -import Data.Aeson hiding ( Error ) -import Data.Default -import qualified Data.Text as T -import Language.Haskell.LSP.Types - --- --------------------------------------------------------------------- - --- | Given a DidChangeConfigurationNotification message, this function returns the parsed --- Config object if possible. -getConfigFromNotification :: DidChangeConfigurationNotification -> Either T.Text Config -getConfigFromNotification (NotificationMessage _ _ (DidChangeConfigurationParams p)) = - case fromJSON p of - A.Success c -> Right c - A.Error err -> Left $ T.pack err - --- | Given an InitializeRequest message, this function returns the parsed --- Config object if possible. Otherwise, it returns the default configuration -getInitialConfig :: InitializeRequest -> Either T.Text Config -getInitialConfig (RequestMessage _ _ _ InitializeParams{_initializationOptions = Nothing }) = Right def -getInitialConfig (RequestMessage _ _ _ InitializeParams{_initializationOptions = Just opts}) = - case fromJSON opts of - A.Success c -> Right c - A.Error err -> Left $ T.pack err - --- --------------------------------------------------------------------- - --- | We (initially anyway) mirror the hie configuration, so that existing --- clients can simply switch executable and not have any nasty surprises. There --- will be surprises relating to config options being ignored, initially though. -data Config = - Config - { hlintOn :: Bool - , diagnosticsOnChange :: Bool - , maxNumberOfProblems :: Int - , diagnosticsDebounceDuration :: Int - , liquidOn :: Bool - , completionSnippetsOn :: Bool - , formatOnImportOn :: Bool - , formattingProvider :: T.Text - } deriving (Show,Eq) - -instance Default Config where - def = Config - { hlintOn = True - , diagnosticsOnChange = True - , maxNumberOfProblems = 100 - , diagnosticsDebounceDuration = 350000 - , liquidOn = False - , completionSnippetsOn = True - , formatOnImportOn = True - -- , formattingProvider = "brittany" - , formattingProvider = "ormolu" - -- , formattingProvider = "floskell" - -- , formattingProvider = "stylish-haskell" - } - --- TODO: Add API for plugins to expose their own LSP config options -instance A.FromJSON Config where - parseJSON = A.withObject "Config" $ \v -> do - s <- v .: "languageServerHaskell" - flip (A.withObject "Config.settings") s $ \o -> Config - <$> o .:? "hlintOn" .!= hlintOn def - <*> o .:? "diagnosticsOnChange" .!= diagnosticsOnChange def - <*> o .:? "maxNumberOfProblems" .!= maxNumberOfProblems def - <*> o .:? "diagnosticsDebounceDuration" .!= diagnosticsDebounceDuration def - <*> o .:? "liquidOn" .!= liquidOn def - <*> o .:? "completionSnippetsOn" .!= completionSnippetsOn def - <*> o .:? "formatOnImportOn" .!= formatOnImportOn def - <*> o .:? "formattingProvider" .!= formattingProvider def - --- 2017-10-09 23:22:00.710515298 [ThreadId 11] - ---> {"jsonrpc":"2.0","method":"workspace/didChangeConfiguration","params":{"settings":{"languageServerHaskell":{"maxNumberOfProblems":100,"hlintOn":true}}}} --- 2017-10-09 23:22:00.710667381 [ThreadId 15] - reactor:got didChangeConfiguration notification: --- NotificationMessage --- {_jsonrpc = "2.0" --- , _method = WorkspaceDidChangeConfiguration --- , _params = DidChangeConfigurationParams --- {_settings = Object (fromList [("languageServerHaskell",Object (fromList [("hlintOn",Bool True) --- ,("maxNumberOfProblems",Number 100.0)]))])}} - -instance A.ToJSON Config where - toJSON (Config h diag m d l c f fp) = object [ "languageServerHaskell" .= r ] - where - r = object [ "hlintOn" .= h - , "diagnosticsOnChange" .= diag - , "maxNumberOfProblems" .= m - , "diagnosticsDebounceDuration" .= d - , "liquidOn" .= l - , "completionSnippetsOn" .= c - , "formatOnImportOn" .= f - , "formattingProvider" .= fp - ] diff --git a/src/Ide/Plugin/Eval.hs b/src/Ide/Plugin/Eval.hs deleted file mode 100644 index 9b452ece61..0000000000 --- a/src/Ide/Plugin/Eval.hs +++ /dev/null @@ -1,338 +0,0 @@ -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DuplicateRecordFields #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE NamedFieldPuns #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TupleSections #-} - --- | A plugin inspired by the REPLoid feature of Dante[1] which allows --- to evaluate code in comment prompts and splice the results right below: --- --- > example :: [String] --- > example = ["This is an example", "of", "interactive", "evaluation"] --- > --- > -- >>> intercalate " " example --- > -- "This is an example of interactive evaluation" --- > -- --- --- [1] - https://github.com/jyp/dante -module Ide.Plugin.Eval where - -import Control.Monad (void) -import Control.Monad.IO.Class (MonadIO (liftIO)) -import Control.Monad.Trans.Class (MonadTrans (lift)) -import Control.Monad.Trans.Except (ExceptT (..), runExceptT, - throwE) -import Data.Aeson (FromJSON, ToJSON, Value (Null), - toJSON) -import Data.Bifunctor (Bifunctor (first)) -import qualified Data.HashMap.Strict as Map -import Data.String (IsString (fromString)) -import Data.Text (Text) -import qualified Data.Text as T -import Data.Time (getCurrentTime) -import Development.IDE.Core.Rules (runAction) -import Development.IDE.Core.RuleTypes (GetModSummary (..), - GhcSession (..)) -import Development.IDE.Core.Shake (use_) -import Development.IDE.GHC.Util (evalGhcEnv, hscEnv, - textToStringBuffer) -import Development.IDE.Types.Location (toNormalizedFilePath', - uriToFilePath') -import DynamicLoading (initializePlugins) -import DynFlags (targetPlatform) -import GHC (DynFlags, ExecResult (..), GeneralFlag (Opt_IgnoreHpcChanges, Opt_IgnoreOptimChanges, Opt_ImplicitImportQualified), - GhcLink (LinkInMemory), - GhcMode (CompManager), - HscTarget (HscInterpreted), - LoadHowMuch (LoadAllTargets), - SuccessFlag (..), - execLineNumber, execOptions, - execSourceFile, execStmt, - getContext, - getInteractiveDynFlags, - getSession, getSessionDynFlags, - ghcLink, ghcMode, hscTarget, - isImport, isStmt, load, - moduleName, packageFlags, - parseImportDecl, pkgDatabase, - pkgState, runDecls, setContext, - setInteractiveDynFlags, - setLogAction, - setSessionDynFlags, setTargets, - simpleImportDecl, ways) -import GHC.Generics (Generic) -import GhcMonad (modifySession) -import GhcPlugins (defaultLogActionHPutStrDoc, - gopt_set, gopt_unset, - interpWays, updateWays, - wayGeneralFlags, - wayUnsetGeneralFlags) -import HscTypes -import Ide.Plugin -import Ide.Types -import Language.Haskell.LSP.Core (LspFuncs (getVirtualFileFunc)) -import Language.Haskell.LSP.Types -import Language.Haskell.LSP.VFS (virtualFileText) -import PrelNames (pRELUDE) -import System.FilePath -import System.IO (hClose) -import System.IO.Temp -import Data.Maybe (catMaybes) - -descriptor :: PluginId -> PluginDescriptor -descriptor plId = - (defaultPluginDescriptor plId) - { pluginId = plId, - pluginCodeLensProvider = Just provider, - pluginCommands = [evalCommand] - } - -extractMatches :: Maybe Text -> [([(Text, Int)], Range)] -extractMatches = goSearch 0 . maybe [] T.lines - where - checkMatch = T.stripPrefix "-- >>> " - looksLikeSplice l - | Just l' <- T.stripPrefix "--" l = - not (" >>>" `T.isPrefixOf` l') - | otherwise = - False - - goSearch _ [] = [] - goSearch line (l : ll) - | Just match <- checkMatch l = - goAcc (line + 1) [(match, line)] ll - | otherwise = - goSearch (line + 1) ll - - goAcc line acc [] = [(reverse acc, Range p p)] where p = Position line 0 - goAcc line acc (l : ll) - | Just match <- checkMatch l = - goAcc (line + 1) ([(match, line)] <> acc) ll - | otherwise = - (reverse acc, r) : goSearch (line + 1) ll - where - r = Range p p' - p = Position line 0 - p' = Position (line + spliceLength) 0 - spliceLength = length (takeWhile looksLikeSplice (l : ll)) - -provider :: CodeLensProvider -provider lsp _state plId CodeLensParams {_textDocument} = response $ do - let TextDocumentIdentifier uri = _textDocument - contents <- liftIO $ getVirtualFileFunc lsp $ toNormalizedUri uri - let text = virtualFileText <$> contents - let matches = extractMatches text - - cmd <- liftIO $ mkLspCommand plId evalCommandName "Evaluate..." (Just []) - - let lenses = - [ CodeLens range (Just cmd') Nothing - | (m, r) <- matches, - let (_, startLine) = head m - (endLineContents, endLine) = last m - range = Range start end - start = Position startLine 0 - end = Position endLine (T.length endLineContents) - args = EvalParams m r _textDocument, - let cmd' = - (cmd :: Command) - { _arguments = Just (List [toJSON args]), - _title = if trivial r then "Evaluate..." else "Refresh..." - } - ] - - return $ List lenses - where - trivial (Range p p') = p == p' - -evalCommandName :: CommandId -evalCommandName = "evalCommand" - -evalCommand :: PluginCommand -evalCommand = - PluginCommand evalCommandName "evaluate" runEvalCmd - -data EvalParams = EvalParams - { statements :: [(Text, Int)], - editTarget :: !Range, - module_ :: !TextDocumentIdentifier - } - deriving (Eq, Show, Generic, FromJSON, ToJSON) - -runEvalCmd :: CommandFunction EvalParams -runEvalCmd lsp state EvalParams {..} = response' $ do - let TextDocumentIdentifier {_uri} = module_ - fp <- handleMaybe "uri" $ uriToFilePath' _uri - contents <- liftIO $ getVirtualFileFunc lsp $ toNormalizedUri _uri - text <- handleMaybe "contents" $ virtualFileText <$> contents - -{- Note: GhcSessionDeps - -Depending on GhcSession means we do need to reload all the module -dependencies in the GHC session(from interface files, hopefully). - -The GhcSessionDeps dependency would allow us to reuse a GHC session preloaded -with all the dependencies. Unfortunately, the ModSummary objects that -GhcSessionDeps puts in the GHC session are not suitable for reuse since they -clear out the timestamps; this is done to avoid internal ghcide bugs and -can probably be relaxed so that plugins like Eval can reuse them. Once that's -done, we want to switch back to GhcSessionDeps: - --- https://github.com/digital-asset/ghcide/pull/694 - - -} - session <- - liftIO $ - runAction "runEvalCmd.ghcSession" state $ - use_ GhcSession $ -- See the note on GhcSessionDeps - toNormalizedFilePath' $ - fp - - ms <- - liftIO $ - runAction "runEvalCmd.getModSummary" state $ - use_ GetModSummary $ - toNormalizedFilePath' $ - fp - - now <- liftIO getCurrentTime - - let tmp = withSystemTempFile (takeFileName fp) - - tmp $ \temp _h -> tmp $ \tempLog hLog -> do - liftIO $ hClose _h - let modName = moduleName $ ms_mod ms - thisModuleTarget = Target (TargetFile fp Nothing) False (Just (textToStringBuffer text, now)) - - hscEnv' <- ExceptT $ - evalGhcEnv (hscEnv session) $ do - df <- getSessionDynFlags - env <- getSession - df <- liftIO $ setupDynFlagsForGHCiLike env df - _lp <- setSessionDynFlags df - - -- copy the package state to the interactive DynFlags - idflags <- getInteractiveDynFlags - df <- getSessionDynFlags - setInteractiveDynFlags - idflags - { pkgState = pkgState df, - pkgDatabase = pkgDatabase df, - packageFlags = packageFlags df - } - - -- set up a custom log action - setLogAction $ \_df _wr _sev _span _style _doc -> - defaultLogActionHPutStrDoc _df hLog _doc _style - - -- load the module in the interactive environment - setTargets [thisModuleTarget] - loadResult <- load LoadAllTargets - case loadResult of - Failed -> liftIO $ do - hClose hLog - Left <$> readFile tempLog - Succeeded -> do - setContext [IIDecl (simpleImportDecl $ moduleName pRELUDE), IIModule modName] - Right <$> getSession - - df <- liftIO $ evalGhcEnv hscEnv' getSessionDynFlags - let eval (stmt, l) - | isStmt df stmt = do - -- set up a custom interactive print function - liftIO $ writeFile temp "" - ctxt <- getContext - setContext [IIDecl (simpleImportDecl $ moduleName pRELUDE)] - let printFun = "let ghcideCustomShow x = Prelude.writeFile " <> show temp <> " (Prelude.show x)" - interactivePrint <- - execStmt printFun execOptions >>= \case - ExecComplete (Right [interactivePrint]) _ -> pure interactivePrint - _ -> error "internal error binding print function" - modifySession $ \hsc -> hsc {hsc_IC = setInteractivePrintName (hsc_IC hsc) interactivePrint} - setContext ctxt - - let opts = - execOptions - { execSourceFile = fp, - execLineNumber = l - } - res <- execStmt stmt opts - case res of - ExecComplete (Left err) _ -> return $ Just $ T.pack $ pad $ show err - ExecComplete (Right _) _ -> do - out <- liftIO $ pad <$> readFile temp - -- Important to take the length in order to read the file eagerly - return $! if length out == 0 then Nothing else Just (T.pack out) - ExecBreak {} -> return $ Just $ T.pack $ pad "breakpoints are not supported" - - | isImport df stmt = do - ctxt <- getContext - idecl <- parseImportDecl stmt - setContext $ IIDecl idecl : ctxt - return Nothing - | otherwise = do - void $ runDecls stmt - return Nothing - - edits <- liftIO $ evalGhcEnv hscEnv' $ traverse (eval . first T.unpack) statements - - - let workspaceEditsMap = Map.fromList [(_uri, List [evalEdit])] - workspaceEdits = WorkspaceEdit (Just workspaceEditsMap) Nothing - evalEdit = TextEdit editTarget (T.intercalate "\n" $ catMaybes edits) - - return (WorkspaceApplyEdit, ApplyWorkspaceEditParams workspaceEdits) - -pad :: String -> String -pad = unlines . map ("-- " <>) . lines - -------------------------------------------------------------------------------- - -handleMaybe :: Monad m => e -> Maybe b -> ExceptT e m b -handleMaybe msg = maybe (throwE msg) return - -handleMaybeM :: Monad m => e -> m (Maybe b) -> ExceptT e m b -handleMaybeM msg act = maybe (throwE msg) return =<< lift act - -response :: ExceptT String IO a -> IO (Either ResponseError a) -response = - fmap (first (\msg -> ResponseError InternalError (fromString msg) Nothing)) - . runExceptT - -response' :: ExceptT String IO a -> IO (Either ResponseError Value, Maybe a) -response' act = do - res <- runExceptT act - case res of - Left e -> - return (Left (ResponseError InternalError (fromString e) Nothing), Nothing) - Right a -> return (Right Null, Just a) - -setupDynFlagsForGHCiLike :: HscEnv -> DynFlags -> IO DynFlags -setupDynFlagsForGHCiLike env dflags = do - let dflags3 = - dflags - { hscTarget = HscInterpreted, - ghcMode = CompManager, - ghcLink = LinkInMemory - } - platform = targetPlatform dflags3 - dflags3a = updateWays $ dflags3 {ways = interpWays} - dflags3b = - foldl gopt_set dflags3a $ - concatMap - (wayGeneralFlags platform) - interpWays - dflags3c = - foldl gopt_unset dflags3b $ - concatMap - (wayUnsetGeneralFlags platform) - interpWays - dflags4 = - dflags3c `gopt_set` Opt_ImplicitImportQualified - `gopt_set` Opt_IgnoreOptimChanges - `gopt_set` Opt_IgnoreHpcChanges - initializePlugins env dflags4 diff --git a/src/Ide/Plugin/Example.hs b/src/Ide/Plugin/Example.hs deleted file mode 100644 index 63f7083b37..0000000000 --- a/src/Ide/Plugin/Example.hs +++ /dev/null @@ -1,234 +0,0 @@ -{-# LANGUAGE ViewPatterns #-} -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DuplicateRecordFields #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE TypeFamilies #-} - -module Ide.Plugin.Example - ( - descriptor - ) where - -import Control.DeepSeq ( NFData ) -import Control.Monad.Trans.Maybe -import Data.Aeson -import Data.Binary -import Data.Functor -import qualified Data.HashMap.Strict as Map -import qualified Data.HashSet as HashSet -import Data.Hashable -import qualified Data.Text as T -import Data.Typeable -import Development.IDE.Core.OfInterest -import Development.IDE.Core.RuleTypes -import Development.IDE.Core.Rules -import Development.IDE.Core.Service -import Development.IDE.Core.Shake -import Development.IDE.Types.Diagnostics as D -import Development.IDE.Types.Location -import Development.IDE.Types.Logger -import Development.Shake hiding ( Diagnostic ) -import GHC.Generics -import Ide.Plugin -import Ide.Types -import Language.Haskell.LSP.Types -import Text.Regex.TDFA.Text() - --- --------------------------------------------------------------------- - -descriptor :: PluginId -> PluginDescriptor -descriptor plId = (defaultPluginDescriptor plId) - { pluginRules = exampleRules - , pluginCommands = [PluginCommand "codelens.todo" "example adding" addTodoCmd] - , pluginCodeActionProvider = Just codeAction - , pluginCodeLensProvider = Just codeLens - , pluginHoverProvider = Just hover - , pluginSymbolsProvider = Just symbols - , pluginCompletionProvider = Just completion - } - --- --------------------------------------------------------------------- - -hover :: IdeState -> TextDocumentPositionParams -> IO (Either ResponseError (Maybe Hover)) -hover = request "Hover" blah (Right Nothing) foundHover - -blah :: NormalizedFilePath -> Position -> Action (Maybe (Maybe Range, [T.Text])) -blah _ (Position line col) - = return $ Just (Just (Range (Position line col) (Position (line+1) 0)), ["example hover 1\n"]) - --- --------------------------------------------------------------------- --- Generating Diagnostics via rules --- --------------------------------------------------------------------- - -data Example = Example - deriving (Eq, Show, Typeable, Generic) -instance Hashable Example -instance NFData Example -instance Binary Example - -type instance RuleResult Example = () - -exampleRules :: Rules () -exampleRules = do - define $ \Example file -> do - _pm <- getParsedModule file - let diag = mkDiag file "example" DsError (Range (Position 0 0) (Position 1 0)) "example diagnostic, hello world" - return ([diag], Just ()) - - action $ do - files <- getFilesOfInterest - void $ uses Example $ HashSet.toList files - -mkDiag :: NormalizedFilePath - -> DiagnosticSource - -> DiagnosticSeverity - -> Range - -> T.Text - -> FileDiagnostic -mkDiag file diagSource sev loc msg = (file, D.ShowDiag,) - Diagnostic - { _range = loc - , _severity = Just sev - , _source = Just diagSource - , _message = msg - , _code = Nothing - , _tags = Nothing - , _relatedInformation = Nothing - } - --- --------------------------------------------------------------------- --- code actions --- --------------------------------------------------------------------- - --- | Generate code actions. -codeAction :: CodeActionProvider -codeAction _lf _state _pid (TextDocumentIdentifier uri) _range CodeActionContext{_diagnostics=List _xs} = do - let - title = "Add TODO Item 1" - tedit = [TextEdit (Range (Position 2 0) (Position 2 0)) - "-- TODO1 added by Example Plugin directly\n"] - edit = WorkspaceEdit (Just $ Map.singleton uri $ List tedit) Nothing - pure $ Right $ List - [ CACodeAction $ CodeAction title (Just CodeActionQuickFix) (Just $ List []) (Just edit) Nothing ] - --- --------------------------------------------------------------------- - -codeLens :: CodeLensProvider -codeLens _lf ideState plId CodeLensParams{_textDocument=TextDocumentIdentifier uri} = do - logInfo (ideLogger ideState) "Example.codeLens entered (ideLogger)" -- AZ - case uriToFilePath' uri of - Just (toNormalizedFilePath -> filePath) -> do - _ <- runIdeAction "Example.codeLens" (shakeExtras ideState) $ runMaybeT $ useE TypeCheck filePath - _diag <- getDiagnostics ideState - _hDiag <- getHiddenDiagnostics ideState - let - title = "Add TODO Item via Code Lens" - -- tedit = [TextEdit (Range (Position 3 0) (Position 3 0)) - -- "-- TODO added by Example Plugin via code lens action\n"] - -- edit = WorkspaceEdit (Just $ Map.singleton uri $ List tedit) Nothing - range = Range (Position 3 0) (Position 4 0) - let cmdParams = AddTodoParams uri "do abc" - cmd <- mkLspCommand plId "codelens.todo" title (Just [(toJSON cmdParams)]) - pure $ Right $ List [ CodeLens range (Just cmd) Nothing ] - Nothing -> pure $ Right $ List [] - --- --------------------------------------------------------------------- --- | Parameters for the addTodo PluginCommand. -data AddTodoParams = AddTodoParams - { file :: Uri -- ^ Uri of the file to add the pragma to - , todoText :: T.Text - } - deriving (Show, Eq, Generic, ToJSON, FromJSON) - -addTodoCmd :: CommandFunction AddTodoParams -addTodoCmd _lf _ide (AddTodoParams uri todoText) = do - let - pos = Position 3 0 - textEdits = List - [TextEdit (Range pos pos) - ("-- TODO:" <> todoText <> "\n") - ] - res = WorkspaceEdit - (Just $ Map.singleton uri textEdits) - Nothing - return (Right Null, Just (WorkspaceApplyEdit, ApplyWorkspaceEditParams res)) - --- --------------------------------------------------------------------- - -foundHover :: (Maybe Range, [T.Text]) -> Either ResponseError (Maybe Hover) -foundHover (mbRange, contents) = - Right $ Just $ Hover (HoverContents $ MarkupContent MkMarkdown - $ T.intercalate sectionSeparator contents) mbRange - - --- | Respond to and log a hover or go-to-definition request -request - :: T.Text - -> (NormalizedFilePath -> Position -> Action (Maybe a)) - -> Either ResponseError b - -> (a -> Either ResponseError b) - -> IdeState - -> TextDocumentPositionParams - -> IO (Either ResponseError b) -request label getResults notFound found ide (TextDocumentPositionParams (TextDocumentIdentifier uri) pos _) = do - mbResult <- case uriToFilePath' uri of - Just path -> logAndRunRequest label getResults ide pos path - Nothing -> pure Nothing - pure $ maybe notFound found mbResult - -logAndRunRequest :: T.Text -> (NormalizedFilePath -> Position -> Action b) - -> IdeState -> Position -> String -> IO b -logAndRunRequest label getResults ide pos path = do - let filePath = toNormalizedFilePath path - logInfo (ideLogger ide) $ - label <> " request at position " <> T.pack (showPosition pos) <> - " in file: " <> T.pack path - runAction "Example" ide $ getResults filePath pos - --- --------------------------------------------------------------------- - -symbols :: SymbolsProvider -symbols _lf _ide (DocumentSymbolParams _doc _mt) - = pure $ Right [r] - where - r = DocumentSymbol name detail kind deprecation range selR chList - name = "Example_symbol_name" - detail = Nothing - kind = SkVariable - deprecation = Nothing - range = Range (Position 2 0) (Position 2 5) - selR = range - chList = Nothing - --- --------------------------------------------------------------------- - -completion :: CompletionProvider -completion _lf _ide (CompletionParams _doc _pos _mctxt _mt) - = pure $ Right $ Completions $ List [r] - where - r = CompletionItem label kind tags detail documentation deprecated preselect - sortText filterText insertText insertTextFormat - textEdit additionalTextEdits commitCharacters - command xd - label = "Example completion" - kind = Nothing - tags = List [] - detail = Nothing - documentation = Nothing - deprecated = Nothing - preselect = Nothing - sortText = Nothing - filterText = Nothing - insertText = Nothing - insertTextFormat = Nothing - textEdit = Nothing - additionalTextEdits = Nothing - commitCharacters = Nothing - command = Nothing - xd = Nothing - --- --------------------------------------------------------------------- diff --git a/src/Ide/Plugin/Example2.hs b/src/Ide/Plugin/Example2.hs deleted file mode 100644 index acfdbffdd2..0000000000 --- a/src/Ide/Plugin/Example2.hs +++ /dev/null @@ -1,231 +0,0 @@ -{-# LANGUAGE ViewPatterns #-} -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DuplicateRecordFields #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE TypeFamilies #-} - -module Ide.Plugin.Example2 - ( - descriptor - ) where - -import Control.DeepSeq ( NFData ) -import Control.Monad.Trans.Maybe -import Data.Aeson -import Data.Binary -import Data.Functor -import qualified Data.HashMap.Strict as Map -import qualified Data.HashSet as HashSet -import Data.Hashable -import qualified Data.Text as T -import Data.Typeable -import Development.IDE.Core.OfInterest -import Development.IDE.Core.RuleTypes -import Development.IDE.Core.Rules -import Development.IDE.Core.Service -import Development.IDE.Core.Shake -import Development.IDE.Types.Diagnostics as D -import Development.IDE.Types.Location -import Development.IDE.Types.Logger -import Development.Shake hiding ( Diagnostic ) -import GHC.Generics -import Ide.Plugin -import Ide.Types -import Language.Haskell.LSP.Types -import Text.Regex.TDFA.Text() - --- --------------------------------------------------------------------- - -descriptor :: PluginId -> PluginDescriptor -descriptor plId = (defaultPluginDescriptor plId) - { pluginRules = exampleRules - , pluginCommands = [PluginCommand "codelens.todo" "example adding" addTodoCmd] - , pluginCodeActionProvider = Just codeAction - , pluginCodeLensProvider = Just codeLens - , pluginHoverProvider = Just hover - , pluginSymbolsProvider = Just symbols - , pluginCompletionProvider = Just completion - } - --- --------------------------------------------------------------------- - -hover :: IdeState -> TextDocumentPositionParams -> IO (Either ResponseError (Maybe Hover)) -hover = request "Hover" blah (Right Nothing) foundHover - -blah :: NormalizedFilePath -> Position -> Action (Maybe (Maybe Range, [T.Text])) -blah _ (Position line col) - = return $ Just (Just (Range (Position line col) (Position (line+1) 0)), ["example hover 2\n"]) - --- --------------------------------------------------------------------- --- Generating Diagnostics via rules --- --------------------------------------------------------------------- - -data Example2 = Example2 - deriving (Eq, Show, Typeable, Generic) -instance Hashable Example2 -instance NFData Example2 -instance Binary Example2 - -type instance RuleResult Example2 = () - -exampleRules :: Rules () -exampleRules = do - define $ \Example2 file -> do - _pm <- getParsedModule file - let diag = mkDiag file "example2" DsError (Range (Position 0 0) (Position 1 0)) "example2 diagnostic, hello world" - return ([diag], Just ()) - - action $ do - files <- getFilesOfInterest - void $ uses Example2 $ HashSet.toList files - -mkDiag :: NormalizedFilePath - -> DiagnosticSource - -> DiagnosticSeverity - -> Range - -> T.Text - -> FileDiagnostic -mkDiag file diagSource sev loc msg = (file, D.ShowDiag,) - Diagnostic - { _range = loc - , _severity = Just sev - , _source = Just diagSource - , _message = msg - , _code = Nothing - , _tags = Nothing - , _relatedInformation = Nothing - } - --- --------------------------------------------------------------------- --- code actions --- --------------------------------------------------------------------- - --- | Generate code actions. -codeAction :: CodeActionProvider -codeAction _lf _state _pid (TextDocumentIdentifier uri) _range CodeActionContext{_diagnostics=List _xs} = do - let - title = "Add TODO2 Item" - tedit = [TextEdit (Range (Position 3 0) (Position 3 0)) - "-- TODO2 added by Example2 Plugin directly\n"] - edit = WorkspaceEdit (Just $ Map.singleton uri $ List tedit) Nothing - pure $ Right $ List - [ CACodeAction $ CodeAction title (Just CodeActionQuickFix) (Just $ List []) (Just edit) Nothing ] - --- --------------------------------------------------------------------- - -codeLens :: CodeLensProvider -codeLens _lf ideState plId CodeLensParams{_textDocument=TextDocumentIdentifier uri} = do - logInfo (ideLogger ideState) "Example2.codeLens entered (ideLogger)" -- AZ - case uriToFilePath' uri of - Just (toNormalizedFilePath -> filePath) -> do - _ <- runIdeAction (fromNormalizedFilePath filePath) (shakeExtras ideState) $ runMaybeT $ useE TypeCheck filePath - _diag <- getDiagnostics ideState - _hDiag <- getHiddenDiagnostics ideState - let - title = "Add TODO2 Item via Code Lens" - range = Range (Position 3 0) (Position 4 0) - let cmdParams = AddTodoParams uri "do abc" - cmd <- mkLspCommand plId "codelens.todo" title (Just [toJSON cmdParams]) - pure $ Right $ List [ CodeLens range (Just cmd) Nothing ] - Nothing -> pure $ Right $ List [] - --- --------------------------------------------------------------------- --- | Parameters for the addTodo PluginCommand. -data AddTodoParams = AddTodoParams - { file :: Uri -- ^ Uri of the file to add the pragma to - , todoText :: T.Text - } - deriving (Show, Eq, Generic, ToJSON, FromJSON) - -addTodoCmd :: CommandFunction AddTodoParams -addTodoCmd _lf _ide (AddTodoParams uri todoText) = do - let - pos = Position 5 0 - textEdits = List - [TextEdit (Range pos pos) - ("-- TODO2:" <> todoText <> "\n") - ] - res = WorkspaceEdit - (Just $ Map.singleton uri textEdits) - Nothing - return (Right Null, Just (WorkspaceApplyEdit, ApplyWorkspaceEditParams res)) - --- --------------------------------------------------------------------- - -foundHover :: (Maybe Range, [T.Text]) -> Either ResponseError (Maybe Hover) -foundHover (mbRange, contents) = - Right $ Just $ Hover (HoverContents $ MarkupContent MkMarkdown - $ T.intercalate sectionSeparator contents) mbRange - - --- | Respond to and log a hover or go-to-definition request -request - :: T.Text - -> (NormalizedFilePath -> Position -> Action (Maybe a)) - -> Either ResponseError b - -> (a -> Either ResponseError b) - -> IdeState - -> TextDocumentPositionParams - -> IO (Either ResponseError b) -request label getResults notFound found ide (TextDocumentPositionParams (TextDocumentIdentifier uri) pos _) = do - mbResult <- case uriToFilePath' uri of - Just path -> logAndRunRequest label getResults ide pos path - Nothing -> pure Nothing - pure $ maybe notFound found mbResult - -logAndRunRequest :: T.Text -> (NormalizedFilePath -> Position -> Action b) - -> IdeState -> Position -> String -> IO b -logAndRunRequest label getResults ide pos path = do - let filePath = toNormalizedFilePath path - logInfo (ideLogger ide) $ - label <> " request at position " <> T.pack (showPosition pos) <> - " in file: " <> T.pack path - runAction "Example2" ide $ getResults filePath pos - --- --------------------------------------------------------------------- - -symbols :: SymbolsProvider -symbols _lf _ide (DocumentSymbolParams _doc _mt) - = pure $ Right [r] - where - r = DocumentSymbol name detail kind deprecation range selR chList - name = "Example2_symbol_name" - detail = Nothing - kind = SkVariable - deprecation = Nothing - range = Range (Position 4 1) (Position 4 7) - selR = range - chList = Nothing - --- --------------------------------------------------------------------- - -completion :: CompletionProvider -completion _lf _ide (CompletionParams _doc _pos _mctxt _mt) - = pure $ Right $ Completions $ List [r] - where - r = CompletionItem label kind tags detail documentation deprecated preselect - sortText filterText insertText insertTextFormat - textEdit additionalTextEdits commitCharacters - command xd - label = "Example2 completion" - kind = Nothing - tags = List [] - detail = Nothing - documentation = Nothing - deprecated = Nothing - preselect = Nothing - sortText = Nothing - filterText = Nothing - insertText = Nothing - insertTextFormat = Nothing - textEdit = Nothing - additionalTextEdits = Nothing - commitCharacters = Nothing - command = Nothing - xd = Nothing - --- --------------------------------------------------------------------- diff --git a/src/Ide/Plugin/Floskell.hs b/src/Ide/Plugin/Floskell.hs deleted file mode 100644 index b77f974e3e..0000000000 --- a/src/Ide/Plugin/Floskell.hs +++ /dev/null @@ -1,62 +0,0 @@ -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TypeApplications #-} -{-# LANGUAGE ViewPatterns #-} - -module Ide.Plugin.Floskell - ( - descriptor - , provider - ) -where - -import qualified Data.ByteString.Lazy as BS -import qualified Data.Text as T -import qualified Data.Text.Encoding as T -import Development.IDE.Types.Diagnostics as D -import Development.IDE.Types.Location -import Floskell -import Ide.Plugin.Formatter -import Ide.Types -import Language.Haskell.LSP.Types -import Text.Regex.TDFA.Text() - --- --------------------------------------------------------------------- - -descriptor :: PluginId -> PluginDescriptor -descriptor plId = (defaultPluginDescriptor plId) - { pluginFormattingProvider = Just provider - } - --- --------------------------------------------------------------------- - --- | Format provider of Floskell. --- Formats the given source in either a given Range or the whole Document. --- If the provider fails an error is returned that can be displayed to the user. -provider :: FormattingProvider IO -provider _lf _ideState typ contents fp _ = do - let file = fromNormalizedFilePath fp - config <- findConfigOrDefault file - let (range, selectedContents) = case typ of - FormatText -> (fullRange contents, contents) - FormatRange r -> (r, extractRange r contents) - result = reformat config (Just file) (BS.fromStrict (T.encodeUtf8 selectedContents)) - case result of - Left err -> return $ Left $ responseError (T.pack $ "floskellCmd: " ++ err) - Right new -> return $ Right $ List [TextEdit range (T.decodeUtf8 (BS.toStrict new))] - --- | Find Floskell Config, user and system wide or provides a default style. --- Every directory of the filepath will be searched to find a user configuration. --- Also looks into places such as XDG_CONFIG_DIRECTORY. --- This function may not throw an exception and returns a default config. -findConfigOrDefault :: FilePath -> IO AppConfig -findConfigOrDefault file = do - mbConf <- findAppConfigIn file - case mbConf of - Just confFile -> readAppConfig confFile - Nothing -> - let gibiansky = head (filter (\s -> styleName s == "gibiansky") styles) - in return $ defaultAppConfig { appStyle = gibiansky } - --- --------------------------------------------------------------------- diff --git a/src/Ide/Plugin/Formatter.hs b/src/Ide/Plugin/Formatter.hs deleted file mode 100644 index 5fccb50b4a..0000000000 --- a/src/Ide/Plugin/Formatter.hs +++ /dev/null @@ -1,108 +0,0 @@ -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TypeApplications #-} -{-# LANGUAGE ViewPatterns #-} - -module Ide.Plugin.Formatter - ( - formatting - , rangeFormatting - , noneProvider - , responseError - , extractRange - , fullRange - ) -where - -import qualified Data.Map as Map -import qualified Data.Text as T -import Development.IDE.Core.FileStore -import Development.IDE.Core.Rules -import Development.IDE.Core.Shake --- import Development.IDE.LSP.Server --- import Development.IDE.Plugin -import Development.IDE.Types.Diagnostics as D -import Development.IDE.Types.Location --- import Development.Shake hiding ( Diagnostic ) --- import Ide.Logger -import Ide.Types -import Development.IDE.Types.Logger -import Ide.Plugin.Config -import qualified Language.Haskell.LSP.Core as LSP --- import Language.Haskell.LSP.Messages -import Language.Haskell.LSP.Types -import Text.Regex.TDFA.Text() - --- --------------------------------------------------------------------- - -formatting :: Map.Map PluginId (FormattingProvider IO) - -> LSP.LspFuncs Config -> IdeState -> DocumentFormattingParams - -> IO (Either ResponseError (List TextEdit)) -formatting providers lf ideState - (DocumentFormattingParams (TextDocumentIdentifier uri) params _mprogress) - = doFormatting lf providers ideState FormatText uri params - --- --------------------------------------------------------------------- - -rangeFormatting :: Map.Map PluginId (FormattingProvider IO) - -> LSP.LspFuncs Config -> IdeState -> DocumentRangeFormattingParams - -> IO (Either ResponseError (List TextEdit)) -rangeFormatting providers lf ideState - (DocumentRangeFormattingParams (TextDocumentIdentifier uri) range params _mprogress) - = doFormatting lf providers ideState (FormatRange range) uri params - --- --------------------------------------------------------------------- - -doFormatting :: LSP.LspFuncs Config -> Map.Map PluginId (FormattingProvider IO) - -> IdeState -> FormattingType -> Uri -> FormattingOptions - -> IO (Either ResponseError (List TextEdit)) -doFormatting lf providers ideState ft uri params = do - mc <- LSP.config lf - let mf = maybe "none" formattingProvider mc - case Map.lookup (PluginId mf) providers of - Just provider -> - case uriToFilePath uri of - Just (toNormalizedFilePath -> fp) -> do - (_, mb_contents) <- runAction "Formatter" ideState $ getFileContents fp - case mb_contents of - Just contents -> do - logDebug (ideLogger ideState) $ T.pack $ - "Formatter.doFormatting: contents=" ++ show contents -- AZ - provider lf ideState ft contents fp params - Nothing -> return $ Left $ responseError $ T.pack $ "Formatter plugin: could not get file contents for " ++ show uri - Nothing -> return $ Left $ responseError $ T.pack $ "Formatter plugin: uriToFilePath failed for: " ++ show uri - Nothing -> return $ Left $ responseError $ T.pack $ "Formatter plugin: no formatter found for:[" ++ T.unpack mf ++ "]" - --- --------------------------------------------------------------------- - -noneProvider :: FormattingProvider IO -noneProvider _ _ _ _ _ _ = return $ Right (List []) - --- --------------------------------------------------------------------- - -responseError :: T.Text -> ResponseError -responseError txt = ResponseError InvalidParams txt Nothing - --- --------------------------------------------------------------------- - -extractRange :: Range -> T.Text -> T.Text -extractRange (Range (Position sl _) (Position el _)) s = newS - where focusLines = take (el-sl+1) $ drop sl $ T.lines s - newS = T.unlines focusLines - --- | Gets the range that covers the entire text -fullRange :: T.Text -> Range -fullRange s = Range startPos endPos - where startPos = Position 0 0 - endPos = Position lastLine 0 - {- - In order to replace everything including newline characters, - the end range should extend below the last line. From the specification: - "If you want to specify a range that contains a line including - the line ending character(s) then use an end position denoting - the start of the next line" - -} - lastLine = length $ T.lines s - --- --------------------------------------------------------------------- diff --git a/src/Ide/Plugin/GhcIde.hs b/src/Ide/Plugin/GhcIde.hs deleted file mode 100644 index 06ebbd5f75..0000000000 --- a/src/Ide/Plugin/GhcIde.hs +++ /dev/null @@ -1,66 +0,0 @@ -{-# LANGUAGE DuplicateRecordFields #-} -{-# LANGUAGE OverloadedStrings #-} -module Ide.Plugin.GhcIde - ( - descriptor - ) where - -import Data.Aeson -import Development.IDE.Core.Service -import Development.IDE.LSP.HoverDefinition -import Development.IDE.LSP.Outline -import Development.IDE.Plugin.CodeAction -import Development.IDE.Plugin.Completions -import Development.IDE.Types.Logger -import Ide.Plugin -import Ide.Types -import Language.Haskell.LSP.Types -import Text.Regex.TDFA.Text() - --- --------------------------------------------------------------------- - -descriptor :: PluginId -> PluginDescriptor -descriptor plId = (defaultPluginDescriptor plId) - { pluginCommands = [PluginCommand (CommandId "typesignature.add") "adds a signature" commandAddSignature] - , pluginCodeActionProvider = Just codeAction' - , pluginCodeLensProvider = Just codeLens' - , pluginHoverProvider = Just hover' - , pluginSymbolsProvider = Just symbolsProvider - , pluginCompletionProvider = Just getCompletionsLSP - } - --- --------------------------------------------------------------------- - -hover' :: HoverProvider -hover' ideState params = do - logInfo (ideLogger ideState) "GhcIde.hover entered (ideLogger)" -- AZ - hover ideState params - --- --------------------------------------------------------------------- - -commandAddSignature :: CommandFunction WorkspaceEdit -commandAddSignature lf ide params - = executeAddSignatureCommand lf ide (ExecuteCommandParams "typesignature.add" (Just (List [toJSON params])) Nothing) - --- --------------------------------------------------------------------- - -codeAction' :: CodeActionProvider -codeAction' lf ide _ doc range context = fmap List <$> codeAction lf ide doc range context - --- --------------------------------------------------------------------- - -codeLens' :: CodeLensProvider -codeLens' lf ide _ params = codeLens lf ide params - --- --------------------------------------------------------------------- - -symbolsProvider :: SymbolsProvider -symbolsProvider ls ide params = do - ds <- moduleOutline ls ide params - case ds of - Right (DSDocumentSymbols (List ls)) -> return $ Right ls - Right (DSSymbolInformation (List _si)) -> - return $ Left $ responseError "GhcIde.symbolsProvider: DSSymbolInformation deprecated" - Left err -> return $ Left err - --- --------------------------------------------------------------------- diff --git a/src/Ide/Plugin/Ormolu.hs b/src/Ide/Plugin/Ormolu.hs deleted file mode 100644 index aa337fbc8e..0000000000 --- a/src/Ide/Plugin/Ormolu.hs +++ /dev/null @@ -1,77 +0,0 @@ -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TypeApplications #-} -{-# LANGUAGE ViewPatterns #-} - -module Ide.Plugin.Ormolu - ( - descriptor - , provider - ) -where - -import Control.Exception -import qualified Data.Text as T -import Development.IDE.Core.Rules -import Development.IDE.Types.Diagnostics as D -import Development.IDE.Types.Location -import qualified DynFlags as D -import qualified EnumSet as S -import GHC -import Ide.Types -import Ide.PluginUtils -import Ide.Plugin.Formatter -import Language.Haskell.LSP.Types -import Ormolu -import Text.Regex.TDFA.Text() - --- --------------------------------------------------------------------- - -descriptor :: PluginId -> PluginDescriptor -descriptor plId = (defaultPluginDescriptor plId) - { pluginFormattingProvider = Just provider - } - --- --------------------------------------------------------------------- - -provider :: FormattingProvider IO -provider _lf ideState typ contents fp _ = do - let - fromDyn :: ParsedModule -> IO [DynOption] - fromDyn pmod = - let - df = ms_hspp_opts $ pm_mod_summary pmod - pp = - let p = D.sPgm_F $ D.settings df - in if null p then [] else ["-pgmF=" <> p] - pm = map (("-fplugin=" <>) . moduleNameString) $ D.pluginModNames df - ex = map (("-X" <>) . show) $ S.toList $ D.extensionFlags df - in - return $ map DynOption $ pp <> pm <> ex - - m_parsed <- runAction "Ormolu" ideState $ getParsedModule fp - fileOpts <- case m_parsed of - Nothing -> return [] - Just pm -> fromDyn pm - - let - fullRegion = RegionIndices Nothing Nothing - rangeRegion s e = RegionIndices (Just s) (Just e) - mkConf o region = defaultConfig { cfgDynOptions = o, cfgRegion = region } - fmt :: T.Text -> Config RegionIndices -> IO (Either OrmoluException T.Text) - fmt cont conf = - try @OrmoluException (ormolu conf (fromNormalizedFilePath fp) $ T.unpack cont) - - case typ of - FormatText -> ret <$> fmt contents (mkConf fileOpts fullRegion) - FormatRange r -> - let - Range (Position sl _) (Position el _) = normalize r - in - ret <$> fmt contents (mkConf fileOpts (rangeRegion sl el)) - where - ret :: Either OrmoluException T.Text -> Either ResponseError (List TextEdit) - ret (Left err) = Left - (responseError (T.pack $ "ormoluCmd: " ++ show err) ) - ret (Right new) = Right (makeDiffTextEdit contents new) diff --git a/src/Ide/Plugin/Pragmas.hs b/src/Ide/Plugin/Pragmas.hs deleted file mode 100644 index 534610f963..0000000000 --- a/src/Ide/Plugin/Pragmas.hs +++ /dev/null @@ -1,162 +0,0 @@ -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE OverloadedStrings #-} - --- | Provides code actions to add missing pragmas (whenever GHC suggests to) -module Ide.Plugin.Pragmas - ( - descriptor - -- , commands -- TODO: get rid of this - ) where - -import Control.Lens hiding (List) -import Data.Aeson -import qualified Data.HashMap.Strict as H -import qualified Data.Text as T -import Ide.Plugin -import Ide.Types -import qualified GHC.Generics as Generics -import qualified Language.Haskell.LSP.Types as J -import qualified Language.Haskell.LSP.Types.Lens as J -import Development.IDE.Types.Diagnostics as D -import Language.Haskell.LSP.Types - --- --------------------------------------------------------------------- - -descriptor :: PluginId -> PluginDescriptor -descriptor plId = (defaultPluginDescriptor plId) - { pluginCommands = commands - , pluginCodeActionProvider = Just codeActionProvider - } - --- --------------------------------------------------------------------- - -commands :: [PluginCommand] -commands = [ PluginCommand "addPragma" "add the given pragma" addPragmaCmd - ] - --- --------------------------------------------------------------------- - --- | Parameters for the addPragma PluginCommand. -data AddPragmaParams = AddPragmaParams - { file :: J.Uri -- ^ Uri of the file to add the pragma to - , pragma :: T.Text -- ^ Name of the Pragma to add - } - deriving (Show, Eq, Generics.Generic, ToJSON, FromJSON) - --- | Add a Pragma to the given URI at the top of the file. --- Pragma is added to the first line of the Uri. --- It is assumed that the pragma name is a valid pragma, --- thus, not validated. -addPragmaCmd :: CommandFunction AddPragmaParams -addPragmaCmd _lf _ide (AddPragmaParams uri pragmaName) = do - let - pos = J.Position 0 0 - textEdits = J.List - [J.TextEdit (J.Range pos pos) - ("{-# LANGUAGE " <> pragmaName <> " #-}\n") - ] - res = J.WorkspaceEdit - (Just $ H.singleton uri textEdits) - Nothing - return (Right Null, Just (WorkspaceApplyEdit, ApplyWorkspaceEditParams res)) - --- --------------------------------------------------------------------- - --- | Offer to add a missing Language Pragma to the top of a file. --- Pragmas are defined by a curated list of known pragmas, see 'possiblePragmas'. -codeActionProvider :: CodeActionProvider -codeActionProvider _ _ plId docId _ (J.CodeActionContext (J.List diags) _monly) = do - cmds <- mapM mkCommand pragmas - -- cmds <- mapM mkCommand ("FooPragma":pragmas) - return $ Right $ List cmds - where - -- Filter diagnostics that are from ghcmod - ghcDiags = filter (\d -> d ^. J.source == Just "typecheck") diags - -- Get all potential Pragmas for all diagnostics. - pragmas = concatMap (\d -> findPragma (d ^. J.message)) ghcDiags - mkCommand pragmaName = do - let - -- | Code Action for the given command. - codeAction :: J.Command -> J.CAResult - codeAction cmd = J.CACodeAction $ J.CodeAction title (Just J.CodeActionQuickFix) (Just (J.List [])) Nothing (Just cmd) - title = "Add \"" <> pragmaName <> "\"" - cmdParams = [toJSON (AddPragmaParams (docId ^. J.uri) pragmaName )] - cmd <- mkLspCommand plId "addPragma" title (Just cmdParams) - return $ codeAction cmd - --- --------------------------------------------------------------------- - --- | Find all Pragmas are an infix of the search term. -findPragma :: T.Text -> [T.Text] -findPragma str = concatMap check possiblePragmas - where - check p = [p | T.isInfixOf p str] - --- --------------------------------------------------------------------- - --- | Possible Pragma names. --- Is non-exhaustive, and may be extended. -possiblePragmas :: [T.Text] -possiblePragmas = - [ - "ConstraintKinds" - , "DefaultSignatures" - , "DeriveAnyClass" - , "DeriveDataTypeable" - , "DeriveFoldable" - , "DeriveFunctor" - , "DeriveGeneric" - , "DeriveLift" - , "DeriveTraversable" - , "DerivingStrategies" - , "DerivingVia" - , "EmptyCase" - , "EmptyDataDecls" - , "EmptyDataDeriving" - , "FlexibleContexts" - , "FlexibleInstances" - , "GADTs" - , "GHCForeignImportPrim" - , "GeneralizedNewtypeDeriving" - , "IncoherentInstances" - , "InstanceSigs" - , "KindSignatures" - , "MultiParamTypeClasses" - , "MultiWayIf" - , "NamedFieldPuns" - , "NamedWildCards" - , "OverloadedStrings" - , "ParallelListComp" - , "PartialTypeSignatures" - , "PatternGuards" - , "PatternSignatures" - , "PatternSynonyms" - , "QuasiQuotes" - , "Rank2Types" - , "RankNTypes" - , "RecordPuns" - , "RecordWildCards" - , "RecursiveDo" - , "RelaxedPolyRec" - , "RoleAnnotations" - , "ScopedTypeVariables" - , "StandaloneDeriving" - , "StaticPointers" - , "TemplateHaskell" - , "TemplateHaskellQuotes" - , "TransformListComp" - , "TupleSections" - , "TypeApplications" - , "TypeFamilies" - , "TypeFamilyDependencies" - , "TypeInType" - , "TypeOperators" - , "TypeSynonymInstances" - , "UnboxedSums" - , "UndecidableInstances" - , "UndecidableSuperClasses" - , "ViewPatterns" - ] - --- --------------------------------------------------------------------- diff --git a/src/Ide/Plugin/StylishHaskell.hs b/src/Ide/Plugin/StylishHaskell.hs deleted file mode 100644 index 3fab035b91..0000000000 --- a/src/Ide/Plugin/StylishHaskell.hs +++ /dev/null @@ -1,59 +0,0 @@ -module Ide.Plugin.StylishHaskell - ( - descriptor - , provider - ) -where - -import Control.Monad.IO.Class -import Data.Text (Text) -import qualified Data.Text as T -import Ide.Plugin.Formatter -import Ide.PluginUtils -import Ide.Types -import Language.Haskell.Stylish -import Language.Haskell.LSP.Types as J - -import System.Directory -import System.FilePath - -descriptor :: PluginId -> PluginDescriptor -descriptor plId = (defaultPluginDescriptor plId) - { pluginFormattingProvider = Just provider - } - --- | Formatter provider of stylish-haskell. --- Formats the given source in either a given Range or the whole Document. --- If the provider fails an error is returned that can be displayed to the user. -provider :: FormattingProvider IO -provider _lf _ideState typ contents fp _opts = do - let file = fromNormalizedFilePath fp - config <- liftIO $ loadConfigFrom file - let (range, selectedContents) = case typ of - FormatText -> (fullRange contents, contents) - FormatRange r -> (normalize r, extractRange r contents) - result = runStylishHaskell file config selectedContents - case result of - Left err -> return $ Left $ responseError $ T.pack $ "stylishHaskellCmd: " ++ err - Right new -> return $ Right $ J.List [TextEdit range new] - --- | Recursively search in every directory of the given filepath for .stylish-haskell.yaml. --- If no such file has been found, return default config. -loadConfigFrom :: FilePath -> IO Config -loadConfigFrom file = do - currDir <- getCurrentDirectory - setCurrentDirectory (takeDirectory file) - config <- loadConfig (makeVerbose False) Nothing - setCurrentDirectory currDir - return config - --- | Run stylish-haskell on the given text with the given configuration. -runStylishHaskell :: FilePath -- ^ Location of the file being formatted. Used for error message - -> Config -- ^ Configuration for stylish-haskell - -> Text -- ^ Text to format - -> Either String Text -- ^ Either formatted Text or an error message -runStylishHaskell file config = fmap fromLines . fmt . toLines - where - fromLines = T.pack . unlines - fmt = runSteps (configLanguageExtensions config) (Just file) (configSteps config) - toLines = lines . T.unpack diff --git a/src/Ide/PluginUtils.hs b/src/Ide/PluginUtils.hs deleted file mode 100644 index 41a295b639..0000000000 --- a/src/Ide/PluginUtils.hs +++ /dev/null @@ -1,112 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -module Ide.PluginUtils where - -import qualified Data.Text as T -import Data.Maybe -import Data.Algorithm.DiffOutput -import Data.Algorithm.Diff -import qualified Data.HashMap.Strict as H -import Language.Haskell.LSP.Types.Capabilities -import qualified Language.Haskell.LSP.Types as J -import Language.Haskell.LSP.Types - --- --------------------------------------------------------------------- - --- | Extend to the line below and above to replace newline character. -normalize :: Range -> Range -normalize (Range (Position sl _) (Position el _)) = - Range (Position sl 0) (Position (el + 1) 0) - --- --------------------------------------------------------------------- - -data WithDeletions = IncludeDeletions | SkipDeletions - deriving Eq - --- | Generate a 'WorkspaceEdit' value from a pair of source Text -diffText :: ClientCapabilities -> (Uri,T.Text) -> T.Text -> WithDeletions -> WorkspaceEdit -diffText clientCaps old new withDeletions = - let - supports = clientSupportsDocumentChanges clientCaps - in diffText' supports old new withDeletions - -makeDiffTextEdit :: T.Text -> T.Text -> List TextEdit -makeDiffTextEdit f1 f2 = diffTextEdit f1 f2 IncludeDeletions - -makeDiffTextEditAdditive :: T.Text -> T.Text -> List TextEdit -makeDiffTextEditAdditive f1 f2 = diffTextEdit f1 f2 SkipDeletions - -diffTextEdit :: T.Text -> T.Text -> WithDeletions -> List TextEdit -diffTextEdit fText f2Text withDeletions = J.List r - where - r = map diffOperationToTextEdit diffOps - d = getGroupedDiff (lines $ T.unpack fText) (lines $ T.unpack f2Text) - - diffOps = filter (\x -> (withDeletions == IncludeDeletions) || not (isDeletion x)) - (diffToLineRanges d) - - isDeletion (Deletion _ _) = True - isDeletion _ = False - - - diffOperationToTextEdit :: DiffOperation LineRange -> J.TextEdit - diffOperationToTextEdit (Change fm to) = J.TextEdit range nt - where - range = calcRange fm - nt = T.pack $ init $ unlines $ lrContents to - - {- - In order to replace everything including newline characters, - the end range should extend below the last line. From the specification: - "If you want to specify a range that contains a line including - the line ending character(s) then use an end position denoting - the start of the next line" - -} - diffOperationToTextEdit (Deletion (LineRange (sl, el) _) _) = J.TextEdit range "" - where - range = J.Range (J.Position (sl - 1) 0) - (J.Position el 0) - - diffOperationToTextEdit (Addition fm l) = J.TextEdit range nt - -- fm has a range wrt to the changed file, which starts in the current file at l - -- So the range has to be shifted to start at l - where - range = J.Range (J.Position (l' - 1) 0) - (J.Position (l' - 1) 0) - l' = max l sl -- Needed to add at the end of the file - sl = fst $ lrNumbers fm - nt = T.pack $ unlines $ lrContents fm - - - calcRange fm = J.Range s e - where - sl = fst $ lrNumbers fm - sc = 0 - s = J.Position (sl - 1) sc -- Note: zero-based lines - el = snd $ lrNumbers fm - ec = length $ last $ lrContents fm - e = J.Position (el - 1) ec -- Note: zero-based lines - - --- | A pure version of 'diffText' for testing -diffText' :: Bool -> (Uri,T.Text) -> T.Text -> WithDeletions -> WorkspaceEdit -diffText' supports (f,fText) f2Text withDeletions = - if supports - then WorkspaceEdit Nothing (Just docChanges) - else WorkspaceEdit (Just h) Nothing - where - diff = diffTextEdit fText f2Text withDeletions - h = H.singleton f diff - docChanges = J.List [docEdit] - docEdit = J.TextDocumentEdit (J.VersionedTextDocumentIdentifier f (Just 0)) diff - --- --------------------------------------------------------------------- - -clientSupportsDocumentChanges :: ClientCapabilities -> Bool -clientSupportsDocumentChanges caps = - let ClientCapabilities mwCaps _ _ _ = caps - supports = do - wCaps <- mwCaps - WorkspaceEditClientCapabilities mDc <- _workspaceEdit wCaps - mDc - in - fromMaybe False supports \ No newline at end of file diff --git a/src/Ide/Types.hs b/src/Ide/Types.hs deleted file mode 100644 index 4f2bc29560..0000000000 --- a/src/Ide/Types.hs +++ /dev/null @@ -1,206 +0,0 @@ -{-# LANGUAGE GADTs #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} - -module Ide.Types - ( - IdePlugins(..) - , PluginDescriptor(..) - , defaultPluginDescriptor - , PluginCommand(..) - , PluginId(..) - , CommandId(..) - , DiagnosticProvider(..) - , DiagnosticProviderFunc(..) - , SymbolsProvider - , FormattingType(..) - , FormattingProvider - , HoverProvider - , CodeActionProvider - , CodeLensProvider - , CommandFunction - , ExecuteCommandProvider - , CompletionProvider - , RenameProvider - , WithSnippets(..) - ) where - -import Data.Aeson hiding (defaultOptions) -import qualified Data.Map as Map -import qualified Data.Set as S -import Data.String -import qualified Data.Text as T -import Development.IDE.Core.Rules -import Development.IDE.Types.Location -import Development.Shake -import Ide.Plugin.Config -import qualified Language.Haskell.LSP.Core as LSP -import Language.Haskell.LSP.Types -import Text.Regex.TDFA.Text() - --- --------------------------------------------------------------------- - -newtype IdePlugins = IdePlugins - { ipMap :: Map.Map PluginId PluginDescriptor - } - --- --------------------------------------------------------------------- - -data PluginDescriptor = - PluginDescriptor { pluginId :: !PluginId - , pluginRules :: !(Rules ()) - , pluginCommands :: ![PluginCommand] - , pluginCodeActionProvider :: !(Maybe CodeActionProvider) - , pluginCodeLensProvider :: !(Maybe CodeLensProvider) - , pluginDiagnosticProvider :: !(Maybe DiagnosticProvider) - -- ^ TODO: diagnostics are generally provided via rules, - -- this is probably redundant. - , pluginHoverProvider :: !(Maybe HoverProvider) - , pluginSymbolsProvider :: !(Maybe SymbolsProvider) - , pluginFormattingProvider :: !(Maybe (FormattingProvider IO)) - , pluginCompletionProvider :: !(Maybe CompletionProvider) - , pluginRenameProvider :: !(Maybe RenameProvider) - } - -defaultPluginDescriptor :: PluginId -> PluginDescriptor -defaultPluginDescriptor plId = - PluginDescriptor - plId - mempty - mempty - Nothing - Nothing - Nothing - Nothing - Nothing - Nothing - Nothing - Nothing - --- instance Show PluginCommand where --- show (PluginCommand i _ _) = "PluginCommand { name = " ++ show i ++ " }" - --- newtype CommandId = CommandId T.Text --- deriving (Show, Read, Eq, Ord) --- instance IsString CommandId where --- fromString = CommandId . T.pack - --- data PluginCommand = forall a b. (FromJSON a, ToJSON b, Typeable b) => --- PluginCommand { commandId :: CommandId --- , commandDesc :: T.Text --- , commandFunc :: a -> IO (Either ResponseError b) --- } - -newtype CommandId = CommandId T.Text - deriving (Show, Read, Eq, Ord) -instance IsString CommandId where - fromString = CommandId . T.pack - -data PluginCommand = forall a. (FromJSON a) => - PluginCommand { commandId :: CommandId - , commandDesc :: T.Text - , commandFunc :: CommandFunction a - } - --- --------------------------------------------------------------------- - -type CommandFunction a = LSP.LspFuncs Config - -> IdeState - -> a - -> IO (Either ResponseError Value, Maybe (ServerMethod, ApplyWorkspaceEditParams)) - -type CodeActionProvider = LSP.LspFuncs Config - -> IdeState - -> PluginId - -> TextDocumentIdentifier - -> Range - -> CodeActionContext - -> IO (Either ResponseError (List CAResult)) - -type CompletionProvider = LSP.LspFuncs Config - -> IdeState - -> CompletionParams - -> IO (Either ResponseError CompletionResponseResult) - - - -type CodeLensProvider = LSP.LspFuncs Config - -> IdeState - -> PluginId - -> CodeLensParams - -> IO (Either ResponseError (List CodeLens)) - -type RenameProvider = LSP.LspFuncs Config - -> IdeState - -> RenameParams - -> IO (Either ResponseError WorkspaceEdit) - -type DiagnosticProviderFuncSync - = DiagnosticTrigger -> Uri - -> IO (Either ResponseError (Map.Map Uri (S.Set Diagnostic))) - -type DiagnosticProviderFuncAsync - = DiagnosticTrigger -> Uri - -> (Map.Map Uri (S.Set Diagnostic) -> IO ()) - -> IO (Either ResponseError ()) - -data DiagnosticProviderFunc - = DiagnosticProviderSync DiagnosticProviderFuncSync - | DiagnosticProviderAsync DiagnosticProviderFuncAsync - - -data DiagnosticProvider = DiagnosticProvider - { dpTrigger :: S.Set DiagnosticTrigger -- AZ:should this be a NonEmptyList? - , dpFunc :: DiagnosticProviderFunc - } - -data DiagnosticTrigger = DiagnosticOnOpen - | DiagnosticOnChange - | DiagnosticOnSave - deriving (Show,Ord,Eq) - --- type HoverProvider = Uri -> Position -> IO (Either ResponseError [Hover]) -type HoverProvider = IdeState -> TextDocumentPositionParams -> IO (Either ResponseError (Maybe Hover)) - -type SymbolsProvider = LSP.LspFuncs Config - -> IdeState - -> DocumentSymbolParams - -> IO (Either ResponseError [DocumentSymbol]) - -type ExecuteCommandProvider = IdeState - -> ExecuteCommandParams - -> IO (Either ResponseError Value, Maybe (ServerMethod, ApplyWorkspaceEditParams)) - -newtype WithSnippets = WithSnippets Bool - --- --------------------------------------------------------------------- - -newtype PluginId = PluginId T.Text - deriving (Show, Read, Eq, Ord) -instance IsString PluginId where - fromString = PluginId . T.pack - --- --------------------------------------------------------------------- - - --- | Format the given Text as a whole or only a @Range@ of it. --- Range must be relative to the text to format. --- To format the whole document, read the Text from the file and use 'FormatText' --- as the FormattingType. -data FormattingType = FormatText - | FormatRange Range - - --- | To format a whole document, the 'FormatText' @FormattingType@ can be used. --- It is required to pass in the whole Document Text for that to happen, an empty text --- and file uri, does not suffice. -type FormattingProvider m - = LSP.LspFuncs Config - -> IdeState - -> FormattingType -- ^ How much to format - -> T.Text -- ^ Text to format - -> NormalizedFilePath -- ^ location of the file being formatted - -> FormattingOptions -- ^ Options for the formatter - -> m (Either ResponseError (List TextEdit)) -- ^ Result of the formatting - --- --------------------------------------------------------------------- diff --git a/src/Ide/Version.hs b/src/Ide/Version.hs index fcb9f2376b..d0af158587 100644 --- a/src/Ide/Version.hs +++ b/src/Ide/Version.hs @@ -1,18 +1,31 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TemplateHaskell #-} -- | Information and display strings for HIE's version -- and the current project's version module Ide.Version where -import Development.GitRev (gitCommitCount) -import Options.Applicative.Simple (simpleVersion) + +import Data.Maybe (listToMaybe) +import Data.Version +import GitHash (giCommitCount, tGitInfoCwdTry) +import Options.Applicative.Simple (simpleVersion) import qualified Paths_haskell_language_server as Meta +import System.Directory +import System.Exit import System.Info +import System.Process +import Text.ParserCombinators.ReadP +-- >>> hlsVersion hlsVersion :: String hlsVersion = - let commitCount = $gitCommitCount + let gi = $$tGitInfoCwdTry + commitCount = case gi of + Right gi -> show $ giCommitCount gi + Left _ -> "UNKNOWN" in concat $ concat [ [$(simpleVersion Meta.version)] -- Leave out number of commits for --depth=1 clone @@ -24,3 +37,55 @@ hlsVersion = ] where hlsGhcDisplayVersion = compilerName ++ "-" ++ VERSION_ghc + +data ProgramsOfInterest = ProgramsOfInterest + { cabalVersion :: Maybe Version + , stackVersion :: Maybe Version + , ghcVersion :: Maybe Version + } + +showProgramVersionOfInterest :: ProgramsOfInterest -> String +showProgramVersionOfInterest ProgramsOfInterest {..} = + unlines + [ showProgramVersion "cabal" cabalVersion + , showProgramVersion "stack" stackVersion + , showProgramVersion "ghc" ghcVersion + ] + +showProgramVersion :: String -> Maybe Version -> String +showProgramVersion name version = + pad 16 (name ++ ":") ++ showVersionWithDefault version + where + showVersionWithDefault = maybe "Not found" showVersion + pad n s = s ++ replicate (n - length s) ' ' + +findProgramVersions :: IO ProgramsOfInterest +findProgramVersions = ProgramsOfInterest + <$> findVersionOf "cabal" + <*> findVersionOf "stack" + <*> findVersionOf "ghc" + +-- | Find the version of the given program. +-- Assumes the program accepts the cli argument "--numeric-version". +-- If the invocation has a non-zero exit-code, we return 'Nothing' +findVersionOf :: FilePath -> IO (Maybe Version) +findVersionOf tool = + findExecutable tool >>= \case + Nothing -> pure Nothing + Just path -> + readProcessWithExitCode path ["--numeric-version"] "" >>= \case + (ExitSuccess, sout, _) -> pure $ mkVersion sout + _ -> pure Nothing + +mkVersion :: String -> Maybe Version +mkVersion = consumeParser myVersionParser + where + myVersionParser = do + skipSpaces + version <- parseVersion + skipSpaces + pure version + + consumeParser :: ReadP a -> String -> Maybe a + consumeParser p input = + listToMaybe $ map fst . filter (null . snd) $ readP_to_S p input diff --git a/stack-8.10.1.yaml b/stack-8.10.1.yaml deleted file mode 100644 index c28931ef18..0000000000 --- a/stack-8.10.1.yaml +++ /dev/null @@ -1,37 +0,0 @@ -resolver: nightly-2020-06-29 - -packages: -- . -- ./ghcide/ - -extra-deps: -- Cabal-3.0.2.0 -- hie-bios-0.6.1 -- cabal-plan-0.7.0.0 -- clock-0.7.2 -- floskell-0.10.3 -- ghc-exactprint-0.6.3 -- lens-4.19.1 -- lsp-test-0.11.0.2 -- monad-dijkstra-0.1.1.2 -- optics-core-0.3 -- ormolu-0.1.2.0 -- stylish-haskell-0.11.0.0 -- semigroups-0.18.5 -- temporary-1.2.1.1 -- these-1.1 - -flags: - haskell-language-server: - pedantic: true - # We want to let agpl be the default value in .cabal (True) - # but brittany is not usable with ghc-8.10.1 - # see https://github.com/lspitzner/brittany/issues/269 - agpl: false - -# allow-newer: true - -nix: - packages: [ icu libcxx zlib ] - -concurrent-tests: false diff --git a/stack-8.6.4.yaml b/stack-8.6.4.yaml deleted file mode 100644 index 56eb41a9ad..0000000000 --- a/stack-8.6.4.yaml +++ /dev/null @@ -1,75 +0,0 @@ -resolver: lts-13.19 # Last 8.6.4 - -packages: -- . -- ./ghcide/ - -extra-deps: -- aeson-1.4.3.0 -- brittany-0.12.1.1 -- butcher-1.3.3.1 -- bytestring-trie-0.2.5.0 -- Cabal-3.0.2.0 -- cabal-doctest-1.0.8 -- cabal-plan-0.5.0.0 -- constrained-dynamic-0.1.0.0 -- deque-0.4.3 -# - ghcide-0.1.0 -- extra-1.7.3 -- floskell-0.10.3 -- fuzzy-0.1.0.0 -- ghc-check-0.5.0.1 -- ghc-exactprint-0.6.2 # for HaRe -- ghc-lib-parser-8.10.1.20200523 -- ghc-lib-parser-ex-8.10.0.4 -- ghc-paths-0.1.0.12 -- haddock-api-2.22.0@rev:1 -- haddock-library-1.8.0 -- happy-1.19.12 -- haskell-lsp-0.22.0.0 -- haskell-lsp-types-0.22.0.0 -- haskell-src-exts-1.21.1 -- hie-bios-0.6.1 -- hlint-2.2.8 -- hoogle-5.0.17.11 -- hsimport-0.11.0@rev:2 -- HsYAML-0.2.1.0@rev:1 -- HsYAML-aeson-0.2.0.0@rev:1 -- lens-4.18 -- lsp-test-0.11.0.2 -- microlens-th-0.4.2.3@rev:1 -- monad-dijkstra-0.1.1.2 -- monad-memo-0.4.1 -- multistate-0.8.0.1 -- ormolu-0.1.2.0 -- opentelemetry-0.4.2 -- parser-combinators-1.2.1 -- regex-base-0.94.0.0 -- regex-tdfa-1.3.1.0 -- rope-utf16-splay-0.3.1.0 -- shake-0.19.1 -- strict-list-0.1.5 -- stylish-haskell-0.11.0.0 -- syz-0.2.0.0 -- tasty-rerun-1.1.17 -- temporary-1.2.1.1 -- th-abstraction-0.3.1.0 -- type-equality-1 -- unix-compat-0.5.2 -- unordered-containers-0.2.10.0 -- yaml-0.11.1.2 -# To make build work in windows 7 -- unix-time-0.4.7 - - -flags: - haskell-language-server: - pedantic: true - - -# allow-newer: true - -nix: - packages: [ icu libcxx zlib ] - -concurrent-tests: false diff --git a/stack-8.6.5.yaml b/stack-8.6.5.yaml deleted file mode 100644 index 62db328d8c..0000000000 --- a/stack-8.6.5.yaml +++ /dev/null @@ -1,59 +0,0 @@ -resolver: lts-14.27 # Last 8.6.5 - -packages: -- . -- ./ghcide/ - -extra-deps: -- ansi-terminal-0.10.2 -- base-compat-0.11.0 -- brittany-0.12.1.1@rev:2 -- butcher-1.3.3.1 -- Cabal-3.0.2.0 -- cabal-plan-0.6.2.0 -- clock-0.7.2 -- extra-1.7.3 -- floskell-0.10.3 -- fuzzy-0.1.0.0 -# - ghcide-0.1.0 -- ghc-check-0.5.0.1 -- ghc-lib-parser-8.10.1.20200523 -- ghc-lib-parser-ex-8.10.0.4 -- haddock-api-2.22.0@rev:1 -- haddock-library-1.8.0 -- haskell-lsp-0.22.0.0 -- haskell-lsp-types-0.22.0.0 -- hie-bios-0.6.1 -- HsYAML-0.2.1.0@rev:1 -- HsYAML-aeson-0.2.0.0@rev:1 -- indexed-profunctors-0.1 -- lens-4.18 -- lsp-test-0.11.0.2 -- monad-dijkstra-0.1.1.2 -- opentelemetry-0.4.2 -- optics-core-0.2 -- optparse-applicative-0.15.1.0 -- ormolu-0.1.2.0 -- parser-combinators-1.2.1 -- regex-base-0.94.0.0 -- regex-pcre-builtin-0.95.1.1.8.43 -- regex-tdfa-1.3.1.0 -- semialign-1.1 -# - github: wz1000/shake -# commit: fb3859dca2e54d1bbb2c873e68ed225fa179fbef -- stylish-haskell-0.11.0.0 -- tasty-rerun-1.1.17 -- temporary-1.2.1.1 -- type-equality-1 -- topograph-1 - -flags: - haskell-language-server: - pedantic: true - -# allow-newer: true - -nix: - packages: [ icu libcxx zlib ] - -concurrent-tests: false diff --git a/stack-8.8.2.yaml b/stack-8.8.2.yaml deleted file mode 100644 index ad95d9ea5b..0000000000 --- a/stack-8.8.2.yaml +++ /dev/null @@ -1,50 +0,0 @@ -resolver: lts-15.3 # Last 8.8.2 - -packages: -- . -- ./ghcide/ - -extra-deps: -- apply-refact-0.7.0.0 -- brittany-0.12.1.1 -- butcher-1.3.3.2 -- bytestring-trie-0.2.5.0 -- clock-0.7.2 -- constrained-dynamic-0.1.0.0 -- extra-1.7.3 -- floskell-0.10.3 -# - ghcide-0.1.0 -- ghc-check-0.5.0.1 -- ghc-lib-parser-8.10.1.20200523 -- ghc-lib-parser-ex-8.10.0.4 -- haddock-library-1.8.0 -- haskell-lsp-0.22.0.0 -- haskell-lsp-types-0.22.0.0 -- haskell-src-exts-1.21.1 -- hie-bios-0.6.1 -- hlint-2.2.8 -- hoogle-5.0.17.11 -- hsimport-0.11.0 -- HsYAML-0.2.1.0@rev:1 -- HsYAML-aeson-0.2.0.0@rev:1 -- ilist-0.3.1.0 -- lsp-test-0.11.0.2 -- monad-dijkstra-0.1.1.2 -- opentelemetry-0.4.2 -- ormolu-0.1.2.0 -- semigroups-0.18.5 -# - github: wz1000/shake -# commit: fb3859dca2e54d1bbb2c873e68ed225fa179fbef -- stylish-haskell-0.11.0.0 -- temporary-1.2.1.1 - -flags: - haskell-language-server: - pedantic: true - -# allow-newer: true - -nix: - packages: [ icu libcxx zlib ] - -concurrent-tests: false diff --git a/stack-8.8.3.yaml b/stack-8.8.3.yaml deleted file mode 100644 index bba9679ccc..0000000000 --- a/stack-8.8.3.yaml +++ /dev/null @@ -1,38 +0,0 @@ -resolver: lts-16.5 - -packages: -- . -- ./ghcide/ - -extra-deps: -- apply-refact-0.7.0.0 -- bytestring-trie-0.2.5.0 -- cabal-plan-0.6.2.0 -- clock-0.7.2 -- constrained-dynamic-0.1.0.0 -- extra-1.7.3 -- floskell-0.10.3 -# - ghcide-0.1.0 -- haskell-src-exts-1.21.1 -- hie-bios-0.6.1 -- hlint-2.2.8 -- hoogle-5.0.17.11 -- hsimport-0.11.0 -- ilist-0.3.1.0 -- lsp-test-0.11.0.2 -- monad-dijkstra-0.1.1.2 -- semigroups-0.18.5 -# - github: wz1000/shake -# commit: fb3859dca2e54d1bbb2c873e68ed225fa179fbef -- temporary-1.2.1.1 - -flags: - haskell-language-server: - pedantic: true - -# allow-newer: true - -nix: - packages: [ icu libcxx zlib ] - -concurrent-tests: false diff --git a/stack-8.8.4.yaml b/stack-8.8.4.yaml deleted file mode 100644 index ca3370ee98..0000000000 --- a/stack-8.8.4.yaml +++ /dev/null @@ -1,40 +0,0 @@ -resolver: lts-16.5 -compiler: ghc-8.8.4 - -packages: -- . -- ./ghcide/ - -extra-deps: -- apply-refact-0.7.0.0 -- bytestring-trie-0.2.5.0 -- cabal-helper-1.1.0.0 -- cabal-plan-0.6.2.0 -- clock-0.7.2 -- constrained-dynamic-0.1.0.0 -- extra-1.7.3 -- floskell-0.10.3 -# - ghcide-0.1.0 -- haskell-src-exts-1.21.1 -- hie-bios-0.6.1 -- hlint-2.2.8 -- hoogle-5.0.17.11 -- hsimport-0.11.0 -- ilist-0.3.1.0 -- lsp-test-0.11.0.2 -- monad-dijkstra-0.1.1.2 -- semigroups-0.18.5 -# - github: wz1000/shake -# commit: fb3859dca2e54d1bbb2c873e68ed225fa179fbef -- temporary-1.2.1.1 - -flags: - haskell-language-server: - pedantic: true - -# allow-newer: true - -nix: - packages: [ icu libcxx zlib ] - -concurrent-tests: false diff --git a/stack-lts22.yaml b/stack-lts22.yaml new file mode 100644 index 0000000000..429125333a --- /dev/null +++ b/stack-lts22.yaml @@ -0,0 +1,66 @@ +resolver: lts-22.43 # ghc-9.6.6 + +packages: + - . + - ./hls-graph + - ./ghcide/ + - ./hls-plugin-api + - ./hls-test-utils + +ghc-options: + "$everything": -haddock + +allow-newer: true +allow-newer-deps: + - extensions + # stan dependencies + - directory-ospath-streaming + +extra-deps: + - Diff-0.5 + - floskell-0.11.1 + - hiedb-0.7.0.0 + - hie-bios-0.17.0 + - hie-compat-0.3.1.2 + - implicit-hie-0.1.4.0 + - lsp-2.7.0.0 + - lsp-test-0.17.1.0 + - lsp-types-2.3.0.0 + - monad-dijkstra-0.1.1.4 # 5 + - retrie-1.2.3 + + # stan and friends + - stan-0.2.1.0 + - dir-traverse-0.2.3.0 + - extensions-0.1.0.1 + - tomland-1.3.3.2 + - trial-0.0.0.0 + - trial-optparse-applicative-0.0.0.0 + - trial-tomland-0.0.0.0 + - validation-selective-0.2.0.0 + - cabal-add-0.2 + - cabal-install-parsers-0.6.1.1 + - directory-ospath-streaming-0.2.2 + + +configure-options: + ghcide: + - --disable-library-for-ghci + haskell-language-server: + - --disable-library-for-ghci + +flags: + haskell-language-server: + pedantic: true + stylish-haskell: + ghc-lib: true + retrie: + BuildExecutable: false + # stan dependencies + directory-ospath-streaming: + os-string: false + +nix: + packages: [icu libcxx zlib] + +concurrent-tests: false diff --git a/stack.yaml b/stack.yaml index 1ff400f465..43cb239b34 100644 --- a/stack.yaml +++ b/stack.yaml @@ -1,57 +1,60 @@ -resolver: lts-14.27 # Last 8.6.5 +resolver: lts-23.18 # ghc-9.8.4 packages: -- . -- ./ghcide/ + - . + - ./hls-graph + - ./ghcide/ + - ./hls-plugin-api + - ./hls-test-utils + +ghc-options: + "$everything": -haddock + +allow-newer: true +allow-newer-deps: + - extensions + - hw-fingertree + - retrie + # stan dependencies + - directory-ospath-streaming extra-deps: -- ansi-terminal-0.10.2 -- base-compat-0.11.0 -- brittany-0.12.1.1@rev:2 -- butcher-1.3.3.1 -- Cabal-3.0.2.0 -- cabal-plan-0.6.2.0 -- clock-0.7.2 -- extra-1.7.3 -- floskell-0.10.3 -- fuzzy-0.1.0.0 -# - ghcide-0.1.0 -- ghc-check-0.5.0.1 -- ghc-lib-parser-8.10.1.20200523 -- ghc-lib-parser-ex-8.10.0.4 -- haddock-api-2.22.0@rev:1 -- haddock-library-1.8.0 -- haskell-lsp-0.22.0.0 -- haskell-lsp-types-0.22.0.0 -- hie-bios-0.6.1 -- HsYAML-0.2.1.0@rev:1 -- HsYAML-aeson-0.2.0.0@rev:1 -- indexed-profunctors-0.1 -- lens-4.18 -- lsp-test-0.11.0.2 -- monad-dijkstra-0.1.1.2 -- opentelemetry-0.4.2 -- optics-core-0.2 -- optparse-applicative-0.15.1.0 -- ormolu-0.1.2.0 -- parser-combinators-1.2.1 -- regex-base-0.94.0.0 -- regex-pcre-builtin-0.95.1.1.8.43 -- regex-tdfa-1.3.1.0 -- semialign-1.1 -- stylish-haskell-0.11.0.0 -- tasty-rerun-1.1.17 -- temporary-1.2.1.1 -- type-equality-1 -- topograph-1 + - floskell-0.11.1 + - hiedb-0.7.0.0 + - hie-compat-0.3.1.2 + - implicit-hie-0.1.4.0 + - hie-bios-0.17.0 + - hw-fingertree-0.1.2.1 + - monad-dijkstra-0.1.1.5 + - retrie-1.2.3 + + # stan dependencies not found in the stackage snapshot + - stan-0.2.1.0 + - dir-traverse-0.2.3.0 + - extensions-0.1.0.1 + - trial-0.0.0.0 + - trial-optparse-applicative-0.0.0.0 + - trial-tomland-0.0.0.0 + - directory-ospath-streaming-0.2.2 + - cabal-add-0.2 +configure-options: + ghcide: + - --disable-library-for-ghci + haskell-language-server: + - --disable-library-for-ghci flags: haskell-language-server: pedantic: true - -# allow-newer: true + stylish-haskell: + ghc-lib: true + retrie: + BuildExecutable: false + # stan dependencies + directory-ospath-streaming: + os-string: false nix: - packages: [ icu libcxx zlib ] + packages: [icu libcxx zlib] concurrent-tests: false diff --git a/test/README.md b/test/README.md new file mode 100644 index 0000000000..2cab8fbce7 --- /dev/null +++ b/test/README.md @@ -0,0 +1,27 @@ +# The `func-test` test suite. + +This is the integration test suite for cross-plugin and cross-package features. + +Add integration tests to `func-test` only if they satisfy one or more of the following conditions: + +* It tests the interaction between more than one plugin. + * For example, plugin A provides a Diagnostic that plugin B requires to provide a CodeAction. + * However, it is also valid, and often preferable, to depend on the required plugin directly in plugin B's test suite. +* It tests HLS specific LSP code. + * For example, we test that config changes are appropriately propagated. + * Note, this is slightly debatable, since the test could also be part of `ghcide`. + * Non HLS specific LSP code may exist in HLS temporarily, but any LSP extensions should be upstreamed to `lsp`. +* It tests features of the `haskell-language-server-wrapper` executable. + * For example, argument parsing. +* It tests features of the `haskell-language-server` executable. + * For example, argument parsing. +* It tests features provided by `hls-plugin-api` that require an integration test (i.e. a unit test doesn't suffice). + * Example: Testing the Logger setup. + +If you think that a test that currently lives in `func-test` does not meet the conditions above, open a ticket for discussion or try to move the test to a better location. + +Note: `func-test` is a historical test suite. It was originally written for Haskell IDE Engine, which was merged with the `ghcide` project. +The integration test-suite `func-test` (back then `unit-test` existed as well) was used to test all kinds of features provided by Haskell IDE Engine (HIE). +When `ghcide` and HIE merged together, the integration test suite was vastly copied. +HLS moved to a plugin-based architecture, which mainly entails that plugin tests are isolated in the respective plugin's test suite. +Over time, `func-test` started to bit rot and wasn't maintained properly any more, since all new tests were added to the plugin or `ghcide` test suites. diff --git a/test/functional/Command.hs b/test/functional/Command.hs deleted file mode 100644 index aaf91175ff..0000000000 --- a/test/functional/Command.hs +++ /dev/null @@ -1,38 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -module Command (tests) where - -import Control.Lens hiding (List) -import Control.Monad.IO.Class -import qualified Data.Text as T -import Data.Char -import Language.Haskell.LSP.Test -import Language.Haskell.LSP.Types as LSP -import Language.Haskell.LSP.Types.Lens as LSP -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.HUnit -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Hspec.Expectations - - ---TODO : Response Message no longer has 4 inputs -tests :: TestTree -tests = testGroup "commands" [ - testCase "are prefixed" $ - runSession hieCommand fullCaps "test/testdata/" $ do - ResponseMessage _ _ (Right res) <- initializeResponse - let List cmds = res ^. LSP.capabilities . executeCommandProvider . _Just . commands - f x = (T.length (T.takeWhile isNumber x) >= 1) && (T.count ":" x >= 2) - liftIO $ do - cmds `shouldSatisfy` all f - cmds `shouldNotSatisfy` null - , ignoreTestBecause "Broken: Plugin package doesn't exist" $ - testCase "get de-prefixed" $ - runSession hieCommand fullCaps "test/testdata/" $ do - ResponseMessage _ _ (Left err) <- request - WorkspaceExecuteCommand - (ExecuteCommandParams "1234:package:add" (Just (List [])) Nothing) :: Session ExecuteCommandResponse - let ResponseError _ msg _ = err - -- We expect an error message about the dud arguments, but should pickup "add" and "package" - liftIO $ msg `shouldSatisfy` T.isInfixOf "while parsing args for add in plugin package" - ] diff --git a/test/functional/Completion.hs b/test/functional/Completion.hs deleted file mode 100644 index ca1a2d801f..0000000000 --- a/test/functional/Completion.hs +++ /dev/null @@ -1,396 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} -module Completion(tests) where - -import Control.Applicative.Combinators -import Control.Monad.IO.Class -import Control.Lens hiding ((.=)) --- import Data.Aeson -import Language.Haskell.LSP.Test -import Language.Haskell.LSP.Types -import Language.Haskell.LSP.Types.Lens hiding (applyEdit) -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Tasty.HUnit -import Test.Hspec.Expectations - ---TODO: Fix tests, some structural changed hav been made - -tests :: TestTree -tests = testGroup "completions" [ --- testCase "works" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 5 7) (Position 5 24)) "put" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 5 9) --- let item = head $ filter ((== "putStrLn") . (^. label)) compls --- liftIO $ do --- item ^. label `shouldBe` "putStrLn" --- item ^. kind `shouldBe` Just CiFunction --- item ^. detail `shouldBe` Just "Prelude" --- resolvedRes <- request CompletionItemResolve item --- let Just (resolved :: CompletionItem) = resolvedRes ^. result --- liftIO $ do --- resolved ^. label `shouldBe` "putStrLn" --- resolved ^. kind `shouldBe` Just CiFunction --- resolved ^. detail `shouldBe` Just "String -> IO ()\nPrelude" --- resolved ^. insertTextFormat `shouldBe` Just Snippet --- resolved ^. insertText `shouldBe` Just "putStrLn ${1:String}" - --- , testCase "completes imports" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 1 17) (Position 1 26)) "Data.M" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 1 22) --- let item = head $ filter ((== "Maybe") . (^. label)) compls --- liftIO $ do --- item ^. label `shouldBe` "Maybe" --- item ^. detail `shouldBe` Just "Data.Maybe" --- item ^. kind `shouldBe` Just CiModule - --- , testCase "completes qualified imports" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 2 17) (Position 1 25)) "Dat" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 1 19) --- let item = head $ filter ((== "Data.List") . (^. label)) compls --- liftIO $ do --- item ^. label `shouldBe` "Data.List" --- item ^. detail `shouldBe` Just "Data.List" --- item ^. kind `shouldBe` Just CiModule - --- , testCase "completes language extensions" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 0 24) (Position 0 31)) "" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 0 24) --- let item = head $ filter ((== "OverloadedStrings") . (^. label)) compls --- liftIO $ do --- item ^. label `shouldBe` "OverloadedStrings" --- item ^. kind `shouldBe` Just CiKeyword - --- , testCase "completes pragmas" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 0 4) (Position 0 34)) "" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 0 4) --- let item = head $ filter ((== "LANGUAGE") . (^. label)) compls --- liftIO $ do --- item ^. label `shouldBe` "LANGUAGE" --- item ^. kind `shouldBe` Just CiKeyword --- item ^. insertTextFormat `shouldBe` Just Snippet --- item ^. insertText `shouldBe` Just "LANGUAGE ${1:extension} #-}" - --- , testCase "completes pragmas no close" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 0 4) (Position 0 24)) "" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 0 4) --- let item = head $ filter ((== "LANGUAGE") . (^. label)) compls --- liftIO $ do --- item ^. label `shouldBe` "LANGUAGE" --- item ^. kind `shouldBe` Just CiKeyword --- item ^. insertTextFormat `shouldBe` Just Snippet --- item ^. insertText `shouldBe` Just "LANGUAGE ${1:extension}" - --- , testCase "completes options pragma" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 0 4) (Position 0 34)) "OPTIONS" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 0 4) --- let item = head $ filter ((== "OPTIONS_GHC") . (^. label)) compls --- liftIO $ do --- item ^. label `shouldBe` "OPTIONS_GHC" --- item ^. kind `shouldBe` Just CiKeyword --- item ^. insertTextFormat `shouldBe` Just Snippet --- item ^. insertText `shouldBe` Just "OPTIONS_GHC -${1:option} #-}" - --- -- ----------------------------------- - --- , testCase "completes ghc options pragma values" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" - --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 0 0) (Position 0 0)) "{-# OPTIONS_GHC -Wno-red #-}\n" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 0 24) --- let item = head $ filter ((== "Wno-redundant-constraints") . (^. label)) compls --- liftIO $ do --- item ^. label `shouldBe` "Wno-redundant-constraints" --- item ^. kind `shouldBe` Just CiKeyword --- item ^. insertTextFormat `shouldBe` Nothing --- item ^. insertText `shouldBe` Nothing - --- -- ----------------------------------- - --- , testCase "completes with no prefix" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics --- compls <- getCompletions doc (Position 5 7) --- liftIO $ filter ((== "!!") . (^. label)) compls `shouldNotSatisfy` null - --- -- See https://github.com/haskell/haskell-ide-engine/issues/903 --- , testCase "strips compiler generated stuff from completions" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "DupRecFields.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 5 0) (Position 5 2)) "acc" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 5 4) --- let item = head $ filter (\c -> c^.label == "accessor") compls --- liftIO $ do --- item ^. label `shouldBe` "accessor" --- item ^. kind `shouldBe` Just CiFunction --- item ^. detail `shouldBe` Just "Two -> Int\nDupRecFields" --- item ^. insertText `shouldBe` Just "accessor ${1:Two}" - --- , testCase "have implicit foralls on basic polymorphic types" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics --- let te = TextEdit (Range (Position 5 7) (Position 5 9)) "id" --- _ <- applyEdit doc te --- compls <- getCompletions doc (Position 5 9) --- let item = head $ filter ((== "id") . (^. label)) compls --- resolvedRes <- request CompletionItemResolve item --- let Just (resolved :: CompletionItem) = resolvedRes ^. result --- liftIO $ --- resolved ^. detail `shouldBe` Just "a -> a\nPrelude" - --- , testCase "have implicit foralls with multiple type variables" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics --- let te = TextEdit (Range (Position 5 7) (Position 5 24)) "flip" --- _ <- applyEdit doc te --- compls <- getCompletions doc (Position 5 11) --- let item = head $ filter ((== "flip") . (^. label)) compls --- resolvedRes <- request CompletionItemResolve item --- let Just (resolved :: CompletionItem) = resolvedRes ^. result --- liftIO $ --- resolved ^. detail `shouldBe` Just "(a -> b -> c) -> b -> a -> c\nPrelude" - - contextTests --- , snippetTests - ] - --- snippetTests :: TestTree --- snippetTests = testGroup "snippets" [ --- testCase "work for argumentless constructors" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 5 7) (Position 5 24)) "Nothing" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 5 14) --- let item = head $ filter ((== "Nothing") . (^. label)) compls --- liftIO $ do --- item ^. insertTextFormat `shouldBe` Just Snippet --- item ^. insertText `shouldBe` Just "Nothing" - --- , testCase "work for polymorphic types" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 5 7) (Position 5 24)) "fold" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 5 11) --- let item = head $ filter ((== "foldl") . (^. label)) compls --- resolvedRes <- request CompletionItemResolve item --- let Just (resolved :: CompletionItem) = resolvedRes ^. result --- liftIO $ do --- resolved ^. label `shouldBe` "foldl" --- resolved ^. kind `shouldBe` Just CiFunction --- resolved ^. insertTextFormat `shouldBe` Just Snippet --- resolved ^. insertText `shouldBe` Just "foldl ${1:b -> a -> b} ${2:b} ${3:t a}" - --- , testCase "work for complex types" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 5 7) (Position 5 24)) "mapM" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 5 11) --- let item = head $ filter ((== "mapM") . (^. label)) compls --- resolvedRes <- request CompletionItemResolve item --- let Just (resolved :: CompletionItem) = resolvedRes ^. result --- liftIO $ do --- resolved ^. label `shouldBe` "mapM" --- resolved ^. kind `shouldBe` Just CiFunction --- resolved ^. insertTextFormat `shouldBe` Just Snippet --- resolved ^. insertText `shouldBe` Just "mapM ${1:a -> m b} ${2:t a}" - --- , testCase "work for infix functions" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 5 7) (Position 5 24)) "even `filte" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 5 18) --- let item = head $ filter ((== "filter") . (^. label)) compls --- liftIO $ do --- item ^. label `shouldBe` "filter" --- item ^. kind `shouldBe` Just CiFunction --- item ^. insertTextFormat `shouldBe` Just Snippet --- item ^. insertText `shouldBe` Just "filter`" - --- , testCase "work for infix functions in backticks" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 5 7) (Position 5 24)) "even `filte`" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 5 18) --- let item = head $ filter ((== "filter") . (^. label)) compls --- liftIO $ do --- item ^. label `shouldBe` "filter" --- item ^. kind `shouldBe` Just CiFunction --- item ^. insertTextFormat `shouldBe` Just Snippet --- item ^. insertText `shouldBe` Just "filter" - --- , testCase "work for qualified infix functions" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 5 7) (Position 5 24)) "\"\" `Data.List.interspe" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 5 29) --- let item = head $ filter ((== "intersperse") . (^. label)) compls --- liftIO $ do --- item ^. label `shouldBe` "intersperse" --- item ^. kind `shouldBe` Just CiFunction --- item ^. insertTextFormat `shouldBe` Just Snippet --- item ^. insertText `shouldBe` Just "intersperse`" - --- , testCase "work for qualified infix functions in backticks" $ runSession hieCommand fullCaps "test/testdata/completion" $ do --- doc <- openDoc "Completion.hs" "haskell" --- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - --- let te = TextEdit (Range (Position 5 7) (Position 5 24)) "\"\" `Data.List.interspe`" --- _ <- applyEdit doc te - --- compls <- getCompletions doc (Position 5 29) --- let item = head $ filter ((== "intersperse") . (^. label)) compls --- liftIO $ do --- item ^. label `shouldBe` "intersperse" --- item ^. kind `shouldBe` Just CiFunction --- item ^. insertTextFormat `shouldBe` Just Snippet --- item ^. insertText `shouldBe` Just "intersperse" - - -- -- TODO : Fix compile issue in the test "Variable not in scope: object" - -- , testCase "respects lsp configuration" $ runSession hieCommand fullCaps "test/testdata/completion" $ do - -- doc <- openDoc "Completion.hs" "haskell" - -- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - - -- let config = object [ "languageServerHaskell" .= (object ["completionSnippetsOn" .= False])] - - -- sendNotification WorkspaceDidChangeConfiguration - -- (DidChangeConfigurationParams config) - - -- checkNoSnippets doc - - -- , testCase "respects client capabilities" $ runSession hieCommand noSnippetsCaps "test/testdata/completion" $ do - -- doc <- openDoc "Completion.hs" "haskell" - -- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - - -- checkNoSnippets doc - -- ] - -- where - -- checkNoSnippets doc = do - -- let te = TextEdit (Range (Position 5 7) (Position 5 24)) "fold" - -- _ <- applyEdit doc te - - -- compls <- getCompletions doc (Position 5 11) - -- let item = head $ filter ((== "foldl") . (^. label)) compls - -- liftIO $ do - -- item ^. label `shouldBe` "foldl" - -- item ^. kind `shouldBe` Just CiFunction - -- item ^. insertTextFormat `shouldBe` Just PlainText - -- item ^. insertText `shouldBe` Nothing - - -- resolvedRes <- request CompletionItemResolve item - -- let Just (resolved :: CompletionItem) = resolvedRes ^. result - -- liftIO $ do - -- resolved ^. label `shouldBe` "foldl" - -- resolved ^. kind `shouldBe` Just CiFunction - -- resolved ^. insertTextFormat `shouldBe` Just PlainText - -- resolved ^. insertText `shouldBe` Nothing - - -- noSnippetsCaps = - -- ( textDocument - -- . _Just - -- . completion - -- . _Just - -- . completionItem - -- . _Just - -- . snippetSupport - -- ?~ False - -- ) - -- fullCaps - -contextTests :: TestTree -contextTests = testGroup "contexts" [ - ignoreTestBecause "Broken: Timed out waiting to receive a message from the server" $ - testCase "only provides type suggestions" $ runSession hieCommand fullCaps "test/testdata/completion" $ do - doc <- openDoc "Context.hs" "haskell" - _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - compls <- getCompletions doc (Position 2 17) - liftIO $ do - compls `shouldContainCompl` "Integer" - compls `shouldNotContainCompl` "interact" - - , ignoreTestBecause "Broken: Timed out waiting to receive a message from the server" $ - testCase "only provides type suggestions" $ runSession hieCommand fullCaps "test/testdata/completion" $ do - doc <- openDoc "Context.hs" "haskell" - _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - compls <- getCompletions doc (Position 3 9) - liftIO $ do - compls `shouldContainCompl` "abs" - compls `shouldNotContainCompl` "Applicative" - - -- This currently fails if , testCase takes too long to typecheck the module - -- , testCase "completes qualified type suggestions" $ runSession hieCommand fullCaps "test/testdata/completion" $ do - -- doc <- openDoc "Context.hs" "haskell" - -- _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - -- let te = TextEdit (Range (Position 2 17) (Position 2 17)) " -> Conc." - -- _ <- applyEdit doc te - -- compls <- getCompletions doc (Position 2 26) - -- liftIO $ do - -- compls `shouldNotContainCompl` "forkOn" - -- compls `shouldContainCompl` "MVar" - -- compls `shouldContainCompl` "Chan" - ] - where - compls `shouldContainCompl` x = - filter ((== x) . (^. label)) compls `shouldNotSatisfy` null - compls `shouldNotContainCompl` x = - filter ((== x) . (^. label)) compls `shouldSatisfy` null \ No newline at end of file diff --git a/test/functional/Config.hs b/test/functional/Config.hs new file mode 100644 index 0000000000..874792784f --- /dev/null +++ b/test/functional/Config.hs @@ -0,0 +1,114 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} +module Config (tests) where + +import Control.DeepSeq +import Control.Monad +import Data.Hashable +import qualified Data.HashMap.Strict as HM +import qualified Data.Map as Map +import Development.IDE (RuleResult, action, define, + getFilesOfInterestUntracked, + getPluginConfigAction, ideErrorText, + uses_) +import Development.IDE.Test (ExpectedDiagnostic, expectDiagnostics) +import GHC.Generics +import Ide.Plugin.Config +import Ide.Types +import Language.LSP.Test as Test +import System.FilePath (()) +import Test.Hls + +{-# ANN module ("HLint: ignore Reduce duplication"::String) #-} + +tests :: TestTree +tests = testGroup "plugin config" [ + -- Note: there are more comprehensive tests over config in hls-hlint-plugin + -- TODO: Add generic tests over some example plugin + genericConfigTests + ] + +genericConfigTests :: TestTree +genericConfigTests = testGroup "generic plugin config" + [ + testCase "custom defaults" $ runConfigSession "diagnostics" $ do + _doc <- createDoc "Foo.hs" "haskell" "module Foo where\nfoo = False" + -- getting only the standard diagnostics means the plugin wasn't enabled + expectDiagnostics standardDiagnostics + , testCase "custom defaults and user config on some other plugin" $ runConfigSession "diagnostics" $ do + _doc <- createDoc "Foo.hs" "haskell" "module Foo where\nfoo = False" + -- test that the user config doesn't accidentally override the initial config + setHlsConfig $ changeConfig "someplugin" def{plcHoverOn = False} + -- getting only the expected diagnostics means the plugin wasn't enabled + expectDiagnostics standardDiagnostics + -- TODO: Partial config is not supported + , testCase "custom defaults and non overlapping user config" $ runConfigSession "diagnostics" $ do + _doc <- createDoc "Foo.hs" "haskell" "module Foo where\nfoo = False" + -- test that the user config doesn't accidentally override the initial config + setHlsConfig $ changeConfig testPluginId def{plcHoverOn = False} + -- getting only the expected diagnostics means the plugin wasn't enabled + expectDiagnosticsFail + (BrokenIdeal standardDiagnostics) + (BrokenCurrent testPluginDiagnostics) + , testCase "custom defaults and overlapping user plugin config" $ runConfigSession "diagnostics" $ do + _doc <- createDoc "Foo.hs" "haskell" "module Foo where\nfoo = False" + -- test that the user config overrides the default initial config + setHlsConfig $ changeConfig testPluginId def{plcGlobalOn = True} + -- getting only the expected diagnostics means the plugin wasn't enabled + expectDiagnostics testPluginDiagnostics + , testCase "custom defaults and non plugin user config" $ runConfigSession "diagnostics" $ do + _doc <- createDoc "Foo.hs" "haskell" "module Foo where\nfoo = False" + -- test that the user config doesn't accidentally override the initial config + setHlsConfig $ def {formattingProvider = "foo"} + -- getting only the expected diagnostics means the plugin wasn't enabled + expectDiagnostics standardDiagnostics + ] + where + standardDiagnostics = [("Foo.hs", [(DiagnosticSeverity_Warning, (1,0), "Top-level binding", Nothing)])] + testPluginDiagnostics = [("Foo.hs", [(DiagnosticSeverity_Error, (0,0), "testplugin", Nothing)])] + + runConfigSession subdir session = do + failIfSessionTimeout $ + runSessionWithTestConfig def + { testConfigSession=def {ignoreConfigurationRequests=False}, testShiftRoot=True + , testPluginDescriptor=plugin, testDirLocation=Left ("test/testdata" subdir) } + (const session) + + testPluginId = "testplugin" + -- A disabled-by-default plugin that creates diagnostics + plugin = mkPluginTestDescriptor' @() pd testPluginId + pd plId = (defaultPluginDescriptor plId "") + { + pluginConfigDescriptor = configDisabled + , pluginRules = do + action $ do + plc <- getPluginConfigAction testPluginId + when (plcGlobalOn plc && plcDiagnosticsOn plc) $ do + files <- getFilesOfInterestUntracked + void $ uses_ GetTestDiagnostics $ HM.keys files + define mempty $ \GetTestDiagnostics file -> do + let diags = [ideErrorText file "testplugin"] + return (diags,Nothing) + } + -- A config that disables the plugin initially + configDisabled = defaultConfigDescriptor{ + configInitialGenericConfig = def{plcGlobalOn = False, plcDiagnosticsOn = False} + } + changeConfig :: PluginId -> PluginConfig -> Config + changeConfig plugin conf = + def{plugins = Map.insert plugin conf (plugins def)} + + +data GetTestDiagnostics = GetTestDiagnostics + deriving (Eq, Show, Generic) +instance Hashable GetTestDiagnostics +instance NFData GetTestDiagnostics +type instance RuleResult GetTestDiagnostics = () + +expectDiagnosticsFail + :: HasCallStack + => ExpectBroken 'Ideal [(FilePath, [ExpectedDiagnostic])] + -> ExpectBroken 'Current [(FilePath, [ExpectedDiagnostic])] + -> Session () +expectDiagnosticsFail _ = expectDiagnostics . unCurrent diff --git a/test/functional/ConfigSchema.hs b/test/functional/ConfigSchema.hs new file mode 100644 index 0000000000..2ece6972e9 --- /dev/null +++ b/test/functional/ConfigSchema.hs @@ -0,0 +1,58 @@ +module ConfigSchema where + + +import qualified Data.ByteString.Lazy.Char8 as BS +import Data.Char (toLower) +import System.FilePath (()) +import System.Process.Extra +import Test.Hls +import Test.Hls.Command + +-- | Integration test to capture changes to the generated default config and the vscode schema. +-- +-- Changes to the vscode schema need to be communicated to vscode-haskell plugin maintainers, +-- otherwise users can't make use of the new configurations. +-- +-- In general, changes to the schema need to be done consciously when new plugin or features are added. +-- To fix a failing of these tests, review the change. If it is expected, accept the change via: +-- +-- @ +-- TASTY_PATTERN="generate schema" cabal test func-test --test-options=--accept +-- @ +-- +-- As changes need to be applied for all GHC version specific configs, you either need to run this command for each +-- GHC version that is affected by the config change, or manually add the change to all other golden config files. +-- Likely, the easiest way is to run CI and apply the generated diffs manually. +tests :: TestTree +tests = testGroup "generate schema" + [ goldenGitDiff "vscode-extension-schema" (vscodeSchemaFp ghcVersion) $ do + stdout <- readProcess hlsExeCommand ["vscode-extension-schema"] "" + pure $ BS.pack stdout + , goldenGitDiff "generate-default-config" (defaultConfigFp ghcVersion) $ do + stdout <- readProcess hlsExeCommand ["generate-default-config"] "" + pure $ BS.pack stdout + , goldenGitDiff "plugins-custom-config-markdown-reference" (markdownReferenceFp ghcVersion) $ do + stdout <- readProcess hlsExeCommand ["plugins-custom-config-markdown-reference"] "" + pure $ BS.pack stdout + ] + +vscodeSchemaFp :: GhcVersion -> FilePath +vscodeSchemaFp ghcVer = "test" "testdata" "schema" prettyGhcVersion ghcVer vscodeSchemaJson + +defaultConfigFp :: GhcVersion -> FilePath +defaultConfigFp ghcVer = "test" "testdata" "schema" prettyGhcVersion ghcVer generateDefaultConfigJson + +markdownReferenceFp :: GhcVersion -> FilePath +markdownReferenceFp ghcVer = "test" "testdata" "schema" prettyGhcVersion ghcVer markdownReferenceMd + +vscodeSchemaJson :: FilePath +vscodeSchemaJson = "vscode-extension-schema.golden.json" + +generateDefaultConfigJson :: FilePath +generateDefaultConfigJson = "default-config.golden.json" + +markdownReferenceMd :: FilePath +markdownReferenceMd = "markdown-reference.md" + +prettyGhcVersion :: GhcVersion -> String +prettyGhcVersion ghcVer = map toLower (show ghcVer) diff --git a/test/functional/Deferred.hs b/test/functional/Deferred.hs deleted file mode 100644 index 4bcebda277..0000000000 --- a/test/functional/Deferred.hs +++ /dev/null @@ -1,178 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DuplicateRecordFields #-} -{-# LANGUAGE OverloadedStrings #-} -module Deferred(tests) where - -import Control.Applicative.Combinators -import Control.Monad.IO.Class -import Control.Lens hiding (List) --- import Control.Monad --- import Data.Maybe -import Language.Haskell.LSP.Test -import Language.Haskell.LSP.Types -import Language.Haskell.LSP.Types.Lens hiding (id, message) --- import qualified Language.Haskell.LSP.Types.Lens as LSP -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Tasty.HUnit -import Test.Hspec.Expectations - - -tests :: TestTree -tests = testGroup "deferred responses" [ - - --TODO: DOes not compile - -- testCase "do not affect hover requests" $ runSession hieCommand fullCaps "test/testdata" $ do - -- doc <- openDoc "FuncTest.hs" "haskell" - - -- id1 <- sendRequest TextDocumentHover (TextDocumentPositionParams doc (Position 4 2) Nothing) - - -- skipMany anyNotification - -- hoverRsp <- message :: Session HoverResponse - -- liftIO $ hoverRsp ^? result . _Just . _Just . contents `shouldBe` Nothing - -- liftIO $ hoverRsp ^. LSP.id `shouldBe` responseId id1 - - -- id2 <- sendRequest TextDocumentDocumentSymbol (DocumentSymbolParams doc Nothing) - -- symbolsRsp <- skipManyTill anyNotification message :: Session DocumentSymbolsResponse - -- liftIO $ symbolsRsp ^. LSP.id `shouldBe` responseId id2 - - -- id3 <- sendRequest TextDocumentHover (TextDocumentPositionParams doc (Position 4 2) Nothing) - -- hoverRsp2 <- skipManyTill anyNotification message :: Session HoverResponse - -- liftIO $ hoverRsp2 ^. LSP.id `shouldBe` responseId id3 - - -- let contents2 = hoverRsp2 ^? result . _Just . _Just . contents - -- liftIO $ contents2 `shouldNotSatisfy` null - - -- -- Now that we have cache the following request should be instant - -- let highlightParams = TextDocumentPositionParams doc (Position 7 0) Nothing - -- highlightRsp <- request TextDocumentDocumentHighlight highlightParams - -- let (Just (List locations)) = highlightRsp ^. result - -- liftIO $ locations `shouldBe` [ DocumentHighlight - -- { _range = Range - -- { _start = Position {_line = 7, _character = 0} - -- , _end = Position {_line = 7, _character = 2} - -- } - -- , _kind = Just HkWrite - -- } - -- , DocumentHighlight - -- { _range = Range - -- { _start = Position {_line = 7, _character = 0} - -- , _end = Position {_line = 7, _character = 2} - -- } - -- , _kind = Just HkWrite - -- } - -- , DocumentHighlight - -- { _range = Range - -- { _start = Position {_line = 5, _character = 6} - -- , _end = Position {_line = 5, _character = 8} - -- } - -- , _kind = Just HkRead - -- } - -- , DocumentHighlight - -- { _range = Range - -- { _start = Position {_line = 7, _character = 0} - -- , _end = Position {_line = 7, _character = 2} - -- } - -- , _kind = Just HkWrite - -- } - -- , DocumentHighlight - -- { _range = Range - -- { _start = Position {_line = 7, _character = 0} - -- , _end = Position {_line = 7, _character = 2} - -- } - -- , _kind = Just HkWrite - -- } - -- , DocumentHighlight - -- { _range = Range - -- { _start = Position {_line = 5, _character = 6} - -- , _end = Position {_line = 5, _character = 8} - -- } - -- , _kind = Just HkRead - -- } - -- ] - - testCase "instantly respond to failed modules with no cache" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "FuncTestFail.hs" "haskell" - defs <- getDefinitions doc (Position 1 11) - liftIO $ defs `shouldBe` [] - - -- TODO: the benefits of caching parsed modules is doubted. - -- TODO: add issue link - -- , testCase "respond to untypecheckable modules with parsed module cache" $ - -- runSession hieCommand fullCaps "test/testdata" $ do - -- doc <- openDoc "FuncTestFail.hs" "haskell" - -- (Left (sym:_)) <- getDocumentSymbols doc - -- liftIO $ sym ^. name `shouldBe` "main" - - -- TODO does not compile - -- , testCase "returns hints as diagnostics" $ runSession hieCommand fullCaps "test/testdata" $ do - -- _ <- openDoc "FuncTest.hs" "haskell" - - -- cwd <- liftIO getCurrentDirectory - -- let testUri = filePathToUri $ cwd "test/testdata/FuncTest.hs" - - -- diags <- skipManyTill loggingNotification publishDiagnosticsNotification - -- liftIO $ diags ^? params `shouldBe` (Just $ PublishDiagnosticsParams - -- { _uri = testUri - -- , _diagnostics = List - -- [ Diagnostic - -- (Range (Position 9 6) (Position 10 18)) - -- (Just DsInfo) - -- (Just (StringValue "Redundant do")) - -- (Just "hlint") - -- "Redundant do\nFound:\n do putStrLn \"hello\"\nWhy not:\n putStrLn \"hello\"\n" - -- Nothing - -- ] - -- } - -- ) - -- let args' = H.fromList [("pos", toJSON (Position 7 0)), ("file", toJSON testUri)] - -- args = List [Object args'] - -- - -- executeRsp <- request WorkspaceExecuteCommand (ExecuteCommandParams "hare:demote" (Just args) Nothing) - -- liftIO $ executeRsp ^. result `shouldBe` Just (Object H.empty) - - -- editReq <- message :: Session ApplyWorkspaceEditRequest - -- let expectedTextEdits = List [TextEdit (Range (Position 6 0) (Position 7 6)) " where\n bb = 5"] - -- expectedTextDocEdits = List [TextDocumentEdit (VersionedTextDocumentIdentifier testUri (Just 0)) expectedTextEdits] - -- liftIO $ editReq ^. params . edit `shouldBe` WorkspaceEdit - -- Nothing - -- (Just expectedTextDocEdits) - -- , multiServerTests - , multiMainTests - ] - ---TODO: Does not compile --- multiServerTests :: TestTree --- multiServerTests = testGroup "multi-server setup" [ --- testCase "doesn't have clashing commands on two servers" $ do --- let getCommands = runSession hieCommand fullCaps "test/testdata" $ do --- rsp <- initializeResponse --- let uuids = rsp ^? result . _Just . capabilities . executeCommandProvider . _Just . commands --- return $ fromJust uuids --- List uuids1 <- getCommands --- List uuids2 <- getCommands --- liftIO $ forM_ (zip uuids1 uuids2) (uncurry shouldNotBe) --- ] - -multiMainTests :: TestTree -multiMainTests = testGroup "multiple main modules" [ - ignoreTestBecause "Broken: Unexpected ConduitParser.empty" $ - testCase "Can load one file at a time, when more than one Main module exists" - -- $ runSession hieCommand fullCaps "test/testdata" $ do - $ runSession hieCommand fullCaps "test/testdata" $ do - _doc <- openDoc "ApplyRefact2.hs" "haskell" - _diagsRspHlint <- skipManyTill anyNotification message :: Session PublishDiagnosticsNotification - diagsRspGhc <- skipManyTill anyNotification message :: Session PublishDiagnosticsNotification - let (List diags) = diagsRspGhc ^. params . diagnostics - - liftIO $ length diags `shouldBe` 2 - - _doc2 <- openDoc "HaReRename.hs" "haskell" - _diagsRspHlint2 <- skipManyTill anyNotification message :: Session PublishDiagnosticsNotification - -- errMsg <- skipManyTill anyNotification notification :: Session ShowMessageNotification - diagsRsp2 <- skipManyTill anyNotification message :: Session PublishDiagnosticsNotification - let (List diags2) = diagsRsp2 ^. params . diagnostics - - liftIO $ show diags2 `shouldBe` "[]" - ] \ No newline at end of file diff --git a/test/functional/Definition.hs b/test/functional/Definition.hs deleted file mode 100644 index 1bfdb1da1b..0000000000 --- a/test/functional/Definition.hs +++ /dev/null @@ -1,70 +0,0 @@ -module Definition (tests) where - -import Control.Lens -import Control.Monad.IO.Class -import Language.Haskell.LSP.Test -import Language.Haskell.LSP.Types -import Language.Haskell.LSP.Types.Lens -import System.Directory -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Tasty.HUnit -import Test.Hspec.Expectations - -tests :: TestTree -tests = testGroup "definitions" [ - - ignoreTestBecause "Broken: file:///Users/jwindsor/src/haskell-language-server/test/testdata/References.hs" $ - testCase "goto's symbols" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "References.hs" "haskell" - defs <- getDefinitions doc (Position 7 8) - let expRange = Range (Position 4 0) (Position 4 3) - liftIO $ defs `shouldBe` [Location (doc ^. uri) expRange] - - -- ----------------------------------- - - , ignoreTestBecause "Broken: file:///Users/jwindsor/src/haskell-language-server/test/testdata/Bar.hs" $ - testCase "goto's imported modules" $ runSession hieCommand fullCaps "test/testdata/definition" $ do - doc <- openDoc "Foo.hs" "haskell" - defs <- getDefinitions doc (Position 2 8) - liftIO $ do - fp <- canonicalizePath "test/testdata/definition/Bar.hs" - defs `shouldBe` [Location (filePathToUri fp) zeroRange] - - , ignoreTestBecause "Broken: file:///Users/jwindsor/src/haskell-language-server/test/testdata/Bar.hs" $ - testCase "goto's exported modules" $ runSession hieCommand fullCaps "test/testdata/definition" $ do - doc <- openDoc "Foo.hs" "haskell" - defs <- getDefinitions doc (Position 0 15) - liftIO $ do - fp <- canonicalizePath "test/testdata/definition/Bar.hs" - defs `shouldBe` [Location (filePathToUri fp) zeroRange] - - , ignoreTestBecause "Broken: file:///Users/jwindsor/src/haskell-language-server/test/testdata/Bar.hs" $ - testCase "goto's imported modules that are loaded" $ runSession hieCommand fullCaps "test/testdata/definition" $ do - doc <- openDoc "Foo.hs" "haskell" - _ <- openDoc "Bar.hs" "haskell" - defs <- getDefinitions doc (Position 2 8) - liftIO $ do - fp <- canonicalizePath "test/testdata/definition/Bar.hs" - defs `shouldBe` [Location (filePathToUri fp) zeroRange] - - , ignoreTestBecause "Broken: file:///Users/jwindsor/src/haskell-language-server/test/testdata/Bar.hs" $ - testCase "goto's imported modules that are loaded, and then closed" $ - runSession hieCommand fullCaps "test/testdata/definition" $ do - doc <- openDoc "Foo.hs" "haskell" - otherDoc <- openDoc "Bar.hs" "haskell" - closeDoc otherDoc - defs <- getDefinitions doc (Position 2 8) - _ <- waitForDiagnostics - liftIO $ putStrLn "D" - liftIO $ do - fp <- canonicalizePath "test/testdata/definition/Bar.hs" - defs `shouldBe` [Location (filePathToUri fp) zeroRange] - liftIO $ putStrLn "E" -- AZ - - noDiagnostics - ] - -zeroRange :: Range -zeroRange = Range (Position 0 0) (Position 0 0) diff --git a/test/functional/Diagnostic.hs b/test/functional/Diagnostic.hs deleted file mode 100644 index fc67bf324d..0000000000 --- a/test/functional/Diagnostic.hs +++ /dev/null @@ -1,104 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - -module Diagnostic (tests) where - -import Control.Applicative.Combinators -import Control.Lens hiding (List) -import Control.Monad.IO.Class -import Data.Aeson (toJSON) -import qualified Data.Text as T -import qualified Data.Default -import Ide.Logger -import Ide.Plugin.Config -import Language.Haskell.LSP.Test hiding (message) -import Language.Haskell.LSP.Types -import qualified Language.Haskell.LSP.Types.Lens as LSP -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Tasty.HUnit -import Test.Hspec.Expectations - --- --------------------------------------------------------------------- - -tests :: TestTree -tests = testGroup "diagnostics providers" [ - saveTests - , triggerTests - , errorTests - , warningTests - ] - - -triggerTests :: TestTree -triggerTests = testGroup "diagnostics triggers" [ - ignoreTestBecause "Broken" $ - ignoreTestBecause "Broken" $ testCase "runs diagnostics on save" $ - runSession hieCommandExamplePlugin codeActionSupportCaps "test/testdata" $ do - logm "starting DiagnosticSpec.runs diagnostic on save" - doc <- openDoc "ApplyRefact2.hs" "haskell" - - diags@(reduceDiag:_) <- waitForDiagnostics - - liftIO $ do - length diags `shouldBe` 2 - reduceDiag ^. LSP.range `shouldBe` Range (Position 1 0) (Position 1 12) - reduceDiag ^. LSP.severity `shouldBe` Just DsInfo - reduceDiag ^. LSP.code `shouldBe` Just (StringValue "Eta reduce") - reduceDiag ^. LSP.source `shouldBe` Just "hlint" - - diags2a <- waitForDiagnostics - - liftIO $ length diags2a `shouldBe` 2 - - sendNotification TextDocumentDidSave (DidSaveTextDocumentParams doc) - - diags3@(d:_) <- waitForDiagnosticsSource "eg2" - - liftIO $ do - length diags3 `shouldBe` 1 - d ^. LSP.range `shouldBe` Range (Position 0 0) (Position 1 0) - d ^. LSP.severity `shouldBe` Nothing - d ^. LSP.code `shouldBe` Nothing - d ^. LSP.message `shouldBe` T.pack "Example plugin diagnostic, triggered byDiagnosticOnSave" - ] - -errorTests :: TestTree -errorTests = testGroup "typed hole errors" [ - ignoreTestBecause "Broken" $ testCase "is deferred" $ - runSession hieCommand fullCaps "test/testdata" $ do - _ <- openDoc "TypedHoles.hs" "haskell" - [diag] <- waitForDiagnosticsSource "bios" - liftIO $ diag ^. LSP.severity `shouldBe` Just DsWarning - ] - -warningTests :: TestTree -warningTests = testGroup "Warnings are warnings" [ - ignoreTestBecause "Broken" $ testCase "Overrides -Werror" $ - runSession hieCommand fullCaps "test/testdata/wErrorTest" $ do - _ <- openDoc "src/WError.hs" "haskell" - [diag] <- waitForDiagnosticsSource "bios" - liftIO $ diag ^. LSP.severity `shouldBe` Just DsWarning - ] - -saveTests :: TestTree -saveTests = testGroup "only diagnostics on save" [ - ignoreTestBecause "Broken" $ testCase "Respects diagnosticsOnChange setting" $ - runSession hieCommandExamplePlugin codeActionSupportCaps "test/testdata" $ do - let config = Data.Default.def { diagnosticsOnChange = False } :: Config - sendNotification WorkspaceDidChangeConfiguration (DidChangeConfigurationParams (toJSON config)) - doc <- openDoc "Hover.hs" "haskell" - diags <- waitForDiagnostics - - liftIO $ do - length diags `shouldBe` 0 - - let te = TextEdit (Range (Position 0 0) (Position 0 13)) "" - _ <- applyEdit doc te - skipManyTill loggingNotification noDiagnostics - - sendNotification TextDocumentDidSave (DidSaveTextDocumentParams doc) - diags2 <- waitForDiagnostics - liftIO $ - length diags2 `shouldBe` 1 - ] diff --git a/test/functional/Eval.hs b/test/functional/Eval.hs deleted file mode 100644 index 4f4cc91691..0000000000 --- a/test/functional/Eval.hs +++ /dev/null @@ -1,69 +0,0 @@ -{-# LANGUAGE NamedFieldPuns #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} - -module Eval (tests) where - -import Control.Applicative.Combinators (skipManyTill) -import Control.Monad.IO.Class (MonadIO (liftIO)) -import qualified Data.Text.IO as T -import Language.Haskell.LSP.Test -import Language.Haskell.LSP.Types (ApplyWorkspaceEditRequest, - CodeLens (CodeLens, _command, _range), - Command (_title), - Position (..), Range (..)) -import System.FilePath -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.HUnit - -tests :: TestTree -tests = - testGroup - "eval" - [ testCase "Produces Evaluate code lenses" $ do - runSession hieCommand fullCaps evalPath $ do - doc <- openDoc "T1.hs" "haskell" - lenses <- getCodeLenses doc - liftIO $ map (fmap _title . _command) lenses @?= [Just "Evaluate..."], - testCase "Produces Refresh code lenses" $ do - runSession hieCommand fullCaps evalPath $ do - doc <- openDoc "T2.hs" "haskell" - lenses <- getCodeLenses doc - liftIO $ map (fmap _title . _command) lenses @?= [Just "Refresh..."], - testCase "Code lenses have ranges" $ do - runSession hieCommand fullCaps evalPath $ do - doc <- openDoc "T1.hs" "haskell" - lenses <- getCodeLenses doc - liftIO $ map _range lenses @?= [Range (Position 4 0) (Position 4 15)], - testCase "Multi-line expressions have a multi-line range" $ do - runSession hieCommand fullCaps evalPath $ do - doc <- openDoc "T3.hs" "haskell" - lenses <- getCodeLenses doc - liftIO $ map _range lenses @?= [Range (Position 3 0) (Position 4 15)], - testCase "Executed expressions range covers only the expression" $ do - runSession hieCommand fullCaps evalPath $ do - doc <- openDoc "T2.hs" "haskell" - lenses <- getCodeLenses doc - liftIO $ map _range lenses @?= [Range (Position 4 0) (Position 4 15)], - testCase "Evaluation of expressions" $ goldenTest "T1.hs", - testCase "Reevaluation of expressions" $ goldenTest "T2.hs", - testCase "Evaluation of expressions w/ imports" $ goldenTest "T3.hs", - testCase "Evaluation of expressions w/ lets" $ goldenTest "T4.hs", - testCase "Refresh an evaluation" $ goldenTest "T5.hs", - testCase "Refresh an evaluation w/ lets" $ goldenTest "T6.hs", - testCase "Refresh a multiline evaluation" $ goldenTest "T7.hs" - ] - -goldenTest :: FilePath -> IO () -goldenTest input = runSession hieCommand fullCaps evalPath $ do - doc <- openDoc input "haskell" - [CodeLens {_command = Just c}] <- getCodeLenses doc - executeCommand c - _resp :: ApplyWorkspaceEditRequest <- skipManyTill anyMessage message - edited <- documentContents doc - expected <- liftIO $ T.readFile $ evalPath input <.> "expected" - liftIO $ edited @?= expected - -evalPath :: FilePath -evalPath = "test/testdata/eval" diff --git a/test/functional/Format.hs b/test/functional/Format.hs index bffaa596a5..a8fe534e9d 100644 --- a/test/functional/Format.hs +++ b/test/functional/Format.hs @@ -1,179 +1,62 @@ +{-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} module Format (tests) where -import Control.Monad.IO.Class -import Data.Aeson -import qualified Data.ByteString.Lazy as BS -import qualified Data.Text as T -import qualified Data.Text.Encoding as T -import Language.Haskell.LSP.Test -import Language.Haskell.LSP.Types -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Tasty.Golden -import Test.Tasty.HUnit -import Test.Hspec.Expectations +import Control.Lens ((^.)) +import Control.Monad.IO.Class +import Data.Functor (void) +import qualified Data.Text as T +import qualified Data.Text.IO as T +import Ide.Types +import qualified Language.LSP.Protocol.Lens as L +import Language.LSP.Protocol.Types +import Language.LSP.Test +import Test.Hls +import Test.Hls.Command +import Test.Hls.Flags (requiresFloskellPlugin, + requiresOrmoluPlugin) tests :: TestTree -tests = testGroup "format document" [ - goldenVsStringDiff "works" goldenGitDiff "test/testdata/Format.formatted_document.hs" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "Format.hs" "haskell" - formatDoc doc (FormattingOptions 2 True) - BS.fromStrict . T.encodeUtf8 <$> documentContents doc - , goldenVsStringDiff "works with custom tab size" goldenGitDiff "test/testdata/Format.formatted_document_with_tabsize.hs" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "Format.hs" "haskell" - formatDoc doc (FormattingOptions 5 True) - BS.fromStrict . T.encodeUtf8 <$> documentContents doc - , rangeTests - , providerTests - , stylishHaskellTests - , brittanyTests - , ormoluTests - ] - -rangeTests :: TestTree -rangeTests = testGroup "format range" [ - goldenVsStringDiff "works" goldenGitDiff "test/testdata/Format.formatted_range.hs" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "Format.hs" "haskell" - formatRange doc (FormattingOptions 2 True) (Range (Position 1 0) (Position 3 10)) - BS.fromStrict . T.encodeUtf8 <$> documentContents doc - , goldenVsStringDiff "works with custom tab size" goldenGitDiff "test/testdata/Format.formatted_range_with_tabsize.hs" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "Format.hs" "haskell" - formatRange doc (FormattingOptions 5 True) (Range (Position 4 0) (Position 7 19)) - BS.fromStrict . T.encodeUtf8 <$> documentContents doc +tests = testGroup "format document" + [ providerTests ] providerTests :: TestTree -providerTests = testGroup "formatting provider" [ - testCase "respects none" $ runSessionWithConfig (formatConfig "none") hieCommand fullCaps "test/testdata" $ do +providerTests = testGroup "lsp formatting provider" + [ testCase "respects none" $ runSessionWithConfig (formatConfig "none") hlsLspCommand fullLatestClientCaps "test/testdata/format" $ do + void configurationRequest doc <- openDoc "Format.hs" "haskell" - orig <- documentContents doc + resp <- request SMethod_TextDocumentFormatting $ DocumentFormattingParams Nothing doc (FormattingOptions 2 True Nothing Nothing Nothing) + liftIO $ case resp ^. L.result of + result@(Left (TResponseError reason message Nothing)) -> case reason of + (InR ErrorCodes_MethodNotFound) -> pure () -- No formatter + (InR ErrorCodes_InvalidRequest) | "No plugin" `T.isPrefixOf` message -> pure () + _ -> assertFailure $ "strange response from formatting provider:" ++ show result + result -> assertFailure $ "strange response from formatting provider:" ++ show result + + , requiresOrmoluPlugin . requiresFloskellPlugin $ testCase "can change on the fly" $ runSessionWithConfig (formatConfig "none") hlsLspCommand fullLatestClientCaps "test/testdata/format" $ do + void configurationRequest + formattedOrmolu <- liftIO $ T.readFile "test/testdata/format/Format.ormolu.formatted.hs" + formattedFloskell <- liftIO $ T.readFile "test/testdata/format/Format.floskell.formatted.hs" + formattedOrmoluPostFloskell <- liftIO $ T.readFile "test/testdata/format/Format.ormolu_post_floskell.formatted.hs" - formatDoc doc (FormattingOptions 2 True) - documentContents doc >>= liftIO . (`shouldBe` orig) - - formatRange doc (FormattingOptions 2 True) (Range (Position 1 0) (Position 3 10)) - documentContents doc >>= liftIO . (`shouldBe` orig) - - , ignoreTestBecause "Broken" $ testCase "can change on the fly" $ runSession hieCommand fullCaps "test/testdata" $ do doc <- openDoc "Format.hs" "haskell" - sendNotification WorkspaceDidChangeConfiguration (DidChangeConfigurationParams (formatLspConfig "brittany")) - formatDoc doc (FormattingOptions 2 True) - documentContents doc >>= liftIO . (`shouldBe` formattedBrittany) + setHlsConfig (formatLspConfig "ormolu") + formatDoc doc (FormattingOptions 2 True Nothing Nothing Nothing) + documentContents doc >>= liftIO . (@?= formattedOrmolu) - sendNotification WorkspaceDidChangeConfiguration (DidChangeConfigurationParams (formatLspConfig "floskell")) - formatDoc doc (FormattingOptions 2 True) - documentContents doc >>= liftIO . (`shouldBe` formattedFloskell) + setHlsConfig (formatLspConfig "floskell") + formatDoc doc (FormattingOptions 2 True Nothing Nothing Nothing) + documentContents doc >>= liftIO . (@?= formattedFloskell) - sendNotification WorkspaceDidChangeConfiguration (DidChangeConfigurationParams (formatLspConfig "brittany")) - formatDoc doc (FormattingOptions 2 True) - documentContents doc >>= liftIO . (`shouldBe` formattedBrittanyPostFloskell) + setHlsConfig (formatLspConfig "ormolu") + formatDoc doc (FormattingOptions 2 True Nothing Nothing Nothing) + documentContents doc >>= liftIO . (@?= formattedOrmoluPostFloskell) ] -stylishHaskellTests :: TestTree -stylishHaskellTests = testGroup "stylish-haskell" [ - goldenVsStringDiff "formats a document" goldenGitDiff "test/testdata/StylishHaksell.formatted_document.hs" $ runSession hieCommand fullCaps "test/testdata" $ do - sendNotification WorkspaceDidChangeConfiguration (DidChangeConfigurationParams (formatLspConfig "stylish-haskell")) - doc <- openDoc "StylishHaskell.hs" "haskell" - formatDoc doc (FormattingOptions 2 True) - BS.fromStrict . T.encodeUtf8 <$> documentContents doc - , goldenVsStringDiff "formats a range" goldenGitDiff "test/testdata/StylishHaksell.formatted_range.hs" $ runSession hieCommand fullCaps "test/testdata" $ do - sendNotification WorkspaceDidChangeConfiguration (DidChangeConfigurationParams (formatLspConfig "stylish-haskell")) - doc <- openDoc "StylishHaskell.hs" "haskell" - formatRange doc (FormattingOptions 2 True) (Range (Position 0 0) (Position 2 21)) - BS.fromStrict . T.encodeUtf8 <$> documentContents doc - ] - -brittanyTests :: TestTree -brittanyTests = testGroup "brittany" [ - goldenVsStringDiff "formats a document with LF endings" goldenGitDiff "test/testdata/BrittanyLF.formatted_document.hs" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "BrittanyLF.hs" "haskell" - formatDoc doc (FormattingOptions 4 True) - BS.fromStrict . T.encodeUtf8 <$> documentContents doc - - , goldenVsStringDiff "formats a document with CRLF endings" goldenGitDiff "test/testdata/BrittanyCRLF.formatted_document.hs" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "BrittanyCRLF.hs" "haskell" - formatDoc doc (FormattingOptions 4 True) - BS.fromStrict . T.encodeUtf8 <$> documentContents doc - - , goldenVsStringDiff "formats a range with LF endings" goldenGitDiff "test/testdata/BrittanyLF.formatted_range.hs" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "BrittanyLF.hs" "haskell" - let range = Range (Position 1 0) (Position 2 22) - formatRange doc (FormattingOptions 4 True) range - BS.fromStrict . T.encodeUtf8 <$> documentContents doc - - , goldenVsStringDiff "formats a range with CRLF endings" goldenGitDiff "test/testdata/BrittanyCRLF.formatted_range.hs" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "BrittanyCRLF.hs" "haskell" - let range = Range (Position 1 0) (Position 2 22) - formatRange doc (FormattingOptions 4 True) range - BS.fromStrict . T.encodeUtf8 <$> documentContents doc - ] - -ormoluTests :: TestTree -ormoluTests = testGroup "ormolu" [ - goldenVsStringDiff "formats correctly" goldenGitDiff ("test/testdata/Format.ormolu." ++ ormoluGoldenSuffix ++ ".hs") $ runSession hieCommand fullCaps "test/testdata" $ do - let formatLspConfig provider = - object [ "languageServerHaskell" .= object ["formattingProvider" .= (provider :: Value)] ] - sendNotification WorkspaceDidChangeConfiguration (DidChangeConfigurationParams (formatLspConfig "ormolu")) - doc <- openDoc "Format.hs" "haskell" - formatDoc doc (FormattingOptions 2 True) - BS.fromStrict . T.encodeUtf8 <$> documentContents doc - ] - where - ormoluGoldenSuffix = case ghcVersion of - GHC88 -> "formatted" - GHC86 -> "formatted" - _ -> "unchanged" - - -formatLspConfig :: Value -> Value -formatLspConfig provider = object [ "languageServerHaskell" .= object ["formattingProvider" .= (provider :: Value)] ] - -formatConfig :: Value -> SessionConfig -formatConfig provider = defaultConfig { lspConfig = Just (formatLspConfig provider) } - -goldenGitDiff :: FilePath -> FilePath -> [String] -goldenGitDiff fRef fNew = ["git", "diff", "--no-index", "--text", "--exit-code", fRef, fNew] - - -formattedBrittany :: T.Text -formattedBrittany = - "module Format where\n\ - \foo :: Int -> Int\n\ - \foo 3 = 2\n\ - \foo x = x\n\ - \bar :: String -> IO String\n\ - \bar s = do\n\ - \ x <- return \"hello\"\n\ - \ return \"asdf\"\n\n\ - \data Baz = Baz { a :: Int, b :: String }\n\n" - -formattedFloskell :: T.Text -formattedFloskell = - "module Format where\n\ - \\n\ - \foo :: Int -> Int\n\ - \foo 3 = 2\n\ - \foo x = x\n\ - \\n\ - \bar :: String -> IO String\n\ - \bar s = do\n\ - \ x <- return \"hello\"\n\ - \ return \"asdf\"\n\n\ - \data Baz = Baz { a :: Int, b :: String }\n\n" +formatLspConfig :: T.Text -> Config +formatLspConfig provider = def { formattingProvider = provider } -formattedBrittanyPostFloskell :: T.Text -formattedBrittanyPostFloskell = - "module Format where\n\ - \\n\ - \foo :: Int -> Int\n\ - \foo 3 = 2\n\ - \foo x = x\n\ - \\n\ - \bar :: String -> IO String\n\ - \bar s = do\n\ - \ x <- return \"hello\"\n\ - \ return \"asdf\"\n\n\ - \data Baz = Baz { a :: Int, b :: String }\n\n" +formatConfig :: T.Text -> SessionConfig +formatConfig provider = defaultConfig { lspConfig = hlsConfigToClientConfig (formatLspConfig provider), ignoreConfigurationRequests = False } diff --git a/test/functional/FunctionalBadProject.hs b/test/functional/FunctionalBadProject.hs index e51ee00cf0..150f9cdb04 100644 --- a/test/functional/FunctionalBadProject.hs +++ b/test/functional/FunctionalBadProject.hs @@ -2,41 +2,27 @@ module FunctionalBadProject (tests) where --- import Control.Lens hiding (List) --- import Control.Monad.IO.Class --- import qualified Data.Text as T --- import Language.Haskell.LSP.Test hiding (message) --- import Language.Haskell.LSP.Types as LSP --- import Language.Haskell.LSP.Types.Lens as LSP hiding (contents, error ) --- import Test.Hls.Util -import Test.Tasty -import Test.Tasty.HUnit -import Test.Hspec.Expectations +import Control.Lens +import qualified Data.Text as T +import qualified Language.LSP.Protocol.Lens as L +import Test.Hls +import Test.Hls.Command + --- --------------------------------------------------------------------- --- TODO: Currently this can not succeed, since such an error is thrown in "runActionWithContext" which --- can produce diagnostics at the moment. Needs more investigation --- TODO: @fendor: Add issue link here --- tests :: TestTree -tests = testGroup "behaviour on malformed projects" [ - testCase "no test executed" $ True `shouldBe` True +tests = testGroup "behaviour on malformed projects" + [ testCase "Missing module diagnostic" $ do + runSession hlsLspCommand fullLatestClientCaps "test/testdata/missingModuleTest/missingModule/" $ do + doc <- openDoc "src/MyLib.hs" "haskell" + [diag] <- waitForDiagnosticsFrom doc + liftIO $ assertBool "missing module name" $ "MyLib" `T.isInfixOf` (diag ^. L.message) + liftIO $ assertBool "module missing context" $ "may not be listed" `T.isInfixOf` (diag ^. L.message) + , testCase "Missing module diagnostic - no matching prefix" $ do + runSession hlsLspCommand fullLatestClientCaps "test/testdata/missingModuleTest/noPrefixMatch/" $ do + doc <- openDoc "app/Other.hs" "haskell" + [diag] <- waitForDiagnosticsFrom doc + liftIO $ assertBool "missing module name" $ + "Other" `T.isInfixOf` (diag ^. L.message) + liftIO $ assertBool "hie-bios message" $ + "Cabal" `T.isInfixOf` (diag ^. L.message) ] - - -- testCase "deals with cabal file with unsatisfiable dependency" $ - -- runSession hieCommandExamplePlugin codeActionSupportCaps "test/testdata/badProjects/cabal" $ do - -- -- runSessionWithConfig logConfig hieCommandExamplePlugin codeActionSupportCaps "test/testdata" $ do - -- _doc <- openDoc "Foo.hs" "haskell" - - -- diags@(d:_) <- waitForDiagnosticsSource "bios" - -- -- liftIO $ show diags `shouldBe` "" - -- -- liftIO $ putStrLn $ show diags - -- -- liftIO $ putStrLn "a" - -- liftIO $ do - -- length diags `shouldBe` 1 - -- d ^. range `shouldBe` Range (Position 0 0) (Position 1 0) - -- d ^. severity `shouldBe` (Just DsError) - -- d ^. code `shouldBe` Nothing - -- d ^. source `shouldBe` Just "bios" - -- d ^. message `shouldBe` - -- (T.pack "readCreateProcess: stack \"build\" \"--only-configure\" \".\" (exit 1): failed\n") diff --git a/test/functional/FunctionalCodeAction.hs b/test/functional/FunctionalCodeAction.hs deleted file mode 100644 index 1acf37216f..0000000000 --- a/test/functional/FunctionalCodeAction.hs +++ /dev/null @@ -1,493 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE OverloadedStrings #-} - -module FunctionalCodeAction (tests) where - -import Control.Applicative.Combinators -import Control.Lens hiding (List) -import Control.Monad -import Control.Monad.IO.Class -import Data.Aeson -import Data.Default -import qualified Data.HashMap.Strict as HM -import Data.Maybe -#if __GLASGOW_HASKELL__ < 808 -import Data.Monoid ((<>)) -#endif -import qualified Data.Text as T -import Ide.Plugin.Config -import Language.Haskell.LSP.Test as Test -import Language.Haskell.LSP.Types -import qualified Language.Haskell.LSP.Types.Lens as L -import qualified Language.Haskell.LSP.Types.Capabilities as C -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Tasty.HUnit -import Test.Hspec.Expectations - -{-# ANN module ("HLint: ignore Reduce duplication"::String) #-} - -tests :: TestTree -tests = testGroup "code actions" [ - hlintTests - , importTests - , missingPragmaTests - , packageTests - , redundantImportTests - , renameTests - , signatureTests - , typedHoleTests - , unusedTermTests - ] - - -hlintTests :: TestTree -hlintTests = testGroup "hlint suggestions" [ - ignoreTestBecause "Broken" $ testCase "provides 3.8 code actions" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "ApplyRefact2.hs" "haskell" - diags@(reduceDiag:_) <- waitForDiagnostics - - liftIO $ do - length diags `shouldBe` 2 - reduceDiag ^. L.range `shouldBe` Range (Position 1 0) (Position 1 12) - reduceDiag ^. L.severity `shouldBe` Just DsInfo - reduceDiag ^. L.code `shouldBe` Just (StringValue "Eta reduce") - reduceDiag ^. L.source `shouldBe` Just "hlint" - - (CACodeAction ca:_) <- getAllCodeActions doc - - -- Evaluate became redundant id in later hlint versions - liftIO $ ["Apply hint:Redundant id", "Apply hint:Evaluate"] `shouldContain` [ca ^. L.title] - - executeCodeAction ca - - contents <- getDocumentEdit doc - liftIO $ contents `shouldBe` "main = undefined\nfoo x = x\n" - - noDiagnostics - - , ignoreTestBecause "Broken" $ testCase "falls back to pre 3.8 code actions" $ runSession hieCommand noLiteralCaps "test/testdata" $ do - doc <- openDoc "ApplyRefact2.hs" "haskell" - - _ <- waitForDiagnostics - - (CACommand cmd:_) <- getAllCodeActions doc - - -- Evaluate became redundant id in later hlint versions - liftIO $ ["Apply hint:Redundant id", "Apply hint:Evaluate"] `shouldContain` [cmd ^. L.title ] - - executeCommand cmd - - contents <- skipManyTill publishDiagnosticsNotification $ getDocumentEdit doc - liftIO $ contents `shouldBe` "main = undefined\nfoo x = x\n" - - noDiagnostics - - , ignoreTestBecause "Broken" $ testCase "runs diagnostics on save" $ runSession hieCommand fullCaps "test/testdata" $ do - let config = def { diagnosticsOnChange = False } - sendNotification WorkspaceDidChangeConfiguration (DidChangeConfigurationParams (toJSON config)) - - doc <- openDoc "ApplyRefact2.hs" "haskell" - diags@(reduceDiag:_) <- waitForDiagnostics - - liftIO $ do - length diags `shouldBe` 2 - reduceDiag ^. L.range `shouldBe` Range (Position 1 0) (Position 1 12) - reduceDiag ^. L.severity `shouldBe` Just DsInfo - reduceDiag ^. L.code `shouldBe` Just (StringValue "Eta reduce") - reduceDiag ^. L.source `shouldBe` Just "hlint" - - (CACodeAction ca:_) <- getAllCodeActions doc - - -- Evaluate became redundant id in later hlint versions - liftIO $ ["Apply hint:Redundant id", "Apply hint:Evaluate"] `shouldContain` [ca ^. L.title] - - executeCodeAction ca - - contents <- getDocumentEdit doc - liftIO $ contents `shouldBe` "main = undefined\nfoo x = x\n" - sendNotification TextDocumentDidSave (DidSaveTextDocumentParams doc) - - noDiagnostics - ] - -renameTests :: TestTree -renameTests = testGroup "rename suggestions" [ - ignoreTestBecause "Broken" $ testCase "works" $ runSession hieCommand noLiteralCaps "test/testdata" $ do - doc <- openDoc "CodeActionRename.hs" "haskell" - - _ <- waitForDiagnosticsSource "bios" - - CACommand cmd:_ <- getAllCodeActions doc - executeCommand cmd - - x:_ <- T.lines <$> documentContents doc - liftIO $ x `shouldBe` "main = putStrLn \"hello\"" - - , ignoreTestBecause "Broken" $ testCase "doesn't give both documentChanges and changes" - $ runSession hieCommand noLiteralCaps "test/testdata" $ do - doc <- openDoc "CodeActionRename.hs" "haskell" - - _ <- waitForDiagnosticsSource "bios" - - CACommand cmd <- (!! 2) <$> getAllCodeActions doc - let Just (List [Object args]) = cmd ^. L.arguments - Object editParams = args HM.! "fallbackWorkspaceEdit" - liftIO $ do - editParams `shouldSatisfy` HM.member "changes" - editParams `shouldNotSatisfy` HM.member "documentChanges" - - executeCommand cmd - - _:x:_ <- T.lines <$> documentContents doc - liftIO $ x `shouldBe` "foo = putStrLn \"world\"" - ] - -importTests :: TestTree -importTests = testGroup "import suggestions" [ - ignoreTestBecause "Broken" $ testCase "works with 3.8 code action kinds" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "CodeActionImport.hs" "haskell" - -- No Formatting: - let config = def { formattingProvider = "none" } - sendNotification WorkspaceDidChangeConfiguration (DidChangeConfigurationParams (toJSON config)) - - -- ignore the first empty hlint diagnostic publish - [_,diag:_] <- count 2 waitForDiagnostics - liftIO $ diag ^. L.message `shouldBe` "Variable not in scope: when :: Bool -> IO () -> IO ()" - - actionsOrCommands <- getAllCodeActions doc - let actns = map fromAction actionsOrCommands - - liftIO $ do - head actns ^. L.title `shouldBe` "Import module Control.Monad" - head (tail actns) ^. L.title `shouldBe` "Import module Control.Monad (when)" - forM_ actns $ \a -> do - a ^. L.kind `shouldBe` Just CodeActionQuickFix - a ^. L.command `shouldSatisfy` isJust - a ^. L.edit `shouldBe` Nothing - let hasOneDiag (Just (List [_])) = True - hasOneDiag _ = False - a ^. L.diagnostics `shouldSatisfy` hasOneDiag - length actns `shouldBe` 10 - - executeCodeAction (head actns) - - contents <- getDocumentEdit doc - liftIO $ contents `shouldBe` "import Control.Monad\nmain :: IO ()\nmain = when True $ putStrLn \"hello\"" - ] - -packageTests :: TestTree -packageTests = testGroup "add package suggestions" [ - ignoreTestBecause "Broken" $ testCase "adds to .cabal files" $ do - flushStackEnvironment - runSession hieCommand fullCaps "test/testdata/addPackageTest/cabal-exe" $ do - doc <- openDoc "AddPackage.hs" "haskell" - - -- ignore the first empty hlint diagnostic publish - [_,diag:_] <- count 2 waitForDiagnostics - - let prefixes = [ "Could not load module `Data.Text'" -- Windows && GHC >= 8.6 - , "Could not find module `Data.Text'" -- Windows - , "Could not load module ‘Data.Text’" -- GHC >= 8.6 - , "Could not find module ‘Data.Text’" - ] - in liftIO $ diag ^. L.message `shouldSatisfy` \m -> any (`T.isPrefixOf` m) prefixes - - acts <- getAllCodeActions doc - let (CACodeAction action:_) = acts - - liftIO $ do - action ^. L.title `shouldBe` "Add text as a dependency" - action ^. L.kind `shouldBe` Just CodeActionQuickFix - action ^. L.command . _Just . L.command `shouldSatisfy` T.isSuffixOf "package:add" - - executeCodeAction action - - contents <- getDocumentEdit . TextDocumentIdentifier =<< getDocUri "add-package-test.cabal" - liftIO $ - T.lines contents `shouldSatisfy` \x -> - any (\l -> "text -any" `T.isSuffixOf` l || "text : {} -any" `T.isSuffixOf` l) x - - , ignoreTestBecause "Broken" $ testCase "adds to hpack package.yaml files" $ - runSession hieCommand fullCaps "test/testdata/addPackageTest/hpack-exe" $ do - doc <- openDoc "app/Asdf.hs" "haskell" - - -- ignore the first empty hlint diagnostic publish - [_,_:diag:_] <- count 2 waitForDiagnostics - - let prefixes = [ "Could not load module `Codec.Compression.GZip'" -- Windows && GHC >= 8.6 - , "Could not find module `Codec.Compression.GZip'" -- Windows - , "Could not load module ‘Codec.Compression.GZip’" -- GHC >= 8.6 - , "Could not find module ‘Codec.Compression.GZip’" - ] - in liftIO $ diag ^. L.message `shouldSatisfy` \m -> any (`T.isPrefixOf` m) prefixes - - mActions <- getAllCodeActions doc - let allActions = map fromAction mActions - action = head allActions - - liftIO $ do - action ^. L.title `shouldBe` "Add zlib as a dependency" - forM_ allActions $ \a -> a ^. L.kind `shouldBe` Just CodeActionQuickFix - forM_ allActions $ \a -> a ^. L.command . _Just . L.command `shouldSatisfy` T.isSuffixOf "package:add" - - executeCodeAction action - - contents <- getDocumentEdit . TextDocumentIdentifier =<< getDocUri "package.yaml" - liftIO $ do - T.lines contents !! 3 `shouldSatisfy` T.isSuffixOf "zlib" - T.lines contents !! 21 `shouldNotSatisfy` T.isSuffixOf "zlib" - ] - -redundantImportTests :: TestTree -redundantImportTests = testGroup "redundant import code actions" [ - ignoreTestBecause "Broken" $ testCase "remove solitary redundant imports" $ - runSession hieCommand fullCaps "test/testdata/redundantImportTest/" $ do - doc <- openDoc "src/CodeActionRedundant.hs" "haskell" - - -- ignore the first empty hlint diagnostic publish - [_,diag:_] <- count 2 waitForDiagnostics - - let prefixes = [ "The import of `Data.List' is redundant" -- Windows - , "The import of ‘Data.List’ is redundant" - ] - in liftIO $ diag ^. L.message `shouldSatisfy` \m -> any (`T.isPrefixOf` m) prefixes - - mActions <- getAllCodeActions doc - - let allActions@[removeAction, changeAction] = map fromAction mActions - - liftIO $ do - removeAction ^. L.title `shouldBe` "Remove redundant import" - changeAction ^. L.title `shouldBe` "Import instances" - forM_ allActions $ \a -> a ^. L.kind `shouldBe` Just CodeActionQuickFix - forM_ allActions $ \a -> a ^. L.command `shouldBe` Nothing - forM_ allActions $ \a -> a ^. L.edit `shouldSatisfy` isJust - - executeCodeAction removeAction - - -- No command/applyworkspaceedit should be here, since action - -- provides workspace edit property which skips round trip to - -- the server - contents <- documentContents doc - liftIO $ contents `shouldBe` "module CodeActionRedundant where\nmain :: IO ()\nmain = putStrLn \"hello\"" - - , ignoreTestBecause "Broken" $ testCase "doesn't touch other imports" $ runSession hieCommand noLiteralCaps "test/testdata/redundantImportTest/" $ do - doc <- openDoc "src/MultipleImports.hs" "haskell" - _ <- count 2 waitForDiagnostics - [CACommand cmd, _] <- getAllCodeActions doc - executeCommand cmd - contents <- documentContents doc - liftIO $ (T.lines contents) `shouldBe` - [ "module MultipleImports where" - , "import Data.Maybe" - , "foo :: Int" - , "foo = fromJust (Just 3)" - ] - ] - -typedHoleTests :: TestTree -typedHoleTests = testGroup "typed hole code actions" [ - ignoreTestBecause "Broken" $ testCase "works" $ - runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "TypedHoles.hs" "haskell" - _ <- waitForDiagnosticsSource "bios" - cas <- map (\(CACodeAction x)-> x) <$> getAllCodeActions doc - - suggestion <- - case ghcVersion of - GHC88 -> do - liftIO $ map (^. L.title) cas `shouldMatchList` - [ "Substitute hole (Int) with x ([Int])" - , "Substitute hole (Int) with foo ([Int] -> Int Valid hole fits include)" - , "Substitute hole (Int) with maxBound (forall a. Bounded a => a with maxBound @Int)" - , "Substitute hole (Int) with minBound (forall a. Bounded a => a with minBound @Int)" - ] - return "x" - GHC86 -> do - liftIO $ map (^. L.title) cas `shouldMatchList` - [ "Substitute hole (Int) with x ([Int])" - , "Substitute hole (Int) with foo ([Int] -> Int Valid hole fits include)" - , "Substitute hole (Int) with maxBound (forall a. Bounded a => a with maxBound @Int)" - , "Substitute hole (Int) with minBound (forall a. Bounded a => a with minBound @Int)" - ] - return "x" - GHC84 -> do - liftIO $ map (^. L.title) cas `shouldMatchList` - [ "Substitute hole (Int) with maxBound (forall a. Bounded a => a)" - , "Substitute hole (Int) with minBound (forall a. Bounded a => a)" - , "Substitute hole (Int) with undefined (forall (a :: TYPE r). GHC.Stack.Types.HasCallStack => a)" - ] - return "maxBound" - - executeCodeAction $ head cas - - contents <- documentContents doc - - liftIO $ contents `shouldBe` T.concat - [ "module TypedHoles where\n" - , "foo :: [Int] -> Int\n" - , "foo x = " <> suggestion - ] - - , ignoreTestBecause "Broken" $ testCase "shows more suggestions" $ - runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "TypedHoles2.hs" "haskell" - _ <- waitForDiagnosticsSource "bios" - cas <- map fromAction <$> getAllCodeActions doc - - suggestion <- - case ghcVersion of - GHC88 -> do - liftIO $ map (^. L.title) cas `shouldMatchList` - [ "Substitute hole (A) with stuff (A -> A)" - , "Substitute hole (A) with x ([A])" - , "Substitute hole (A) with foo2 ([A] -> A)" - ] - return "stuff" - GHC86 -> do - liftIO $ map (^. L.title) cas `shouldMatchList` - [ "Substitute hole (A) with stuff (A -> A)" - , "Substitute hole (A) with x ([A])" - , "Substitute hole (A) with foo2 ([A] -> A)" - ] - return "stuff" - GHC84 -> do - liftIO $ map (^. L.title) cas `shouldMatchList` - [ "Substitute hole (A) with undefined (forall (a :: TYPE r). GHC.Stack.Types.HasCallStack => a)" - , "Substitute hole (A) with stuff (A -> A)" - , "Substitute hole (A) with x ([A])" - , "Substitute hole (A) with foo2 ([A] -> A)" - ] - return "undefined" - - executeCodeAction $ head cas - - contents <- documentContents doc - - liftIO $ (T.lines contents) `shouldBe` - [ "module TypedHoles2 (foo2) where" - , "newtype A = A Int" - , "foo2 :: [A] -> A" - , "foo2 x = " <> suggestion <> "" - , " where" - , " stuff (A a) = A (a + 1)" - ] - ] - -signatureTests :: TestTree -signatureTests = testGroup "missing top level signature code actions" [ - ignoreTestBecause "Broken" $ testCase "Adds top level signature" $ - runSession hieCommand fullCaps "test/testdata/" $ do - doc <- openDoc "TopLevelSignature.hs" "haskell" - - _ <- waitForDiagnosticsSource "bios" - cas <- map fromAction <$> getAllCodeActions doc - - liftIO $ map (^. L.title) cas `shouldContain` [ "Add signature: main :: IO ()"] - - executeCodeAction $ head cas - - contents <- documentContents doc - - let expected = [ "{-# OPTIONS_GHC -Wall #-}" - , "module TopLevelSignature where" - , "main :: IO ()" - , "main = do" - , " putStrLn \"Hello\"" - , " return ()" - ] - - liftIO $ (T.lines contents) `shouldBe` expected - ] - -missingPragmaTests :: TestTree -missingPragmaTests = testGroup "missing pragma warning code actions" [ - ignoreTestBecause "Broken" $ testCase "Adds TypeSynonymInstances pragma" $ - runSession hieCommand fullCaps "test/testdata/addPragmas" $ do - doc <- openDoc "NeedsPragmas.hs" "haskell" - - _ <- waitForDiagnosticsSource "bios" - cas <- map fromAction <$> getAllCodeActions doc - - liftIO $ map (^. L.title) cas `shouldContain` [ "Add \"TypeSynonymInstances\""] - liftIO $ map (^. L.title) cas `shouldContain` [ "Add \"FlexibleInstances\""] - - executeCodeAction $ head cas - - contents <- getDocumentEdit doc - - let expected = [ "{-# LANGUAGE TypeSynonymInstances #-}" - , "" - , "import GHC.Generics" - , "" - , "main = putStrLn \"hello\"" - , "" - , "type Foo = Int" - , "" - , "instance Show Foo where" - , " show x = undefined" - , "" - , "instance Show (Int,String) where" - , " show = undefined" - , "" - , "data FFF a = FFF Int String a" - , " deriving (Generic,Functor,Traversable)" - ] - - liftIO $ (T.lines contents) `shouldBe` expected - ] - -unusedTermTests :: TestTree -unusedTermTests = testGroup "unused term code actions" [ - -- ignoreTestBecause "Broken" $ testCase "Prefixes with '_'" $ pendingWith "removed because of HaRe" - -- runSession hieCommand fullCaps "test/testdata/" $ do - -- doc <- openDoc "UnusedTerm.hs" "haskell" - -- - -- _ <- waitForDiagnosticsSource "bios" - -- cas <- map fromAction <$> getAllCodeActions doc - -- - -- liftIO $ map (^. L.title) cas `shouldContain` [ "Prefix imUnused with _"] - -- - -- executeCodeAction $ head cas - -- - -- edit <- getDocumentEdit doc - -- - -- let expected = [ "{-# OPTIONS_GHC -Wall #-}" - -- , "module UnusedTerm () where" - -- , "_imUnused :: Int -> Int" - -- , "_imUnused 1 = 1" - -- , "_imUnused 2 = 2" - -- , "_imUnused _ = 3" - -- ] - -- - -- liftIO $ edit `shouldBe` T.unlines expected - - -- See https://microsoft.github.io/language-server-protocol/specifications/specification-3-15/#textDocument_codeAction - -- `CodeActionContext` - ignoreTestBecause "Broken" $ testCase "respect 'only' parameter" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "CodeActionOnly.hs" "haskell" - _ <- count 2 waitForDiagnostics -- need to wait for both hlint and ghcmod - diags <- getCurrentDiagnostics doc - let params = CodeActionParams doc (Range (Position 2 10) (Position 4 0)) caContext Nothing - caContext = CodeActionContext (List diags) (Just (List [CodeActionRefactorInline])) - ResponseMessage _ _ (Right (List res)) <- request TextDocumentCodeAction params - let cas = map fromAction res - kinds = map (^. L.kind) cas - liftIO $ do - -- TODO: When HaRe is back this should be uncommented - -- kinds `shouldNotSatisfy` null - kinds `shouldNotSatisfy` any (Just CodeActionRefactorInline /=) - kinds `shouldSatisfy` all (Just CodeActionRefactorInline ==) - ] - -fromAction :: CAResult -> CodeAction -fromAction (CACodeAction action) = action -fromAction _ = error "Not a code action" - -noLiteralCaps :: C.ClientCapabilities -noLiteralCaps = def { C._textDocument = Just textDocumentCaps } - where - textDocumentCaps = def { C._codeAction = Just codeActionCaps } - codeActionCaps = C.CodeActionClientCapabilities (Just True) Nothing diff --git a/test/functional/FunctionalLiquid.hs b/test/functional/FunctionalLiquid.hs deleted file mode 100644 index 7cd8bb6557..0000000000 --- a/test/functional/FunctionalLiquid.hs +++ /dev/null @@ -1,101 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - -module FunctionalLiquid (tests) where - -import Control.Lens hiding (List) -import Control.Monad.IO.Class -import Data.Aeson -import Data.Default -import qualified Data.Text as T -import Language.Haskell.LSP.Test hiding (message) -import Language.Haskell.LSP.Types as LSP -import Language.Haskell.LSP.Types.Lens as LSP hiding (contents) -import Ide.Plugin.Config -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Tasty.HUnit -import Test.Hspec.Expectations - --- --------------------------------------------------------------------- - -tests :: TestTree -tests = testGroup "liquid haskell diagnostics" [ - ignoreTestBecause "Broken" $ testCase "runs diagnostics on save, no liquid" $ - runSession hieCommandExamplePlugin codeActionSupportCaps "test/testdata" $ do - doc <- openDoc "liquid/Evens.hs" "haskell" - - diags@(reduceDiag:_) <- waitForDiagnostics - - liftIO $ do - length diags `shouldBe` 2 - reduceDiag ^. range `shouldBe` Range (Position 5 18) (Position 5 22) - reduceDiag ^. severity `shouldBe` Just DsHint - reduceDiag ^. code `shouldBe` Just (StringValue "Use negate") - reduceDiag ^. source `shouldBe` Just "hlint" - - diags2hlint <- waitForDiagnostics - - liftIO $ length diags2hlint `shouldBe` 2 - - sendNotification TextDocumentDidSave (DidSaveTextDocumentParams doc) - - diags3@(d:_) <- waitForDiagnosticsSource "eg2" - - liftIO $ do - length diags3 `shouldBe` 1 - d ^. LSP.range `shouldBe` Range (Position 0 0) (Position 1 0) - d ^. LSP.severity `shouldBe` Nothing - d ^. LSP.code `shouldBe` Nothing - d ^. LSP.message `shouldBe` T.pack "Example plugin diagnostic, triggered byDiagnosticOnSave" - - -- --------------------------------- - - , ignoreTestBecause "Broken" $ testCase "runs diagnostics on save, with liquid haskell" $ - runSession hieCommand codeActionSupportCaps "test/testdata" $ do - -- runSessionWithConfig logConfig hieCommand codeActionSupportCaps "test/testdata" $ do - doc <- openDoc "liquid/Evens.hs" "haskell" - - diags@(reduceDiag:_) <- waitForDiagnostics - - -- liftIO $ show diags `shouldBe` "" - - liftIO $ do - length diags `shouldBe` 2 - reduceDiag ^. range `shouldBe` Range (Position 5 18) (Position 5 22) - reduceDiag ^. severity `shouldBe` Just DsHint - reduceDiag ^. code `shouldBe` Just (StringValue "Use negate") - reduceDiag ^. source `shouldBe` Just "hlint" - - -- Enable liquid haskell plugin and disable hlint - let config = def { liquidOn = True, hlintOn = False } - sendNotification WorkspaceDidChangeConfiguration (DidChangeConfigurationParams (toJSON config)) - - -- docItem <- getDocItem file languageId - sendNotification TextDocumentDidSave (DidSaveTextDocumentParams doc) - -- TODO: what does that test? - -- TODO: whether hlint is really disbabled? - -- TODO: @fendor, document or remove - -- diags2hlint <- waitForDiagnostics - -- -- liftIO $ show diags2hlint `shouldBe` "" - - -- -- We turned hlint diagnostics off - -- liftIO $ length diags2hlint `shouldBe` 0 - -- diags2liquid <- waitForDiagnostics - -- liftIO $ length diags2liquid `shouldBe` 0 - -- liftIO $ show diags2liquid `shouldBe` "" - diags3@(d:_) <- waitForDiagnosticsSource "liquid" - -- liftIO $ show diags3 `shouldBe` "" - liftIO $ do - length diags3 `shouldBe` 1 - d ^. range `shouldBe` Range (Position 8 0) (Position 8 11) - d ^. severity `shouldBe` Just DsError - d ^. code `shouldBe` Nothing - d ^. source `shouldBe` Just "liquid" - d ^. message `shouldSatisfy` T.isPrefixOf ("Error: Liquid Type Mismatch\n" <> - " Inferred type\n" <> - " VV : {v : GHC.Types.Int | v == 7}\n" <> - " \n" <> - " not a subtype of Required type\n" <> - " VV : {VV : GHC.Types.Int | VV mod 2 == 0}\n ") - ] \ No newline at end of file diff --git a/test/functional/HieBios.hs b/test/functional/HieBios.hs index f1c58f1928..5a06026b53 100644 --- a/test/functional/HieBios.hs +++ b/test/functional/HieBios.hs @@ -1,36 +1,19 @@ {-# LANGUAGE OverloadedStrings #-} module HieBios (tests) where -import Control.Applicative.Combinators -import qualified Data.Text as T -import Language.Haskell.LSP.Test -import Language.Haskell.LSP.Types -import Language.Haskell.LSP.Messages -import System.FilePath (()) -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Tasty.HUnit +import Control.Lens ((^.)) +import Control.Monad.IO.Class +import qualified Data.Text as T +import qualified Language.LSP.Protocol.Lens as L +import Test.Hls +import Test.Hls.Command tests :: TestTree -tests = testGroup "hie-bios" [ - ignoreTestBecause "Broken" $ testCase "loads modules inside main-is" $ do - writeFile (hieBiosErrorPath "hie.yaml") "" - runSession hieCommand fullCaps "test/testdata/hieBiosMainIs" $ do - _ <- openDoc "Main.hs" "haskell" - _ <- count 2 waitForDiagnostics - return () - - , ignoreTestBecause "Broken" $ testCase "reports errors in hie.yaml" $ do - writeFile (hieBiosErrorPath "hie.yaml") "" - runSession hieCommand fullCaps hieBiosErrorPath $ do - _ <- openDoc "Foo.hs" "haskell" - _ <- skipManyTill loggingNotification (satisfy isMessage) - return () - ] - where - hieBiosErrorPath = "test/testdata/hieBiosError" - - isMessage (NotShowMessage (NotificationMessage _ _ (ShowMessageParams MtError s))) = - "Couldn't parse hie.yaml" `T.isInfixOf` s - isMessage _ = False +tests = testGroup "hie-bios" + [ testCase "loads main-is module" $ do + runSession hlsLspCommand fullLatestClientCaps "test/testdata/hieBiosMainIs" $ do + _ <- openDoc "Main.hs" "haskell" + (diag:_) <- waitForDiagnostics + liftIO $ "Top-level binding with no type signature:" `T.isInfixOf` (diag ^. L.message) + @? "Expected missing top-level binding diagnostic" + ] diff --git a/test/functional/Highlight.hs b/test/functional/Highlight.hs deleted file mode 100644 index 07031785c9..0000000000 --- a/test/functional/Highlight.hs +++ /dev/null @@ -1,29 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -module Highlight (tests) where - -import Control.Applicative.Combinators -import Control.Monad.IO.Class -import Language.Haskell.LSP.Test -import Language.Haskell.LSP.Types -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Tasty.HUnit -import Test.Hspec.Expectations - -tests :: TestTree -tests = testGroup "highlight" [ - ignoreTestBecause "Broken" $ testCase "works" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "Highlight.hs" "haskell" - _ <- count 2 $ skipManyTill loggingNotification noDiagnostics - highlights <- getHighlights doc (Position 2 2) - liftIO $ do - let hls = - [ DocumentHighlight (mkRange 2 0 2 3) (Just HkWrite) - , DocumentHighlight (mkRange 4 22 4 25) (Just HkRead) - , DocumentHighlight (mkRange 3 6 3 9) (Just HkRead) - , DocumentHighlight (mkRange 1 0 1 3) (Just HkRead)] - mapM_ (\x -> highlights `shouldContain` [x]) hls - ] - where - mkRange sl sc el ec = Range (Position sl sc) (Position el ec) diff --git a/test/functional/Main.hs b/test/functional/Main.hs index 328a0e502f..daa342f694 100644 --- a/test/functional/Main.hs +++ b/test/functional/Main.hs @@ -1,52 +1,19 @@ module Main where -import Test.Tasty -import Test.Tasty.Runners (listingTests, consoleTestReporter) -import Test.Tasty.Ingredients.Rerun -import Test.Tasty.Runners.AntXML - -import Command -import Completion -import Deferred -import Definition -import Diagnostic -import Eval -import Format -import FunctionalBadProject -import FunctionalCodeAction -import FunctionalLiquid -import HieBios -import Highlight -import Progress -import Reference -import Rename -import Symbol -import TypeDefinition +import Config +import ConfigSchema +import Format +import FunctionalBadProject +import HieBios +import Progress +import Test.Hls main :: IO () -main = - -- ingredient: xml runner writes json file of test results (https://github.com/ocharles/tasty-ant-xml/blob/master/Test/Tasty/Runners/AntXML.hs) - -- rerunningTests allow rerun of failed tests (https://github.com/ocharles/tasty-rerun/blob/master/src/Test/Tasty/Ingredients/Rerun.hs) - defaultMainWithIngredients [ - antXMLRunner - , rerunningTests [ listingTests, consoleTestReporter ] - ] - $ testGroup "haskell-language-server" [ - Command.tests - , Completion.tests - , Deferred.tests - , Definition.tests - , Diagnostic.tests - , Eval.tests - , Format.tests - , FunctionalBadProject.tests - , FunctionalCodeAction.tests - , FunctionalLiquid.tests - , HieBios.tests - , Highlight.tests - , Progress.tests - , Reference.tests - , Rename.tests - , Symbol.tests - , TypeDefinition.tests - ] +main = defaultTestRunner $ testGroup "haskell-language-server" + [ Config.tests + , ConfigSchema.tests + , ignoreInEnv [HostOS Windows] "Tests gets stuck in ci" Format.tests + , FunctionalBadProject.tests + , HieBios.tests + , ignoreInEnv [HostOS Windows] "Tests gets stuck in ci" Progress.tests + ] diff --git a/test/functional/Progress.hs b/test/functional/Progress.hs index 82daa4e429..ed82a02350 100644 --- a/test/functional/Progress.hs +++ b/test/functional/Progress.hs @@ -1,119 +1,172 @@ -{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} module Progress (tests) where -import Control.Applicative.Combinators -import Control.Lens -import Control.Monad.IO.Class -import Data.Aeson -import Data.Default -import Ide.Plugin.Config -import Language.Haskell.LSP.Test -import Language.Haskell.LSP.Messages -- TODO: Move this into haskell-lsp-types -import Language.Haskell.LSP.Types -import qualified Language.Haskell.LSP.Types.Lens as L -import Language.Haskell.LSP.Types.Capabilities -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Tasty.HUnit -import Test.Hspec.Expectations +import Control.Exception (throw) +import Control.Lens hiding ((.=)) +import Data.Aeson (decode, encode) +import Data.Functor (void) +import Data.List (delete) +import Data.Maybe (fromJust) +import Data.Text (Text, pack) +import Ide.Types +import Language.LSP.Protocol.Capabilities +import qualified Language.LSP.Protocol.Lens as L +import Test.Hls +import Test.Hls.Command +import Test.Hls.Flags -tests :: TestTree -tests = testGroup "window/workDoneProgress" [ - ignoreTestBecause "Broken" $ testCase "sends indefinite progress notifications" $ - -- Testing that ghc-mod sends progress notifications - runSession hieCommand progressCaps "test/testdata" $ do - doc <- openDoc "ApplyRefact2.hs" "haskell" - - skipMany loggingNotification - - createRequest <- message :: Session WorkDoneProgressCreateRequest - liftIO $ do - createRequest ^. L.params `shouldBe` WorkDoneProgressCreateParams (ProgressNumericToken 0) - - startNotification <- message :: Session WorkDoneProgressBeginNotification - liftIO $ do - -- Expect a stack cradle, since the given `hie.yaml` is expected - -- to contain a multi-stack cradle. - startNotification ^. L.params . L.value . L.title `shouldBe` "Initializing Stack project" - startNotification ^. L.params . L.token `shouldBe` (ProgressNumericToken 0) - - reportNotification <- message :: Session WorkDoneProgressReportNotification - liftIO $ do - reportNotification ^. L.params . L.value . L.message `shouldBe` Just "Main" - reportNotification ^. L.params . L.token `shouldBe` (ProgressNumericToken 0) - - -- may produce diagnostics - skipMany publishDiagnosticsNotification - - doneNotification <- message :: Session WorkDoneProgressEndNotification - liftIO $ doneNotification ^. L.params . L.token `shouldBe` (ProgressNumericToken 0) - - -- Initial hlint notifications - _ <- publishDiagnosticsNotification - - -- Test incrementing ids - sendNotification TextDocumentDidSave (DidSaveTextDocumentParams doc) - - createRequest' <- skipManyTill loggingNotification (message :: Session WorkDoneProgressCreateRequest) - liftIO $ do - createRequest' ^. L.params `shouldBe` WorkDoneProgressCreateParams (ProgressNumericToken 1) - - startNotification' <- message :: Session WorkDoneProgressBeginNotification - liftIO $ do - startNotification' ^. L.params . L.value . L.title `shouldBe` "loading" - startNotification' ^. L.params . L.token `shouldBe` (ProgressNumericToken 1) - - reportNotification' <- message :: Session WorkDoneProgressReportNotification - liftIO $ do - reportNotification' ^. L.params . L.value . L.message `shouldBe` Just "Main" - reportNotification' ^. L.params . L.token `shouldBe` (ProgressNumericToken 1) - doneNotification' <- message :: Session WorkDoneProgressEndNotification - liftIO $ doneNotification' ^. L.params . L.token `shouldBe` (ProgressNumericToken 1) - - -- Initial hlint notifications - _ <- publishDiagnosticsNotification - return () - - , ignoreTestBecause "Broken" $ testCase "sends indefinite progress notifications with liquid" $ - -- Testing that Liquid Haskell sends progress notifications - runSession hieCommand progressCaps "test/testdata" $ do - doc <- openDoc "liquid/Evens.hs" "haskell" - - skipMany loggingNotification - - _ <- message :: Session WorkDoneProgressCreateRequest - _ <- message :: Session WorkDoneProgressBeginNotification - _ <- message :: Session WorkDoneProgressReportNotification - _ <- message :: Session WorkDoneProgressEndNotification - - -- the hie-bios diagnostics - _ <- skipManyTill loggingNotification publishDiagnosticsNotification - - -- Enable liquid haskell plugin - let config = def { liquidOn = True, hlintOn = False } - sendNotification WorkspaceDidChangeConfiguration (DidChangeConfigurationParams (toJSON config)) - - -- Test liquid - sendNotification TextDocumentDidSave (DidSaveTextDocumentParams doc) - - -- hlint notifications - -- TODO: potential race between typechecking, e.g. context intialisation - -- TODO: and disabling hlint notifications - -- _ <- skipManyTill loggingNotification publishDiagnosticsNotification - - let startPred (NotWorkDoneProgressBegin m) = - m ^. L.params . L.value . L.title == "Running Liquid Haskell on Evens.hs" - startPred _ = False - - let donePred (NotWorkDoneProgressEnd _) = True - donePred _ = False - - _ <- skipManyTill anyMessage $ between (satisfy startPred) (satisfy donePred) $ - many (satisfy (\x -> not (startPred x || donePred x))) - return () - ] +tests :: TestTree +tests = + testGroup + "window/workDoneProgress" + [ testCase "sends indefinite progress notifications" $ + runSession hlsLspCommand progressCaps "test/testdata/diagnostics" $ do + let path = "Foo.hs" + _ <- openDoc path "haskell" + expectProgressMessages [pack ("Setting up diagnostics (for " ++ path ++ ")"), "Processing", "Indexing"] [] [] + , requiresEvalPlugin $ testCase "eval plugin sends progress reports" $ + runSession hlsLspCommand progressCaps "plugins/hls-eval-plugin/test/testdata" $ do + doc <- openDoc "TIO.hs" "haskell" + lspId <- sendRequest SMethod_TextDocumentCodeLens (CodeLensParams Nothing Nothing doc) + + (codeLensResponse, createdProgressTokens, activeProgressTokens) <- expectProgressMessagesTill + (responseForId SMethod_TextDocumentCodeLens lspId) + ["Setting up testdata (for TIO.hs)", "Processing"] + [] + [] + + -- this is a test so exceptions result in fails + let response = getMessageResult codeLensResponse + case response of + InL [evalLens] -> do + let command = evalLens ^?! L.command . _Just + + _ <- sendRequest SMethod_WorkspaceExecuteCommand $ + ExecuteCommandParams + Nothing + (command ^. L.command) + (decode $ encode $ fromJust $ command ^. L.arguments) + + expectProgressMessages ["Evaluating"] createdProgressTokens activeProgressTokens + _ -> error $ "Unexpected response result: " ++ show response + , requiresOrmoluPlugin $ testCase "ormolu plugin sends progress notifications" $ do + runSessionWithConfig (def { ignoreConfigurationRequests = False }) hlsLspCommand progressCaps "test/testdata/format" $ do + void configurationRequest + setHlsConfig (formatLspConfig "ormolu") + doc <- openDoc "Format.hs" "haskell" + expectProgressMessages ["Setting up format (for Format.hs)", "Processing", "Indexing"] [] [] + _ <- sendRequest SMethod_TextDocumentFormatting $ DocumentFormattingParams Nothing doc (FormattingOptions 2 True Nothing Nothing Nothing) + expectProgressMessages ["Formatting Format.hs"] [] [] + , requiresFourmoluPlugin $ testCase "fourmolu plugin sends progress notifications" $ do + runSessionWithConfig (def { ignoreConfigurationRequests = False }) hlsLspCommand progressCaps "test/testdata/format" $ do + void configurationRequest + setHlsConfig (formatLspConfig "fourmolu") + doc <- openDoc "Format.hs" "haskell" + expectProgressMessages ["Setting up format (for Format.hs)", "Processing", "Indexing"] [] [] + _ <- sendRequest SMethod_TextDocumentFormatting $ DocumentFormattingParams Nothing doc (FormattingOptions 2 True Nothing Nothing Nothing) + expectProgressMessages ["Formatting Format.hs"] [] [] + ] + +formatLspConfig :: Text -> Config +formatLspConfig provider = def { formattingProvider = provider } progressCaps :: ClientCapabilities -progressCaps = fullCaps { _window = Just (WindowClientCapabilities (Just True)) } +progressCaps = fullLatestClientCaps{_window = Just (WindowClientCapabilities (Just True) Nothing Nothing)} + +data ProgressMessage + = ProgressCreate WorkDoneProgressCreateParams + | ProgressBegin ProgressToken WorkDoneProgressBegin + | ProgressReport ProgressToken WorkDoneProgressReport + | ProgressEnd ProgressToken WorkDoneProgressEnd + +data InterestingMessage a + = InterestingMessage a + | ProgressMessage ProgressMessage + +progressMessage :: Session ProgressMessage +progressMessage = + progressCreate <|> progressBegin <|> progressReport <|> progressEnd + where + progressCreate = ProgressCreate . view L.params <$> message SMethod_WindowWorkDoneProgressCreate + progressBegin :: Session ProgressMessage + progressBegin = satisfyMaybe (\case + FromServerMess SMethod_Progress (TNotificationMessage _ _ (ProgressParams t (preview _workDoneProgressBegin -> Just params))) -> + Just (ProgressBegin t params) + _ -> Nothing) + progressReport :: Session ProgressMessage + progressReport = satisfyMaybe (\case + FromServerMess SMethod_Progress (TNotificationMessage _ _ (ProgressParams t (preview _workDoneProgressReport -> Just params))) -> + Just (ProgressReport t params) + _ -> Nothing) + progressEnd :: Session ProgressMessage + progressEnd = satisfyMaybe (\case + FromServerMess SMethod_Progress (TNotificationMessage _ _ (ProgressParams t (preview _workDoneProgressEnd -> Just params))) + -> Just (ProgressEnd t params) + _ -> Nothing) + +interestingMessage :: Session a -> Session (InterestingMessage a) +interestingMessage theMessage = + fmap InterestingMessage theMessage <|> fmap ProgressMessage progressMessage + +expectProgressMessagesTill :: Session a -> [Text] -> [ProgressToken] -> [ProgressToken] -> Session (a, [ProgressToken], [ProgressToken]) +expectProgressMessagesTill stopMessage expectedTitles createdProgressTokens activeProgressTokens = do + message <- skipManyTill anyMessage (interestingMessage stopMessage) + case message of + InterestingMessage a -> do + liftIO $ null expectedTitles @? "Expected titles not empty " <> show expectedTitles + pure (a, createdProgressTokens, activeProgressTokens) + ProgressMessage progressMessage -> + updateExpectProgressStateAndRecurseWith + (expectProgressMessagesTill stopMessage) + progressMessage + expectedTitles + createdProgressTokens + activeProgressTokens + +{- | Test that the server is correctly producing a sequence of progress related + messages. Creates can be dangling, but should be paired with a corresponding begin and end, + optionally with some progress in between. Tokens must match. The begin + messages have titles describing the work that is in-progress, we check that + the titles we see are those we expect. +-} +expectProgressMessages :: [Text] -> [ProgressToken] -> [ProgressToken] -> Session () +expectProgressMessages [] _ [] = pure () +expectProgressMessages expectedTitles createdProgressTokens activeProgressTokens = do + message <- skipManyTill anyMessage progressMessage + updateExpectProgressStateAndRecurseWith expectProgressMessages message expectedTitles createdProgressTokens activeProgressTokens + +updateExpectProgressStateAndRecurseWith :: ([Text] -> [ProgressToken] -> [ProgressToken] -> Session a) + -> ProgressMessage + -> [Text] + -> [ProgressToken] + -> [ProgressToken] + -> Session a +updateExpectProgressStateAndRecurseWith f progressMessage expectedTitles createdProgressTokens activeProgressTokens = do + case progressMessage of + ProgressCreate params -> do + f expectedTitles ((params ^. L.token): createdProgressTokens) activeProgressTokens + ProgressBegin token params -> do + liftIO $ token `expectedIn` createdProgressTokens + f (delete (params ^. L.title) expectedTitles) (delete token createdProgressTokens) (token:activeProgressTokens) + ProgressReport token _ -> do + liftIO $ token `expectedIn` activeProgressTokens + f expectedTitles createdProgressTokens activeProgressTokens + ProgressEnd token _ -> do + liftIO $ token `expectedIn` activeProgressTokens + f expectedTitles createdProgressTokens (delete token activeProgressTokens) + + +expectedIn :: (Foldable t, Eq a, Show a) => a -> t a -> Assertion +expectedIn a as = a `elem` as @? "Unexpected " ++ show a + +getMessageResult :: Show (ErrorData m) => TResponseMessage m -> MessageResult m +getMessageResult rsp = + case rsp ^. L.result of + Right x -> x + Left err -> throw $ UnexpectedResponseError (fromJust $ rsp ^. L.id) err diff --git a/test/functional/Reference.hs b/test/functional/Reference.hs deleted file mode 100644 index 173e42515b..0000000000 --- a/test/functional/Reference.hs +++ /dev/null @@ -1,35 +0,0 @@ -module Reference (tests) where - -import Control.Lens -import Control.Monad.IO.Class -import Language.Haskell.LSP.Test -import Language.Haskell.LSP.Types -import Language.Haskell.LSP.Types.Lens -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Tasty.HUnit -import Test.Hspec.Expectations - -tests :: TestTree -tests = testGroup "references" [ - ignoreTestBecause "Broken" $ testCase "works with definitions" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "References.hs" "haskell" - let pos = Position 2 7 -- foo = bar <-- - refs <- getReferences doc pos True - liftIO $ refs `shouldContain` map (Location (doc ^. uri)) [ - mkRange 4 0 4 3 - , mkRange 8 11 8 14 - , mkRange 7 7 7 10 - , mkRange 4 14 4 17 - , mkRange 4 0 4 3 - , mkRange 2 6 2 9 - ] - -- TODO: Respect withDeclaration parameter - -- ignoreTestBecause "Broken" $ testCase "works without definitions" $ runSession hieCommand fullCaps "test/testdata" $ do - -- doc <- openDoc "References.hs" "haskell" - -- let pos = Position 2 7 -- foo = bar <-- - -- refs <- getReferences doc pos False - -- liftIO $ refs `shouldNotContain` [Location (doc ^. uri) (mkRange 4 0 4 3)] - ] - where mkRange sl sc el ec = Range (Position sl sc) (Position el ec) diff --git a/test/functional/Rename.hs b/test/functional/Rename.hs deleted file mode 100644 index 0cecd1c73d..0000000000 --- a/test/functional/Rename.hs +++ /dev/null @@ -1,28 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -module Rename (tests) where - --- import Control.Monad.IO.Class --- import Language.Haskell.LSP.Test --- import Language.Haskell.LSP.Types --- import Test.Hls.Util -import Test.Tasty -import Test.Tasty.HUnit -import Test.Hspec.Expectations - -tests :: TestTree -tests = testGroup "rename" [ - testCase "works" $ True `shouldBe` True - -- pendingWith "removed because of HaRe" - -- runSession hieCommand fullCaps "test/testdata" $ do - -- doc <- openDoc "Rename.hs" "haskell" - -- rename doc (Position 3 1) "baz" -- foo :: Int -> Int - -- documentContents doc >>= liftIO . flip shouldBe expected - -- where - -- expected = - -- "main = do\n\ - -- \ x <- return $ baz 42\n\ - -- \ return (baz x)\n\ - -- \baz :: Int -> Int\n\ - -- \baz x = x + 1\n\ - -- \bar = (+ 1) . baz\n" - ] \ No newline at end of file diff --git a/test/functional/Symbol.hs b/test/functional/Symbol.hs deleted file mode 100644 index 066b87b71c..0000000000 --- a/test/functional/Symbol.hs +++ /dev/null @@ -1,114 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -module Symbol (tests) where - -import Control.Monad.IO.Class -import Language.Haskell.LSP.Test as Test -import Language.Haskell.LSP.Types -import Language.Haskell.LSP.Types.Capabilities -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Tasty.HUnit -import Test.Hspec.Expectations - -tests :: TestTree -tests = testGroup "document symbols" [ - pre310Tests - , v310Tests - ] - -v310Tests :: TestTree -v310Tests = testGroup "3.10 hierarchical document symbols" [ - ignoreTestBecause "Broken" $ testCase "provides nested data types and constructors" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "Symbols.hs" "haskell" - Left symbs <- getDocumentSymbols doc - - let myData = DocumentSymbol "MyData" (Just "") SkClass Nothing myDataR myDataSR (Just (List [a, b])) - a = DocumentSymbol "A" (Just "") SkConstructor Nothing aR aSR (Just mempty) - b = DocumentSymbol "B" (Just "") SkConstructor Nothing bR bSR (Just mempty) - - liftIO $ symbs `shouldContain` [myData] - - ,ignoreTestBecause "Broken" $ testCase "provides nested where functions" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "Symbols.hs" "haskell" - Left symbs <- getDocumentSymbols doc - - let foo = DocumentSymbol "foo" (Just "") SkFunction Nothing fooR fooSR (Just (List [bar])) - bar = DocumentSymbol "bar" (Just "") SkFunction Nothing barR barSR (Just (List [dog, cat])) - dog = DocumentSymbol "dog" (Just "") SkVariable Nothing dogR dogSR (Just mempty) - cat = DocumentSymbol "cat" (Just "") SkVariable Nothing catR catSR (Just mempty) - - liftIO $ symbs `shouldContain` [foo] - - , ignoreTestBecause "Broken" $ testCase "provides pattern synonyms" $ runSession hieCommand fullCaps "test/testdata" $ do - doc <- openDoc "Symbols.hs" "haskell" - Left symbs <- getDocumentSymbols doc - - let testPattern = DocumentSymbol "TestPattern" - (Just "") SkFunction Nothing testPatternR testPatternSR (Just mempty) - - liftIO $ symbs `shouldContain` [testPattern] - ] - --- TODO: Test module, imports - -pre310Tests :: TestTree -pre310Tests = testGroup "pre 3.10 symbol information" [ - ignoreTestBecause "Broken" $ testCase "provides nested data types and constructors" $ runSession hieCommand oldCaps "test/testdata" $ do - doc@(TextDocumentIdentifier testUri) <- openDoc "Symbols.hs" "haskell" - Right symbs <- getDocumentSymbols doc - - let myData = SymbolInformation "MyData" SkClass Nothing (Location testUri myDataR) Nothing - a = SymbolInformation "A" SkConstructor Nothing (Location testUri aR) (Just "MyData") - b = SymbolInformation "B" SkConstructor Nothing (Location testUri bR) (Just "MyData") - - liftIO $ symbs `shouldContain` [myData, a, b] - - ,ignoreTestBecause "Broken" $ testCase "provides nested where functions" $ runSession hieCommand oldCaps "test/testdata" $ do - doc@(TextDocumentIdentifier testUri) <- openDoc "Symbols.hs" "haskell" - Right symbs <- getDocumentSymbols doc - - let foo = SymbolInformation "foo" SkFunction Nothing (Location testUri fooR) Nothing - bar = SymbolInformation "bar" SkFunction Nothing (Location testUri barR) (Just "foo") - dog = SymbolInformation "dog" SkVariable Nothing (Location testUri dogR) (Just "bar") - cat = SymbolInformation "cat" SkVariable Nothing (Location testUri catR) (Just "bar") - - -- Order is important! - liftIO $ symbs `shouldContain` [foo, bar, dog, cat] - ] - -oldCaps :: ClientCapabilities -oldCaps = capsForVersion (LSPVersion 3 9) --- Some common ranges and selection ranges in Symbols.hs -fooSR :: Range -fooSR = Range (Position 5 0) (Position 5 3) -fooR :: Range -fooR = Range (Position 5 0) (Position 7 43) -barSR :: Range -barSR = Range (Position 6 8) (Position 6 11) -barR :: Range -barR = Range (Position 6 8) (Position 7 43) -dogSR :: Range -dogSR = Range (Position 7 17) (Position 7 20) -dogR :: Range -dogR = Range (Position 7 16) (Position 7 43) -catSR :: Range -catSR = Range (Position 7 22) (Position 7 25) -catR :: Range -catR = Range (Position 7 16) (Position 7 43) -myDataSR :: Range -myDataSR = Range (Position 9 5) (Position 9 11) -myDataR :: Range -myDataR = Range (Position 9 0) (Position 10 22) -aSR :: Range -aSR = Range (Position 9 14) (Position 9 15) -aR :: Range -aR = Range (Position 9 14) (Position 9 19) -bSR :: Range -bSR = Range (Position 10 14) (Position 10 15) -bR :: Range -bR = Range (Position 10 14) (Position 10 22) -testPatternSR :: Range -testPatternSR = Range (Position 13 8) (Position 13 19) -testPatternR :: Range -testPatternR = Range (Position 13 0) (Position 13 27) \ No newline at end of file diff --git a/test/functional/TypeDefinition.hs b/test/functional/TypeDefinition.hs deleted file mode 100644 index 893448d36d..0000000000 --- a/test/functional/TypeDefinition.hs +++ /dev/null @@ -1,108 +0,0 @@ -module TypeDefinition (tests) where - -import Control.Monad.IO.Class -import Language.Haskell.LSP.Test -import Language.Haskell.LSP.Types -import System.Directory -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.ExpectedFailure (ignoreTestBecause) -import Test.Tasty.HUnit -import Test.Hspec.Expectations - -tests :: TestTree -tests = testGroup "type definitions" [ - ignoreTestBecause "Broken" $ testCase "finds local definition of record variable" - $ runSession hieCommand fullCaps "test/testdata/gototest" - $ do - doc <- openDoc "src/Lib.hs" "haskell" - defs <- getTypeDefinitions doc (toPos (11, 23)) - liftIO $ do - fp <- canonicalizePath "test/testdata/gototest/src/Lib.hs" - defs - `shouldBe` [ Location (filePathToUri fp) - (Range (toPos (8, 1)) (toPos (8, 29))) - ] - , ignoreTestBecause "Broken" $ testCase "finds local definition of newtype variable" - $ runSession hieCommand fullCaps "test/testdata/gototest" - $ do - doc <- openDoc "src/Lib.hs" "haskell" - defs <- getTypeDefinitions doc (toPos (16, 21)) - liftIO $ do - fp <- canonicalizePath "test/testdata/gototest/src/Lib.hs" - defs - `shouldBe` [ Location (filePathToUri fp) - (Range (toPos (13, 1)) (toPos (13, 30))) - ] - , ignoreTestBecause "Broken" $ testCase "finds local definition of sum type variable" - $ runSession hieCommand fullCaps "test/testdata/gototest" - $ do - doc <- openDoc "src/Lib.hs" "haskell" - defs <- getTypeDefinitions doc (toPos (21, 13)) - liftIO $ do - fp <- canonicalizePath "test/testdata/gototest/src/Lib.hs" - defs - `shouldBe` [ Location (filePathToUri fp) - (Range (toPos (18, 1)) (toPos (18, 26))) - ] - , ignoreTestBecause "Broken" $ testCase "finds local definition of sum type contructor" - $ runSession hieCommand fullCaps "test/testdata/gototest" - $ do - doc <- openDoc "src/Lib.hs" "haskell" - defs <- getTypeDefinitions doc (toPos (24, 7)) - liftIO $ do - fp <- canonicalizePath "test/testdata/gototest/src/Lib.hs" - defs - `shouldBe` [ Location (filePathToUri fp) - (Range (toPos (18, 1)) (toPos (18, 26))) - ] - , ignoreTestBecause "Broken" $ testCase "can not find non-local definition of type def" - $ runSession hieCommand fullCaps "test/testdata/gototest" - $ do - doc <- openDoc "src/Lib.hs" "haskell" - defs <- getTypeDefinitions doc (toPos (30, 17)) - liftIO $ defs `shouldBe` [] - - , ignoreTestBecause "Broken" $ testCase "find local definition of type def" - $ runSession hieCommand fullCaps "test/testdata/gototest" - $ do - doc <- openDoc "src/Lib.hs" "haskell" - defs <- getTypeDefinitions doc (toPos (35, 16)) - liftIO $ do - fp <- canonicalizePath "test/testdata/gototest/src/Lib.hs" - defs - `shouldBe` [ Location (filePathToUri fp) - (Range (toPos (18, 1)) (toPos (18, 26))) - ] - - -- TODO Implement - -- , ignoreTestBecause "Broken" $ testCase "find type-definition of type def in component" - -- $ pendingWith "Finding symbols cross module is currently not supported" - -- $ runSession hieCommand fullCaps "test/testdata/gototest" - -- $ do - -- doc <- openDoc "src/Lib2.hs" "haskell" - -- otherDoc <- openDoc "src/Lib.hs" "haskell" - -- closeDoc otherDoc - -- defs <- getTypeDefinitions doc (toPos (13, 20)) - -- liftIO $ do - -- fp <- canonicalizePath "test/testdata/gototest/src/Lib.hs" - -- defs - -- `shouldBe` [ Location (filePathToUri fp) - -- (Range (toPos (8, 1)) (toPos (8, 29))) - -- ] - , ignoreTestBecause "Broken" $ testCase "find definition of parameterized data type" - $ runSession hieCommand fullCaps "test/testdata/gototest" - $ do - doc <- openDoc "src/Lib.hs" "haskell" - defs <- getTypeDefinitions doc (toPos (40, 19)) - liftIO $ do - fp <- canonicalizePath "test/testdata/gototest/src/Lib.hs" - defs - `shouldBe` [ Location (filePathToUri fp) - (Range (toPos (37, 1)) (toPos (37, 31))) - ] - ] - ---NOTE: copied from Haskell.Ide.Engine.ArtifactMap -toPos :: (Int,Int) -> Position -toPos (l,c) = Position (l-1) (c-1) \ No newline at end of file diff --git a/test/testdata/ApplyRefact.hs b/test/testdata/ApplyRefact.hs deleted file mode 100644 index 984656fbcc..0000000000 --- a/test/testdata/ApplyRefact.hs +++ /dev/null @@ -1,4 +0,0 @@ - -main = (putStrLn "hello") - -foo x = (x + 1) diff --git a/test/testdata/ApplyRefactError.hs b/test/testdata/ApplyRefactError.hs deleted file mode 100644 index 89ad34d323..0000000000 --- a/test/testdata/ApplyRefactError.hs +++ /dev/null @@ -1,2 +0,0 @@ -foo :: forall a. (a -> a) -> a -> a -foo f x = f $ x diff --git a/test/testdata/BrittanyCRLF.formatted_document.hs b/test/testdata/BrittanyCRLF.formatted_document.hs deleted file mode 100644 index 13250a383e..0000000000 --- a/test/testdata/BrittanyCRLF.formatted_document.hs +++ /dev/null @@ -1,4 +0,0 @@ -foo :: Int -> String -> IO () -foo x y = do - print x - return 42 \ No newline at end of file diff --git a/test/testdata/BrittanyCRLF.formatted_range.hs b/test/testdata/BrittanyCRLF.formatted_range.hs deleted file mode 100644 index 13250a383e..0000000000 --- a/test/testdata/BrittanyCRLF.formatted_range.hs +++ /dev/null @@ -1,4 +0,0 @@ -foo :: Int -> String -> IO () -foo x y = do - print x - return 42 \ No newline at end of file diff --git a/test/testdata/BrittanyCRLF.hs b/test/testdata/BrittanyCRLF.hs deleted file mode 100644 index 2ed3293b3d..0000000000 --- a/test/testdata/BrittanyCRLF.hs +++ /dev/null @@ -1,3 +0,0 @@ -foo :: Int -> String-> IO () -foo x y = do print x - return 42 \ No newline at end of file diff --git a/test/testdata/BrittanyLF.formatted_document.hs b/test/testdata/BrittanyLF.formatted_document.hs deleted file mode 100644 index 13250a383e..0000000000 --- a/test/testdata/BrittanyLF.formatted_document.hs +++ /dev/null @@ -1,4 +0,0 @@ -foo :: Int -> String -> IO () -foo x y = do - print x - return 42 \ No newline at end of file diff --git a/test/testdata/BrittanyLF.formatted_range.hs b/test/testdata/BrittanyLF.formatted_range.hs deleted file mode 100644 index 13250a383e..0000000000 --- a/test/testdata/BrittanyLF.formatted_range.hs +++ /dev/null @@ -1,4 +0,0 @@ -foo :: Int -> String -> IO () -foo x y = do - print x - return 42 \ No newline at end of file diff --git a/test/testdata/BrittanyLF.hs b/test/testdata/BrittanyLF.hs deleted file mode 100644 index 4662d9b5a8..0000000000 --- a/test/testdata/BrittanyLF.hs +++ /dev/null @@ -1,3 +0,0 @@ -foo :: Int -> String-> IO () -foo x y = do print x - return 42 \ No newline at end of file diff --git a/test/testdata/CodeActionImport.hs b/test/testdata/CodeActionImport.hs deleted file mode 100644 index 95520bbd2f..0000000000 --- a/test/testdata/CodeActionImport.hs +++ /dev/null @@ -1,2 +0,0 @@ -main :: IO () -main = when True $ putStrLn "hello" \ No newline at end of file diff --git a/test/testdata/CodeActionImportBrittany.hs b/test/testdata/CodeActionImportBrittany.hs deleted file mode 100644 index af9cb0d2d4..0000000000 --- a/test/testdata/CodeActionImportBrittany.hs +++ /dev/null @@ -1,3 +0,0 @@ -import qualified Data.Maybe -main :: IO () -main = when True $ putStrLn "hello" \ No newline at end of file diff --git a/test/testdata/CodeActionImportList.hs b/test/testdata/CodeActionImportList.hs deleted file mode 100644 index 1a0d3ee3e8..0000000000 --- a/test/testdata/CodeActionImportList.hs +++ /dev/null @@ -1,6 +0,0 @@ --- | Main entry point to the program -main :: IO () -main = - when True - $ hPutStrLn stdout - $ fromMaybe "Good night, World!" (Just "Hello, World!") \ No newline at end of file diff --git a/test/testdata/CodeActionImportListElaborate.hs b/test/testdata/CodeActionImportListElaborate.hs deleted file mode 100644 index 63f9056982..0000000000 --- a/test/testdata/CodeActionImportListElaborate.hs +++ /dev/null @@ -1,9 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -import System.IO (IO) -import Data.List (find, head, last, tail, init, union, (\\), null, length, cons, uncons) --- | Main entry point to the program -main :: IO () -main = - when True - $ hPutStrLn stderr - $ fromMaybe "Good night, World!" (Just "Hello, World!") \ No newline at end of file diff --git a/test/testdata/CodeActionOnly.hs b/test/testdata/CodeActionOnly.hs deleted file mode 100644 index 1f8a403c8a..0000000000 --- a/test/testdata/CodeActionOnly.hs +++ /dev/null @@ -1,3 +0,0 @@ -module CodeActionOnly where -foo = bar - where bar = id Nothing \ No newline at end of file diff --git a/test/testdata/CodeActionRename.hs b/test/testdata/CodeActionRename.hs deleted file mode 100644 index 457d983b88..0000000000 --- a/test/testdata/CodeActionRename.hs +++ /dev/null @@ -1,2 +0,0 @@ -main = butStrLn "hello" -foo = putStrn "world" diff --git a/test/testdata/FileWithWarning.hs b/test/testdata/FileWithWarning.hs deleted file mode 100644 index 226e659d9b..0000000000 --- a/test/testdata/FileWithWarning.hs +++ /dev/null @@ -1,7 +0,0 @@ - -main = putStrLn "hello" - -foo = x - -bar x = do - return (3 + x) diff --git a/test/testdata/Format.formatted_range.hs b/test/testdata/Format.formatted_range.hs deleted file mode 100644 index 393584a9e4..0000000000 --- a/test/testdata/Format.formatted_range.hs +++ /dev/null @@ -1,12 +0,0 @@ -module Format where - -foo :: Int -> Int -foo 3 = 2 -foo x = x -bar :: String -> IO String -bar s = do - x <- return "hello" - return "asdf" - -data Baz = Baz { a :: Int, b :: String } - diff --git a/test/testdata/Format.formatted_range_with_tabsize.hs b/test/testdata/Format.formatted_range_with_tabsize.hs deleted file mode 100644 index 0a98f42e8f..0000000000 --- a/test/testdata/Format.formatted_range_with_tabsize.hs +++ /dev/null @@ -1,12 +0,0 @@ -module Format where -foo :: Int -> Int -foo 3 = 2 -foo x = x -bar :: String -> IO String -bar s = do - x <- return "hello" - return "asdf" - - -data Baz = Baz { a :: Int, b :: String } - diff --git a/test/testdata/Format.ormolu.unchanged.hs b/test/testdata/Format.ormolu.unchanged.hs deleted file mode 100644 index d4682acaa2..0000000000 --- a/test/testdata/Format.ormolu.unchanged.hs +++ /dev/null @@ -1,11 +0,0 @@ -module Format where -foo :: Int -> Int -foo 3 = 2 -foo x = x -bar :: String -> IO String -bar s = do - x <- return "hello" - return "asdf" - -data Baz = Baz { a :: Int, b :: String } - diff --git a/test/testdata/FuncTest.hs b/test/testdata/FuncTest.hs deleted file mode 100644 index 99ee963164..0000000000 --- a/test/testdata/FuncTest.hs +++ /dev/null @@ -1,13 +0,0 @@ -module Main where - -main = putStrLn "hello" - -foo :: Int -foo = bb - -bb = 5 - -baz = do - putStrLn "hello" - -f x = x+1 \ No newline at end of file diff --git a/test/testdata/FuncTestError.hs b/test/testdata/FuncTestError.hs deleted file mode 100644 index 48b47a22b6..0000000000 --- a/test/testdata/FuncTestError.hs +++ /dev/null @@ -1,15 +0,0 @@ -module Main where - -main = putStrLn "hello" - -foo :: Int -foo = bb - -bb = 5 - -bug -- no hlint returned because of this, despite redundant do below - -baz = do - putStrLn "hello" - -f x = x+1 diff --git a/test/testdata/FuncTestFail.hs b/test/testdata/FuncTestFail.hs deleted file mode 100644 index ac61d11137..0000000000 --- a/test/testdata/FuncTestFail.hs +++ /dev/null @@ -1,2 +0,0 @@ -main :: IO Int -main = return "yow diff --git a/test/testdata/GhcModCaseSplit.hs b/test/testdata/GhcModCaseSplit.hs deleted file mode 100644 index ad1ee0dd33..0000000000 --- a/test/testdata/GhcModCaseSplit.hs +++ /dev/null @@ -1,5 +0,0 @@ - -main = putStrLn "hello" - -foo :: Maybe Int -> () -foo x = () diff --git a/test/testdata/HaReCase.hs b/test/testdata/HaReCase.hs deleted file mode 100644 index 259cd8a597..0000000000 --- a/test/testdata/HaReCase.hs +++ /dev/null @@ -1,10 +0,0 @@ - -main = putStrLn "hello" - -foo :: Int -> Int -foo x = if odd x - then - x + 3 - else - x - diff --git a/test/testdata/HaReDemote.hs b/test/testdata/HaReDemote.hs deleted file mode 100644 index 0b6b8a85d7..0000000000 --- a/test/testdata/HaReDemote.hs +++ /dev/null @@ -1,6 +0,0 @@ - -main = putStrLn "hello" - -foo x = y + 3 - -y = 7 diff --git a/test/testdata/HaReGA1/HaReGA1.cabal b/test/testdata/HaReGA1/HaReGA1.cabal deleted file mode 100644 index add265b777..0000000000 --- a/test/testdata/HaReGA1/HaReGA1.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: HaReGA1 -version: 0.1.0.0 -cabal-version: >=2.0 -build-type: Simple - -executable harega - build-depends: base, parsec - main-is: HaReGA1.hs - default-language: Haskell2010 - diff --git a/test/testdata/HaReGA1/HaReGA1.hs b/test/testdata/HaReGA1/HaReGA1.hs deleted file mode 100644 index 4a2b2a57c6..0000000000 --- a/test/testdata/HaReGA1/HaReGA1.hs +++ /dev/null @@ -1,11 +0,0 @@ -module Main where -import Text.ParserCombinators.Parsec - -parseStr :: CharParser () String -parseStr = do - char '"' - str <- many1 (noneOf "\"") - char '"' - return str - -main = putStrLn "hello" diff --git a/test/testdata/HaReLift.hs b/test/testdata/HaReLift.hs deleted file mode 100644 index bc22926de8..0000000000 --- a/test/testdata/HaReLift.hs +++ /dev/null @@ -1,3 +0,0 @@ -module HaReLift where -foo = bar - where bar = "hello" \ No newline at end of file diff --git a/test/testdata/HaReMoveDef.hs b/test/testdata/HaReMoveDef.hs deleted file mode 100644 index f60053a6b1..0000000000 --- a/test/testdata/HaReMoveDef.hs +++ /dev/null @@ -1,14 +0,0 @@ - -main = putStrLn "hello" - -lifting x = x + y - where - y = 4 - -liftToTop x = x + y - where - y = z + 4 - where - z = 7 - - diff --git a/test/testdata/HaReRename.hs b/test/testdata/HaReRename.hs deleted file mode 100644 index 8183da35e7..0000000000 --- a/test/testdata/HaReRename.hs +++ /dev/null @@ -1,6 +0,0 @@ - -main = putStrLn "hello" - -foo :: Int -> Int -foo x = x + 3 - diff --git a/test/testdata/Highlight.hs b/test/testdata/Highlight.hs deleted file mode 100644 index 8d92d18779..0000000000 --- a/test/testdata/Highlight.hs +++ /dev/null @@ -1,5 +0,0 @@ -module Highlight where -foo :: Int -foo = 3 -bar = foo - where baz = let x = foo in x diff --git a/test/testdata/HlintNoRefactorings.hs b/test/testdata/HlintNoRefactorings.hs deleted file mode 100644 index 6721feb768..0000000000 --- a/test/testdata/HlintNoRefactorings.hs +++ /dev/null @@ -1,4 +0,0 @@ -main = putStrLn "hello" - -foo x = putStrLn x -bar y = id 42 \ No newline at end of file diff --git a/test/testdata/HlintParseFail.hs b/test/testdata/HlintParseFail.hs deleted file mode 100644 index 6730e7e601..0000000000 --- a/test/testdata/HlintParseFail.hs +++ /dev/null @@ -1,13 +0,0 @@ -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE GADTs #-} -{-# LANGUAGE TypeOperators #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE TypeInType #-} -module Test where - -import Data.Singletons.Prelude -import Data.Singletons.TypeLits -import Data.Type.Equality ((:~:) (..), (:~~:) (..)) - -data instance Sing (z :: (a :~: b)) where - SRefl :: Sing Refl + diff --git a/test/testdata/HlintPragma.hs b/test/testdata/HlintPragma.hs deleted file mode 100644 index d308479ed1..0000000000 --- a/test/testdata/HlintPragma.hs +++ /dev/null @@ -1,4 +0,0 @@ -{-# ANN module ("hlint: ignore Redundant do" :: String) #-} - -main = do - putStrLn ("hello") diff --git a/test/testdata/Hover.hs b/test/testdata/Hover.hs deleted file mode 100644 index 977816c68f..0000000000 --- a/test/testdata/Hover.hs +++ /dev/null @@ -1,2 +0,0 @@ -main :: IO Int -main = return $ sum [1,2,3] diff --git a/test/testdata/References.hs b/test/testdata/References.hs deleted file mode 100644 index 34eb8c4e25..0000000000 --- a/test/testdata/References.hs +++ /dev/null @@ -1,9 +0,0 @@ -main = return () - -foo = bar - -bar = let x = bar 42 in const "hello" - -baz = do - x <- bar 23 - return $ bar 14 diff --git a/test/testdata/Rename.hs b/test/testdata/Rename.hs deleted file mode 100644 index 19f566795f..0000000000 --- a/test/testdata/Rename.hs +++ /dev/null @@ -1,6 +0,0 @@ -main = do - x <- return $ foo 42 - return (foo x) -foo :: Int -> Int -foo x = x + 1 -bar = (+ 1) . foo diff --git a/test/testdata/StylishHaksell.formatted_document.hs b/test/testdata/StylishHaksell.formatted_document.hs deleted file mode 100644 index c695ddcb59..0000000000 --- a/test/testdata/StylishHaksell.formatted_document.hs +++ /dev/null @@ -1,8 +0,0 @@ -import Data.Char -import qualified Data.List -import Data.String - -bar :: Maybe (Either String Integer) -> Integer -bar Nothing = 0 -bar (Just (Left _)) = 0 -bar (Just (Right x)) = x diff --git a/test/testdata/StylishHaksell.formatted_range.hs b/test/testdata/StylishHaksell.formatted_range.hs deleted file mode 100644 index 18f1fe7a0b..0000000000 --- a/test/testdata/StylishHaksell.formatted_range.hs +++ /dev/null @@ -1,8 +0,0 @@ -import Data.Char -import qualified Data.List -import Data.String - -bar :: Maybe (Either String Integer) -> Integer -bar Nothing = 0 -bar (Just (Left _)) = 0 -bar (Just (Right x)) = x diff --git a/test/testdata/Symbols.hs b/test/testdata/Symbols.hs deleted file mode 100644 index 4b36275306..0000000000 --- a/test/testdata/Symbols.hs +++ /dev/null @@ -1,14 +0,0 @@ -{-# LANGUAGE PatternSynonyms #-} -module Symbols where - -import Data.Maybe - -foo = bar - where bar = 42 + dog - where (dog, cat) = (1234, "meow") - -data MyData = A Int - | B String - -pattern TestPattern :: Int -> MyData -pattern TestPattern x = A x diff --git a/test/testdata/TopLevelSignature.hs b/test/testdata/TopLevelSignature.hs deleted file mode 100644 index 71322f2edc..0000000000 --- a/test/testdata/TopLevelSignature.hs +++ /dev/null @@ -1,5 +0,0 @@ -{-# OPTIONS_GHC -Wall #-} -module TopLevelSignature where -main = do - putStrLn "Hello" - return () diff --git a/test/testdata/TypedHoles.hs b/test/testdata/TypedHoles.hs deleted file mode 100644 index a471d611b3..0000000000 --- a/test/testdata/TypedHoles.hs +++ /dev/null @@ -1,3 +0,0 @@ -module TypedHoles where -foo :: [Int] -> Int -foo x = _ \ No newline at end of file diff --git a/test/testdata/TypedHoles2.hs b/test/testdata/TypedHoles2.hs deleted file mode 100644 index cc10d249cf..0000000000 --- a/test/testdata/TypedHoles2.hs +++ /dev/null @@ -1,6 +0,0 @@ -module TypedHoles2 (foo2) where -newtype A = A Int -foo2 :: [A] -> A -foo2 x = _ - where - stuff (A a) = A (a + 1) diff --git a/test/testdata/Types.hs b/test/testdata/Types.hs deleted file mode 100644 index 8d6b4338bb..0000000000 --- a/test/testdata/Types.hs +++ /dev/null @@ -1,33 +0,0 @@ -module Types where - -import Control.Applicative - -foo :: Maybe Int -> Int -foo (Just x) = x -foo Nothing = 0 - -bar :: Maybe Int -> Int -bar x = case x of - Just y -> y + 1 - Nothing -> 0 - -maybeMonad :: Maybe Int -> Maybe Int -maybeMonad x = do - y <- x - let z = return (y + 10) - b <- z - return (b + y) - -funcTest :: (a -> a) -> a -> a -funcTest f a = f a - -compTest :: (b -> c) -> (a -> b) -> a -> c -compTest f g = let h = f . g in h - -monadStuff :: (a -> b) -> IO a -> IO b -monadStuff f action = f <$> action - -data Test - = TestC Int - | TestM String - deriving (Show, Eq, Ord) \ No newline at end of file diff --git a/test/testdata/UnusedTerm.hs b/test/testdata/UnusedTerm.hs deleted file mode 100644 index e49c2e8d07..0000000000 --- a/test/testdata/UnusedTerm.hs +++ /dev/null @@ -1,6 +0,0 @@ -{-# OPTIONS_GHC -Wall #-} -module UnusedTerm () where -imUnused :: Int -> Int -imUnused 1 = 1 -imUnused 2 = 2 -imUnused _ = 3 diff --git a/test/testdata/addPackageTest/cabal-exe/AddPackage.hs b/test/testdata/addPackageTest/cabal-exe/AddPackage.hs deleted file mode 100644 index e1bbc6678d..0000000000 --- a/test/testdata/addPackageTest/cabal-exe/AddPackage.hs +++ /dev/null @@ -1,3 +0,0 @@ -import Data.Text -foo = pack "I'm a Text" -main = putStrLn "hello" diff --git a/test/testdata/addPackageTest/cabal-exe/add-package-test.cabal b/test/testdata/addPackageTest/cabal-exe/add-package-test.cabal deleted file mode 100644 index edd2a92a70..0000000000 --- a/test/testdata/addPackageTest/cabal-exe/add-package-test.cabal +++ /dev/null @@ -1,14 +0,0 @@ -name: add-package-test -version: 0.1.0.0 -license: BSD3 -author: Luke Lau -maintainer: luke_lau@icloud.com -build-type: Simple -extra-source-files: ChangeLog.md -cabal-version: >=1.10 - -executable AddPackage - exposed-modules: ./. - main-is: AddPackage.hs - build-depends: base >=4.7 && <5 - default-language: Haskell2010 \ No newline at end of file diff --git a/test/testdata/addPackageTest/cabal-lib/AddPackage.hs b/test/testdata/addPackageTest/cabal-lib/AddPackage.hs deleted file mode 100644 index 24015b598e..0000000000 --- a/test/testdata/addPackageTest/cabal-lib/AddPackage.hs +++ /dev/null @@ -1,4 +0,0 @@ -module AddPackage where - -import Data.Text -foo = pack "I'm a Text" \ No newline at end of file diff --git a/test/testdata/addPackageTest/cabal-lib/add-package-test.cabal b/test/testdata/addPackageTest/cabal-lib/add-package-test.cabal deleted file mode 100644 index f979fe1f64..0000000000 --- a/test/testdata/addPackageTest/cabal-lib/add-package-test.cabal +++ /dev/null @@ -1,14 +0,0 @@ -name: add-package-test -version: 0.1.0.0 -license: BSD3 -author: Luke Lau -maintainer: luke_lau@icloud.com -build-type: Simple -extra-source-files: ChangeLog.md -cabal-version: >=1.10 - -library - exposed-modules: AddPackage - build-depends: base >=4.7 && <5 - -- hs-source-dirs: - default-language: Haskell2010 diff --git a/test/testdata/addPackageTest/hpack-exe/app/Asdf.hs b/test/testdata/addPackageTest/hpack-exe/app/Asdf.hs deleted file mode 100644 index fdd639ffe3..0000000000 --- a/test/testdata/addPackageTest/hpack-exe/app/Asdf.hs +++ /dev/null @@ -1,5 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - -import Codec.Compression.GZip - -main = return $ compress "hello" \ No newline at end of file diff --git a/test/testdata/addPackageTest/hpack-exe/asdf.cabal b/test/testdata/addPackageTest/hpack-exe/asdf.cabal deleted file mode 100644 index e39c61d39c..0000000000 --- a/test/testdata/addPackageTest/hpack-exe/asdf.cabal +++ /dev/null @@ -1,37 +0,0 @@ -cabal-version: 1.12 - --- This file has been generated from package.yaml by hpack version 0.32.0. --- --- see: https://github.com/sol/hpack --- --- hash: 69241e1f4f912f034502d225d2017f035c38062080733108c11cd3d111cb9007 - -name: asdf -version: 0.1.0.0 -description: Please see the README on GitHub at -homepage: https://github.com/githubuser/asdf#readme -bug-reports: https://github.com/githubuser/asdf/issues -author: Author name here -maintainer: example@example.com -copyright: 2018 Author name here -license: BSD3 -build-type: Simple -extra-source-files: - README.md - ChangeLog.md - -source-repository head - type: git - location: https://github.com/githubuser/asdf - -executable asdf-exe - main-is: Main.hs - other-modules: - Asdf - Paths_asdf - hs-source-dirs: - app - ghc-options: -threaded -rtsopts -with-rtsopts=-N - build-depends: - base >=4.7 && <5 - default-language: Haskell2010 diff --git a/test/testdata/addPackageTest/hpack-lib/app/Asdf.hs b/test/testdata/addPackageTest/hpack-lib/app/Asdf.hs deleted file mode 100644 index ec4b229117..0000000000 --- a/test/testdata/addPackageTest/hpack-lib/app/Asdf.hs +++ /dev/null @@ -1,7 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - -module Asdf where - -import Codec.Compression.GZip - -main = return $ compress "hello" \ No newline at end of file diff --git a/test/testdata/addPackageTest/invalid/AddPackage.hs b/test/testdata/addPackageTest/invalid/AddPackage.hs deleted file mode 100644 index 963020508b..0000000000 --- a/test/testdata/addPackageTest/invalid/AddPackage.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Data.Text -foo = pack "I'm a Text" \ No newline at end of file diff --git a/test/testdata/addPragmas/test.cabal b/test/testdata/addPragmas/test.cabal deleted file mode 100644 index 68ab327aec..0000000000 --- a/test/testdata/addPragmas/test.cabal +++ /dev/null @@ -1,18 +0,0 @@ -name: test -version: 0.1.0.0 --- synopsis: --- description: -license: BSD3 -author: Author name here -maintainer: example@example.com -copyright: 2017 Author name here -category: Web -build-type: Simple -cabal-version: >=1.10 - -executable p - main-is: NeedsPragmas.hs - hs-source-dirs: . - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 - ghc-options: -Wall \ No newline at end of file diff --git a/test/testdata/badProjects/cabal/Foo.hs b/test/testdata/badProjects/cabal/Foo.hs deleted file mode 100644 index d2c06e960d..0000000000 --- a/test/testdata/badProjects/cabal/Foo.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Foo where - -foo :: Int -foo = 3 diff --git a/test/testdata/badProjects/cabal/bad-cabal.cabal b/test/testdata/badProjects/cabal/bad-cabal.cabal deleted file mode 100644 index 28414e8314..0000000000 --- a/test/testdata/badProjects/cabal/bad-cabal.cabal +++ /dev/null @@ -1,16 +0,0 @@ -name: bad-cabal -version: 0.1.0.0 -license: BSD3 -author: Alan Zimmerman -maintainer: alan.zimm@gmail.com -build-type: Simple -extra-source-files: ChangeLog.md -cabal-version: >=1.10 - -library - exposed-modules: Foo - build-depends: base >=4.7 && <5 - -- missing dependency - , does-not-exist - -- hs-source-dirs: - default-language: Haskell2010 diff --git a/test/testdata/cabal-helper/implicit-exe/Setup.hs b/test/testdata/cabal-helper/implicit-exe/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/testdata/cabal-helper/implicit-exe/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/testdata/cabal-helper/implicit-exe/cabal.project b/test/testdata/cabal-helper/implicit-exe/cabal.project deleted file mode 100644 index bfe6289656..0000000000 --- a/test/testdata/cabal-helper/implicit-exe/cabal.project +++ /dev/null @@ -1 +0,0 @@ -packages: ./ \ No newline at end of file diff --git a/test/testdata/cabal-helper/implicit-exe/implicit-exe.cabal b/test/testdata/cabal-helper/implicit-exe/implicit-exe.cabal deleted file mode 100644 index 3aca1b42fa..0000000000 --- a/test/testdata/cabal-helper/implicit-exe/implicit-exe.cabal +++ /dev/null @@ -1,17 +0,0 @@ -cabal-version: >=1.10 -name: implicit-exe -version: 0.1.0.0 -license-file: LICENSE -build-type: Simple - -library - exposed-modules: Lib - hs-source-dirs: src - build-depends: base - default-language: Haskell2010 - - -executable implicit-exe - main-is: src/Exe.hs - build-depends: base, implicit-exe - default-language: Haskell2010 \ No newline at end of file diff --git a/test/testdata/cabal-helper/implicit-exe/src/Exe.hs b/test/testdata/cabal-helper/implicit-exe/src/Exe.hs deleted file mode 100644 index ed41929e78..0000000000 --- a/test/testdata/cabal-helper/implicit-exe/src/Exe.hs +++ /dev/null @@ -1,4 +0,0 @@ - -import Lib (someFunc) - -main = someFunc \ No newline at end of file diff --git a/test/testdata/cabal-helper/implicit-exe/src/Lib.hs b/test/testdata/cabal-helper/implicit-exe/src/Lib.hs deleted file mode 100644 index f51af83e20..0000000000 --- a/test/testdata/cabal-helper/implicit-exe/src/Lib.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Lib (someFunc) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/testdata/cabal-helper/mono-repo/A/A.cabal b/test/testdata/cabal-helper/mono-repo/A/A.cabal deleted file mode 100644 index e70b43fc1d..0000000000 --- a/test/testdata/cabal-helper/mono-repo/A/A.cabal +++ /dev/null @@ -1,15 +0,0 @@ -cabal-version: >=1.10 -name: A -version: 0.1.0.0 -build-type: Simple - -library - exposed-modules: MyLib - build-depends: base - default-language: Haskell2010 - -executable A - main-is: Main.hs - other-modules: MyLib - build-depends: base, A - default-language: Haskell2010 diff --git a/test/testdata/cabal-helper/mono-repo/A/Main.hs b/test/testdata/cabal-helper/mono-repo/A/Main.hs deleted file mode 100644 index 60d904e8c1..0000000000 --- a/test/testdata/cabal-helper/mono-repo/A/Main.hs +++ /dev/null @@ -1,8 +0,0 @@ -module Main where - -import qualified MyLib (someFunc) - -main :: IO () -main = do - putStrLn "Hello, Haskell!" - MyLib.someFunc diff --git a/test/testdata/cabal-helper/mono-repo/A/Setup.hs b/test/testdata/cabal-helper/mono-repo/A/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/testdata/cabal-helper/mono-repo/A/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/testdata/cabal-helper/mono-repo/B/B.cabal b/test/testdata/cabal-helper/mono-repo/B/B.cabal deleted file mode 100644 index 4093e1d0f6..0000000000 --- a/test/testdata/cabal-helper/mono-repo/B/B.cabal +++ /dev/null @@ -1,15 +0,0 @@ -cabal-version: >=1.10 -name: B -version: 0.1.0.0 -build-type: Simple - -library - exposed-modules: MyLib - build-depends: base - default-language: Haskell2010 - -executable B - main-is: Main.hs - other-modules: MyLib - build-depends: base, B - default-language: Haskell2010 diff --git a/test/testdata/cabal-helper/mono-repo/B/Main.hs b/test/testdata/cabal-helper/mono-repo/B/Main.hs deleted file mode 100644 index 60d904e8c1..0000000000 --- a/test/testdata/cabal-helper/mono-repo/B/Main.hs +++ /dev/null @@ -1,8 +0,0 @@ -module Main where - -import qualified MyLib (someFunc) - -main :: IO () -main = do - putStrLn "Hello, Haskell!" - MyLib.someFunc diff --git a/test/testdata/cabal-helper/mono-repo/B/Setup.hs b/test/testdata/cabal-helper/mono-repo/B/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/testdata/cabal-helper/mono-repo/B/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/testdata/cabal-helper/mono-repo/C/C.cabal b/test/testdata/cabal-helper/mono-repo/C/C.cabal deleted file mode 100644 index db5e380f49..0000000000 --- a/test/testdata/cabal-helper/mono-repo/C/C.cabal +++ /dev/null @@ -1,9 +0,0 @@ -cabal-version: >=1.10 -name: C -version: 0.1.0.0 -build-type: Simple - -library - exposed-modules: MyLib - build-depends: base - default-language: Haskell2010 diff --git a/test/testdata/cabal-helper/mono-repo/C/MyLib.hs b/test/testdata/cabal-helper/mono-repo/C/MyLib.hs deleted file mode 100644 index e657c4403f..0000000000 --- a/test/testdata/cabal-helper/mono-repo/C/MyLib.hs +++ /dev/null @@ -1,4 +0,0 @@ -module MyLib (someFunc) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/testdata/cabal-helper/mono-repo/C/Setup.hs b/test/testdata/cabal-helper/mono-repo/C/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/testdata/cabal-helper/mono-repo/C/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/testdata/cabal-helper/mono-repo/cabal.project b/test/testdata/cabal-helper/mono-repo/cabal.project deleted file mode 100644 index cf2eab3e10..0000000000 --- a/test/testdata/cabal-helper/mono-repo/cabal.project +++ /dev/null @@ -1,4 +0,0 @@ -packages: - ./A/ - ./B/ - ./C/ \ No newline at end of file diff --git a/test/testdata/cabal-helper/multi-source-dirs/Setup.hs b/test/testdata/cabal-helper/multi-source-dirs/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/testdata/cabal-helper/multi-source-dirs/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/testdata/cabal-helper/multi-source-dirs/multi-source-dirs.cabal b/test/testdata/cabal-helper/multi-source-dirs/multi-source-dirs.cabal deleted file mode 100644 index 58568683dd..0000000000 --- a/test/testdata/cabal-helper/multi-source-dirs/multi-source-dirs.cabal +++ /dev/null @@ -1,11 +0,0 @@ -cabal-version: >=1.10 -name: multi-source-dirs -version: 0.1.0.0 -license-file: LICENSE -build-type: Simple - -library - exposed-modules: Lib, BetterLib - hs-source-dirs: src, src/input - build-depends: base - default-language: Haskell2010 diff --git a/test/testdata/cabal-helper/multi-source-dirs/src/BetterLib.hs b/test/testdata/cabal-helper/multi-source-dirs/src/BetterLib.hs deleted file mode 100644 index 0784c76d48..0000000000 --- a/test/testdata/cabal-helper/multi-source-dirs/src/BetterLib.hs +++ /dev/null @@ -1,5 +0,0 @@ -module BetterLib where - - -foo = 3 -bar = "String" \ No newline at end of file diff --git a/test/testdata/cabal-helper/multi-source-dirs/src/input/Lib.hs b/test/testdata/cabal-helper/multi-source-dirs/src/input/Lib.hs deleted file mode 100644 index 6c37234910..0000000000 --- a/test/testdata/cabal-helper/multi-source-dirs/src/input/Lib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module Lib where - -foobar = 15 - -fizbuzz :: Int -> String -fizbuzz n = "Fizz" \ No newline at end of file diff --git a/test/testdata/cabal-helper/simple-cabal/MyLib.hs b/test/testdata/cabal-helper/simple-cabal/MyLib.hs deleted file mode 100644 index e657c4403f..0000000000 --- a/test/testdata/cabal-helper/simple-cabal/MyLib.hs +++ /dev/null @@ -1,4 +0,0 @@ -module MyLib (someFunc) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/testdata/cabal-helper/simple-cabal/Setup.hs b/test/testdata/cabal-helper/simple-cabal/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/testdata/cabal-helper/simple-cabal/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/testdata/cabal-helper/simple-cabal/simple-cabal-test.cabal b/test/testdata/cabal-helper/simple-cabal/simple-cabal-test.cabal deleted file mode 100644 index 3c8be5d868..0000000000 --- a/test/testdata/cabal-helper/simple-cabal/simple-cabal-test.cabal +++ /dev/null @@ -1,10 +0,0 @@ -cabal-version: >=1.10 -name: simple-cabal-test -version: 0.1.0.0 -license-file: LICENSE -build-type: Simple - -library - exposed-modules: MyLib - build-depends: base - default-language: Haskell2010 diff --git a/test/testdata/cabal-helper/simple-stack/MyLib.hs b/test/testdata/cabal-helper/simple-stack/MyLib.hs deleted file mode 100644 index e657c4403f..0000000000 --- a/test/testdata/cabal-helper/simple-stack/MyLib.hs +++ /dev/null @@ -1,4 +0,0 @@ -module MyLib (someFunc) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/testdata/cabal-helper/simple-stack/Setup.hs b/test/testdata/cabal-helper/simple-stack/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/testdata/cabal-helper/simple-stack/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/testdata/cabal-helper/simple-stack/simple-stack-test.cabal b/test/testdata/cabal-helper/simple-stack/simple-stack-test.cabal deleted file mode 100644 index 264baebfd1..0000000000 --- a/test/testdata/cabal-helper/simple-stack/simple-stack-test.cabal +++ /dev/null @@ -1,10 +0,0 @@ -cabal-version: >=1.10 -name: simple-stack-test -version: 0.1.0.0 -license-file: LICENSE -build-type: Simple - -library - exposed-modules: MyLib - build-depends: base - default-language: Haskell2010 diff --git a/test/testdata/cabal-helper/sub-package/Setup.hs b/test/testdata/cabal-helper/sub-package/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/testdata/cabal-helper/sub-package/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/testdata/cabal-helper/sub-package/app/Main.hs b/test/testdata/cabal-helper/sub-package/app/Main.hs deleted file mode 100644 index 60d904e8c1..0000000000 --- a/test/testdata/cabal-helper/sub-package/app/Main.hs +++ /dev/null @@ -1,8 +0,0 @@ -module Main where - -import qualified MyLib (someFunc) - -main :: IO () -main = do - putStrLn "Hello, Haskell!" - MyLib.someFunc diff --git a/test/testdata/cabal-helper/sub-package/plugins-api/PluginLib.hs b/test/testdata/cabal-helper/sub-package/plugins-api/PluginLib.hs deleted file mode 100644 index 55a7098c23..0000000000 --- a/test/testdata/cabal-helper/sub-package/plugins-api/PluginLib.hs +++ /dev/null @@ -1,4 +0,0 @@ -module PluginLib (someFunc) where - -someFunc :: IO () -someFunc = putStrLn "someFunc" diff --git a/test/testdata/cabal-helper/sub-package/plugins-api/Setup.hs b/test/testdata/cabal-helper/sub-package/plugins-api/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/testdata/cabal-helper/sub-package/plugins-api/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/testdata/cabal-helper/sub-package/plugins-api/plugins-api.cabal b/test/testdata/cabal-helper/sub-package/plugins-api/plugins-api.cabal deleted file mode 100644 index 223fa73b95..0000000000 --- a/test/testdata/cabal-helper/sub-package/plugins-api/plugins-api.cabal +++ /dev/null @@ -1,10 +0,0 @@ -cabal-version: >=1.10 -name: plugins-api -version: 0.1.0.0 -license-file: LICENSE -build-type: Simple - -library - exposed-modules: PluginLib - build-depends: base - default-language: Haskell2010 diff --git a/test/testdata/cabal-helper/sub-package/src/MyLib.hs b/test/testdata/cabal-helper/sub-package/src/MyLib.hs deleted file mode 100644 index 53ea5c6332..0000000000 --- a/test/testdata/cabal-helper/sub-package/src/MyLib.hs +++ /dev/null @@ -1,6 +0,0 @@ -module MyLib (someFunc) where - -import qualified PluginLib as L - -someFunc :: IO () -someFunc = L.someFunc diff --git a/test/testdata/cabal-helper/sub-package/sub-package.cabal b/test/testdata/cabal-helper/sub-package/sub-package.cabal deleted file mode 100644 index ba36f1b4d1..0000000000 --- a/test/testdata/cabal-helper/sub-package/sub-package.cabal +++ /dev/null @@ -1,17 +0,0 @@ -cabal-version: >=1.10 -name: sub-package -version: 0.1.0.0 -license-file: LICENSE -build-type: Simple - -library - exposed-modules: MyLib - build-depends: base, plugins-api - hs-source-dirs: src - default-language: Haskell2010 - -executable sub-package - main-is: Main.hs - build-depends: base, sub-package - hs-source-dirs: app - default-language: Haskell2010 diff --git a/test/testdata/completion/Completion.hs b/test/testdata/completion/Completion.hs index d6480903b6..9427f3dc03 100644 --- a/test/testdata/completion/Completion.hs +++ b/test/testdata/completion/Completion.hs @@ -6,4 +6,4 @@ main :: IO () main = putStrLn "hello" foo :: Either a b -> Either a b -foo = id \ No newline at end of file +foo = id diff --git a/test/testdata/completion/Context.hs b/test/testdata/completion/Context.hs deleted file mode 100644 index 45c5befb10..0000000000 --- a/test/testdata/completion/Context.hs +++ /dev/null @@ -1,4 +0,0 @@ -module Context where -import Control.Concurrent as Conc -foo :: Int -> Int -foo x = abs 42 \ No newline at end of file diff --git a/test/testdata/completion/DupRecFields.hs b/test/testdata/completion/DupRecFields.hs deleted file mode 100644 index 8ba3148d3a..0000000000 --- a/test/testdata/completion/DupRecFields.hs +++ /dev/null @@ -1,5 +0,0 @@ -{-# LANGUAGE DuplicateRecordFields #-} -module DupRecFields where - -newtype One = One { accessor :: Int } -newtype Two = Two { accessor :: Int } diff --git a/test/testdata/completion/FieldsSharingSignature.hs b/test/testdata/completion/FieldsSharingSignature.hs new file mode 100644 index 0000000000..f5523a2788 --- /dev/null +++ b/test/testdata/completion/FieldsSharingSignature.hs @@ -0,0 +1 @@ +data Foo = MkFoo { arg1, arg2, arg3 :: Int, arg4 :: Int, arg5 :: Double } diff --git a/test/testdata/completion/completions.cabal b/test/testdata/completion/completions.cabal deleted file mode 100644 index d2c23bd86e..0000000000 --- a/test/testdata/completion/completions.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: completions -version: 0.1.0.0 -cabal-version: >= 2.0 -build-type: Simple - -executable compl-exe - other-modules: DupRecFields, Context - main-is: Completion.hs - default-language: Haskell2010 - build-depends: base diff --git a/test/testdata/completion/hie.yaml b/test/testdata/completion/hie.yaml new file mode 100644 index 0000000000..8f2eee1478 --- /dev/null +++ b/test/testdata/completion/hie.yaml @@ -0,0 +1,5 @@ +cradle: + direct: + arguments: + - "Completion" + - "FieldsSharingSignature" diff --git a/test/testdata/context/ExampleContext.hs b/test/testdata/context/ExampleContext.hs deleted file mode 100644 index 324d055282..0000000000 --- a/test/testdata/context/ExampleContext.hs +++ /dev/null @@ -1,20 +0,0 @@ -module ExampleContext (foo) where - -import Data.List (find) -import Control.Monad hiding (fix) - -foo :: Int -> Int -foo xs = bar xs + 1 - where - bar :: Int -> Int - bar x = x + 2 - -data Foo a = Foo a - deriving (Show) - -class Bar a where - bar :: a -> Integer - -instance Integral a => Bar (Foo a) where - bar (Foo a) = toInteger a - diff --git a/test/testdata/context/Foo/Bar.hs b/test/testdata/context/Foo/Bar.hs deleted file mode 100644 index 0d6044ee85..0000000000 --- a/test/testdata/context/Foo/Bar.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Foo.Bar where - - diff --git a/test/testdata/definition/Bar.hs b/test/testdata/definition/Bar.hs deleted file mode 100644 index 02a244cd4d..0000000000 --- a/test/testdata/definition/Bar.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Bar where - -a = 42 diff --git a/test/testdata/definition/Foo.hs b/test/testdata/definition/Foo.hs deleted file mode 100644 index 6dfb3ba2e6..0000000000 --- a/test/testdata/definition/Foo.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Foo (module Bar) where - -import Bar diff --git a/test/testdata/definition/definitions.cabal b/test/testdata/definition/definitions.cabal deleted file mode 100644 index 3ddc941472..0000000000 --- a/test/testdata/definition/definitions.cabal +++ /dev/null @@ -1,10 +0,0 @@ -name: definitions -version: 0.1.0.0 -cabal-version: >= 2.0 -build-type: Simple - -library - exposed-modules: Foo - other-modules: Bar - default-language: Haskell2010 - build-depends: base diff --git a/test/testdata/diagnostics/Foo.hs b/test/testdata/diagnostics/Foo.hs new file mode 100644 index 0000000000..d83992f387 --- /dev/null +++ b/test/testdata/diagnostics/Foo.hs @@ -0,0 +1,2 @@ +main = undefined +foo x = id x diff --git a/test/testdata/diagnostics/hie.yaml b/test/testdata/diagnostics/hie.yaml new file mode 100644 index 0000000000..dd3a73237e --- /dev/null +++ b/test/testdata/diagnostics/hie.yaml @@ -0,0 +1,5 @@ +cradle: + direct: + arguments: + - Foo + - -Wmissing-signatures diff --git a/test/testdata/eval/T4.hs b/test/testdata/eval/T4.hs deleted file mode 100644 index 72c88ed1d4..0000000000 --- a/test/testdata/eval/T4.hs +++ /dev/null @@ -1,8 +0,0 @@ -module T4 where - -import Data.List (unwords) - --- >>> let evaluation = " evaluation" --- >>> unwords example ++ evaluation -example :: [String] -example = ["This","is","an","example","of"] diff --git a/test/testdata/eval/T4.hs.expected b/test/testdata/eval/T4.hs.expected deleted file mode 100644 index 4b56dbf392..0000000000 --- a/test/testdata/eval/T4.hs.expected +++ /dev/null @@ -1,9 +0,0 @@ -module T4 where - -import Data.List (unwords) - --- >>> let evaluation = " evaluation" --- >>> unwords example ++ evaluation --- "This is an example of evaluation" -example :: [String] -example = ["This","is","an","example","of"] diff --git a/test/testdata/eval/hie.yaml b/test/testdata/eval/hie.yaml deleted file mode 100644 index a2e9ed5148..0000000000 --- a/test/testdata/eval/hie.yaml +++ /dev/null @@ -1 +0,0 @@ -cradle: {direct: {arguments: ["T1", "T2", "T3", "T4"]}} diff --git a/test/testdata/eval/test.cabal b/test/testdata/eval/test.cabal deleted file mode 100644 index fbc943a651..0000000000 --- a/test/testdata/eval/test.cabal +++ /dev/null @@ -1,17 +0,0 @@ -name: test -version: 0.1.0.0 --- synopsis: --- description: -license: BSD3 -author: Author name here -maintainer: example@example.com -copyright: 2017 Author name here -category: Web -build-type: Simple -cabal-version: >=1.10 - -library - exposed-modules: T1, T2, T3, T4 - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 - ghc-options: -Wall -fwarn-unused-imports diff --git a/test/testdata/format/Format.floskell.formatted.hs b/test/testdata/format/Format.floskell.formatted.hs new file mode 100644 index 0000000000..99879567c0 --- /dev/null +++ b/test/testdata/format/Format.floskell.formatted.hs @@ -0,0 +1,16 @@ +module Format where + +import Data.Int +import Data.List +import Prelude + +foo :: Int -> Int +foo 3 = 2 +foo x = x + +bar :: String -> IO String +bar s = do + x <- return "hello" + return "asdf" + +data Baz = Baz { a :: Int, b :: String } diff --git a/test/testdata/Format.formatted_document.hs b/test/testdata/format/Format.formatted_document.hs similarity index 78% rename from test/testdata/Format.formatted_document.hs rename to test/testdata/format/Format.formatted_document.hs index ec1ce57379..ac43b2d285 100644 --- a/test/testdata/Format.formatted_document.hs +++ b/test/testdata/format/Format.formatted_document.hs @@ -1,12 +1,16 @@ module Format where +import Data.Int +import Data.List +import Prelude + foo :: Int -> Int foo 3 = 2 foo x = x + bar :: String -> IO String bar s = do x <- return "hello" return "asdf" data Baz = Baz {a :: Int, b :: String} - diff --git a/test/testdata/Format.formatted_document_with_tabsize.hs b/test/testdata/format/Format.formatted_document_with_tabsize.hs similarity index 78% rename from test/testdata/Format.formatted_document_with_tabsize.hs rename to test/testdata/format/Format.formatted_document_with_tabsize.hs index ec1ce57379..ac43b2d285 100644 --- a/test/testdata/Format.formatted_document_with_tabsize.hs +++ b/test/testdata/format/Format.formatted_document_with_tabsize.hs @@ -1,12 +1,16 @@ module Format where +import Data.Int +import Data.List +import Prelude + foo :: Int -> Int foo 3 = 2 foo x = x + bar :: String -> IO String bar s = do x <- return "hello" return "asdf" data Baz = Baz {a :: Int, b :: String} - diff --git a/test/testdata/format/Format.formatted_range.hs b/test/testdata/format/Format.formatted_range.hs new file mode 100644 index 0000000000..920a07916e --- /dev/null +++ b/test/testdata/format/Format.formatted_range.hs @@ -0,0 +1,15 @@ +module Format where +import Data.List + +import Prelude +import Data.Int +foo :: Int -> Int +foo 3 = 2 +foo x = x +bar :: String -> IO String +bar s = do + x <- return "hello" + return "asdf" + +data Baz = Baz { a :: Int, b :: String } + diff --git a/test/testdata/format/Format.formatted_range_with_tabsize.hs b/test/testdata/format/Format.formatted_range_with_tabsize.hs new file mode 100644 index 0000000000..33a942e43d --- /dev/null +++ b/test/testdata/format/Format.formatted_range_with_tabsize.hs @@ -0,0 +1,15 @@ +module Format where +import Data.List + +import Prelude +import Data.Int +foo :: Int -> Int +foo 3 = 2 +foo x = x +bar :: String -> IO String +bar s = do + x <- return "hello" + return "asdf" + +data Baz = Baz { a :: Int, b :: String } + diff --git a/test/testdata/Format.hs b/test/testdata/format/Format.hs similarity index 80% rename from test/testdata/Format.hs rename to test/testdata/format/Format.hs index d4682acaa2..b8bb374e2e 100644 --- a/test/testdata/Format.hs +++ b/test/testdata/format/Format.hs @@ -1,4 +1,8 @@ module Format where +import Data.List + +import Prelude +import Data.Int foo :: Int -> Int foo 3 = 2 foo x = x diff --git a/test/testdata/Format.ormolu.formatted.hs b/test/testdata/format/Format.ormolu.formatted.hs similarity index 78% rename from test/testdata/Format.ormolu.formatted.hs rename to test/testdata/format/Format.ormolu.formatted.hs index ec1ce57379..ac43b2d285 100644 --- a/test/testdata/Format.ormolu.formatted.hs +++ b/test/testdata/format/Format.ormolu.formatted.hs @@ -1,12 +1,16 @@ module Format where +import Data.Int +import Data.List +import Prelude + foo :: Int -> Int foo 3 = 2 foo x = x + bar :: String -> IO String bar s = do x <- return "hello" return "asdf" data Baz = Baz {a :: Int, b :: String} - diff --git a/test/testdata/format/Format.ormolu_post_floskell.formatted.hs b/test/testdata/format/Format.ormolu_post_floskell.formatted.hs new file mode 100644 index 0000000000..ac43b2d285 --- /dev/null +++ b/test/testdata/format/Format.ormolu_post_floskell.formatted.hs @@ -0,0 +1,16 @@ +module Format where + +import Data.Int +import Data.List +import Prelude + +foo :: Int -> Int +foo 3 = 2 +foo x = x + +bar :: String -> IO String +bar s = do + x <- return "hello" + return "asdf" + +data Baz = Baz {a :: Int, b :: String} diff --git a/test/testdata/format/hie.yaml b/test/testdata/format/hie.yaml new file mode 100644 index 0000000000..24eab13a43 --- /dev/null +++ b/test/testdata/format/hie.yaml @@ -0,0 +1,4 @@ +cradle: + direct: + arguments: + - Format diff --git a/test/testdata/gototest/Setup.hs b/test/testdata/gototest/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/testdata/gototest/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/testdata/gototest/app/Main.hs b/test/testdata/gototest/app/Main.hs deleted file mode 100644 index 2c951ca59d..0000000000 --- a/test/testdata/gototest/app/Main.hs +++ /dev/null @@ -1,7 +0,0 @@ -module Main where - -import Lib -import Lib2 - -main :: IO () -main = someFunc >> g diff --git a/test/testdata/gototest/cabal.project b/test/testdata/gototest/cabal.project deleted file mode 100644 index 258ca2fe22..0000000000 --- a/test/testdata/gototest/cabal.project +++ /dev/null @@ -1,3 +0,0 @@ -packages: . - -write-ghc-environment-files: never diff --git a/test/testdata/gototest/gototest.cabal b/test/testdata/gototest/gototest.cabal deleted file mode 100644 index 5cac1ffefd..0000000000 --- a/test/testdata/gototest/gototest.cabal +++ /dev/null @@ -1,24 +0,0 @@ -name: gototest -version: 0.1.0.0 --- synopsis: --- description: -license: BSD3 -author: Author name here -maintainer: example@example.com -copyright: 2017 Author name here -category: Web -build-type: Simple -cabal-version: >=1.10 - -executable gototest-exec - hs-source-dirs: app - main-is: Main.hs - other-modules: - build-depends: base >= 4.7 && < 5, gototest - default-language: Haskell2010 - -library - hs-source-dirs: src - exposed-modules: Lib, Lib2 - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 diff --git a/test/testdata/gototest/src/Lib.hs b/test/testdata/gototest/src/Lib.hs deleted file mode 100644 index 2603a7474c..0000000000 --- a/test/testdata/gototest/src/Lib.hs +++ /dev/null @@ -1,40 +0,0 @@ -module Lib - - where - -someFunc :: IO () -someFunc = putStrLn "someFunc" - -data DataType = DataType Int - -dataTypeId :: DataType -> DataType -dataTypeId dataType = dataType - -newtype NewType = NewType Int - -newTypeId :: NewType -> NewType -newTypeId newType = newType - -data Enu = First | Second - -enuId :: Enu -> Enu -enuId enu = enu - -toNum :: Enu -> Int -toNum First = 1 -toNum Second = 2 - -type MyInt = Int - -myIntId :: MyInt -> MyInt -myIntId myInt = myInt - -type TypEnu = Enu - -typEnuId :: TypEnu -> TypEnu -typEnuId enu = enu - -data Parameter a = Parameter a - -parameterId :: Parameter a -> Parameter a -parameterId pid = pid \ No newline at end of file diff --git a/test/testdata/gototest/src/Lib2.hs b/test/testdata/gototest/src/Lib2.hs deleted file mode 100644 index c0ef7d46b0..0000000000 --- a/test/testdata/gototest/src/Lib2.hs +++ /dev/null @@ -1,13 +0,0 @@ -module Lib2 where - -import Lib - -g = do - someFunc - print x - where z = 1+2 - y = z+z - x = y*z - -otherId :: DataType -> DataType -otherId dataType = dataType \ No newline at end of file diff --git a/test/testdata/hieBiosError/Foo.hs b/test/testdata/hieBiosError/Foo.hs deleted file mode 100644 index e495355ec9..0000000000 --- a/test/testdata/hieBiosError/Foo.hs +++ /dev/null @@ -1 +0,0 @@ -main = putStrLn "hey" diff --git a/test/testdata/hieBiosMainIs/Main.hs b/test/testdata/hieBiosMainIs/Main.hs index 65ae4a05d5..c3c579788f 100644 --- a/test/testdata/hieBiosMainIs/Main.hs +++ b/test/testdata/hieBiosMainIs/Main.hs @@ -1,4 +1,6 @@ -module Main where +module Main (main) where main :: IO () -main = putStrLn "Hello, Haskell!" +main = print foo + +foo = 5 :: Int diff --git a/test/testdata/hieBiosMainIs/cabal.project b/test/testdata/hieBiosMainIs/cabal.project new file mode 100644 index 0000000000..e6fdbadb43 --- /dev/null +++ b/test/testdata/hieBiosMainIs/cabal.project @@ -0,0 +1 @@ +packages: . diff --git a/test/testdata/hieBiosMainIs/hie.yaml b/test/testdata/hieBiosMainIs/hie.yaml new file mode 100644 index 0000000000..04cd24395e --- /dev/null +++ b/test/testdata/hieBiosMainIs/hie.yaml @@ -0,0 +1,2 @@ +cradle: + cabal: diff --git a/test/testdata/hieBiosMainIs/hieBiosMainIs.cabal b/test/testdata/hieBiosMainIs/hieBiosMainIs.cabal index d7efa971e0..87c5dc421a 100644 --- a/test/testdata/hieBiosMainIs/hieBiosMainIs.cabal +++ b/test/testdata/hieBiosMainIs/hieBiosMainIs.cabal @@ -4,5 +4,6 @@ version: 0.1.0.0 build-type: Simple executable hieBiosMainIs main-is: Main.hs - build-depends: base >=4.12 && <4.13 + build-depends: base default-language: Haskell2010 + ghc-options: -Wall diff --git a/test/testdata/liquid/Evens.hs b/test/testdata/liquid/Evens.hs deleted file mode 100644 index 38ac14b2be..0000000000 --- a/test/testdata/liquid/Evens.hs +++ /dev/null @@ -1,41 +0,0 @@ -module Main where - -{-@ type Even = {v:Int | v mod 2 = 0} @-} - -{-@ weAreEven :: [Even] @-} -weAreEven = [(0-10), (0-4), 0, 2, 666] - -{-@ notEven :: Even @-} -notEven = 7 - -{-@ isEven :: n:Nat -> {v:Bool | (v <=> (n mod 2 == 0))} @-} -isEven :: Int -> Bool -isEven 0 = True -isEven 1 = False -isEven n = not (isEven (n-1)) - -{-@ evens :: n:Nat -> [Even] @-} -evens n = [i | i <- range 0 n, isEven i] - -{-@ range :: lo:Int -> hi:Int -> [{v:Int | (lo <= v && v < hi)}] / [hi -lo] @-} -range lo hi - | lo < hi = lo : range (lo+1) hi - | otherwise = [] - -{-@ shift :: [Even] -> Even -> [Even] @-} -shift xs k = [x + k | x <- xs] - -{-@ double :: [Nat] -> [Even] @-} -double xs = [x + x | x <- xs] - - - ---- - -notEven :: Int -weAreEven :: [Int] -shift :: [Int] -> Int -> [Int] -double :: [Int] -> [Int] -range :: Int -> Int -> [Int] - -main = putStrLn "hello" diff --git a/test/testdata/missingModuleTest/missingModule/cabal.project b/test/testdata/missingModuleTest/missingModule/cabal.project new file mode 100644 index 0000000000..6f920794c8 --- /dev/null +++ b/test/testdata/missingModuleTest/missingModule/cabal.project @@ -0,0 +1 @@ +packages: ./ diff --git a/test/testdata/missingModuleTest/missingModule/hie.yaml b/test/testdata/missingModuleTest/missingModule/hie.yaml new file mode 100644 index 0000000000..04cd24395e --- /dev/null +++ b/test/testdata/missingModuleTest/missingModule/hie.yaml @@ -0,0 +1,2 @@ +cradle: + cabal: diff --git a/test/testdata/missingModuleTest/missingModule/missingModule.cabal b/test/testdata/missingModuleTest/missingModule/missingModule.cabal new file mode 100644 index 0000000000..1f3e0a1d8c --- /dev/null +++ b/test/testdata/missingModuleTest/missingModule/missingModule.cabal @@ -0,0 +1,10 @@ +cabal-version: 3.4 +name: missingModule +version: 0.1.0.0 +build-type: Simple + +library + hs-source-dirs: ./src/ + exposed-modules: + build-depends: base + default-language: Haskell2010 diff --git a/test/testdata/missingModuleTest/missingModule/src/MyLib.hs b/test/testdata/missingModuleTest/missingModule/src/MyLib.hs new file mode 100644 index 0000000000..3ced4fb33c --- /dev/null +++ b/test/testdata/missingModuleTest/missingModule/src/MyLib.hs @@ -0,0 +1,5 @@ +module MyLib where + +someFunc :: IO () +someFunc = do + putStrLn "someFunc" diff --git a/test/testdata/missingModuleTest/noPrefixMatch/app/Main.hs b/test/testdata/missingModuleTest/noPrefixMatch/app/Main.hs new file mode 100644 index 0000000000..da579930ec --- /dev/null +++ b/test/testdata/missingModuleTest/noPrefixMatch/app/Main.hs @@ -0,0 +1,4 @@ + +main :: IO () +main = do + putStrLn "someFunc" diff --git a/test/testdata/missingModuleTest/noPrefixMatch/app/Other.hs b/test/testdata/missingModuleTest/noPrefixMatch/app/Other.hs new file mode 100644 index 0000000000..159221bd25 --- /dev/null +++ b/test/testdata/missingModuleTest/noPrefixMatch/app/Other.hs @@ -0,0 +1 @@ +module Other where diff --git a/test/testdata/missingModuleTest/noPrefixMatch/cabal.project b/test/testdata/missingModuleTest/noPrefixMatch/cabal.project new file mode 100644 index 0000000000..6f920794c8 --- /dev/null +++ b/test/testdata/missingModuleTest/noPrefixMatch/cabal.project @@ -0,0 +1 @@ +packages: ./ diff --git a/test/testdata/missingModuleTest/noPrefixMatch/hie.yaml b/test/testdata/missingModuleTest/noPrefixMatch/hie.yaml new file mode 100644 index 0000000000..c9100beb9a --- /dev/null +++ b/test/testdata/missingModuleTest/noPrefixMatch/hie.yaml @@ -0,0 +1,4 @@ +cradle: + cabal: + - path: ./app/Main.hs + component: exe:testExe diff --git a/test/testdata/missingModuleTest/noPrefixMatch/noPrefixMatch.cabal b/test/testdata/missingModuleTest/noPrefixMatch/noPrefixMatch.cabal new file mode 100644 index 0000000000..491144c41d --- /dev/null +++ b/test/testdata/missingModuleTest/noPrefixMatch/noPrefixMatch.cabal @@ -0,0 +1,9 @@ +cabal-version: 3.4 +name: noPrefixMatch +version: 0.1.0.0 +build-type: Simple + +executable testExe + main-is: Main.hs + hs-source-dirs: app + build-depends: base diff --git a/test/testdata/redundantImportTest/src/CodeActionRedundant.hs b/test/testdata/redundantImportTest/src/CodeActionRedundant.hs deleted file mode 100644 index 870fc5b16a..0000000000 --- a/test/testdata/redundantImportTest/src/CodeActionRedundant.hs +++ /dev/null @@ -1,4 +0,0 @@ -module CodeActionRedundant where -import Data.List -main :: IO () -main = putStrLn "hello" \ No newline at end of file diff --git a/test/testdata/redundantImportTest/src/MultipleImports.hs b/test/testdata/redundantImportTest/src/MultipleImports.hs deleted file mode 100644 index 4bc5508b61..0000000000 --- a/test/testdata/redundantImportTest/src/MultipleImports.hs +++ /dev/null @@ -1,5 +0,0 @@ -module MultipleImports where -import Data.Foldable -import Data.Maybe -foo :: Int -foo = fromJust (Just 3) diff --git a/test/testdata/redundantImportTest/test.cabal b/test/testdata/redundantImportTest/test.cabal deleted file mode 100644 index d185920d5b..0000000000 --- a/test/testdata/redundantImportTest/test.cabal +++ /dev/null @@ -1,18 +0,0 @@ -name: test -version: 0.1.0.0 --- synopsis: --- description: -license: BSD3 -author: Author name here -maintainer: example@example.com -copyright: 2017 Author name here -category: Web -build-type: Simple -cabal-version: >=1.10 - -library - exposed-modules: CodeActionRedundant, MultipleImports - hs-source-dirs: src - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 - ghc-options: -Wall -fwarn-unused-imports \ No newline at end of file diff --git a/test/testdata/schema/ghc910/default-config.golden.json b/test/testdata/schema/ghc910/default-config.golden.json new file mode 100644 index 0000000000..3b4e687ef9 --- /dev/null +++ b/test/testdata/schema/ghc910/default-config.golden.json @@ -0,0 +1,158 @@ +{ + "cabalFormattingProvider": "cabal-gild", + "checkParents": "CheckOnSave", + "checkProject": true, + "formattingProvider": "ormolu", + "maxCompletions": 40, + "plugin": { + "alternateNumberFormat": { + "globalOn": true + }, + "cabal": { + "codeActionsOn": true, + "completionOn": true, + "diagnosticsOn": true, + "hoverOn": true, + "symbolsOn": true + }, + "cabal-fmt": { + "config": { + "path": "cabal-fmt" + } + }, + "cabal-gild": { + "config": { + "path": "cabal-gild" + } + }, + "cabalHaskellIntegration": { + "globalOn": true + }, + "callHierarchy": { + "globalOn": true + }, + "changeTypeSignature": { + "globalOn": true + }, + "class": { + "codeActionsOn": true, + "codeLensOn": true + }, + "eval": { + "codeActionsOn": true, + "codeLensOn": true, + "config": { + "diff": true, + "exception": false + } + }, + "explicit-fields": { + "codeActionsOn": true, + "inlayHintsOn": true + }, + "explicit-fixity": { + "globalOn": true + }, + "fourmolu": { + "config": { + "external": false, + "path": "fourmolu" + } + }, + "gadt": { + "globalOn": true + }, + "ghcide-code-actions-bindings": { + "globalOn": true + }, + "ghcide-code-actions-fill-holes": { + "globalOn": true + }, + "ghcide-code-actions-imports-exports": { + "globalOn": true + }, + "ghcide-code-actions-type-signatures": { + "globalOn": true + }, + "ghcide-completions": { + "config": { + "autoExtendOn": true, + "snippetsOn": true + }, + "globalOn": true + }, + "ghcide-hover-and-symbols": { + "hoverOn": true, + "symbolsOn": true + }, + "ghcide-type-lenses": { + "config": { + "mode": "always" + }, + "globalOn": true + }, + "hlint": { + "codeActionsOn": true, + "config": { + "flags": [] + }, + "diagnosticsOn": true + }, + "importLens": { + "codeActionsOn": true, + "codeLensOn": true, + "inlayHintsOn": true + }, + "moduleName": { + "globalOn": true + }, + "ormolu": { + "config": { + "external": false + } + }, + "overloaded-record-dot": { + "globalOn": true + }, + "pragmas-completion": { + "globalOn": true + }, + "pragmas-disable": { + "globalOn": true + }, + "pragmas-suggest": { + "globalOn": true + }, + "qualifyImportedNames": { + "globalOn": true + }, + "rename": { + "config": { + "crossModule": false + }, + "globalOn": true + }, + "semanticTokens": { + "config": { + "classMethodToken": "method", + "classToken": "class", + "dataConstructorToken": "enumMember", + "functionToken": "function", + "moduleToken": "namespace", + "operatorToken": "operator", + "patternSynonymToken": "macro", + "recordFieldToken": "property", + "typeConstructorToken": "enum", + "typeFamilyToken": "interface", + "typeSynonymToken": "type", + "typeVariableToken": "typeParameter", + "variableToken": "variable" + }, + "globalOn": false + }, + "stan": { + "globalOn": false + } + }, + "sessionLoading": "singleComponent" +} diff --git a/test/testdata/schema/ghc910/markdown-reference.md b/test/testdata/schema/ghc910/markdown-reference.md new file mode 100644 index 0000000000..668323ce66 --- /dev/null +++ b/test/testdata/schema/ghc910/markdown-reference.md @@ -0,0 +1,66 @@ +## hlint +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `flags` | Flags used by hlint | `TODO: Array values` |   | + +## cabal-fmt +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `path` | Set path to 'cabal-fmt' executable | `"cabal-fmt"` |   | + +## ghcide-completions +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `autoExtendOn` | Extends the import list automatically when completing a out-of-scope identifier | `True` |   | +| `snippetsOn` | Inserts snippets when using code completions | `True` |   | + +## eval +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `exception` | Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi. | `False` |   | +| `diff` | Enable the diff output (WAS/NOW) of eval lenses | `True` |   | + +## ghcide-type-lenses +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `mode` | Control how type lenses are shown | `Always` |
    • Always
    • Exported
    • Diagnostics
    | + +## ormolu +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `external` | Call out to an external "ormolu" executable, rather than using the bundled library | `False` |   | + +## rename +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `crossModule` | Enable experimental cross-module renaming | `False` |   | + +## semanticTokens +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `variableToken` | LSP semantic token type to use for variables | `SemanticTokenTypes_Variable` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `functionToken` | LSP semantic token type to use for functions | `SemanticTokenTypes_Function` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `dataConstructorToken` | LSP semantic token type to use for data constructors | `SemanticTokenTypes_EnumMember` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeVariableToken` | LSP semantic token type to use for type variables | `SemanticTokenTypes_TypeParameter` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `classMethodToken` | LSP semantic token type to use for typeclass methods | `SemanticTokenTypes_Method` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `patternSynonymToken` | LSP semantic token type to use for pattern synonyms | `SemanticTokenTypes_Macro` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeConstructorToken` | LSP semantic token type to use for type constructors | `SemanticTokenTypes_Enum` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `classToken` | LSP semantic token type to use for typeclasses | `SemanticTokenTypes_Class` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeSynonymToken` | LSP semantic token type to use for type synonyms | `SemanticTokenTypes_Type` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeFamilyToken` | LSP semantic token type to use for type families | `SemanticTokenTypes_Interface` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `recordFieldToken` | LSP semantic token type to use for record fields | `SemanticTokenTypes_Property` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `operatorToken` | LSP semantic token type to use for operators | `SemanticTokenTypes_Operator` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `moduleToken` | LSP semantic token type to use for modules | `SemanticTokenTypes_Namespace` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | + +## fourmolu +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `external` | Call out to an external "fourmolu" executable, rather than using the bundled library. | `False` |   | +| `path` | Set path to executable (for "external" mode). | `"fourmolu"` |   | + +## cabal-gild +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `path` | Set path to 'cabal-gild' executable | `"cabal-gild"` |   | + + diff --git a/test/testdata/schema/ghc910/vscode-extension-schema.golden.json b/test/testdata/schema/ghc910/vscode-extension-schema.golden.json new file mode 100644 index 0000000000..4ca08f296c --- /dev/null +++ b/test/testdata/schema/ghc910/vscode-extension-schema.golden.json @@ -0,0 +1,1046 @@ +{ + "haskell.plugin.alternateNumberFormat.globalOn": { + "default": true, + "description": "Enables alternateNumberFormat plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal-fmt.config.path": { + "default": "cabal-fmt", + "markdownDescription": "Set path to 'cabal-fmt' executable", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.cabal-gild.config.path": { + "default": "cabal-gild", + "markdownDescription": "Set path to 'cabal-gild' executable", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.cabal.codeActionsOn": { + "default": true, + "description": "Enables cabal code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.completionOn": { + "default": true, + "description": "Enables cabal completions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.diagnosticsOn": { + "default": true, + "description": "Enables cabal diagnostics", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.hoverOn": { + "default": true, + "description": "Enables cabal hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.symbolsOn": { + "default": true, + "description": "Enables cabal symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabalHaskellIntegration.globalOn": { + "default": true, + "description": "Enables cabalHaskellIntegration plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.callHierarchy.globalOn": { + "default": true, + "description": "Enables callHierarchy plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.changeTypeSignature.globalOn": { + "default": true, + "description": "Enables changeTypeSignature plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.class.codeActionsOn": { + "default": true, + "description": "Enables class code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.class.codeLensOn": { + "default": true, + "description": "Enables class code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeActionsOn": { + "default": true, + "description": "Enables eval code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeLensOn": { + "default": true, + "description": "Enables eval code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.config.diff": { + "default": true, + "markdownDescription": "Enable the diff output (WAS/NOW) of eval lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.config.exception": { + "default": false, + "markdownDescription": "Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi.", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fields.codeActionsOn": { + "default": true, + "description": "Enables explicit-fields code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fields.inlayHintsOn": { + "default": true, + "description": "Enables explicit-fields inlay hints", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fixity.globalOn": { + "default": true, + "description": "Enables explicit-fixity plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.fourmolu.config.external": { + "default": false, + "markdownDescription": "Call out to an external \"fourmolu\" executable, rather than using the bundled library.", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.fourmolu.config.path": { + "default": "fourmolu", + "markdownDescription": "Set path to executable (for \"external\" mode).", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.gadt.globalOn": { + "default": true, + "description": "Enables gadt plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-bindings.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-bindings plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-fill-holes.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-fill-holes plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-imports-exports.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-imports-exports plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-type-signatures.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-type-signatures plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.config.autoExtendOn": { + "default": true, + "markdownDescription": "Extends the import list automatically when completing a out-of-scope identifier", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.config.snippetsOn": { + "default": true, + "markdownDescription": "Inserts snippets when using code completions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.globalOn": { + "default": true, + "description": "Enables ghcide-completions plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-hover-and-symbols.hoverOn": { + "default": true, + "description": "Enables ghcide-hover-and-symbols hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-hover-and-symbols.symbolsOn": { + "default": true, + "description": "Enables ghcide-hover-and-symbols symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-type-lenses.config.mode": { + "default": "always", + "description": "Control how type lenses are shown", + "enum": [ + "always", + "exported", + "diagnostics" + ], + "enumDescriptions": [ + "Always displays type lenses of global bindings", + "Only display type lenses of exported global bindings", + "Follows error messages produced by GHC about missing signatures" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.ghcide-type-lenses.globalOn": { + "default": true, + "description": "Enables ghcide-type-lenses plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.hlint.codeActionsOn": { + "default": true, + "description": "Enables hlint code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.hlint.config.flags": { + "default": [], + "markdownDescription": "Flags used by hlint", + "scope": "resource", + "type": "array" + }, + "haskell.plugin.hlint.diagnosticsOn": { + "default": true, + "description": "Enables hlint diagnostics", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.codeActionsOn": { + "default": true, + "description": "Enables importLens code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.codeLensOn": { + "default": true, + "description": "Enables importLens code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.inlayHintsOn": { + "default": true, + "description": "Enables importLens inlay hints", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.moduleName.globalOn": { + "default": true, + "description": "Enables moduleName plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ormolu.config.external": { + "default": false, + "markdownDescription": "Call out to an external \"ormolu\" executable, rather than using the bundled library", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.overloaded-record-dot.globalOn": { + "default": true, + "description": "Enables overloaded-record-dot plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-completion.globalOn": { + "default": true, + "description": "Enables pragmas-completion plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-disable.globalOn": { + "default": true, + "description": "Enables pragmas-disable plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-suggest.globalOn": { + "default": true, + "description": "Enables pragmas-suggest plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.qualifyImportedNames.globalOn": { + "default": true, + "description": "Enables qualifyImportedNames plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.rename.config.crossModule": { + "default": false, + "markdownDescription": "Enable experimental cross-module renaming", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.rename.globalOn": { + "default": true, + "description": "Enables rename plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.semanticTokens.config.classMethodToken": { + "default": "method", + "description": "LSP semantic token type to use for typeclass methods", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.classToken": { + "default": "class", + "description": "LSP semantic token type to use for typeclasses", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.dataConstructorToken": { + "default": "enumMember", + "description": "LSP semantic token type to use for data constructors", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.functionToken": { + "default": "function", + "description": "LSP semantic token type to use for functions", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.moduleToken": { + "default": "namespace", + "description": "LSP semantic token type to use for modules", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.operatorToken": { + "default": "operator", + "description": "LSP semantic token type to use for operators", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.patternSynonymToken": { + "default": "macro", + "description": "LSP semantic token type to use for pattern synonyms", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.recordFieldToken": { + "default": "property", + "description": "LSP semantic token type to use for record fields", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeConstructorToken": { + "default": "enum", + "description": "LSP semantic token type to use for type constructors", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeFamilyToken": { + "default": "interface", + "description": "LSP semantic token type to use for type families", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeSynonymToken": { + "default": "type", + "description": "LSP semantic token type to use for type synonyms", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeVariableToken": { + "default": "typeParameter", + "description": "LSP semantic token type to use for type variables", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.variableToken": { + "default": "variable", + "description": "LSP semantic token type to use for variables", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.globalOn": { + "default": false, + "description": "Enables semanticTokens plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.stan.globalOn": { + "default": false, + "description": "Enables stan plugin", + "scope": "resource", + "type": "boolean" + } +} diff --git a/test/testdata/schema/ghc912/default-config.golden.json b/test/testdata/schema/ghc912/default-config.golden.json new file mode 100644 index 0000000000..0dfbd39df2 --- /dev/null +++ b/test/testdata/schema/ghc912/default-config.golden.json @@ -0,0 +1,155 @@ +{ + "cabalFormattingProvider": "cabal-gild", + "checkParents": "CheckOnSave", + "checkProject": true, + "formattingProvider": "ormolu", + "maxCompletions": 40, + "plugin": { + "alternateNumberFormat": { + "globalOn": true + }, + "cabal": { + "codeActionsOn": true, + "completionOn": true, + "diagnosticsOn": true, + "hoverOn": true, + "symbolsOn": true + }, + "cabal-fmt": { + "config": { + "path": "cabal-fmt" + } + }, + "cabal-gild": { + "config": { + "path": "cabal-gild" + } + }, + "cabalHaskellIntegration": { + "globalOn": true + }, + "callHierarchy": { + "globalOn": true + }, + "changeTypeSignature": { + "globalOn": true + }, + "class": { + "codeActionsOn": true, + "codeLensOn": true + }, + "eval": { + "codeActionsOn": true, + "codeLensOn": true, + "config": { + "diff": true, + "exception": false + } + }, + "explicit-fields": { + "codeActionsOn": true, + "inlayHintsOn": true + }, + "explicit-fixity": { + "globalOn": true + }, + "fourmolu": { + "config": { + "external": false, + "path": "fourmolu" + } + }, + "gadt": { + "globalOn": true + }, + "ghcide-code-actions-bindings": { + "globalOn": true + }, + "ghcide-code-actions-fill-holes": { + "globalOn": true + }, + "ghcide-code-actions-imports-exports": { + "globalOn": true + }, + "ghcide-code-actions-type-signatures": { + "globalOn": true + }, + "ghcide-completions": { + "config": { + "autoExtendOn": true, + "snippetsOn": true + }, + "globalOn": true + }, + "ghcide-hover-and-symbols": { + "hoverOn": true, + "symbolsOn": true + }, + "ghcide-type-lenses": { + "config": { + "mode": "always" + }, + "globalOn": true + }, + "hlint": { + "codeActionsOn": true, + "config": { + "flags": [] + }, + "diagnosticsOn": true + }, + "importLens": { + "codeActionsOn": true, + "codeLensOn": true, + "inlayHintsOn": true + }, + "moduleName": { + "globalOn": true + }, + "ormolu": { + "config": { + "external": false + } + }, + "overloaded-record-dot": { + "globalOn": true + }, + "pragmas-completion": { + "globalOn": true + }, + "pragmas-disable": { + "globalOn": true + }, + "pragmas-suggest": { + "globalOn": true + }, + "qualifyImportedNames": { + "globalOn": true + }, + "rename": { + "config": { + "crossModule": false + }, + "globalOn": true + }, + "semanticTokens": { + "config": { + "classMethodToken": "method", + "classToken": "class", + "dataConstructorToken": "enumMember", + "functionToken": "function", + "moduleToken": "namespace", + "operatorToken": "operator", + "patternSynonymToken": "macro", + "recordFieldToken": "property", + "typeConstructorToken": "enum", + "typeFamilyToken": "interface", + "typeSynonymToken": "type", + "typeVariableToken": "typeParameter", + "variableToken": "variable" + }, + "globalOn": false + } + }, + "sessionLoading": "singleComponent" +} diff --git a/test/testdata/schema/ghc912/markdown-reference.md b/test/testdata/schema/ghc912/markdown-reference.md new file mode 100644 index 0000000000..668323ce66 --- /dev/null +++ b/test/testdata/schema/ghc912/markdown-reference.md @@ -0,0 +1,66 @@ +## hlint +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `flags` | Flags used by hlint | `TODO: Array values` |   | + +## cabal-fmt +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `path` | Set path to 'cabal-fmt' executable | `"cabal-fmt"` |   | + +## ghcide-completions +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `autoExtendOn` | Extends the import list automatically when completing a out-of-scope identifier | `True` |   | +| `snippetsOn` | Inserts snippets when using code completions | `True` |   | + +## eval +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `exception` | Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi. | `False` |   | +| `diff` | Enable the diff output (WAS/NOW) of eval lenses | `True` |   | + +## ghcide-type-lenses +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `mode` | Control how type lenses are shown | `Always` |
    • Always
    • Exported
    • Diagnostics
    | + +## ormolu +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `external` | Call out to an external "ormolu" executable, rather than using the bundled library | `False` |   | + +## rename +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `crossModule` | Enable experimental cross-module renaming | `False` |   | + +## semanticTokens +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `variableToken` | LSP semantic token type to use for variables | `SemanticTokenTypes_Variable` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `functionToken` | LSP semantic token type to use for functions | `SemanticTokenTypes_Function` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `dataConstructorToken` | LSP semantic token type to use for data constructors | `SemanticTokenTypes_EnumMember` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeVariableToken` | LSP semantic token type to use for type variables | `SemanticTokenTypes_TypeParameter` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `classMethodToken` | LSP semantic token type to use for typeclass methods | `SemanticTokenTypes_Method` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `patternSynonymToken` | LSP semantic token type to use for pattern synonyms | `SemanticTokenTypes_Macro` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeConstructorToken` | LSP semantic token type to use for type constructors | `SemanticTokenTypes_Enum` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `classToken` | LSP semantic token type to use for typeclasses | `SemanticTokenTypes_Class` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeSynonymToken` | LSP semantic token type to use for type synonyms | `SemanticTokenTypes_Type` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeFamilyToken` | LSP semantic token type to use for type families | `SemanticTokenTypes_Interface` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `recordFieldToken` | LSP semantic token type to use for record fields | `SemanticTokenTypes_Property` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `operatorToken` | LSP semantic token type to use for operators | `SemanticTokenTypes_Operator` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `moduleToken` | LSP semantic token type to use for modules | `SemanticTokenTypes_Namespace` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | + +## fourmolu +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `external` | Call out to an external "fourmolu" executable, rather than using the bundled library. | `False` |   | +| `path` | Set path to executable (for "external" mode). | `"fourmolu"` |   | + +## cabal-gild +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `path` | Set path to 'cabal-gild' executable | `"cabal-gild"` |   | + + diff --git a/test/testdata/schema/ghc912/vscode-extension-schema.golden.json b/test/testdata/schema/ghc912/vscode-extension-schema.golden.json new file mode 100644 index 0000000000..77d398438e --- /dev/null +++ b/test/testdata/schema/ghc912/vscode-extension-schema.golden.json @@ -0,0 +1,1040 @@ +{ + "haskell.plugin.alternateNumberFormat.globalOn": { + "default": true, + "description": "Enables alternateNumberFormat plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal-fmt.config.path": { + "default": "cabal-fmt", + "markdownDescription": "Set path to 'cabal-fmt' executable", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.cabal-gild.config.path": { + "default": "cabal-gild", + "markdownDescription": "Set path to 'cabal-gild' executable", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.cabal.codeActionsOn": { + "default": true, + "description": "Enables cabal code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.completionOn": { + "default": true, + "description": "Enables cabal completions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.diagnosticsOn": { + "default": true, + "description": "Enables cabal diagnostics", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.hoverOn": { + "default": true, + "description": "Enables cabal hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.symbolsOn": { + "default": true, + "description": "Enables cabal symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabalHaskellIntegration.globalOn": { + "default": true, + "description": "Enables cabalHaskellIntegration plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.callHierarchy.globalOn": { + "default": true, + "description": "Enables callHierarchy plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.changeTypeSignature.globalOn": { + "default": true, + "description": "Enables changeTypeSignature plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.class.codeActionsOn": { + "default": true, + "description": "Enables class code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.class.codeLensOn": { + "default": true, + "description": "Enables class code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeActionsOn": { + "default": true, + "description": "Enables eval code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeLensOn": { + "default": true, + "description": "Enables eval code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.config.diff": { + "default": true, + "markdownDescription": "Enable the diff output (WAS/NOW) of eval lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.config.exception": { + "default": false, + "markdownDescription": "Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi.", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fields.codeActionsOn": { + "default": true, + "description": "Enables explicit-fields code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fields.inlayHintsOn": { + "default": true, + "description": "Enables explicit-fields inlay hints", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fixity.globalOn": { + "default": true, + "description": "Enables explicit-fixity plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.fourmolu.config.external": { + "default": false, + "markdownDescription": "Call out to an external \"fourmolu\" executable, rather than using the bundled library.", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.fourmolu.config.path": { + "default": "fourmolu", + "markdownDescription": "Set path to executable (for \"external\" mode).", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.gadt.globalOn": { + "default": true, + "description": "Enables gadt plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-bindings.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-bindings plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-fill-holes.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-fill-holes plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-imports-exports.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-imports-exports plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-type-signatures.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-type-signatures plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.config.autoExtendOn": { + "default": true, + "markdownDescription": "Extends the import list automatically when completing a out-of-scope identifier", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.config.snippetsOn": { + "default": true, + "markdownDescription": "Inserts snippets when using code completions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.globalOn": { + "default": true, + "description": "Enables ghcide-completions plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-hover-and-symbols.hoverOn": { + "default": true, + "description": "Enables ghcide-hover-and-symbols hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-hover-and-symbols.symbolsOn": { + "default": true, + "description": "Enables ghcide-hover-and-symbols symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-type-lenses.config.mode": { + "default": "always", + "description": "Control how type lenses are shown", + "enum": [ + "always", + "exported", + "diagnostics" + ], + "enumDescriptions": [ + "Always displays type lenses of global bindings", + "Only display type lenses of exported global bindings", + "Follows error messages produced by GHC about missing signatures" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.ghcide-type-lenses.globalOn": { + "default": true, + "description": "Enables ghcide-type-lenses plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.hlint.codeActionsOn": { + "default": true, + "description": "Enables hlint code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.hlint.config.flags": { + "default": [], + "markdownDescription": "Flags used by hlint", + "scope": "resource", + "type": "array" + }, + "haskell.plugin.hlint.diagnosticsOn": { + "default": true, + "description": "Enables hlint diagnostics", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.codeActionsOn": { + "default": true, + "description": "Enables importLens code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.codeLensOn": { + "default": true, + "description": "Enables importLens code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.inlayHintsOn": { + "default": true, + "description": "Enables importLens inlay hints", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.moduleName.globalOn": { + "default": true, + "description": "Enables moduleName plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ormolu.config.external": { + "default": false, + "markdownDescription": "Call out to an external \"ormolu\" executable, rather than using the bundled library", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.overloaded-record-dot.globalOn": { + "default": true, + "description": "Enables overloaded-record-dot plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-completion.globalOn": { + "default": true, + "description": "Enables pragmas-completion plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-disable.globalOn": { + "default": true, + "description": "Enables pragmas-disable plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-suggest.globalOn": { + "default": true, + "description": "Enables pragmas-suggest plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.qualifyImportedNames.globalOn": { + "default": true, + "description": "Enables qualifyImportedNames plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.rename.config.crossModule": { + "default": false, + "markdownDescription": "Enable experimental cross-module renaming", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.rename.globalOn": { + "default": true, + "description": "Enables rename plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.semanticTokens.config.classMethodToken": { + "default": "method", + "description": "LSP semantic token type to use for typeclass methods", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.classToken": { + "default": "class", + "description": "LSP semantic token type to use for typeclasses", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.dataConstructorToken": { + "default": "enumMember", + "description": "LSP semantic token type to use for data constructors", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.functionToken": { + "default": "function", + "description": "LSP semantic token type to use for functions", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.moduleToken": { + "default": "namespace", + "description": "LSP semantic token type to use for modules", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.operatorToken": { + "default": "operator", + "description": "LSP semantic token type to use for operators", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.patternSynonymToken": { + "default": "macro", + "description": "LSP semantic token type to use for pattern synonyms", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.recordFieldToken": { + "default": "property", + "description": "LSP semantic token type to use for record fields", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeConstructorToken": { + "default": "enum", + "description": "LSP semantic token type to use for type constructors", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeFamilyToken": { + "default": "interface", + "description": "LSP semantic token type to use for type families", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeSynonymToken": { + "default": "type", + "description": "LSP semantic token type to use for type synonyms", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeVariableToken": { + "default": "typeParameter", + "description": "LSP semantic token type to use for type variables", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.variableToken": { + "default": "variable", + "description": "LSP semantic token type to use for variables", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.globalOn": { + "default": false, + "description": "Enables semanticTokens plugin", + "scope": "resource", + "type": "boolean" + } +} diff --git a/test/testdata/schema/ghc96/default-config.golden.json b/test/testdata/schema/ghc96/default-config.golden.json new file mode 100644 index 0000000000..8467b451f1 --- /dev/null +++ b/test/testdata/schema/ghc96/default-config.golden.json @@ -0,0 +1,164 @@ +{ + "cabalFormattingProvider": "cabal-gild", + "checkParents": "CheckOnSave", + "checkProject": true, + "formattingProvider": "ormolu", + "maxCompletions": 40, + "plugin": { + "alternateNumberFormat": { + "globalOn": true + }, + "cabal": { + "codeActionsOn": true, + "completionOn": true, + "diagnosticsOn": true, + "hoverOn": true, + "symbolsOn": true + }, + "cabal-fmt": { + "config": { + "path": "cabal-fmt" + } + }, + "cabal-gild": { + "config": { + "path": "cabal-gild" + } + }, + "cabalHaskellIntegration": { + "globalOn": true + }, + "callHierarchy": { + "globalOn": true + }, + "changeTypeSignature": { + "globalOn": true + }, + "class": { + "codeActionsOn": true, + "codeLensOn": true + }, + "eval": { + "codeActionsOn": true, + "codeLensOn": true, + "config": { + "diff": true, + "exception": false + } + }, + "explicit-fields": { + "codeActionsOn": true, + "inlayHintsOn": true + }, + "explicit-fixity": { + "globalOn": true + }, + "fourmolu": { + "config": { + "external": false, + "path": "fourmolu" + } + }, + "gadt": { + "globalOn": true + }, + "ghcide-code-actions-bindings": { + "globalOn": true + }, + "ghcide-code-actions-fill-holes": { + "globalOn": true + }, + "ghcide-code-actions-imports-exports": { + "globalOn": true + }, + "ghcide-code-actions-type-signatures": { + "globalOn": true + }, + "ghcide-completions": { + "config": { + "autoExtendOn": true, + "snippetsOn": true + }, + "globalOn": true + }, + "ghcide-hover-and-symbols": { + "hoverOn": true, + "symbolsOn": true + }, + "ghcide-type-lenses": { + "config": { + "mode": "always" + }, + "globalOn": true + }, + "hlint": { + "codeActionsOn": true, + "config": { + "flags": [] + }, + "diagnosticsOn": true + }, + "importLens": { + "codeActionsOn": true, + "codeLensOn": true, + "inlayHintsOn": true + }, + "moduleName": { + "globalOn": true + }, + "ormolu": { + "config": { + "external": false + } + }, + "overloaded-record-dot": { + "globalOn": true + }, + "pragmas-completion": { + "globalOn": true + }, + "pragmas-disable": { + "globalOn": true + }, + "pragmas-suggest": { + "globalOn": true + }, + "qualifyImportedNames": { + "globalOn": true + }, + "rename": { + "config": { + "crossModule": false + }, + "globalOn": true + }, + "retrie": { + "globalOn": true + }, + "semanticTokens": { + "config": { + "classMethodToken": "method", + "classToken": "class", + "dataConstructorToken": "enumMember", + "functionToken": "function", + "moduleToken": "namespace", + "operatorToken": "operator", + "patternSynonymToken": "macro", + "recordFieldToken": "property", + "typeConstructorToken": "enum", + "typeFamilyToken": "interface", + "typeSynonymToken": "type", + "typeVariableToken": "typeParameter", + "variableToken": "variable" + }, + "globalOn": false + }, + "splice": { + "globalOn": true + }, + "stan": { + "globalOn": false + } + }, + "sessionLoading": "singleComponent" +} diff --git a/test/testdata/schema/ghc96/markdown-reference.md b/test/testdata/schema/ghc96/markdown-reference.md new file mode 100644 index 0000000000..668323ce66 --- /dev/null +++ b/test/testdata/schema/ghc96/markdown-reference.md @@ -0,0 +1,66 @@ +## hlint +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `flags` | Flags used by hlint | `TODO: Array values` |   | + +## cabal-fmt +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `path` | Set path to 'cabal-fmt' executable | `"cabal-fmt"` |   | + +## ghcide-completions +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `autoExtendOn` | Extends the import list automatically when completing a out-of-scope identifier | `True` |   | +| `snippetsOn` | Inserts snippets when using code completions | `True` |   | + +## eval +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `exception` | Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi. | `False` |   | +| `diff` | Enable the diff output (WAS/NOW) of eval lenses | `True` |   | + +## ghcide-type-lenses +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `mode` | Control how type lenses are shown | `Always` |
    • Always
    • Exported
    • Diagnostics
    | + +## ormolu +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `external` | Call out to an external "ormolu" executable, rather than using the bundled library | `False` |   | + +## rename +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `crossModule` | Enable experimental cross-module renaming | `False` |   | + +## semanticTokens +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `variableToken` | LSP semantic token type to use for variables | `SemanticTokenTypes_Variable` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `functionToken` | LSP semantic token type to use for functions | `SemanticTokenTypes_Function` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `dataConstructorToken` | LSP semantic token type to use for data constructors | `SemanticTokenTypes_EnumMember` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeVariableToken` | LSP semantic token type to use for type variables | `SemanticTokenTypes_TypeParameter` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `classMethodToken` | LSP semantic token type to use for typeclass methods | `SemanticTokenTypes_Method` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `patternSynonymToken` | LSP semantic token type to use for pattern synonyms | `SemanticTokenTypes_Macro` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeConstructorToken` | LSP semantic token type to use for type constructors | `SemanticTokenTypes_Enum` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `classToken` | LSP semantic token type to use for typeclasses | `SemanticTokenTypes_Class` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeSynonymToken` | LSP semantic token type to use for type synonyms | `SemanticTokenTypes_Type` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeFamilyToken` | LSP semantic token type to use for type families | `SemanticTokenTypes_Interface` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `recordFieldToken` | LSP semantic token type to use for record fields | `SemanticTokenTypes_Property` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `operatorToken` | LSP semantic token type to use for operators | `SemanticTokenTypes_Operator` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `moduleToken` | LSP semantic token type to use for modules | `SemanticTokenTypes_Namespace` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | + +## fourmolu +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `external` | Call out to an external "fourmolu" executable, rather than using the bundled library. | `False` |   | +| `path` | Set path to executable (for "external" mode). | `"fourmolu"` |   | + +## cabal-gild +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `path` | Set path to 'cabal-gild' executable | `"cabal-gild"` |   | + + diff --git a/test/testdata/schema/ghc96/vscode-extension-schema.golden.json b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json new file mode 100644 index 0000000000..1c0b19eb27 --- /dev/null +++ b/test/testdata/schema/ghc96/vscode-extension-schema.golden.json @@ -0,0 +1,1058 @@ +{ + "haskell.plugin.alternateNumberFormat.globalOn": { + "default": true, + "description": "Enables alternateNumberFormat plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal-fmt.config.path": { + "default": "cabal-fmt", + "markdownDescription": "Set path to 'cabal-fmt' executable", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.cabal-gild.config.path": { + "default": "cabal-gild", + "markdownDescription": "Set path to 'cabal-gild' executable", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.cabal.codeActionsOn": { + "default": true, + "description": "Enables cabal code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.completionOn": { + "default": true, + "description": "Enables cabal completions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.diagnosticsOn": { + "default": true, + "description": "Enables cabal diagnostics", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.hoverOn": { + "default": true, + "description": "Enables cabal hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.symbolsOn": { + "default": true, + "description": "Enables cabal symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabalHaskellIntegration.globalOn": { + "default": true, + "description": "Enables cabalHaskellIntegration plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.callHierarchy.globalOn": { + "default": true, + "description": "Enables callHierarchy plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.changeTypeSignature.globalOn": { + "default": true, + "description": "Enables changeTypeSignature plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.class.codeActionsOn": { + "default": true, + "description": "Enables class code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.class.codeLensOn": { + "default": true, + "description": "Enables class code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeActionsOn": { + "default": true, + "description": "Enables eval code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeLensOn": { + "default": true, + "description": "Enables eval code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.config.diff": { + "default": true, + "markdownDescription": "Enable the diff output (WAS/NOW) of eval lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.config.exception": { + "default": false, + "markdownDescription": "Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi.", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fields.codeActionsOn": { + "default": true, + "description": "Enables explicit-fields code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fields.inlayHintsOn": { + "default": true, + "description": "Enables explicit-fields inlay hints", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fixity.globalOn": { + "default": true, + "description": "Enables explicit-fixity plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.fourmolu.config.external": { + "default": false, + "markdownDescription": "Call out to an external \"fourmolu\" executable, rather than using the bundled library.", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.fourmolu.config.path": { + "default": "fourmolu", + "markdownDescription": "Set path to executable (for \"external\" mode).", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.gadt.globalOn": { + "default": true, + "description": "Enables gadt plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-bindings.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-bindings plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-fill-holes.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-fill-holes plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-imports-exports.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-imports-exports plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-type-signatures.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-type-signatures plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.config.autoExtendOn": { + "default": true, + "markdownDescription": "Extends the import list automatically when completing a out-of-scope identifier", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.config.snippetsOn": { + "default": true, + "markdownDescription": "Inserts snippets when using code completions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.globalOn": { + "default": true, + "description": "Enables ghcide-completions plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-hover-and-symbols.hoverOn": { + "default": true, + "description": "Enables ghcide-hover-and-symbols hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-hover-and-symbols.symbolsOn": { + "default": true, + "description": "Enables ghcide-hover-and-symbols symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-type-lenses.config.mode": { + "default": "always", + "description": "Control how type lenses are shown", + "enum": [ + "always", + "exported", + "diagnostics" + ], + "enumDescriptions": [ + "Always displays type lenses of global bindings", + "Only display type lenses of exported global bindings", + "Follows error messages produced by GHC about missing signatures" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.ghcide-type-lenses.globalOn": { + "default": true, + "description": "Enables ghcide-type-lenses plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.hlint.codeActionsOn": { + "default": true, + "description": "Enables hlint code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.hlint.config.flags": { + "default": [], + "markdownDescription": "Flags used by hlint", + "scope": "resource", + "type": "array" + }, + "haskell.plugin.hlint.diagnosticsOn": { + "default": true, + "description": "Enables hlint diagnostics", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.codeActionsOn": { + "default": true, + "description": "Enables importLens code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.codeLensOn": { + "default": true, + "description": "Enables importLens code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.inlayHintsOn": { + "default": true, + "description": "Enables importLens inlay hints", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.moduleName.globalOn": { + "default": true, + "description": "Enables moduleName plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ormolu.config.external": { + "default": false, + "markdownDescription": "Call out to an external \"ormolu\" executable, rather than using the bundled library", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.overloaded-record-dot.globalOn": { + "default": true, + "description": "Enables overloaded-record-dot plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-completion.globalOn": { + "default": true, + "description": "Enables pragmas-completion plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-disable.globalOn": { + "default": true, + "description": "Enables pragmas-disable plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-suggest.globalOn": { + "default": true, + "description": "Enables pragmas-suggest plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.qualifyImportedNames.globalOn": { + "default": true, + "description": "Enables qualifyImportedNames plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.rename.config.crossModule": { + "default": false, + "markdownDescription": "Enable experimental cross-module renaming", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.rename.globalOn": { + "default": true, + "description": "Enables rename plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.retrie.globalOn": { + "default": true, + "description": "Enables retrie plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.semanticTokens.config.classMethodToken": { + "default": "method", + "description": "LSP semantic token type to use for typeclass methods", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.classToken": { + "default": "class", + "description": "LSP semantic token type to use for typeclasses", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.dataConstructorToken": { + "default": "enumMember", + "description": "LSP semantic token type to use for data constructors", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.functionToken": { + "default": "function", + "description": "LSP semantic token type to use for functions", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.moduleToken": { + "default": "namespace", + "description": "LSP semantic token type to use for modules", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.operatorToken": { + "default": "operator", + "description": "LSP semantic token type to use for operators", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.patternSynonymToken": { + "default": "macro", + "description": "LSP semantic token type to use for pattern synonyms", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.recordFieldToken": { + "default": "property", + "description": "LSP semantic token type to use for record fields", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeConstructorToken": { + "default": "enum", + "description": "LSP semantic token type to use for type constructors", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeFamilyToken": { + "default": "interface", + "description": "LSP semantic token type to use for type families", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeSynonymToken": { + "default": "type", + "description": "LSP semantic token type to use for type synonyms", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeVariableToken": { + "default": "typeParameter", + "description": "LSP semantic token type to use for type variables", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.variableToken": { + "default": "variable", + "description": "LSP semantic token type to use for variables", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.globalOn": { + "default": false, + "description": "Enables semanticTokens plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.splice.globalOn": { + "default": true, + "description": "Enables splice plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.stan.globalOn": { + "default": false, + "description": "Enables stan plugin", + "scope": "resource", + "type": "boolean" + } +} diff --git a/test/testdata/schema/ghc98/default-config.golden.json b/test/testdata/schema/ghc98/default-config.golden.json new file mode 100644 index 0000000000..8467b451f1 --- /dev/null +++ b/test/testdata/schema/ghc98/default-config.golden.json @@ -0,0 +1,164 @@ +{ + "cabalFormattingProvider": "cabal-gild", + "checkParents": "CheckOnSave", + "checkProject": true, + "formattingProvider": "ormolu", + "maxCompletions": 40, + "plugin": { + "alternateNumberFormat": { + "globalOn": true + }, + "cabal": { + "codeActionsOn": true, + "completionOn": true, + "diagnosticsOn": true, + "hoverOn": true, + "symbolsOn": true + }, + "cabal-fmt": { + "config": { + "path": "cabal-fmt" + } + }, + "cabal-gild": { + "config": { + "path": "cabal-gild" + } + }, + "cabalHaskellIntegration": { + "globalOn": true + }, + "callHierarchy": { + "globalOn": true + }, + "changeTypeSignature": { + "globalOn": true + }, + "class": { + "codeActionsOn": true, + "codeLensOn": true + }, + "eval": { + "codeActionsOn": true, + "codeLensOn": true, + "config": { + "diff": true, + "exception": false + } + }, + "explicit-fields": { + "codeActionsOn": true, + "inlayHintsOn": true + }, + "explicit-fixity": { + "globalOn": true + }, + "fourmolu": { + "config": { + "external": false, + "path": "fourmolu" + } + }, + "gadt": { + "globalOn": true + }, + "ghcide-code-actions-bindings": { + "globalOn": true + }, + "ghcide-code-actions-fill-holes": { + "globalOn": true + }, + "ghcide-code-actions-imports-exports": { + "globalOn": true + }, + "ghcide-code-actions-type-signatures": { + "globalOn": true + }, + "ghcide-completions": { + "config": { + "autoExtendOn": true, + "snippetsOn": true + }, + "globalOn": true + }, + "ghcide-hover-and-symbols": { + "hoverOn": true, + "symbolsOn": true + }, + "ghcide-type-lenses": { + "config": { + "mode": "always" + }, + "globalOn": true + }, + "hlint": { + "codeActionsOn": true, + "config": { + "flags": [] + }, + "diagnosticsOn": true + }, + "importLens": { + "codeActionsOn": true, + "codeLensOn": true, + "inlayHintsOn": true + }, + "moduleName": { + "globalOn": true + }, + "ormolu": { + "config": { + "external": false + } + }, + "overloaded-record-dot": { + "globalOn": true + }, + "pragmas-completion": { + "globalOn": true + }, + "pragmas-disable": { + "globalOn": true + }, + "pragmas-suggest": { + "globalOn": true + }, + "qualifyImportedNames": { + "globalOn": true + }, + "rename": { + "config": { + "crossModule": false + }, + "globalOn": true + }, + "retrie": { + "globalOn": true + }, + "semanticTokens": { + "config": { + "classMethodToken": "method", + "classToken": "class", + "dataConstructorToken": "enumMember", + "functionToken": "function", + "moduleToken": "namespace", + "operatorToken": "operator", + "patternSynonymToken": "macro", + "recordFieldToken": "property", + "typeConstructorToken": "enum", + "typeFamilyToken": "interface", + "typeSynonymToken": "type", + "typeVariableToken": "typeParameter", + "variableToken": "variable" + }, + "globalOn": false + }, + "splice": { + "globalOn": true + }, + "stan": { + "globalOn": false + } + }, + "sessionLoading": "singleComponent" +} diff --git a/test/testdata/schema/ghc98/markdown-reference.md b/test/testdata/schema/ghc98/markdown-reference.md new file mode 100644 index 0000000000..668323ce66 --- /dev/null +++ b/test/testdata/schema/ghc98/markdown-reference.md @@ -0,0 +1,66 @@ +## hlint +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `flags` | Flags used by hlint | `TODO: Array values` |   | + +## cabal-fmt +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `path` | Set path to 'cabal-fmt' executable | `"cabal-fmt"` |   | + +## ghcide-completions +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `autoExtendOn` | Extends the import list automatically when completing a out-of-scope identifier | `True` |   | +| `snippetsOn` | Inserts snippets when using code completions | `True` |   | + +## eval +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `exception` | Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi. | `False` |   | +| `diff` | Enable the diff output (WAS/NOW) of eval lenses | `True` |   | + +## ghcide-type-lenses +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `mode` | Control how type lenses are shown | `Always` |
    • Always
    • Exported
    • Diagnostics
    | + +## ormolu +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `external` | Call out to an external "ormolu" executable, rather than using the bundled library | `False` |   | + +## rename +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `crossModule` | Enable experimental cross-module renaming | `False` |   | + +## semanticTokens +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `variableToken` | LSP semantic token type to use for variables | `SemanticTokenTypes_Variable` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `functionToken` | LSP semantic token type to use for functions | `SemanticTokenTypes_Function` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `dataConstructorToken` | LSP semantic token type to use for data constructors | `SemanticTokenTypes_EnumMember` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeVariableToken` | LSP semantic token type to use for type variables | `SemanticTokenTypes_TypeParameter` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `classMethodToken` | LSP semantic token type to use for typeclass methods | `SemanticTokenTypes_Method` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `patternSynonymToken` | LSP semantic token type to use for pattern synonyms | `SemanticTokenTypes_Macro` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeConstructorToken` | LSP semantic token type to use for type constructors | `SemanticTokenTypes_Enum` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `classToken` | LSP semantic token type to use for typeclasses | `SemanticTokenTypes_Class` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeSynonymToken` | LSP semantic token type to use for type synonyms | `SemanticTokenTypes_Type` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `typeFamilyToken` | LSP semantic token type to use for type families | `SemanticTokenTypes_Interface` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `recordFieldToken` | LSP semantic token type to use for record fields | `SemanticTokenTypes_Property` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `operatorToken` | LSP semantic token type to use for operators | `SemanticTokenTypes_Operator` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | +| `moduleToken` | LSP semantic token type to use for modules | `SemanticTokenTypes_Namespace` |
    • SemanticTokenTypes_Namespace
    • SemanticTokenTypes_Type
    • SemanticTokenTypes_Class
    • SemanticTokenTypes_Enum
    • SemanticTokenTypes_Interface
    • SemanticTokenTypes_Struct
    • SemanticTokenTypes_TypeParameter
    • SemanticTokenTypes_Parameter
    • SemanticTokenTypes_Variable
    • SemanticTokenTypes_Property
    • SemanticTokenTypes_EnumMember
    • SemanticTokenTypes_Event
    • SemanticTokenTypes_Function
    • SemanticTokenTypes_Method
    • SemanticTokenTypes_Macro
    • SemanticTokenTypes_Keyword
    • SemanticTokenTypes_Modifier
    • SemanticTokenTypes_Comment
    • SemanticTokenTypes_String
    • SemanticTokenTypes_Number
    • SemanticTokenTypes_Regexp
    • SemanticTokenTypes_Operator
    • SemanticTokenTypes_Decorator
    | + +## fourmolu +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `external` | Call out to an external "fourmolu" executable, rather than using the bundled library. | `False` |   | +| `path` | Set path to executable (for "external" mode). | `"fourmolu"` |   | + +## cabal-gild +| Property | Description | Default | Allowed values | +| --- | --- | --- | --- | +| `path` | Set path to 'cabal-gild' executable | `"cabal-gild"` |   | + + diff --git a/test/testdata/schema/ghc98/vscode-extension-schema.golden.json b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json new file mode 100644 index 0000000000..1c0b19eb27 --- /dev/null +++ b/test/testdata/schema/ghc98/vscode-extension-schema.golden.json @@ -0,0 +1,1058 @@ +{ + "haskell.plugin.alternateNumberFormat.globalOn": { + "default": true, + "description": "Enables alternateNumberFormat plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal-fmt.config.path": { + "default": "cabal-fmt", + "markdownDescription": "Set path to 'cabal-fmt' executable", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.cabal-gild.config.path": { + "default": "cabal-gild", + "markdownDescription": "Set path to 'cabal-gild' executable", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.cabal.codeActionsOn": { + "default": true, + "description": "Enables cabal code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.completionOn": { + "default": true, + "description": "Enables cabal completions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.diagnosticsOn": { + "default": true, + "description": "Enables cabal diagnostics", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.hoverOn": { + "default": true, + "description": "Enables cabal hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabal.symbolsOn": { + "default": true, + "description": "Enables cabal symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.cabalHaskellIntegration.globalOn": { + "default": true, + "description": "Enables cabalHaskellIntegration plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.callHierarchy.globalOn": { + "default": true, + "description": "Enables callHierarchy plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.changeTypeSignature.globalOn": { + "default": true, + "description": "Enables changeTypeSignature plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.class.codeActionsOn": { + "default": true, + "description": "Enables class code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.class.codeLensOn": { + "default": true, + "description": "Enables class code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeActionsOn": { + "default": true, + "description": "Enables eval code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.codeLensOn": { + "default": true, + "description": "Enables eval code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.config.diff": { + "default": true, + "markdownDescription": "Enable the diff output (WAS/NOW) of eval lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.eval.config.exception": { + "default": false, + "markdownDescription": "Enable marking exceptions with `*** Exception:` similarly to doctest and GHCi.", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fields.codeActionsOn": { + "default": true, + "description": "Enables explicit-fields code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fields.inlayHintsOn": { + "default": true, + "description": "Enables explicit-fields inlay hints", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.explicit-fixity.globalOn": { + "default": true, + "description": "Enables explicit-fixity plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.fourmolu.config.external": { + "default": false, + "markdownDescription": "Call out to an external \"fourmolu\" executable, rather than using the bundled library.", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.fourmolu.config.path": { + "default": "fourmolu", + "markdownDescription": "Set path to executable (for \"external\" mode).", + "scope": "resource", + "type": "string" + }, + "haskell.plugin.gadt.globalOn": { + "default": true, + "description": "Enables gadt plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-bindings.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-bindings plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-fill-holes.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-fill-holes plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-imports-exports.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-imports-exports plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-code-actions-type-signatures.globalOn": { + "default": true, + "description": "Enables ghcide-code-actions-type-signatures plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.config.autoExtendOn": { + "default": true, + "markdownDescription": "Extends the import list automatically when completing a out-of-scope identifier", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.config.snippetsOn": { + "default": true, + "markdownDescription": "Inserts snippets when using code completions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-completions.globalOn": { + "default": true, + "description": "Enables ghcide-completions plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-hover-and-symbols.hoverOn": { + "default": true, + "description": "Enables ghcide-hover-and-symbols hover", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-hover-and-symbols.symbolsOn": { + "default": true, + "description": "Enables ghcide-hover-and-symbols symbols", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ghcide-type-lenses.config.mode": { + "default": "always", + "description": "Control how type lenses are shown", + "enum": [ + "always", + "exported", + "diagnostics" + ], + "enumDescriptions": [ + "Always displays type lenses of global bindings", + "Only display type lenses of exported global bindings", + "Follows error messages produced by GHC about missing signatures" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.ghcide-type-lenses.globalOn": { + "default": true, + "description": "Enables ghcide-type-lenses plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.hlint.codeActionsOn": { + "default": true, + "description": "Enables hlint code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.hlint.config.flags": { + "default": [], + "markdownDescription": "Flags used by hlint", + "scope": "resource", + "type": "array" + }, + "haskell.plugin.hlint.diagnosticsOn": { + "default": true, + "description": "Enables hlint diagnostics", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.codeActionsOn": { + "default": true, + "description": "Enables importLens code actions", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.codeLensOn": { + "default": true, + "description": "Enables importLens code lenses", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.importLens.inlayHintsOn": { + "default": true, + "description": "Enables importLens inlay hints", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.moduleName.globalOn": { + "default": true, + "description": "Enables moduleName plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.ormolu.config.external": { + "default": false, + "markdownDescription": "Call out to an external \"ormolu\" executable, rather than using the bundled library", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.overloaded-record-dot.globalOn": { + "default": true, + "description": "Enables overloaded-record-dot plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-completion.globalOn": { + "default": true, + "description": "Enables pragmas-completion plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-disable.globalOn": { + "default": true, + "description": "Enables pragmas-disable plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.pragmas-suggest.globalOn": { + "default": true, + "description": "Enables pragmas-suggest plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.qualifyImportedNames.globalOn": { + "default": true, + "description": "Enables qualifyImportedNames plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.rename.config.crossModule": { + "default": false, + "markdownDescription": "Enable experimental cross-module renaming", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.rename.globalOn": { + "default": true, + "description": "Enables rename plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.retrie.globalOn": { + "default": true, + "description": "Enables retrie plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.semanticTokens.config.classMethodToken": { + "default": "method", + "description": "LSP semantic token type to use for typeclass methods", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.classToken": { + "default": "class", + "description": "LSP semantic token type to use for typeclasses", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.dataConstructorToken": { + "default": "enumMember", + "description": "LSP semantic token type to use for data constructors", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.functionToken": { + "default": "function", + "description": "LSP semantic token type to use for functions", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.moduleToken": { + "default": "namespace", + "description": "LSP semantic token type to use for modules", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.operatorToken": { + "default": "operator", + "description": "LSP semantic token type to use for operators", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.patternSynonymToken": { + "default": "macro", + "description": "LSP semantic token type to use for pattern synonyms", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.recordFieldToken": { + "default": "property", + "description": "LSP semantic token type to use for record fields", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeConstructorToken": { + "default": "enum", + "description": "LSP semantic token type to use for type constructors", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeFamilyToken": { + "default": "interface", + "description": "LSP semantic token type to use for type families", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeSynonymToken": { + "default": "type", + "description": "LSP semantic token type to use for type synonyms", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.typeVariableToken": { + "default": "typeParameter", + "description": "LSP semantic token type to use for type variables", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.config.variableToken": { + "default": "variable", + "description": "LSP semantic token type to use for variables", + "enum": [ + "namespace", + "type", + "class", + "enum", + "interface", + "struct", + "typeParameter", + "parameter", + "variable", + "property", + "enumMember", + "event", + "function", + "method", + "macro", + "keyword", + "modifier", + "comment", + "string", + "number", + "regexp", + "operator", + "decorator" + ], + "enumDescriptions": [ + "LSP Semantic Token Type: namespace", + "LSP Semantic Token Type: type", + "LSP Semantic Token Type: class", + "LSP Semantic Token Type: enum", + "LSP Semantic Token Type: interface", + "LSP Semantic Token Type: struct", + "LSP Semantic Token Type: typeParameter", + "LSP Semantic Token Type: parameter", + "LSP Semantic Token Type: variable", + "LSP Semantic Token Type: property", + "LSP Semantic Token Type: enumMember", + "LSP Semantic Token Type: event", + "LSP Semantic Token Type: function", + "LSP Semantic Token Type: method", + "LSP Semantic Token Type: macro", + "LSP Semantic Token Type: keyword", + "LSP Semantic Token Type: modifier", + "LSP Semantic Token Type: comment", + "LSP Semantic Token Type: string", + "LSP Semantic Token Type: number", + "LSP Semantic Token Type: regexp", + "LSP Semantic Token Type: operator", + "LSP Semantic Token Type: decorator" + ], + "scope": "resource", + "type": "string" + }, + "haskell.plugin.semanticTokens.globalOn": { + "default": false, + "description": "Enables semanticTokens plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.splice.globalOn": { + "default": true, + "description": "Enables splice plugin", + "scope": "resource", + "type": "boolean" + }, + "haskell.plugin.stan.globalOn": { + "default": false, + "description": "Enables stan plugin", + "scope": "resource", + "type": "boolean" + } +} diff --git a/test/testdata/testdata.cabal b/test/testdata/testdata.cabal deleted file mode 100644 index c191bbd1f1..0000000000 --- a/test/testdata/testdata.cabal +++ /dev/null @@ -1,82 +0,0 @@ -name: testdata -version: 0.1.0.0 -cabal-version: >=2.0 -build-type: Simple - -executable applyrefact - build-depends: base - main-is: ApplyRefact.hs - default-language: Haskell2010 - -executable applyrefact2 - build-depends: base - main-is: ApplyRefact2.hs - default-language: Haskell2010 - -executable codeactionrename - build-depends: base - main-is: CodeActionRename.hs - default-language: Haskell2010 - -executable hover - build-depends: base - main-is: Hover.hs - default-language: Haskell2010 - -executable symbols - build-depends: base - main-is: Symbols.hs - default-language: Haskell2010 - - -executable applyrefact2 - build-depends: base - main-is: ApplyRefact2.hs - default-language: Haskell2010 - -executable hlintpragma - build-depends: base - main-is: HlintPragma.hs - default-language: Haskell2010 - -executable harecase - build-depends: base - main-is: HaReCase.hs - default-language: Haskell2010 - -executable haredemote - build-depends: base - main-is: HaReDemote.hs - default-language: Haskell2010 - -executable haremovedef - build-depends: base - main-is: HaReMoveDef.hs - default-language: Haskell2010 - -executable harerename - build-depends: base - main-is: HaReRename.hs - default-language: Haskell2010 - -executable haregenapplicative - build-depends: base - , parsec - main-is: HaReGA1.hs - default-language: Haskell2010 - -executable functests - build-depends: base - main-is: FuncTest.hs - default-language: Haskell2010 - -executable evens - build-depends: base - main-is: Evens.hs - hs-source-dirs: liquid - default-language: Haskell2010 - -executable filewithwarning - build-depends: base - main-is: FileWithWarning.hs - default-language: Haskell2010 diff --git a/test/testdata/typedHoleDiag.txt b/test/testdata/typedHoleDiag.txt deleted file mode 100644 index 3ca81f900c..0000000000 --- a/test/testdata/typedHoleDiag.txt +++ /dev/null @@ -1,26 +0,0 @@ -• Found hole: _ :: Maybe T.Text -• In the expression: _ - In an equation for ‘extractHoles’: - extractHoles diag - | "Found hole:" `T.isInfixOf` diag = _ - | otherwise = Nothing -• Relevant bindings include - diag :: T.Text - (bound at /private/var/folders/zh/fqwj2cq95b7gbzs49fsq5drw0000gn/T/ghc-mod48138/GhcMod48135-416.hs:482:14) - extractHoles :: T.Text -> Maybe T.Text - (bound at /private/var/folders/zh/fqwj2cq95b7gbzs49fsq5drw0000gn/T/ghc-mod48138/GhcMod48135-416.hs:482:1) - Valid substitutions include - Nothing :: forall a. Maybe a - (imported from ‘Data.Maybe’ at /private/var/folders/zh/fqwj2cq95b7gbzs49fsq5drw0000gn/T/ghc-mod48138/GhcMod48135-416.hs:24:1-27 - (and originally defined in ‘GHC.Base’)) - mempty :: forall a. Monoid a => a - (imported from ‘Prelude’ at /private/var/folders/zh/fqwj2cq95b7gbzs49fsq5drw0000gn/T/ghc-mod48138/GhcMod48135-416.hs:8:8-39 - (and originally defined in ‘GHC.Base’)) - undefined :: forall (a :: TYPE r). - GHC.Stack.Types.HasCallStack => - a - (imported from ‘Prelude’ at /private/var/folders/zh/fqwj2cq95b7gbzs49fsq5drw0000gn/T/ghc-mod48138/GhcMod48135-416.hs:8:8-39 - (and originally defined in ‘GHC.Err’)) - GM.mzero :: forall (m :: * -> *). GM.MonadPlus m => forall a. m a - (imported qualified from ‘GhcMod.Error’ at /private/var/folders/zh/fqwj2cq95b7gbzs49fsq5drw0000gn/T/ghc-mod48138/GhcMod48135-416.hs:37:1-56 - (and originally defined in ‘GHC.Base’)) \ No newline at end of file diff --git a/test/testdata/typedHoleDiag2.txt b/test/testdata/typedHoleDiag2.txt deleted file mode 100644 index 032d18bacc..0000000000 --- a/test/testdata/typedHoleDiag2.txt +++ /dev/null @@ -1,17 +0,0 @@ -• Found hole: _ :: A -• In the expression: _ - In an equation for ‘foo2’: - foo2 x - = _ - where - stuff (A a) = A (a + 1) -• Relevant bindings include - stuff :: A -> A (bound at test/testdata/TypedHoles2.hs:6:5) - x :: [A] (bound at test/testdata/TypedHoles2.hs:4:6) - foo2 :: [A] -> A (bound at test/testdata/TypedHoles2.hs:4:1) - Valid substitutions include - undefined :: forall (a :: TYPE r). - GHC.Stack.Types.HasCallStack => - a - (imported from ‘Prelude’ at test/testdata/TypedHoles2.hs:1:8-18 - (and originally defined in ‘GHC.Err’)) diff --git a/test/testdata/typedHoleDiag3.txt b/test/testdata/typedHoleDiag3.txt deleted file mode 100644 index ffe520ffaa..0000000000 --- a/test/testdata/typedHoleDiag3.txt +++ /dev/null @@ -1,37 +0,0 @@ -• Found hole: _ :: t -> FilePath - Where: ‘t’ is a rigid type variable bound by - the inferred type of - lintDockerfile :: [IgnoreRule] - -> t - -> IO (Either Language.Docker.Parser.Error [Rules.RuleCheck]) - at app/Main.hs:(229,5)-(235,47) -• In the expression: _ - In the first argument of ‘Docker.parseFile’, namely - ‘(_ dockerFile)’ - In a stmt of a 'do' block: ast <- Docker.parseFile (_ dockerFile) -• Relevant bindings include - processedFile :: Either Language.Docker.Parser.Error Dockerfile - -> Either Language.Docker.Parser.Error [Rules.RuleCheck] - (bound at app/Main.hs:233:9) - processRules :: Dockerfile -> [Rules.RuleCheck] - (bound at app/Main.hs:234:9) - ignoredRules :: Rules.RuleCheck -> Bool - (bound at app/Main.hs:235:9) - dockerFile :: t (bound at app/Main.hs:229:32) - ignoreRules :: [IgnoreRule] (bound at app/Main.hs:229:20) - lintDockerfile :: [IgnoreRule] - -> t -> IO (Either Language.Docker.Parser.Error [Rules.RuleCheck]) - (bound at app/Main.hs:229:5) - (Some bindings suppressed; use -fmax-relevant-binds=N or -fno-max-relevant-binds) - Valid substitutions include - mempty :: forall a. Monoid a => a - (imported from ‘Prelude’ at app/Main.hs:5:8-11 - (and originally defined in ‘GHC.Base’)) - undefined :: forall (a :: TYPE r). - GHC.Stack.Types.HasCallStack => - a - (imported from ‘Prelude’ at app/Main.hs:5:8-11 - (and originally defined in ‘GHC.Err’)) - idm :: forall m. Monoid m => m - (imported from ‘Options.Applicative’ at app/Main.hs:21:1-46 - (and originally defined in ‘Options.Applicative.Builder’)) diff --git a/test/testdata/wErrorTest/src/WError.hs b/test/testdata/wErrorTest/src/WError.hs deleted file mode 100644 index 86e0ad2a3d..0000000000 --- a/test/testdata/wErrorTest/src/WError.hs +++ /dev/null @@ -1,2 +0,0 @@ -module WError where -main = undefined diff --git a/test/testdata/wErrorTest/test.cabal b/test/testdata/wErrorTest/test.cabal deleted file mode 100644 index 4ce7fc3b9a..0000000000 --- a/test/testdata/wErrorTest/test.cabal +++ /dev/null @@ -1,18 +0,0 @@ -name: test -version: 0.1.0.0 --- synopsis: --- description: -license: BSD3 -author: Author name here -maintainer: example@example.com -copyright: 2017 Author name here -category: Web -build-type: Simple -cabal-version: >=1.10 - -library - exposed-modules: WError - hs-source-dirs: src - build-depends: base >= 4.7 && < 5 - default-language: Haskell2010 - ghc-options: -Wall -Werror diff --git a/test/testdata/wrapper/8.8.1/Setup.hs b/test/testdata/wrapper/8.8.1/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/testdata/wrapper/8.8.1/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/testdata/wrapper/8.8.1/cabal1.cabal b/test/testdata/wrapper/8.8.1/cabal1.cabal deleted file mode 100644 index f599b3df0c..0000000000 --- a/test/testdata/wrapper/8.8.1/cabal1.cabal +++ /dev/null @@ -1,25 +0,0 @@ --- Initial cabal1.cabal generated by cabal init. For further --- documentation, see http://haskell.org/cabal/users-guide/ - -name: cabal1 -version: 0.1.0.0 --- synopsis: --- description: -license: PublicDomain --- license-file: LICENSE -author: Alan Zimmerman -maintainer: alan.zimm@gmail.com --- copyright: --- category: -build-type: Simple --- extra-source-files: --- cabal-helper for cabal 2.2/GHC 8.4 needs a cabal version >= 2 -cabal-version: >=2.0 - -executable cabal1 - main-is: main.hs - -- other-modules: - -- other-extensions: - build-depends: base >=4.6 && <5 - hs-source-dirs: src - default-language: Haskell2010 \ No newline at end of file diff --git a/test/testdata/wrapper/8.8.1/src/Foo/Bar.hs b/test/testdata/wrapper/8.8.1/src/Foo/Bar.hs deleted file mode 100644 index ceb08691b1..0000000000 --- a/test/testdata/wrapper/8.8.1/src/Foo/Bar.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Foo.Bar where - -baz = 6 diff --git a/test/testdata/wrapper/8.8.1/src/main.hs b/test/testdata/wrapper/8.8.1/src/main.hs deleted file mode 100644 index 839d104293..0000000000 --- a/test/testdata/wrapper/8.8.1/src/main.hs +++ /dev/null @@ -1,7 +0,0 @@ --- | Testing that HaRe can find source files from a cabal file - -import qualified Foo.Bar as B - -main = putStrLn "foo" - -baz = 3 + B.baz diff --git a/test/testdata/wrapper/ghc/dummy b/test/testdata/wrapper/ghc/dummy deleted file mode 100644 index 9c7ffe8ee9..0000000000 --- a/test/testdata/wrapper/ghc/dummy +++ /dev/null @@ -1 +0,0 @@ -Needed or else git won't track the directory \ No newline at end of file diff --git a/test/testdata/wrapper/lts-14.18/Setup.hs b/test/testdata/wrapper/lts-14.18/Setup.hs deleted file mode 100644 index 9a994af677..0000000000 --- a/test/testdata/wrapper/lts-14.18/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/test/testdata/wrapper/lts-14.18/cabal1.cabal b/test/testdata/wrapper/lts-14.18/cabal1.cabal deleted file mode 100644 index f599b3df0c..0000000000 --- a/test/testdata/wrapper/lts-14.18/cabal1.cabal +++ /dev/null @@ -1,25 +0,0 @@ --- Initial cabal1.cabal generated by cabal init. For further --- documentation, see http://haskell.org/cabal/users-guide/ - -name: cabal1 -version: 0.1.0.0 --- synopsis: --- description: -license: PublicDomain --- license-file: LICENSE -author: Alan Zimmerman -maintainer: alan.zimm@gmail.com --- copyright: --- category: -build-type: Simple --- extra-source-files: --- cabal-helper for cabal 2.2/GHC 8.4 needs a cabal version >= 2 -cabal-version: >=2.0 - -executable cabal1 - main-is: main.hs - -- other-modules: - -- other-extensions: - build-depends: base >=4.6 && <5 - hs-source-dirs: src - default-language: Haskell2010 \ No newline at end of file diff --git a/test/testdata/wrapper/lts-14.18/src/Foo/Bar.hs b/test/testdata/wrapper/lts-14.18/src/Foo/Bar.hs deleted file mode 100644 index ceb08691b1..0000000000 --- a/test/testdata/wrapper/lts-14.18/src/Foo/Bar.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Foo.Bar where - -baz = 6 diff --git a/test/testdata/wrapper/lts-14.18/src/main.hs b/test/testdata/wrapper/lts-14.18/src/main.hs deleted file mode 100644 index 839d104293..0000000000 --- a/test/testdata/wrapper/lts-14.18/src/main.hs +++ /dev/null @@ -1,7 +0,0 @@ --- | Testing that HaRe can find source files from a cabal file - -import qualified Foo.Bar as B - -main = putStrLn "foo" - -baz = 3 + B.baz diff --git a/test/utils/Test/Hls/Command.hs b/test/utils/Test/Hls/Command.hs new file mode 100644 index 0000000000..b0e0febc3c --- /dev/null +++ b/test/utils/Test/Hls/Command.hs @@ -0,0 +1,34 @@ +module Test.Hls.Command + ( hlsExeCommand + , hlsLspCommand + , hlsWrapperLspCommand + , hlsWrapperExeCommand + ) +where + +import Data.Maybe (fromMaybe) +import System.Environment (lookupEnv) +import System.IO.Unsafe (unsafePerformIO) + +-- | The command to execute the version of hls for the current compiler. +-- +-- Both @stack test@ and @cabal new-test@ setup the environment so @hls@ is +-- on PATH. Cabal seems to respond to @build-tool-depends@ specifically while +-- stack just puts all project executables on PATH. +hlsExeCommand :: String +{-# NOINLINE hlsExeCommand #-} +hlsExeCommand = unsafePerformIO $ do + testExe <- fromMaybe "haskell-language-server" <$> lookupEnv "HLS_TEST_EXE" + pure testExe + +hlsLspCommand :: String +hlsLspCommand = hlsExeCommand ++ " --lsp --test -d -j4" + +hlsWrapperLspCommand :: String +hlsWrapperLspCommand = hlsWrapperExeCommand ++ " --lsp --test -d -j4" + +hlsWrapperExeCommand :: String +{-# NOINLINE hlsWrapperExeCommand #-} +hlsWrapperExeCommand = unsafePerformIO $ do + testExe <- fromMaybe "haskell-language-server-wrapper" <$> lookupEnv "HLS_WRAPPER_TEST_EXE" + pure testExe diff --git a/test/utils/Test/Hls/Flags.hs b/test/utils/Test/Hls/Flags.hs new file mode 100644 index 0000000000..8e60ebb93e --- /dev/null +++ b/test/utils/Test/Hls/Flags.hs @@ -0,0 +1,42 @@ +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} +-- | Module for disabling tests if their plugins are disabled +-- DEPRECATED: To be removed when all plugin tests are in their own packages +module Test.Hls.Flags where + +import Test.Hls (TestTree, ignoreTestBecause) + +-- * Plugin dependent tests + +-- | Disable test unless the eval flag is set +requiresEvalPlugin :: TestTree -> TestTree +#if hls_eval +requiresEvalPlugin = id +#else +requiresEvalPlugin = ignoreTestBecause "Eval plugin disabled" +#endif + +-- * Formatters +-- | Disable test unless the floskell flag is set +requiresFloskellPlugin :: TestTree -> TestTree +#if hls_floskell +requiresFloskellPlugin = id +#else +requiresFloskellPlugin = ignoreTestBecause "Floskell plugin disabled" +#endif + +-- | Disable test unless the fourmolu flag is set +requiresFourmoluPlugin :: TestTree -> TestTree +#if hls_fourmolu +requiresFourmoluPlugin = id +#else +requiresFourmoluPlugin = ignoreTestBecause "Fourmolu plugin disabled" +#endif + +-- | Disable test unless the ormolu flag is set +requiresOrmoluPlugin :: TestTree -> TestTree +#if hls_ormolu +requiresOrmoluPlugin = id +#else +requiresOrmoluPlugin = ignoreTestBecause "Ormolu plugin disabled" +#endif diff --git a/test/utils/Test/Hls/Util.hs b/test/utils/Test/Hls/Util.hs deleted file mode 100644 index c6b14a6ea1..0000000000 --- a/test/utils/Test/Hls/Util.hs +++ /dev/null @@ -1,366 +0,0 @@ -{-# LANGUAGE CPP, OverloadedStrings, NamedFieldPuns #-} -module Test.Hls.Util - ( - codeActionSupportCaps - , dummyLspFuncs - , flushStackEnvironment - , getHspecFormattedConfig - , ghcVersion, GhcVersion(..) - , hieCommand - , hieCommandExamplePlugin - , hieCommandVomit - , logConfig - , logFilePath - , noLogConfig - , setupBuildToolFiles - , withFileLogging - , findExe - , withCurrentDirectoryInTmp - -- , makeRequest - -- , runIGM - -- , runIGM' - -- , runSingle - -- , runSingle' - -- , runSingleReq - -- , testCommand - -- , testOptions - ) -where - -import Control.Applicative --- import Control.Concurrent.STM -import Control.Monad -import Control.Monad.Trans.Maybe -import Data.Default -import Data.List (intercalate) --- import Data.Typeable --- import qualified Data.Map as Map -import Data.Maybe -import Language.Haskell.LSP.Core -import Language.Haskell.LSP.Types -import qualified Language.Haskell.LSP.Test as T -import qualified Language.Haskell.LSP.Types.Capabilities as C --- import Haskell.Ide.Engine.MonadTypes hiding (withProgress, withIndefiniteProgress) --- import qualified Ide.Cradle as Bios --- import qualified Ide.Engine.Config as Config -import System.Directory -import System.Environment -import System.FilePath -import qualified System.Log.Logger as L -import System.IO.Temp --- import Test.Hspec -import Test.Hspec.Runner -import Test.Hspec.Core.Formatters -import Text.Blaze.Renderer.String (renderMarkup) -import Text.Blaze.Internal --- import qualified Haskell.Ide.Engine.PluginApi as HIE (BiosOptions, defaultOptions) --- import HIE.Bios.Types - --- testOptions :: HIE.BiosOptions --- testOptions = HIE.defaultOptions { cradleOptsVerbosity = Verbose } - --- --------------------------------------------------------------------- - - --- testCommand :: (ToJSON a, Typeable b, ToJSON b, Show b, Eq b) --- => IdePlugins -> FilePath -> IdeGhcM (IdeResult b) -> PluginId -> CommandId -> a -> IdeResult b -> IO () --- testCommand testPlugins fp act plugin cmd arg res = do --- flushStackEnvironment --- (newApiRes, oldApiRes) <- runIGM testPlugins fp $ do --- new <- act --- old <- makeRequest plugin cmd arg --- return (new, old) --- newApiRes `shouldBe` res --- fmap fromDynJSON oldApiRes `shouldBe` fmap Just res - --- runSingle :: IdePlugins -> FilePath -> IdeGhcM (IdeResult b) -> IO (IdeResult b) --- runSingle = runSingle' id - --- runSingle' :: (Config.Config -> Config.Config) -> IdePlugins -> FilePath -> IdeGhcM (IdeResult b) -> IO (IdeResult b) --- runSingle' modifyConfig testPlugins fp act = runIGM' modifyConfig testPlugins fp act - --- runSingleReq :: ToJSON a --- => IdePlugins -> FilePath -> PluginId -> CommandId -> a -> IO (IdeResult DynamicJSON) --- runSingleReq testPlugins fp plugin com arg = runIGM testPlugins fp (makeRequest plugin com arg) - --- makeRequest :: ToJSON a => PluginId -> CommandId -> a -> IdeGhcM (IdeResult DynamicJSON) --- makeRequest plugin com arg = runPluginCommand plugin com (toJSON arg) - --- runIGM :: IdePlugins -> FilePath -> IdeGhcM a -> IO a --- runIGM = runIGM' id - --- runIGM' :: (Config.Config -> Config.Config) -> IdePlugins -> FilePath -> IdeGhcM a -> IO a --- runIGM' modifyConfig testPlugins fp f = do --- stateVar <- newTVarIO $ IdeState emptyModuleCache Map.empty Map.empty Nothing --- crdl <- Bios.findLocalCradle fp --- mlibdir <- Bios.getProjectGhcLibDir crdl --- let tmpFuncs :: LspFuncs Config.Config --- tmpFuncs = dummyLspFuncs --- lspFuncs :: LspFuncs Config.Config --- lspFuncs = tmpFuncs { config = (fmap . fmap) modifyConfig (config tmpFuncs)} --- runIdeGhcM mlibdir testPlugins lspFuncs stateVar f - -noLogConfig :: T.SessionConfig -noLogConfig = T.defaultConfig { T.logMessages = False } - -logConfig :: T.SessionConfig -logConfig = T.defaultConfig { T.logMessages = True } - -codeActionSupportCaps :: C.ClientCapabilities -codeActionSupportCaps = def { C._textDocument = Just textDocumentCaps } - where - textDocumentCaps = def { C._codeAction = Just codeActionCaps } - codeActionCaps = C.CodeActionClientCapabilities (Just True) (Just literalSupport) - literalSupport = C.CodeActionLiteralSupport def - -withFileLogging :: FilePath -> IO a -> IO a -withFileLogging logFile f = do - let logDir = "./test-logs" - logPath = logDir logFile - - dirExists <- doesDirectoryExist logDir - unless dirExists $ createDirectory logDir - - exists <- doesFileExist logPath - when exists $ removeFile logPath - - setupLogger (Just logPath) ["hie"] L.DEBUG - - f - --- --------------------------------------------------------------------- - -setupBuildToolFiles :: IO () -setupBuildToolFiles = do - forM_ files setupDirectFilesIn - -setupDirectFilesIn :: FilePath -> IO () -setupDirectFilesIn f = - writeFile (f ++ "hie.yaml") hieYamlCradleDirectContents - - --- --------------------------------------------------------------------- - -files :: [FilePath] -files = - [ "./test/testdata/" - -- , "./test/testdata/addPackageTest/cabal-exe/" - -- , "./test/testdata/addPackageTest/hpack-exe/" - -- , "./test/testdata/addPackageTest/cabal-lib/" - -- , "./test/testdata/addPackageTest/hpack-lib/" - -- , "./test/testdata/addPragmas/" - -- , "./test/testdata/badProjects/cabal/" - -- , "./test/testdata/completion/" - -- , "./test/testdata/definition/" - -- , "./test/testdata/gototest/" - -- , "./test/testdata/redundantImportTest/" - -- , "./test/testdata/wErrorTest/" - ] - -data GhcVersion - = GHC88 - | GHC86 - | GHC84 - deriving (Eq,Show) - -ghcVersion :: GhcVersion -#if (defined(MIN_VERSION_GLASGOW_HASKELL) && (MIN_VERSION_GLASGOW_HASKELL(8,8,0,0))) -ghcVersion = GHC88 -#elif (defined(MIN_VERSION_GLASGOW_HASKELL) && (MIN_VERSION_GLASGOW_HASKELL(8,6,0,0))) -ghcVersion = GHC86 -#elif (defined(MIN_VERSION_GLASGOW_HASKELL) && (MIN_VERSION_GLASGOW_HASKELL(8,4,0,0))) -ghcVersion = GHC84 -#endif - -logFilePath :: String -logFilePath = "hie-" ++ show ghcVersion ++ ".log" - --- | The command to execute the version of hie for the current compiler. --- --- Both @stack test@ and @cabal new-test@ setup the environment so @hie@ is --- on PATH. Cabal seems to respond to @build-tool-depends@ specifically while --- stack just puts all project executables on PATH. -hieCommand :: String --- hieCommand = "hie --lsp --bios-verbose -d -l test-logs/" ++ logFilePath --- hieCommand = "haskell-language-server --lsp" --- hieCommand = "haskell-language-server --lsp --test --shake-profiling=test-logs/" ++ logFilePath -hieCommand = "haskell-language-server --lsp -d -l test-logs/" ++ logFilePath - -hieCommandVomit :: String -hieCommandVomit = hieCommand ++ " --vomit" - -hieCommandExamplePlugin :: String -hieCommandExamplePlugin = hieCommand ++ " --example" - --- --------------------------------------------------------------------- - -hieYamlCradleDirectContents :: String -hieYamlCradleDirectContents = unlines - [ "# WARNING: THIS FILE IS AUTOGENERATED IN test/utils/TestUtils.hs. IT WILL BE OVERWRITTEN ON EVERY TEST RUN" - , "cradle:" - , " direct:" - , " arguments:" - , " - -i." - ] - - --- --------------------------------------------------------------------- - -getHspecFormattedConfig :: String -> IO Config -getHspecFormattedConfig name = do - -- https://circleci.com/docs/2.0/env-vars/#built-in-environment-variables - isCI <- isJust <$> lookupEnv "CI" - - -- Only use the xml formatter on CI since it hides console output - if isCI - then do - let subdir = "test-results" name - createDirectoryIfMissing True subdir - - return $ defaultConfig { configFormatter = Just xmlFormatter - , configOutputFile = Right $ subdir "results.xml" - } - else return defaultConfig - --- | A Hspec formatter for CircleCI. --- Originally from https://github.com/LeastAuthority/hspec-jenkins -xmlFormatter :: Formatter -xmlFormatter = silent { - headerFormatter = do - writeLine "" - writeLine "" - , exampleSucceeded - , exampleFailed - , examplePending - , footerFormatter = writeLine "" - } - where - -#if MIN_VERSION_hspec(2,5,0) - exampleSucceeded path _ = -#else - exampleSucceeded path = -#endif - writeLine $ renderMarkup $ testcase path "" - -#if MIN_VERSION_hspec(2,5,0) - exampleFailed path _ err = -#else - exampleFailed path (Left err) = - writeLine $ renderMarkup $ testcase path $ - failure ! message (show err) $ "" - exampleFailed path (Right err) = -#endif - writeLine $ renderMarkup $ testcase path $ - failure ! message (reasonAsString err) $ "" - -#if MIN_VERSION_hspec(2,5,0) - examplePending path _ reason = -#else - examplePending path reason = -#endif - writeLine $ renderMarkup $ testcase path $ - case reason of - Just desc -> skipped ! message desc $ "" - Nothing -> skipped "" - - failure, skipped :: Markup -> Markup - failure = customParent "failure" - skipped = customParent "skipped" - - name, className, message :: String -> Attribute - name = customAttribute "name" . stringValue - className = customAttribute "classname" . stringValue - message = customAttribute "message" . stringValue - - testcase :: Path -> Markup -> Markup - testcase (xs,x) = customParent "testcase" ! name x ! className (intercalate "." xs) - - reasonAsString :: FailureReason -> String - reasonAsString NoReason = "no reason given" - reasonAsString (Reason x) = x - reasonAsString (ExpectedButGot Nothing expected got) = "Expected " ++ expected ++ " but got " ++ got - reasonAsString (ExpectedButGot (Just src) expected got) = src ++ " expected " ++ expected ++ " but got " ++ got -#if MIN_VERSION_hspec(2,5,0) - reasonAsString (Error Nothing err ) = show err - reasonAsString (Error (Just s) err) = s ++ show err -#endif - --- --------------------------------------------------------------------- - -flushStackEnvironment :: IO () -flushStackEnvironment = do - -- We need to clear these environment variables to prevent - -- collisions with stack usages - -- See https://github.com/commercialhaskell/stack/issues/4875 - unsetEnv "GHC_PACKAGE_PATH" - unsetEnv "GHC_ENVIRONMENT" - unsetEnv "HASKELL_PACKAGE_SANDBOX" - unsetEnv "HASKELL_PACKAGE_SANDBOXES" - --- --------------------------------------------------------------------- - -dummyLspFuncs :: Default a => LspFuncs a -dummyLspFuncs = LspFuncs { clientCapabilities = def - , config = return (Just def) - , sendFunc = const (return ()) - , getVirtualFileFunc = const (return Nothing) - , persistVirtualFileFunc = \uri -> return (uriToFilePath (fromNormalizedUri uri)) - , reverseFileMapFunc = return id - , publishDiagnosticsFunc = mempty - , flushDiagnosticsBySourceFunc = mempty - , getNextReqId = pure (IdInt 0) - , rootPath = Nothing - , getWorkspaceFolders = return Nothing - , withProgress = \_ _ f -> f (const (return ())) - , withIndefiniteProgress = \_ _ f -> f - } - -findExeRecursive :: FilePath -> FilePath -> IO (Maybe FilePath) -findExeRecursive exe dir = do - me <- listToMaybe <$> findExecutablesInDirectories [dir] exe - case me of - Just e -> return (Just e) - Nothing -> do - subdirs <- (fmap (dir )) <$> listDirectory dir >>= filterM doesDirectoryExist - foldM (\acc subdir -> case acc of - Just y -> pure $ Just y - Nothing -> findExeRecursive exe subdir) - Nothing - subdirs - --- | So we can find an executable with cabal run --- since it doesnt put build tools on the path (only cabal test) -findExe :: String -> IO FilePath -findExe name = do - fp <- fmap fromJust $ runMaybeT $ - MaybeT (findExecutable name) <|> - MaybeT (findExeRecursive name "dist-newstyle") - makeAbsolute fp - --- | Like 'withCurrentDirectory', but will copy the directory over to the system --- temporary directory first to avoid haskell-language-server's source tree from --- interfering with the cradle -withCurrentDirectoryInTmp :: FilePath -> IO a -> IO a -withCurrentDirectoryInTmp dir f = - withTempCopy dir $ \newDir -> - withCurrentDirectory newDir f - -withTempCopy :: FilePath -> (FilePath -> IO a) -> IO a -withTempCopy srcDir f = do - withSystemTempDirectory "hls-test" $ \newDir -> do - copyDir srcDir newDir - f newDir - -copyDir :: FilePath -> FilePath -> IO () -copyDir src dst = do - cnts <- listDirectory src - forM_ cnts $ \file -> do - unless (file `elem` ignored) $ do - let srcFp = src file - dstFp = dst file - isDir <- doesDirectoryExist srcFp - if isDir - then createDirectory dstFp >> copyDir srcFp dstFp - else copyFile srcFp dstFp - where ignored = ["dist", "dist-newstyle", ".stack-work"] diff --git a/test/wrapper/Main.hs b/test/wrapper/Main.hs index 6f2795a579..0fbfa76b7a 100644 --- a/test/wrapper/Main.hs +++ b/test/wrapper/Main.hs @@ -1,33 +1,50 @@ -import Data.List -import Data.Char -import Test.Hls.Util -import Test.Tasty -import Test.Tasty.HUnit -import System.Process +import Data.List.Extra (isInfixOf, trimEnd) +import Data.Maybe +import System.Environment +import System.Process +import Test.Hls main :: IO () -main = do - flushStackEnvironment - defaultMain $ - testGroup "haskell-language-server-wrapper" [projectGhcVersionTests] +main = defaultTestRunner $ testGroup "haskell-language-server-wrapper" [projectGhcVersionTests] projectGhcVersionTests :: TestTree projectGhcVersionTests = testGroup "--project-ghc-version" - [ testCase "stack with ghc 8.10.1" $ - testDir "test/wrapper/testdata/stack-8.10.1" "8.10.1" - , testCase "stack with ghc 8.8.3" $ - testDir "test/wrapper/testdata/stack-8.8.3" "8.8.3" + [ testCase "stack with global ghc" $ do + ghcVer <- ghcNumericVersion + let writeStackYaml = writeFile "stack.yaml" $ + -- Use system-ghc and install-ghc to avoid stack downloading ghc in CI + -- (and use ghcup-managed ghc instead) + "{resolver: ghc-" ++ ghcVer ++ ", system-ghc: true, install-ghc: false}" + testDir writeStackYaml "test/wrapper/testdata/stack-specific-ghc" ghcVer , testCase "cabal with global ghc" $ do - ghcVer <- trim <$> readProcess "ghc" ["--numeric-version"] "" - testDir "test/wrapper/testdata/cabal-cur-ver" ghcVer + ghcVer <- ghcNumericVersion + testDir (pure ()) "test/wrapper/testdata/cabal-cur-ver" ghcVer + , testCase "stack with existing cabal build artifact" $ do + -- Should report cabal as existing build artifacts are more important than + -- the existence of 'stack.yaml' + testProjectType "test/wrapper/testdata/stack-with-dist-newstyle" + ("cradleOptsProg = CradleAction: Cabal" `isInfixOf`) ] + where + ghcNumericVersion = trimEnd <$> readProcess "ghc" ["--numeric-version"] "" -testDir :: FilePath -> String -> Assertion -testDir dir expectedVer = do - wrapper <- findExe "haskell-language-server-wrapper" +testDir :: IO () -> FilePath -> String -> Assertion +testDir extraSetup dir expectedVer = withCurrentDirectoryInTmp dir $ do - actualVer <- trim <$> readProcess wrapper ["--project-ghc-version"] "" + extraSetup + testExe <- fromMaybe "haskell-language-server-wrapper" + <$> lookupEnv "HLS_WRAPPER_TEST_EXE" + actualVer <- trimEnd <$> readProcess testExe ["--project-ghc-version"] "" actualVer @?= expectedVer -trim :: String -> String -trim = dropWhileEnd isSpace +testProjectType :: FilePath -> (String -> Bool) -> Assertion +testProjectType dir matcher = + withCurrentDirectoryInTmp' [".stack-work", "dist"] dir $ do + wrapperTestExe <- fromMaybe "haskell-language-server-wrapper" + <$> lookupEnv "HLS_WRAPPER_TEST_EXE" + hlsTestExe <- fromMaybe "haskell-language-server" + <$> lookupEnv "HLS_TEST_EXE" + actualWrapperCradle <- trimEnd <$> readProcess wrapperTestExe ["--print-cradle"] "" + actualHlsCradle <- trimEnd <$> readProcess hlsTestExe ["--print-cradle"] "" + matcher actualWrapperCradle @? "Wrapper reported wrong project type: " ++ actualWrapperCradle + matcher actualHlsCradle @? "HLS reported wrong project type: " ++ actualHlsCradle diff --git a/test/wrapper/testdata/stack-8.10.1/stack.yaml b/test/wrapper/testdata/stack-8.10.1/stack.yaml deleted file mode 100644 index 409e7fe489..0000000000 --- a/test/wrapper/testdata/stack-8.10.1/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: ghc-8.10.1 diff --git a/test/wrapper/testdata/stack-8.8.3/Lib.hs b/test/wrapper/testdata/stack-8.8.3/Lib.hs deleted file mode 100644 index 30bf1ec6b8..0000000000 --- a/test/wrapper/testdata/stack-8.8.3/Lib.hs +++ /dev/null @@ -1,2 +0,0 @@ -module Lib where -foo = 42 diff --git a/test/wrapper/testdata/stack-8.8.3/foo.cabal b/test/wrapper/testdata/stack-8.8.3/foo.cabal deleted file mode 100644 index affc654cad..0000000000 --- a/test/wrapper/testdata/stack-8.8.3/foo.cabal +++ /dev/null @@ -1,7 +0,0 @@ -cabal-version: 2.4 -name: foo -version: 0.1.0.0 -library - exposed-modules: Lib - build-depends: base - default-language: Haskell2010 diff --git a/test/wrapper/testdata/stack-8.8.3/stack.yaml b/test/wrapper/testdata/stack-8.8.3/stack.yaml deleted file mode 100644 index fc8cd8cd8f..0000000000 --- a/test/wrapper/testdata/stack-8.8.3/stack.yaml +++ /dev/null @@ -1 +0,0 @@ -resolver: ghc-8.8.3 diff --git a/test/wrapper/testdata/stack-8.10.1/Lib.hs b/test/wrapper/testdata/stack-specific-ghc/Lib.hs similarity index 100% rename from test/wrapper/testdata/stack-8.10.1/Lib.hs rename to test/wrapper/testdata/stack-specific-ghc/Lib.hs diff --git a/test/wrapper/testdata/stack-8.10.1/foo.cabal b/test/wrapper/testdata/stack-specific-ghc/foo.cabal similarity index 100% rename from test/wrapper/testdata/stack-8.10.1/foo.cabal rename to test/wrapper/testdata/stack-specific-ghc/foo.cabal diff --git a/test/wrapper/testdata/stack-with-dist-newstyle/dist-newstyle/.gitkeep b/test/wrapper/testdata/stack-with-dist-newstyle/dist-newstyle/.gitkeep new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/wrapper/testdata/stack-with-dist-newstyle/stack-with-dist-newstyle.cabal b/test/wrapper/testdata/stack-with-dist-newstyle/stack-with-dist-newstyle.cabal new file mode 100644 index 0000000000..ed06c519c8 --- /dev/null +++ b/test/wrapper/testdata/stack-with-dist-newstyle/stack-with-dist-newstyle.cabal @@ -0,0 +1,6 @@ +cabal-version: 2.4 +name: stack-with-dist-newstyle +version: 0.1.0.0 + +library + default-language: Haskell2010 diff --git a/test/wrapper/testdata/stack-with-dist-newstyle/stack.yaml b/test/wrapper/testdata/stack-with-dist-newstyle/stack.yaml new file mode 100644 index 0000000000..d95c1a7a03 --- /dev/null +++ b/test/wrapper/testdata/stack-with-dist-newstyle/stack.yaml @@ -0,0 +1,2 @@ +# specific version does not matter +resolver: ghc-9.6.5