diff --git a/.github/workflows/bump-version.js b/.github/workflows/bump-version.js
index cc070d32d..fe6e8edfc 100644
--- a/.github/workflows/bump-version.js
+++ b/.github/workflows/bump-version.js
@@ -40,7 +40,7 @@ else if (semver.minor(release) - semver.minor(latestPublish) === 1) {
// minor higher than the release version.
else {
throw new Error(
- "Version number minors are more than off by one, check package.json and (pre-)published versions manually."
+ "Version number minors are more than off by one, check package.json and (pre-)published versions manually.",
);
}
@@ -50,5 +50,5 @@ if (!semver.valid(newVersion)) {
fs.appendFileSync(
process.env.GITHUB_OUTPUT,
- `new_version=${newVersion}${os.EOL}`
+ `new_version=${newVersion}${os.EOL}`,
);
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 1d4af6f39..5ccfed8e0 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -2,16 +2,16 @@ name: CI
on:
push:
- branches: [ master ]
+ branches: [master]
tags: "*.*.*"
pull_request:
- branches: [ master ]
+ branches: [master]
-concurrency:
+concurrency:
group: ${{ github.ref }}
cancel-in-progress: true
-jobs:
+jobs:
build:
strategy:
fail-fast: false
@@ -50,68 +50,74 @@ jobs:
runs-on: ${{matrix.os}}
steps:
- # needed for Windows testing
- - name: Set git to use LF
- run: |
- git config --global core.autocrlf false
- git config --global core.eol lf
-
- - uses: actions/checkout@v4
-
- - name: Cache OCaml's opam
- uses: actions/cache@v4
- with:
- path: ~/.opam
- key: ${{matrix.os}}-rescript-vscode-v5
-
- - name: Use OCaml
- uses: ocaml/setup-ocaml@v3
- with:
- ocaml-compiler: ${{matrix.ocaml-compiler}}
- opam-pin: false
-
- - name: Use Node.js
- uses: actions/setup-node@v4
- with:
- node-version: 20
- registry-url: 'https://registry.npmjs.org'
-
- - run: npm ci
- - run: opam install dune cppo
- - run: npm run compile
- - run: npm run bundle
-
- # These 2 runs (or just the second?) are for when you have opam dependencies. We don't.
- # Don't add deps. But if you ever do, un-comment these and add an .opam file.
- # - run: opam pin add rescript-editor-analysis.dev . --no-action
- # - run: opam install . --deps-only --with-doc --with-test
-
- - name: Set dune profile according to platform
- run: echo "DUNE_PROFILE=${{ matrix.dune-profile }}" >> $GITHUB_ENV
-
- - name: Build
- run: opam exec -- make
-
- - name: Test
- run: opam exec -- make test
-
- - name: Strip binaries
- if: matrix.os != 'windows-latest'
- run: strip rescript-editor-analysis.exe rescript-tools.exe
-
- # Also avoids artifacts upload permission loss:
- # https://github.com/actions/upload-artifact/tree/ee69f02b3dfdecd58bb31b4d133da38ba6fe3700#permission-loss
- - name: Compress files
- run: |
- mkdir ${{matrix.artifact-folder}}
- mv rescript-editor-analysis.exe ${{matrix.artifact-folder}}
- mv rescript-tools.exe ${{matrix.artifact-folder}}
- tar -cvf binary.tar ${{matrix.artifact-folder}}
-
- - uses: actions/upload-artifact@v4
- with:
- name: ${{matrix.artifact-folder}}
- path: binary.tar
+ # needed for Windows testing
+ - name: Set git to use LF
+ run: |
+ git config --global core.autocrlf false
+ git config --global core.eol lf
+
+ - uses: actions/checkout@v4
+
+ - name: Cache OCaml's opam
+ uses: actions/cache@v4
+ with:
+ path: ~/.opam
+ key: ${{matrix.os}}-rescript-vscode-v5
+
+ - name: Use OCaml
+ uses: ocaml/setup-ocaml@v3
+ with:
+ ocaml-compiler: ${{matrix.ocaml-compiler}}
+ opam-pin: false
+
+ - name: Use Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: 20
+ registry-url: "https://registry.npmjs.org"
+
+ - run: npm ci
+ - run: opam install dune cppo
+ - run: npm run compile
+ - run: npm run bundle
+
+ # These 2 runs (or just the second?) are for when you have opam dependencies. We don't.
+ # Don't add deps. But if you ever do, un-comment these and add an .opam file.
+ # - run: opam pin add rescript-editor-analysis.dev . --no-action
+ # - run: opam install . --deps-only --with-doc --with-test
+
+ - name: Set dune profile according to platform
+ run: echo "DUNE_PROFILE=${{ matrix.dune-profile }}" >> $GITHUB_ENV
+
+ - name: Install ocamlformat
+ run: opam install ocamlformat.0.27.0
+
+ - name: Format check
+ run: opam exec -- make checkformat
+
+ - name: Build
+ run: opam exec -- make
+
+ - name: Test
+ run: opam exec -- make test
+
+ - name: Strip binaries
+ if: matrix.os != 'windows-latest'
+ run: strip rescript-editor-analysis.exe rescript-tools.exe
+
+ # Also avoids artifacts upload permission loss:
+ # https://github.com/actions/upload-artifact/tree/ee69f02b3dfdecd58bb31b4d133da38ba6fe3700#permission-loss
+ - name: Compress files
+ run: |
+ mkdir ${{matrix.artifact-folder}}
+ mv rescript-editor-analysis.exe ${{matrix.artifact-folder}}
+ mv rescript-tools.exe ${{matrix.artifact-folder}}
+ tar -cvf binary.tar ${{matrix.artifact-folder}}
+
+ - uses: actions/upload-artifact@v4
+ with:
+ name: ${{matrix.artifact-folder}}
+ path: binary.tar
package:
needs:
@@ -119,191 +125,191 @@ jobs:
runs-on: ubuntu-24.04
steps:
- - uses: actions/checkout@v4
-
- - name: Use Node.js
- uses: actions/setup-node@v4
- with:
- node-version: 20
- registry-url: 'https://registry.npmjs.org'
-
- - run: npm ci
- - run: npm run compile
-
- - name: Download MacOS binaries
- uses: actions/download-artifact@v4
- with:
- name: darwin
- path: binaries
- - run: tar -xvf binary.tar
- working-directory: binaries
-
- - name: Download MacOS ARM binaries
- uses: actions/download-artifact@v4
- with:
- name: darwinarm64
- path: binaries
- - run: tar -xvf binary.tar
- working-directory: binaries
-
- - name: Download Linux binaries
- uses: actions/download-artifact@v4
- with:
- name: linux
- path: binaries
- - run: tar -xvf binary.tar
- working-directory: binaries
-
- - name: Download Linux ARM binaries
- uses: actions/download-artifact@v4
- with:
- name: linuxarm64
- path: binaries
- - run: tar -xvf binary.tar
- working-directory: binaries
-
- - name: Download Windows binaries
- uses: actions/download-artifact@v4
- with:
- name: win32
- path: binaries
- - run: tar -xvf binary.tar
- working-directory: binaries
-
- - name: Cleanup tar file
- run: rm binary.tar
- working-directory: binaries
-
- - name: Move binaries to folders
- run: |
- declare -a platforms=("darwin" "darwinarm64" "linux" "linuxarm64" "win32")
-
- for platform in "${platforms[@]}"; do
- mkdir server/analysis_binaries/"$platform"
- mv binaries/"$platform"/rescript-editor-analysis.exe server/analysis_binaries/"$platform"
- done
-
- for platform in "${platforms[@]}"; do
- mkdir tools/binaries/"$platform"
- mv binaries/"$platform"/rescript-tools.exe tools/binaries/"$platform"
- done
-
- - name: Store short commit SHA for filename
- id: vars
- env:
- COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
- run: echo "sha_short=${COMMIT_SHA:0:7}" >> $GITHUB_OUTPUT
-
- - name: Store tag name
- id: tag_name
- if: startsWith(github.ref, 'refs/tags/')
- run: echo "tag=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
-
- - name: Increment pre-release version
- if: github.ref == 'refs/heads/master'
- id: increment_pre_release
- run: |
- JSON=$(npx vsce show chenglou92.rescript-vscode --json)
- NEW_VERSION=$(echo $JSON | jq -r '.versions | .[0] | .["version"]')
- node .github/workflows/bump-version.js ${NEW_VERSION}
-
- - name: Package Extension
- if: github.ref != 'refs/heads/master'
- run: npx vsce package -o rescript-vscode-${{ steps.vars.outputs.sha_short }}.vsix
-
- - name: Package Extension pre-release version
- if: github.ref == 'refs/heads/master'
- run: npx vsce package -o rescript-vscode-latest-master.vsix ${{ steps.increment_pre_release.outputs.new_version }} --no-git-tag-version
-
- - name: Package Extension release version
- if: startsWith(github.ref, 'refs/tags/')
- run: npx vsce package -o rescript-vscode-${{ steps.tag_name.outputs.tag }}.vsix ${{ steps.tag_name.outputs.tag }} --no-git-tag-version
-
- - uses: actions/upload-artifact@v4
- if: github.ref != 'refs/heads/master'
- with:
- name: rescript-vscode-${{ steps.vars.outputs.sha_short }}.vsix
- path: rescript-vscode-${{ steps.vars.outputs.sha_short }}.vsix
-
- - uses: actions/upload-artifact@v4
- if: github.ref == 'refs/heads/master'
- with:
- name: rescript-vscode-latest-master.vsix
- path: rescript-vscode-latest-master.vsix
-
- - uses: actions/upload-artifact@v4
- if: startsWith(github.ref, 'refs/tags/')
- with:
- name: rescript-vscode-${{ steps.tag_name.outputs.tag }}.vsix
- path: rescript-vscode-${{ steps.tag_name.outputs.tag }}.vsix
-
- - name: Move latest-master tag to current commit
- if: github.ref == 'refs/heads/master'
- run: |
- git tag -d latest-master || true
- git push origin --delete latest-master || true
- git tag latest-master
- git push origin latest-master
-
- - name: Publish latest master to GitHub
- if: github.ref == 'refs/heads/master'
- uses: softprops/action-gh-release@v2
- with:
- token: "${{ secrets.GITHUB_TOKEN }}"
- tag_name: "latest-master"
- prerelease: true
- generate_release_notes: true
- name: "Latest master"
- files: rescript-vscode-latest-master.vsix
-
- - name: Generate release notes from changelog
- if: startsWith(github.ref, 'refs/tags/')
- run: |
- sed -e "/^## ${{ steps.tag_name.outputs.tag }}/,/^## / ! d" CHANGELOG.md | head -n -2 > RELEASE.md
-
- - name: Publish release version to GitHub
- if: startsWith(github.ref, 'refs/tags/')
- uses: softprops/action-gh-release@v2
- with:
- token: "${{ secrets.GITHUB_TOKEN }}"
- prerelease: false
- body_path: RELEASE.md
- name: ${{ steps.tag_name.outputs.tag }}
- files: rescript-vscode-${{ steps.tag_name.outputs.tag }}.vsix
-
- - name: Publish extension as pre-release
- if: github.ref == 'refs/heads/master' && !startsWith(github.event.head_commit.message, 'publish tools')
- run: npx vsce publish --pat ${{ secrets.MARKETPLACE_TOKEN }} --pre-release ${{ steps.increment_pre_release.outputs.new_version }} --no-git-tag-version
-
- - name: Publish extension as release
- if: startsWith(github.ref, 'refs/tags/')
- run: npx vsce publish --pat ${{ secrets.MARKETPLACE_TOKEN }} ${{ steps.tag_name.outputs.tag }} --no-git-tag-version
-
- - name: Publish LSP as pre-release to NPM
- if: github.ref == 'refs/heads/master'
- working-directory: server
- run: |
- npm version preminor --preid next-$(git rev-parse --short HEAD)
- npm publish --access public --tag next
- env:
- NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
-
- - name: Publish LSP to NPM
- if: startsWith(github.ref, 'refs/tags/')
- working-directory: server
- run: npm publish --access public
- env:
- NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
-
- - name: Build @rescript/tools package
- working-directory: tools
- run: |
- npm ci
- npm run build
-
- - name: Publish @rescript/tools package
- if: ${{ startsWith(github.event.head_commit.message, 'publish tools') && (github.ref == 'refs/heads/master') }}
- working-directory: tools
- run: npm publish --access public
- env:
- NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
+ - uses: actions/checkout@v4
+
+ - name: Use Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: 20
+ registry-url: "https://registry.npmjs.org"
+
+ - run: npm ci
+ - run: npm run compile
+
+ - name: Download MacOS binaries
+ uses: actions/download-artifact@v4
+ with:
+ name: darwin
+ path: binaries
+ - run: tar -xvf binary.tar
+ working-directory: binaries
+
+ - name: Download MacOS ARM binaries
+ uses: actions/download-artifact@v4
+ with:
+ name: darwinarm64
+ path: binaries
+ - run: tar -xvf binary.tar
+ working-directory: binaries
+
+ - name: Download Linux binaries
+ uses: actions/download-artifact@v4
+ with:
+ name: linux
+ path: binaries
+ - run: tar -xvf binary.tar
+ working-directory: binaries
+
+ - name: Download Linux ARM binaries
+ uses: actions/download-artifact@v4
+ with:
+ name: linuxarm64
+ path: binaries
+ - run: tar -xvf binary.tar
+ working-directory: binaries
+
+ - name: Download Windows binaries
+ uses: actions/download-artifact@v4
+ with:
+ name: win32
+ path: binaries
+ - run: tar -xvf binary.tar
+ working-directory: binaries
+
+ - name: Cleanup tar file
+ run: rm binary.tar
+ working-directory: binaries
+
+ - name: Move binaries to folders
+ run: |
+ declare -a platforms=("darwin" "darwinarm64" "linux" "linuxarm64" "win32")
+
+ for platform in "${platforms[@]}"; do
+ mkdir server/analysis_binaries/"$platform"
+ mv binaries/"$platform"/rescript-editor-analysis.exe server/analysis_binaries/"$platform"
+ done
+
+ for platform in "${platforms[@]}"; do
+ mkdir tools/binaries/"$platform"
+ mv binaries/"$platform"/rescript-tools.exe tools/binaries/"$platform"
+ done
+
+ - name: Store short commit SHA for filename
+ id: vars
+ env:
+ COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
+ run: echo "sha_short=${COMMIT_SHA:0:7}" >> $GITHUB_OUTPUT
+
+ - name: Store tag name
+ id: tag_name
+ if: startsWith(github.ref, 'refs/tags/')
+ run: echo "tag=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
+
+ - name: Increment pre-release version
+ if: github.ref == 'refs/heads/master'
+ id: increment_pre_release
+ run: |
+ JSON=$(npx vsce show chenglou92.rescript-vscode --json)
+ NEW_VERSION=$(echo $JSON | jq -r '.versions | .[0] | .["version"]')
+ node .github/workflows/bump-version.js ${NEW_VERSION}
+
+ - name: Package Extension
+ if: github.ref != 'refs/heads/master'
+ run: npx vsce package -o rescript-vscode-${{ steps.vars.outputs.sha_short }}.vsix
+
+ - name: Package Extension pre-release version
+ if: github.ref == 'refs/heads/master'
+ run: npx vsce package -o rescript-vscode-latest-master.vsix ${{ steps.increment_pre_release.outputs.new_version }} --no-git-tag-version
+
+ - name: Package Extension release version
+ if: startsWith(github.ref, 'refs/tags/')
+ run: npx vsce package -o rescript-vscode-${{ steps.tag_name.outputs.tag }}.vsix ${{ steps.tag_name.outputs.tag }} --no-git-tag-version
+
+ - uses: actions/upload-artifact@v4
+ if: github.ref != 'refs/heads/master'
+ with:
+ name: rescript-vscode-${{ steps.vars.outputs.sha_short }}.vsix
+ path: rescript-vscode-${{ steps.vars.outputs.sha_short }}.vsix
+
+ - uses: actions/upload-artifact@v4
+ if: github.ref == 'refs/heads/master'
+ with:
+ name: rescript-vscode-latest-master.vsix
+ path: rescript-vscode-latest-master.vsix
+
+ - uses: actions/upload-artifact@v4
+ if: startsWith(github.ref, 'refs/tags/')
+ with:
+ name: rescript-vscode-${{ steps.tag_name.outputs.tag }}.vsix
+ path: rescript-vscode-${{ steps.tag_name.outputs.tag }}.vsix
+
+ - name: Move latest-master tag to current commit
+ if: github.ref == 'refs/heads/master'
+ run: |
+ git tag -d latest-master || true
+ git push origin --delete latest-master || true
+ git tag latest-master
+ git push origin latest-master
+
+ - name: Publish latest master to GitHub
+ if: github.ref == 'refs/heads/master'
+ uses: softprops/action-gh-release@v2
+ with:
+ token: "${{ secrets.GITHUB_TOKEN }}"
+ tag_name: "latest-master"
+ prerelease: true
+ generate_release_notes: true
+ name: "Latest master"
+ files: rescript-vscode-latest-master.vsix
+
+ - name: Generate release notes from changelog
+ if: startsWith(github.ref, 'refs/tags/')
+ run: |
+ sed -e "/^## ${{ steps.tag_name.outputs.tag }}/,/^## / ! d" CHANGELOG.md | head -n -2 > RELEASE.md
+
+ - name: Publish release version to GitHub
+ if: startsWith(github.ref, 'refs/tags/')
+ uses: softprops/action-gh-release@v2
+ with:
+ token: "${{ secrets.GITHUB_TOKEN }}"
+ prerelease: false
+ body_path: RELEASE.md
+ name: ${{ steps.tag_name.outputs.tag }}
+ files: rescript-vscode-${{ steps.tag_name.outputs.tag }}.vsix
+
+ - name: Publish extension as pre-release
+ if: github.ref == 'refs/heads/master' && !startsWith(github.event.head_commit.message, 'publish tools')
+ run: npx vsce publish --pat ${{ secrets.MARKETPLACE_TOKEN }} --pre-release ${{ steps.increment_pre_release.outputs.new_version }} --no-git-tag-version
+
+ - name: Publish extension as release
+ if: startsWith(github.ref, 'refs/tags/')
+ run: npx vsce publish --pat ${{ secrets.MARKETPLACE_TOKEN }} ${{ steps.tag_name.outputs.tag }} --no-git-tag-version
+
+ - name: Publish LSP as pre-release to NPM
+ if: github.ref == 'refs/heads/master'
+ working-directory: server
+ run: |
+ npm version preminor --preid next-$(git rev-parse --short HEAD)
+ npm publish --access public --tag next
+ env:
+ NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
+
+ - name: Publish LSP to NPM
+ if: startsWith(github.ref, 'refs/tags/')
+ working-directory: server
+ run: npm publish --access public
+ env:
+ NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
+
+ - name: Build @rescript/tools package
+ working-directory: tools
+ run: |
+ npm ci
+ npm run build
+
+ - name: Publish @rescript/tools package
+ if: ${{ startsWith(github.event.head_commit.message, 'publish tools') && (github.ref == 'refs/heads/master') }}
+ working-directory: tools
+ run: npm publish --access public
+ env:
+ NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
diff --git a/.ocamlformat b/.ocamlformat
index ad5b55b6f..39c1395fe 100644
--- a/.ocamlformat
+++ b/.ocamlformat
@@ -1,5 +1,5 @@
profile = default
-version = 0.26.2
+version = 0.27.0
field-space = tight-decl
break-cases = toplevel
diff --git a/.ocamlformat-ignore b/.ocamlformat-ignore
index 1bb071a2d..fb6a1886d 100644
--- a/.ocamlformat-ignore
+++ b/.ocamlformat-ignore
@@ -13,3 +13,4 @@ analysis/vendor/ext/map.cppo.ml
analysis/vendor/ext/ordered_hash_map.cppo.ml
analysis/vendor/ext/set.cppo.ml
analysis/vendor/ext/vec.cppo.ml
+**/node_modules/**
\ No newline at end of file
diff --git a/.prettierignore b/.prettierignore
new file mode 100644
index 000000000..459fd3949
--- /dev/null
+++ b/.prettierignore
@@ -0,0 +1,5 @@
+# Ignore artifacts:
+server/out
+analysis/examples
+analysis/reanalyze/examples
+tools/tests
\ No newline at end of file
diff --git a/.prettierrc b/.prettierrc
new file mode 100644
index 000000000..0967ef424
--- /dev/null
+++ b/.prettierrc
@@ -0,0 +1 @@
+{}
diff --git a/.vscode/extensions.json b/.vscode/extensions.json
index af515502d..326dae3cd 100644
--- a/.vscode/extensions.json
+++ b/.vscode/extensions.json
@@ -1,9 +1,7 @@
{
- // See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
- // Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
+ // See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
+ // Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
- // List of extensions which should be recommended for users of this workspace.
- "recommendations": [
- "dbaeumer.vscode-eslint"
- ]
-}
\ No newline at end of file
+ // List of extensions which should be recommended for users of this workspace.
+ "recommendations": ["dbaeumer.vscode-eslint"]
+}
diff --git a/.vscode/launch.json b/.vscode/launch.json
index 8c9ceb4bc..2d3785241 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -1,55 +1,44 @@
-// A launch configuration that compiles the extension and then opens it inside a new window
-{
- "version": "0.2.0",
- "configurations": [
- {
- "type": "extensionHost",
- "request": "launch",
- "name": "Launch Client",
- "runtimeExecutable": "${execPath}",
- "args": [
- "--extensionDevelopmentPath=${workspaceRoot}"
- ],
- "outFiles": [
- "${workspaceRoot}/client/out/**/*.js"
- ],
- "preLaunchTask": {
- "type": "npm",
- "script": "watch"
- }
- },
- {
- "type": "node",
- "request": "attach",
- "name": "Attach to Server",
- "port": 6009,
- "restart": true,
- "outFiles": [
- "${workspaceRoot}/server/out/**/*.js"
- ]
- },
- {
- "name": "Language Server E2E Test",
- "type": "extensionHost",
- "request": "launch",
- "runtimeExecutable": "${execPath}",
- "args": [
- "--extensionDevelopmentPath=${workspaceRoot}",
- "--extensionTestsPath=${workspaceRoot}/client/out/test/index",
- "${workspaceRoot}/client/testFixture"
- ],
- "outFiles": [
- "${workspaceRoot}/client/out/test/**/*.js"
- ]
- }
- ],
- "compounds": [
- {
- "name": "Client + Server",
- "configurations": [
- "Launch Client",
- "Attach to Server"
- ]
- }
- ]
-}
+// A launch configuration that compiles the extension and then opens it inside a new window
+{
+ "version": "0.2.0",
+ "configurations": [
+ {
+ "type": "extensionHost",
+ "request": "launch",
+ "name": "Launch Client",
+ "runtimeExecutable": "${execPath}",
+ "args": ["--extensionDevelopmentPath=${workspaceRoot}"],
+ "outFiles": ["${workspaceRoot}/client/out/**/*.js"],
+ "preLaunchTask": {
+ "type": "npm",
+ "script": "watch"
+ }
+ },
+ {
+ "type": "node",
+ "request": "attach",
+ "name": "Attach to Server",
+ "port": 6009,
+ "restart": true,
+ "outFiles": ["${workspaceRoot}/server/out/**/*.js"]
+ },
+ {
+ "name": "Language Server E2E Test",
+ "type": "extensionHost",
+ "request": "launch",
+ "runtimeExecutable": "${execPath}",
+ "args": [
+ "--extensionDevelopmentPath=${workspaceRoot}",
+ "--extensionTestsPath=${workspaceRoot}/client/out/test/index",
+ "${workspaceRoot}/client/testFixture"
+ ],
+ "outFiles": ["${workspaceRoot}/client/out/test/**/*.js"]
+ }
+ ],
+ "compounds": [
+ {
+ "name": "Client + Server",
+ "configurations": ["Launch Client", "Attach to Server"]
+ }
+ ]
+}
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 7e7f928b2..5e9c9a441 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,13 +1,13 @@
-{
- "editor.insertSpaces": false,
- "tslint.enable": true,
- "typescript.tsc.autoDetect": "off",
- "typescript.preferences.quoteStyle": "single",
- "editor.codeActionsOnSave": {
- "source.fixAll.eslint": "explicit"
- },
- "ocaml.sandbox": {
- "kind": "opam",
- "switch": "${workspaceFolder:rescript-vscode}"
- }
-}
+{
+ "editor.insertSpaces": false,
+ "tslint.enable": true,
+ "typescript.tsc.autoDetect": "off",
+ "typescript.preferences.quoteStyle": "single",
+ "editor.codeActionsOnSave": {
+ "source.fixAll.eslint": "explicit"
+ },
+ "ocaml.sandbox": {
+ "kind": "opam",
+ "switch": "${workspaceFolder:rescript-vscode}"
+ }
+}
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
index 070d88eb1..7f5b17100 100644
--- a/.vscode/tasks.json
+++ b/.vscode/tasks.json
@@ -1,33 +1,29 @@
-{
- "version": "2.0.0",
- "tasks": [
- {
- "type": "npm",
- "script": "compile",
- "group": "build",
- "presentation": {
- "panel": "dedicated",
- "reveal": "never"
- },
- "problemMatcher": [
- "$tsc"
- ]
- },
- {
- "type": "npm",
- "script": "watch",
- "isBackground": true,
- "group": {
- "kind": "build",
- "isDefault": true
- },
- "presentation": {
- "panel": "dedicated",
- "reveal": "never"
- },
- "problemMatcher": [
- "$tsc-watch"
- ]
- }
- ]
-}
\ No newline at end of file
+{
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "type": "npm",
+ "script": "compile",
+ "group": "build",
+ "presentation": {
+ "panel": "dedicated",
+ "reveal": "never"
+ },
+ "problemMatcher": ["$tsc"]
+ },
+ {
+ "type": "npm",
+ "script": "watch",
+ "isBackground": true,
+ "group": {
+ "kind": "build",
+ "isDefault": true
+ },
+ "presentation": {
+ "panel": "dedicated",
+ "reveal": "never"
+ },
+ "problemMatcher": ["$tsc-watch"]
+ }
+ ]
+}
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 305d3552a..65650bb1e 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -68,7 +68,6 @@ If `dune` is present, run `make build` to build the OCaml projects and copy the
If you're getting some Promise-related error alert: this is a VSCode and/or template bug.
-
- If that newly launched VSCode test instance has no project in its explorer view, drag in a random project.
- Kill all your node processes.
- Redo the launch.
diff --git a/Makefile b/Makefile
index a81885099..cbafb4321 100644
--- a/Makefile
+++ b/Makefile
@@ -17,9 +17,11 @@ clean:
format:
dune build @fmt --auto-promote
+ npx prettier --write --experimental-cli .
checkformat:
dune build @fmt
+ npx prettier --check --experimental-cli .
.DEFAULT_GOAL := build
diff --git a/analysis/README.md b/analysis/README.md
index 2ce1bf5a2..a0509cf0f 100644
--- a/analysis/README.md
+++ b/analysis/README.md
@@ -13,6 +13,7 @@ See main CONTRIBUTING.md's repo structure. Additionally, `examples/` is a conven
## Usage
At root:
+
```sh
./rescript-editor-analysis.exe --help
diff --git a/analysis/reanalyze/src/DeadCommon.ml b/analysis/reanalyze/src/DeadCommon.ml
index b9cda3afb..9dbacba7b 100644
--- a/analysis/reanalyze/src/DeadCommon.ml
+++ b/analysis/reanalyze/src/DeadCommon.ml
@@ -555,7 +555,8 @@ module Decl = struct
let refIsBelow (pos : Lexing.position) =
decl.pos.pos_fname <> pos.pos_fname
|| decl.pos.pos_cnum < pos.pos_cnum
- && (* not a function defined inside a function, e.g. not a callback *)
+ &&
+ (* not a function defined inside a function, e.g. not a callback *)
decl.posEnd.pos_cnum < pos.pos_cnum
in
refs |> PosSet.exists refIsBelow
diff --git a/analysis/reanalyze/src/Paths.ml b/analysis/reanalyze/src/Paths.ml
index 45341f104..52a965368 100644
--- a/analysis/reanalyze/src/Paths.ml
+++ b/analysis/reanalyze/src/Paths.ml
@@ -110,10 +110,8 @@ module Config = struct
| None -> ())
end
-(**
- * Handle namespaces in cmt files.
- * E.g. src/Module-Project.cmt becomes src/Module
- *)
+(** * Handle namespaces in cmt files. * E.g. src/Module-Project.cmt becomes
+ src/Module *)
let handleNamespace cmt =
let cutAfterDash s =
match String.index s '-' with
diff --git a/analysis/src/CreateInterface.ml b/analysis/src/CreateInterface.ml
index 09fa7e0f9..b382cb4d0 100644
--- a/analysis/src/CreateInterface.ml
+++ b/analysis/src/CreateInterface.ml
@@ -219,7 +219,9 @@ let printSignature ~extractor ~signature =
:: rest
when Ident.name makePropsId = Ident.name makeId ^ "Props"
&& ((* from implementation *) makePropsLoc.loc_ghost
- || (* from interface *) makePropsLoc = makeValueDesc.val_loc)
+ ||
+ (* from interface *)
+ makePropsLoc = makeValueDesc.val_loc)
&& getComponentTypeV3 makeValueDesc.val_type <> None ->
(*
{"name": string} => retType ~~> (~name:string) => retType
diff --git a/analysis/src/Loc.ml b/analysis/src/Loc.ml
index 2ab1d8fbd..34f905f0b 100644
--- a/analysis/src/Loc.ml
+++ b/analysis/src/Loc.ml
@@ -9,8 +9,9 @@ let toString (loc : t) =
let hasPos ~pos loc = start loc <= pos && pos < end_ loc
-(** Allows the character after the end to be included. Ie when the cursor is at the
- end of the word, like `someIdentifier`. Useful in some scenarios. *)
+(** Allows the character after the end to be included. Ie when the cursor is at
+ the end of the word, like `someIdentifier`. Useful in some
+ scenarios. *)
let hasPosInclusiveEnd ~pos loc = start loc <= pos && pos <= end_ loc
let mkPosition (pos : Pos.t) =
diff --git a/analysis/src/References.ml b/analysis/src/References.ml
index e047a2ba1..c74e44264 100644
--- a/analysis/src/References.ml
+++ b/analysis/src/References.ml
@@ -209,7 +209,8 @@ let definedForLoc ~file ~package locKind =
maybeLog "Yes!! got it";
Some res)))
-(** Find alternative declaration: from res in case of interface, or from resi in case of implementation *)
+(** Find alternative declaration: from res in case of interface, or from resi in
+ case of implementation *)
let alternateDeclared ~(file : File.t) ~package (declared : _ Declared.t) tip =
match Hashtbl.find_opt package.pathsForModule file.moduleName with
| None -> None
diff --git a/analysis/src/SharedTypes.ml b/analysis/src/SharedTypes.ml
index 307352728..40572a6b9 100644
--- a/analysis/src/SharedTypes.ml
+++ b/analysis/src/SharedTypes.ml
@@ -356,7 +356,8 @@ and completionType =
fields: field list;
definition:
[ `NameOnly of string
- (** When we only have the name, like when pulling the record from a declared type. *)
+ (** When we only have the name, like when pulling the record from a
+ declared type. *)
| `TypeExpr of Types.type_expr
(** When we have the full type expr from the compiler. *) ];
}
@@ -573,7 +574,8 @@ module Completable = struct
(** Additional context for nested completion where needed. *)
type nestedContext =
| RecordField of {seenFields: string list}
- (** Completing for a record field, and we already saw the following fields... *)
+ (** Completing for a record field, and we already saw the following
+ fields... *)
| CameFromRecordField of string
(** We just came from this field (we leverage use this for better
completion names etc) *)
@@ -656,11 +658,13 @@ module Completable = struct
| CdecoratorPayload of decoratorPayload
| CextensionNode of string (** e.g. %todo *)
| CnamedArg of contextPath * string * string list
- (** e.g. (..., "label", ["l1", "l2"]) for ...(...~l1...~l2...~label...) *)
+ (** e.g. (..., "label", ["l1", "l2"]) for ...(...~l1...~l2...~label...)
+ *)
| Cnone (** e.g. don't complete inside strings *)
| Cpath of contextPath
| Cjsx of string list * string * string list
- (** E.g. (["M", "Comp"], "id", ["id1", "id2"]) for List.map (fun (field : field) ->
- let startOffset = !offset in
- let argText =
- Printf.sprintf "%s%s: %s" field.fname.txt
- (if field.optional then "?" else "")
- (Shared.typeToString
- (if field.optional then
- Utils.unwrapIfOption field.typ
- else field.typ))
- in
- let endOffset =
- startOffset + String.length argText
- in
- offset := endOffset + String.length ", ";
- (argText, field, (startOffset, endOffset)))))
+ (fields
+ |> List.map (fun (field : field) ->
+ let startOffset = !offset in
+ let argText =
+ Printf.sprintf "%s%s: %s" field.fname.txt
+ (if field.optional then "?" else "")
+ (Shared.typeToString
+ (if field.optional then
+ Utils.unwrapIfOption field.typ
+ else field.typ))
+ in
+ let endOffset =
+ startOffset + String.length argText
+ in
+ offset := endOffset + String.length ", ";
+ (argText, field, (startOffset, endOffset)))))
| Args [(typ, _)] ->
Some
(`SingleArg
- ( typ |> Shared.typeToString,
- docsForLabel ~file:full.file ~package:full.package
- ~supportsMarkdownLinks typ ))
+ ( typ |> Shared.typeToString,
+ docsForLabel ~file:full.file ~package:full.package
+ ~supportsMarkdownLinks typ ))
| Args args ->
let offset = ref 0 in
Some
(`TupleArg
- (args
- |> List.map (fun (typ, _) ->
- let startOffset = !offset in
- let argText = typ |> Shared.typeToString in
- let endOffset =
- startOffset + String.length argText
- in
- offset := endOffset + String.length ", ";
- ( argText,
- docsForLabel ~file:full.file ~package:full.package
- ~supportsMarkdownLinks typ,
- (startOffset, endOffset) ))))
+ (args
+ |> List.map (fun (typ, _) ->
+ let startOffset = !offset in
+ let argText = typ |> Shared.typeToString in
+ let endOffset =
+ startOffset + String.length argText
+ in
+ offset := endOffset + String.length ", ";
+ ( argText,
+ docsForLabel ~file:full.file ~package:full.package
+ ~supportsMarkdownLinks typ,
+ (startOffset, endOffset) ))))
in
let label =
constructor.name ^ "("
diff --git a/analysis/src/TypeUtils.ml b/analysis/src/TypeUtils.ml
index 5cb35d0be..656c7226f 100644
--- a/analysis/src/TypeUtils.ml
+++ b/analysis/src/TypeUtils.ml
@@ -1183,10 +1183,11 @@ let transformCompletionToPipeCompletion ?(synthetic = false) ~env ?posOfDot
| Some posOfDot -> Some (makeAdditionalTextEditsForRemovingDot posOfDot));
}
-(** This takes a type expr and the env that type expr was found in, and produces a globally unique
- id for that specific type. The globally unique id is the full path to the type as seen from the root
- of the project. Example: type x in module SomeModule in file SomeFile would get the globally
- unique id `SomeFile.SomeModule.x`.*)
+(** This takes a type expr and the env that type expr was found in, and produces
+ a globally unique id for that specific type. The globally unique id is the
+ full path to the type as seen from the root of the project. Example: type x
+ in module SomeModule in file SomeFile would get the globally unique id
+ `SomeFile.SomeModule.x`.*)
let rec findRootTypeId ~full ~env (t : Types.type_expr) =
let debug = false in
match t.desc with
diff --git a/analysis/src/Utils.ml b/analysis/src/Utils.ml
index d136c181a..d95bdc210 100644
--- a/analysis/src/Utils.ml
+++ b/analysis/src/Utils.ml
@@ -1,7 +1,5 @@
-(**
- * `startsWith(string, prefix)`
- * true if the string starts with the prefix
- *)
+(** * `startsWith(string, prefix)` * true if the string starts with the prefix
+*)
let startsWith s prefix =
if prefix = "" then true
else
diff --git a/analysis/vendor/ext/bsb_db.mli b/analysis/vendor/ext/bsb_db.mli
index 3b54f9f1c..3171574e5 100644
--- a/analysis/vendor/ext/bsb_db.mli
+++ b/analysis/vendor/ext/bsb_db.mli
@@ -22,12 +22,10 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
-(** Store a file called [.bsbuild] that can be communicated
- between [bsb.exe] and [bsb_helper.exe].
- [bsb.exe] stores such data which would be retrieved by
- [bsb_helper.exe]. It is currently used to combine with
- ocamldep to figure out which module->file it depends on
-*)
+(** Store a file called [.bsbuild] that can be communicated between [bsb.exe]
+ and [bsb_helper.exe]. [bsb.exe] stores such data which would be retrieved by
+ [bsb_helper.exe]. It is currently used to combine with ocamldep to figure
+ out which module->file it depends on *)
type case = bool
@@ -50,11 +48,9 @@ type 'a cat = {mutable lib: 'a; mutable dev: 'a}
type t = map cat
-(** store the meta data indexed by {!Bsb_dir_index}
+(** store the meta data indexed by {!Bsb_dir_index}
{[
0 --> lib group
1 --> dev 1 group
.
-
- ]}
-*)
+ ]} *)
diff --git a/analysis/vendor/ext/bsc_warnings.ml b/analysis/vendor/ext/bsc_warnings.ml
index 976914284..f49d4738d 100644
--- a/analysis/vendor/ext/bsc_warnings.ml
+++ b/analysis/vendor/ext/bsc_warnings.ml
@@ -22,23 +22,26 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
-(**
- See the meanings of the warning codes here: https://caml.inria.fr/pub/docs/manual-ocaml/comp.html#sec281
+(** See the meanings of the warning codes here:
+ https://caml.inria.fr/pub/docs/manual-ocaml/comp.html#sec281
- - 30 Two labels or constructors of the same name are defined in two mutually recursive types.
- - 40 Constructor or label name used out of scope.
+ - 30 Two labels or constructors of the same name are defined in two mutually
+ recursive types.
+ - 40 Constructor or label name used out of scope.
- - 6 Label omitted in function application.
- - 7 Method overridden.
- - 9 Missing fields in a record pattern. (*Not always desired, in some cases need [@@@warning "+9"] *)
- - 27 Innocuous unused variable: unused variable that is not bound with let nor as, and doesn’t start with an underscore (_) character.
- - 29 Unescaped end-of-line in a string constant (non-portable code).
- - 32 .. 39 Unused blabla
- - 44 Open statement shadows an already defined identifier.
- - 45 Open statement shadows an already defined label or constructor.
- - 48 Implicit elimination of optional arguments. https://caml.inria.fr/mantis/view.php?id=6352
- - 101 (bsb-specific) unsafe polymorphic comparison.
-*)
+ - 6 Label omitted in function application.
+ - 7 Method overridden.
+ - 9 Missing fields in a record pattern. (*Not always desired, in some cases
+ need [@@@warning "+9"] *)
+ - 27 Innocuous unused variable: unused variable that is not bound with let
+ nor as, and doesn’t start with an underscore (_) character.
+ - 29 Unescaped end-of-line in a string constant (non-portable code).
+ - 32 .. 39 Unused blabla
+ - 44 Open statement shadows an already defined identifier.
+ - 45 Open statement shadows an already defined label or constructor.
+ - 48 Implicit elimination of optional arguments.
+ https://caml.inria.fr/mantis/view.php?id=6352
+ - 101 (bsb-specific) unsafe polymorphic comparison. *)
(*
The purpose of default warning set is to make it strict while
diff --git a/analysis/vendor/ext/encoding.md b/analysis/vendor/ext/encoding.md
index 59228c029..55edef457 100644
--- a/analysis/vendor/ext/encoding.md
+++ b/analysis/vendor/ext/encoding.md
@@ -1,9 +1,3 @@
-
-
-
-
-
-
```c
CAMLprim value caml_ml_string_length(value s)
{
@@ -15,14 +9,15 @@ CAMLprim value caml_ml_string_length(value s)
```
Like all heap blocks, strings contain a header defining the size of
-the string in machine words. The actual block contents are:
+the string in machine words. The actual block contents are:
+
- the characters of the string
- padding bytes to align the block on a word boundary.
The padding is one of
- 00
- 00 01
- 00 00 02
- 00 00 00 03
+ 00
+ 00 01
+ 00 00 02
+ 00 00 00 03
on a 32-bit machine, and up to 00 00 .... 07 on a 64-bit machine.
Thus, the string is always zero-terminated, and its length can be
@@ -34,9 +29,9 @@ The null-termination comes handy when passing a string to C, but is
not relied upon to compute the length (in Caml), allowing the string
to contain nulls.
-so, suppose
+so, suppose
"" -> `8 - 7 - 1 `
"a" -> `8 - 6 - 1`
"0123456" -> `8 - 0 - 1`
-"01234567" -> `2 * 8 - 7 - 1`
\ No newline at end of file
+"01234567" -> `2 * 8 - 7 - 1`
diff --git a/analysis/vendor/ext/ext_array.ml b/analysis/vendor/ext/ext_array.ml
index 5e0bb32d7..8e9fe73b8 100644
--- a/analysis/vendor/ext/ext_array.ml
+++ b/analysis/vendor/ext/ext_array.ml
@@ -159,18 +159,16 @@ let of_list_map a f =
in
fill 5 tl
-(**
- {[
- # rfind_with_index [|1;2;3|] (=) 2;;
- - : int = 1
- # rfind_with_index [|1;2;3|] (=) 1;;
- - : int = 0
- # rfind_with_index [|1;2;3|] (=) 3;;
- - : int = 2
- # rfind_with_index [|1;2;3|] (=) 4;;
- - : int = -1
- ]}
-*)
+(** {[
+ # rfind_with_index [|1;2;3|] (=) 2;;
+ - : int = 1
+ # rfind_with_index [|1;2;3|] (=) 1;;
+ - : int = 0
+ # rfind_with_index [|1;2;3|] (=) 3;;
+ - : int = 2
+ # rfind_with_index [|1;2;3|] (=) 4;;
+ - : int = -1
+ ]} *)
let rfind_with_index arr cmp v =
let len = Array.length arr in
let rec aux i =
diff --git a/analysis/vendor/ext/ext_buffer.ml b/analysis/vendor/ext/ext_buffer.ml
index 5dbb8396d..5433c3fab 100644
--- a/analysis/vendor/ext/ext_buffer.ml
+++ b/analysis/vendor/ext/ext_buffer.ml
@@ -152,10 +152,8 @@ let not_equal (b : t) (s : string) =
let s_len = String.length s in
b_len <> s_len || not_equal_aux b.buffer s 0 s_len
-(**
- It could be one byte, two bytes, three bytes and four bytes
- TODO: inline for better performance
-*)
+(** It could be one byte, two bytes, three bytes and four bytes TODO: inline for
+ better performance *)
let add_int_1 (b : t) (x : int) =
let c = Char.unsafe_chr (x land 0xff) in
let pos = b.position in
diff --git a/analysis/vendor/ext/ext_buffer.mli b/analysis/vendor/ext/ext_buffer.mli
index a00115b95..293ab2da8 100644
--- a/analysis/vendor/ext/ext_buffer.mli
+++ b/analysis/vendor/ext/ext_buffer.mli
@@ -13,11 +13,9 @@
(** Extensible buffers.
- This module implements buffers that automatically expand
- as necessary. It provides accumulative concatenation of strings
- in quasi-linear time (instead of quadratic time when strings are
- concatenated pairwise).
-*)
+ This module implements buffers that automatically expand as necessary. It
+ provides accumulative concatenation of strings in quasi-linear time (instead
+ of quadratic time when strings are concatenated pairwise). *)
(* ReScript customization: customized for efficient digest *)
@@ -25,22 +23,20 @@ type t
(** The abstract type of buffers. *)
val create : int -> t
-(** [create n] returns a fresh buffer, initially empty.
- The [n] parameter is the initial size of the internal byte sequence
- that holds the buffer contents. That byte sequence is automatically
- reallocated when more than [n] characters are stored in the buffer,
- but shrinks back to [n] characters when [reset] is called.
- For best performance, [n] should be of the same order of magnitude
- as the number of characters that are expected to be stored in
- the buffer (for instance, 80 for a buffer that holds one output
- line). Nothing bad will happen if the buffer grows beyond that
- limit, however. In doubt, take [n = 16] for instance.
- If [n] is not between 1 and {!Sys.max_string_length}, it will
- be clipped to that interval. *)
+(** [create n] returns a fresh buffer, initially empty. The [n] parameter is the
+ initial size of the internal byte sequence that holds the buffer contents.
+ That byte sequence is automatically reallocated when more than [n]
+ characters are stored in the buffer, but shrinks back to [n] characters when
+ [reset] is called. For best performance, [n] should be of the same order of
+ magnitude as the number of characters that are expected to be stored in the
+ buffer (for instance, 80 for a buffer that holds one output line). Nothing
+ bad will happen if the buffer grows beyond that limit, however. In doubt,
+ take [n = 16] for instance. If [n] is not between 1 and
+ {!Sys.max_string_length}, it will be clipped to that interval. *)
val contents : t -> string
-(** Return a copy of the current contents of the buffer.
- The buffer itself is unchanged. *)
+(** Return a copy of the current contents of the buffer. The buffer itself is
+ unchanged. *)
val length : t -> int
(** Return the number of characters currently contained in the buffer. *)
@@ -61,27 +57,26 @@ val add_string : t -> string -> unit
@since 4.02 *)
(* val add_substring : t -> string -> int -> int -> unit *)
-(** [add_substring b s ofs len] takes [len] characters from offset
- [ofs] in string [s] and appends them at the end of the buffer [b]. *)
+(** [add_substring b s ofs len] takes [len] characters from offset [ofs] in
+ string [s] and appends them at the end of the buffer [b]. *)
(* val add_subbytes : t -> bytes -> int -> int -> unit *)
-(** [add_substring b s ofs len] takes [len] characters from offset
- [ofs] in byte sequence [s] and appends them at the end of the buffer [b].
+(** [add_substring b s ofs len] takes [len] characters from offset [ofs] in byte
+ sequence [s] and appends them at the end of the buffer [b].
@since 4.02 *)
(* val add_buffer : t -> t -> unit *)
-(** [add_buffer b1 b2] appends the current contents of buffer [b2]
- at the end of buffer [b1]. [b2] is not modified. *)
+(** [add_buffer b1 b2] appends the current contents of buffer [b2] at the end of
+ buffer [b1]. [b2] is not modified. *)
(* val add_channel : t -> in_channel -> int -> unit *)
-(** [add_channel b ic n] reads exactly [n] character from the
- input channel [ic] and stores them at the end of buffer [b].
- Raise [End_of_file] if the channel contains fewer than [n]
- characters. *)
+(** [add_channel b ic n] reads exactly [n] character from the input channel [ic]
+ and stores them at the end of buffer [b]. Raise [End_of_file] if the channel
+ contains fewer than [n] characters. *)
val output_buffer : out_channel -> t -> unit
-(** [output_buffer oc b] writes the current contents of buffer [b]
- on the output channel [oc]. *)
+(** [output_buffer oc b] writes the current contents of buffer [b] on the output
+ channel [oc]. *)
val digest : t -> Digest.t
diff --git a/analysis/vendor/ext/ext_char.ml b/analysis/vendor/ext/ext_char.ml
index 3754665a6..5adcbf442 100644
--- a/analysis/vendor/ext/ext_char.ml
+++ b/analysis/vendor/ext/ext_char.ml
@@ -22,9 +22,8 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
-(** {!Char.escaped} is locale sensitive in 4.02.3, fixed in the trunk,
- backport it here
-*)
+(** {!Char.escaped} is locale sensitive in 4.02.3, fixed in the trunk, backport
+ it here *)
let valid_hex x =
match x with
diff --git a/analysis/vendor/ext/ext_file_extensions.ml b/analysis/vendor/ext/ext_file_extensions.ml
index 9004b5821..0ba847cd9 100644
--- a/analysis/vendor/ext/ext_file_extensions.ml
+++ b/analysis/vendor/ext/ext_file_extensions.ml
@@ -1,9 +1,7 @@
type valid_input = Res | Resi | Intf_ast | Impl_ast | Mlmap | Cmi | Unknown
-(** This is per-file based,
- when [ocamlc] [-c -o another_dir/xx.cmi]
- it will return (another_dir/xx)
-*)
+(** This is per-file based, when [ocamlc] [-c -o another_dir/xx.cmi] it will
+ return (another_dir/xx) *)
let classify_input ext =
match () with
diff --git a/analysis/vendor/ext/ext_filename.ml b/analysis/vendor/ext/ext_filename.ml
index cb3302bac..e840df419 100644
--- a/analysis/vendor/ext/ext_filename.ml
+++ b/analysis/vendor/ext/ext_filename.ml
@@ -80,13 +80,9 @@ let new_extension name (ext : string) =
in
search_dot name (String.length name - 1) ext
-(** TODO: improve efficiency
- given a path, calcuate its module name
- Note that `ocamlc.opt -c aa.xx.mli` gives `aa.xx.cmi`
- we can not strip all extensions, otherwise
- we can not tell the difference between "x.cpp.ml"
- and "x.ml"
-*)
+(** TODO: improve efficiency given a path, calcuate its module name Note that
+ `ocamlc.opt -c aa.xx.mli` gives `aa.xx.cmi` we can not strip all extensions,
+ otherwise we can not tell the difference between "x.cpp.ml" and "x.ml" *)
let module_name name =
let rec search_dot i name =
if i < 0 then Ext_string.capitalize_ascii name
diff --git a/analysis/vendor/ext/ext_filename.mli b/analysis/vendor/ext/ext_filename.mli
index e111ee200..7022d5f98 100644
--- a/analysis/vendor/ext/ext_filename.mli
+++ b/analysis/vendor/ext/ext_filename.mli
@@ -27,9 +27,8 @@
library but rather specific to JS Module name convention.
*)
-(** An extension module to calculate relative path follow node/npm style.
- TODO : this short name will have to change upon renaming the file.
-*)
+(** An extension module to calculate relative path follow node/npm style. TODO :
+ this short name will have to change upon renaming the file. *)
val is_dir_sep : char -> bool
diff --git a/analysis/vendor/ext/ext_format.mli b/analysis/vendor/ext/ext_format.mli
index f921110f4..1c0cd4d5c 100644
--- a/analysis/vendor/ext/ext_format.mli
+++ b/analysis/vendor/ext/ext_format.mli
@@ -22,8 +22,7 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
-(** Simplified wrapper module for the standard library [Format] module.
-*)
+(** Simplified wrapper module for the standard library [Format] module. *)
type t = private Format.formatter
diff --git a/analysis/vendor/ext/ext_ident.ml b/analysis/vendor/ext/ext_ident.ml
index 8a7910ca3..c4b27d256 100644
--- a/analysis/vendor/ext/ext_ident.ml
+++ b/analysis/vendor/ext/ext_ident.ml
@@ -150,10 +150,8 @@ let name_mangle name =
done;
Ext_buffer.contents buffer
-(**
- [convert name] if [name] is a js keyword or js global, add "$$"
- otherwise do the name mangling to make sure ocaml identifier it is
- a valid js identifier
+(** [convert name] if [name] is a js keyword or js global, add "$$" otherwise do
+ the name mangling to make sure ocaml identifier it is a valid js identifier
*)
let convert (name : string) =
let name = unwrap_uppercase_exotic name in
diff --git a/analysis/vendor/ext/ext_ident.mli b/analysis/vendor/ext/ext_ident.mli
index ff21fca3c..057221c50 100644
--- a/analysis/vendor/ext/ext_ident.mli
+++ b/analysis/vendor/ext/ext_ident.mli
@@ -48,9 +48,7 @@ val is_uppercase_exotic : string -> bool
val unwrap_uppercase_exotic : string -> string
val convert : string -> string
-(**
- Invariant: if name is not converted, the reference should be equal
-*)
+(** Invariant: if name is not converted, the reference should be equal *)
val is_js_or_global : Ident.t -> bool
diff --git a/analysis/vendor/ext/ext_int.mli b/analysis/vendor/ext/ext_int.mli
index acfc7af2e..705292f53 100644
--- a/analysis/vendor/ext/ext_int.mli
+++ b/analysis/vendor/ext/ext_int.mli
@@ -29,7 +29,5 @@ val compare : t -> t -> int
val equal : t -> t -> bool
val int32_unsigned_to_int : int32 -> int
-(**
- works on 64 bit platform only
- given input as an uint32 and convert it io int64
-*)
+(** works on 64 bit platform only given input as an uint32 and convert it io
+ int64 *)
diff --git a/analysis/vendor/ext/ext_list.ml b/analysis/vendor/ext/ext_list.ml
index 7066f301b..05b91bfd1 100644
--- a/analysis/vendor/ext/ext_list.ml
+++ b/analysis/vendor/ext/ext_list.ml
@@ -459,9 +459,7 @@ let split_at_last (x : 'a list) =
let rev, last = split_at_last_aux [] rest in
(a0 :: a1 :: a2 :: a3 :: a4 :: rev, last)
-(**
- can not do loop unroll due to state combination
-*)
+(** can not do loop unroll due to state combination *)
let filter_mapi xs f =
let rec aux i xs =
match xs with
@@ -487,10 +485,8 @@ let rec rev_map_append l1 l2 f =
| [] -> l2
| a :: l -> rev_map_append l (f a :: l2) f
-(** It is not worth loop unrolling,
- it is already tail-call, and we need to be careful
- about evaluation order when unroll
-*)
+(** It is not worth loop unrolling, it is already tail-call, and we need to be
+ careful about evaluation order when unroll *)
let rec flat_map_aux f acc append lx =
match lx with
| [] -> rev_append acc append
@@ -522,9 +518,9 @@ let rec length_ge l n =
| [] -> false
else true
-(**
- {[length xs = length ys + n ]}
-*)
+(** {[
+ length xs = length ys + n
+ ]} *)
let rec length_larger_than_n xs ys n =
match (xs, ys) with
| _, [] -> length_compare xs n = `Eq
diff --git a/analysis/vendor/ext/ext_list.mli b/analysis/vendor/ext/ext_list.mli
index 61a07c7b3..a03a6be53 100644
--- a/analysis/vendor/ext/ext_list.mli
+++ b/analysis/vendor/ext/ext_list.mli
@@ -43,17 +43,11 @@ val mapi_append : 'a list -> (int -> 'a -> 'b) -> 'b list -> 'b list
val map_snd : ('a * 'b) list -> ('b -> 'c) -> ('a * 'c) list
val map_last : 'a list -> (bool -> 'a -> 'b) -> 'b list
-(** [map_last f xs ]
- will pass [true] to [f] for the last element,
- [false] otherwise.
- For empty list, it returns empty
-*)
+(** [map_last f xs ] will pass [true] to [f] for the last element, [false]
+ otherwise. For empty list, it returns empty *)
val last : 'a list -> 'a
-(** [last l]
- return the last element
- raise if the list is empty
-*)
+(** [last l] return the last element raise if the list is empty *)
val append : 'a list -> 'a list -> 'a list
@@ -82,27 +76,20 @@ val exclude : 'a list -> ('a -> bool) -> 'a list
(** [exclude p l] is the opposite of [filter p l] *)
val exclude_with_val : 'a list -> ('a -> bool) -> 'a list option
-(** [excludes p l]
- return a tuple [excluded,newl]
- where [exluded] is true indicates that at least one
- element is removed,[newl] is the new list where all [p x] for [x] is false
-
-*)
+(** [excludes p l] return a tuple [excluded,newl] where [exluded] is true
+ indicates that at least one element is removed,[newl] is the new list where
+ all [p x] for [x] is false *)
val same_length : 'a list -> 'b list -> bool
val init : int -> (int -> 'a) -> 'a list
val split_at : 'a list -> int -> 'a list * 'a list
-(** [split_at n l]
- will split [l] into two lists [a,b], [a] will be of length [n],
- otherwise, it will raise
-*)
+(** [split_at n l] will split [l] into two lists [a,b], [a] will be of length
+ [n], otherwise, it will raise *)
val split_at_last : 'a list -> 'a list * 'a
-(** [split_at_last l]
- It is equivalent to [split_at (List.length l - 1) l ]
-*)
+(** [split_at_last l] It is equivalent to [split_at (List.length l - 1) l ] *)
val filter_mapi : 'a list -> ('a -> int -> 'b option) -> 'b list
@@ -112,61 +99,45 @@ val length_compare : 'a list -> int -> [`Gt | `Eq | `Lt]
val length_ge : 'a list -> int -> bool
-(**
-
- {[length xs = length ys + n ]}
- input n should be positive
- TODO: input checking
-*)
+(** {[
+ length xs = length ys + n
+ ]}
+ input n should be positive TODO: input checking *)
val length_larger_than_n : 'a list -> 'a list -> int -> bool
val rev_map_append : 'a list -> 'b list -> ('a -> 'b) -> 'b list
-(**
- [rev_map_append f l1 l2]
- [map f l1] and reverse it to append [l2]
- This weird semantics is due to it is the most efficient operation
- we can do
-*)
+(** [rev_map_append f l1 l2] [map f l1] and reverse it to append [l2] This weird
+ semantics is due to it is the most efficient operation we can do *)
val flat_map : 'a list -> ('a -> 'b list) -> 'b list
val flat_map_append : 'a list -> 'b list -> ('a -> 'b list) -> 'b list
val stable_group : 'a list -> ('a -> 'a -> bool) -> 'a list list
-(**
- [stable_group eq lst]
- Example:
- Input:
- {[
- stable_group (=) [1;2;3;4;3]
- ]}
+(** [stable_group eq lst] Example: Input:
+ {[
+ stable_group ( = ) [1; 2; 3; 4; 3]
+ ]}
Output:
- {[
- [[1];[2];[4];[3;3]]
- ]}
- TODO: this is O(n^2) behavior
- which could be improved later
-*)
+ {[
+ [[1]; [2]; [4]; [3; 3]]
+ ]}
+ TODO: this is O(n^2) behavior which could be improved later *)
val drop : 'a list -> int -> 'a list
-(** [drop n list]
- raise when [n] is negative
- raise when list's length is less than [n]
-*)
+(** [drop n list] raise when [n] is negative raise when list's length is less
+ than [n] *)
val find_first : 'a list -> ('a -> bool) -> 'a option
val find_first_not : 'a list -> ('a -> bool) -> 'a option
-(** [find_first_not p lst ]
- if all elements in [lst] pass, return [None]
- otherwise return the first element [e] as [Some e] which
- fails the predicate
+(** [find_first_not p lst ] if all elements in [lst] pass, return [None]
+ otherwise return the first element [e] as [Some e] which fails the predicate
*)
-(** [find_opt f l] returns [None] if all return [None],
- otherwise returns the first one.
-*)
+(** [find_opt f l] returns [None] if all return [None], otherwise returns the
+ first one. *)
val find_opt : 'a list -> ('a -> 'b option) -> 'b option
@@ -181,10 +152,8 @@ val for_all : 'a list -> ('a -> bool) -> bool
val for_all_snd : ('a * 'b) list -> ('b -> bool) -> bool
val for_all2_no_exn : 'a list -> 'b list -> ('a -> 'b -> bool) -> bool
-(** [for_all2_no_exn p xs ys]
- return [true] if all satisfied,
- [false] otherwise or length not equal
-*)
+(** [for_all2_no_exn p xs ys] return [true] if all satisfied, [false] otherwise
+ or length not equal *)
val split_map : 'a list -> ('a -> 'b * 'c) -> 'b list * 'c list
(** [f] is applied follow the list order *)
@@ -197,11 +166,8 @@ val sort_via_array : 'a list -> ('a -> 'a -> int) -> 'a list
val sort_via_arrayf : 'a list -> ('a -> 'a -> int) -> ('a -> 'b) -> 'b list
val assoc_by_string : (string * 'a) list -> string -> 'a option -> 'a
-(** [assoc_by_string default key lst]
- if [key] is found in the list return that val,
- other unbox the [default],
- otherwise [assert false ]
-*)
+(** [assoc_by_string default key lst] if [key] is found in the list return that
+ val, other unbox the [default], otherwise [assert false ] *)
val assoc_by_int : (int * 'a) list -> int -> 'a option -> 'a
diff --git a/analysis/vendor/ext/ext_marshal.mli b/analysis/vendor/ext/ext_marshal.mli
index 9980f8846..8d82ce45d 100644
--- a/analysis/vendor/ext/ext_marshal.mli
+++ b/analysis/vendor/ext/ext_marshal.mli
@@ -22,7 +22,6 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
-(** Extension to the standard library [Marshall] module
-*)
+(** Extension to the standard library [Marshall] module *)
val from_string : string -> 'a
diff --git a/analysis/vendor/ext/ext_modulename.ml b/analysis/vendor/ext/ext_modulename.ml
index d2d46930f..73398731a 100644
--- a/analysis/vendor/ext/ext_modulename.ml
+++ b/analysis/vendor/ext/ext_modulename.ml
@@ -57,14 +57,13 @@ and collect_next buf s off len =
| '.' | '-' -> collect_start buf s next len
| _ -> collect_next buf s next len
-(** This is for a js exeternal module, we can change it when printing
- for example
+(** This is for a js exeternal module, we can change it when printing for
+ example
{[
var React$1 = require('react');
React$1.render(..)
]}
- Given a name, if duplicated, they should have the same id
-*)
+ Given a name, if duplicated, they should have the same id *)
let js_id_name_of_hint_name module_name =
let i = Ext_string.rindex_neg module_name '/' in
if i >= 0 then (
diff --git a/analysis/vendor/ext/ext_modulename.mli b/analysis/vendor/ext/ext_modulename.mli
index d59902439..203adebe1 100644
--- a/analysis/vendor/ext/ext_modulename.mli
+++ b/analysis/vendor/ext/ext_modulename.mli
@@ -23,6 +23,4 @@
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
val js_id_name_of_hint_name : string -> string
-(** Given an JS bundle name, generate a meaningful
- bounded module name
-*)
+(** Given an JS bundle name, generate a meaningful bounded module name *)
diff --git a/analysis/vendor/ext/ext_namespace.mli b/analysis/vendor/ext/ext_namespace.mli
index fa6401694..86dd91272 100644
--- a/analysis/vendor/ext/ext_namespace.mli
+++ b/analysis/vendor/ext/ext_namespace.mli
@@ -34,8 +34,7 @@ val try_split_module_name : string -> (string * string) option
val change_ext_ns_suffix : string -> string -> string
val js_name_of_modulename : string -> Ext_js_file_kind.case -> string -> string
-(** [js_name_of_modulename ~little A-Ns]
-*)
+(** [js_name_of_modulename ~little A-Ns] *)
(* TODO handle cases like
'@angular/core'
diff --git a/analysis/vendor/ext/ext_namespace_encode.mli b/analysis/vendor/ext/ext_namespace_encode.mli
index 7042a13c1..ab4684928 100644
--- a/analysis/vendor/ext/ext_namespace_encode.mli
+++ b/analysis/vendor/ext/ext_namespace_encode.mli
@@ -23,7 +23,5 @@
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
val make : ?ns:string -> string -> string
-(** [make ~ns:"Ns" "a" ]
- A typical example would return "a-Ns"
- Note the namespace comes from the output of [namespace_of_package_name]
-*)
+(** [make ~ns:"Ns" "a" ] A typical example would return "a-Ns" Note the
+ namespace comes from the output of [namespace_of_package_name] *)
diff --git a/analysis/vendor/ext/ext_obj.ml b/analysis/vendor/ext/ext_obj.ml
index 01ec1d8f5..c81e86f23 100644
--- a/analysis/vendor/ext/ext_obj.ml
+++ b/analysis/vendor/ext/ext_obj.ml
@@ -61,8 +61,8 @@ let rec dump r =
"(" ^ String.concat ", " (Ext_list.map fields dump) ^ ")"
| x when x = Obj.lazy_tag ->
(* Note that [lazy_tag .. forward_tag] are < no_scan_tag. Not
- * clear if very large constructed values could have the same
- * tag. XXX *)
+ * clear if very large constructed values could have the same
+ * tag. XXX *)
opaque "lazy"
| x when x = Obj.closure_tag -> opaque "closure"
| x when x = Obj.object_tag ->
@@ -73,7 +73,7 @@ let rec dump r =
| _ -> assert false
in
(* No information on decoding the class (first field). So just print
- * out the ID and the slots. *)
+ * out the ID and the slots. *)
"Object #" ^ dump id ^ " ("
^ String.concat ", " (Ext_list.map slots dump)
^ ")"
diff --git a/analysis/vendor/ext/ext_path.ml b/analysis/vendor/ext/ext_path.ml
index 660d69a41..d827d5f50 100644
--- a/analysis/vendor/ext/ext_path.ml
+++ b/analysis/vendor/ext/ext_path.ml
@@ -23,8 +23,10 @@
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
(* [@@@warning "-37"] *)
-type t = (* | File of string *)
- | Dir of string [@@unboxed]
+type t =
+ (* | File of string *)
+ | Dir of string
+[@@unboxed]
let simple_convert_node_path_to_os_path =
if Sys.unix then fun x -> x
@@ -52,7 +54,6 @@ let split_by_sep_per_os : string -> string list =
The other way
{[
-
"/bb/mbigc/mbig2899/bgit/rescript/jscomp/stdlib/ocaml_array.ml"
"/bb/mbigc/mbig2899/bgit/rescript/jscomp/stdlib/external/pervasives.cmj"
]}
@@ -62,8 +63,7 @@ let split_by_sep_per_os : string -> string list =
{[
/a/b
/c/d
- ]}
-*)
+ ]} *)
let node_relative_path ~from:(file_or_dir_2 : t) (file_or_dir_1 : t) =
let relevant_dir1 =
match file_or_dir_1 with
@@ -117,28 +117,23 @@ let ( // ) x y =
else if y = Filename.current_dir_name then x
else Filename.concat x y
-(**
- {[
- split_aux "//ghosg//ghsogh/";;
- - : string * string list = ("/", ["ghosg"; "ghsogh"])
- ]}
- Note that
- {[
- Filename.dirname "/a/" = "/"
- Filename.dirname "/a/b/" = Filename.dirname "/a/b" = "/a"
- ]}
- Special case:
- {[
- basename "//" = "/"
- basename "///" = "/"
- ]}
- {[
- basename "" = "."
- basename "" = "."
- dirname "" = "."
- dirname "" = "."
- ]}
-*)
+(** {[
+ split_aux "//ghosg//ghsogh/";;
+ - : string * string list = ("/", ["ghosg"; "ghsogh"])
+ ]}
+ Note that
+ {[
+ Filename.dirname "/a/"
+ = "/" Filename.dirname "/a/b/"
+ = Filename.dirname "/a/b" = "/a"
+ ]}
+ Special case:
+ {[
+ basename "//" = "/" basename "///" = "/"
+ ]}
+ {[
+ basename "" = "." basename "" = "." dirname "" = "." dirname "" = "."
+ ]} *)
let split_aux p =
let rec go p acc =
let dir = Filename.dirname p in
@@ -154,13 +149,11 @@ let split_aux p =
go p []
-(**
- TODO: optimization
- if [from] and [to] resolve to the same path, a zero-length string is returned
+(** TODO: optimization if [from] and [to] resolve to the same path, a
+ zero-length string is returned
- This function is useed in [es6-global] and
- [amdjs-global] format and tailored for `rollup`
-*)
+ This function is useed in [es6-global] and [amdjs-global] format and
+ tailored for `rollup` *)
let rel_normalized_absolute_path ~from to_ =
let root1, paths1 = split_aux from in
let root2, paths2 = split_aux to_ in
diff --git a/analysis/vendor/ext/ext_path.mli b/analysis/vendor/ext/ext_path.mli
index 6a9905688..6cf3789e7 100644
--- a/analysis/vendor/ext/ext_path.mli
+++ b/analysis/vendor/ext/ext_path.mli
@@ -25,53 +25,39 @@
type t
val simple_convert_node_path_to_os_path : string -> string
-(** Js_output is node style, which means
- separator is only '/'
+(** Js_output is node style, which means separator is only '/'
- if the path contains 'node_modules',
- [node_relative_path] will discard its prefix and
- just treat it as a library instead
-*)
+ if the path contains 'node_modules', [node_relative_path] will discard its
+ prefix and just treat it as a library instead *)
val combine : string -> string -> string
-(**
- [combine path1 path2]
- 1. add some simplifications when concatenating
- 2. when [path2] is absolute, return [path2]
-*)
+(** [combine path1 path2] 1. add some simplifications when concatenating 2. when
+ [path2] is absolute, return [path2] *)
-(**
- {[
- get_extension "a.txt" = ".txt"
- get_extension "a" = ""
- ]}
-*)
+(** {[
+ get_extension "a.txt" = ".txt" get_extension "a" = ""
+ ]} *)
val node_rebase_file : from:string -> to_:string -> string -> string
val rel_normalized_absolute_path : from:string -> string -> string
-(**
- TODO: could be highly optimized
- if [from] and [to] resolve to the same path, a zero-length string is returned
- Given that two paths are directory
+(** TODO: could be highly optimized if [from] and [to] resolve to the same path,
+ a zero-length string is returned Given that two paths are directory
- A typical use case is
- {[
- Filename.concat
- (rel_normalized_absolute_path cwd (Filename.dirname a))
- (Filename.basename a)
- ]}
-*)
+ A typical use case is
+ {[
+ Filename.concat
+ (rel_normalized_absolute_path cwd (Filename.dirname a))
+ (Filename.basename a)
+ ]} *)
val normalize_absolute_path : string -> string
val absolute_cwd_path : string -> string
val concat : string -> string -> string
-(** [concat dirname filename]
- The same as {!Filename.concat} except a tiny optimization
- for current directory simplification
-*)
+(** [concat dirname filename] The same as {!Filename.concat} except a tiny
+ optimization for current directory simplification *)
val check_suffix_case : string -> string -> bool
diff --git a/analysis/vendor/ext/ext_pervasives.mli b/analysis/vendor/ext/ext_pervasives.mli
index a4761900b..c84e615f3 100644
--- a/analysis/vendor/ext/ext_pervasives.mli
+++ b/analysis/vendor/ext/ext_pervasives.mli
@@ -22,8 +22,7 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
-(** Extension to standard library [Pervavives] module, safe to open
-*)
+(** Extension to standard library [Pervavives] module, safe to open *)
external reraise : exn -> 'a = "%reraise"
@@ -41,9 +40,7 @@ val max_int_option : int option -> int option -> int option
(* external id : 'a -> 'a = "%identity" *)
-(** Copied from {!Btype.hash_variant}:
- need sync up and add test case
-*)
+(** Copied from {!Btype.hash_variant}: need sync up and add test case *)
(* val hash_variant : string -> int *)
(* val todo : string -> 'a *)
diff --git a/analysis/vendor/ext/ext_position.mli b/analysis/vendor/ext/ext_position.mli
index 7d0a0563c..356c34b13 100644
--- a/analysis/vendor/ext/ext_position.mli
+++ b/analysis/vendor/ext/ext_position.mli
@@ -30,12 +30,9 @@ type t = Lexing.position = {
}
val offset : t -> t -> t
-(** [offset pos newpos]
- return a new position
- here [newpos] is zero based, the use case is that
- at position [pos], we get a string and Lexing from that string,
- therefore, we get a [newpos] and we need rebase it on top of
- [pos]
+(** [offset pos newpos] return a new position here [newpos] is zero based, the
+ use case is that at position [pos], we get a string and Lexing from that
+ string, therefore, we get a [newpos] and we need rebase it on top of [pos]
*)
val lexbuf_from_channel_with_fname : in_channel -> string -> Lexing.lexbuf
diff --git a/analysis/vendor/ext/ext_pp.mli b/analysis/vendor/ext/ext_pp.mli
index aaf217621..dfdfb5edb 100644
--- a/analysis/vendor/ext/ext_pp.mli
+++ b/analysis/vendor/ext/ext_pp.mli
@@ -25,15 +25,12 @@
type t
(** A simple pretty printer
- Advantage compared with [Format],
- [P.newline] does not screw the layout, have better control when do a newline (sicne JS has ASI)
- Easy to tweak
+ Advantage compared with [Format], [P.newline] does not screw the layout,
+ have better control when do a newline (sicne JS has ASI) Easy to tweak
- {ul
- {- be a little smarter}
- {- buffer the last line, so that we can do a smart newline, when it's really safe to do so}
- }
-*)
+ - be a little smarter
+ - buffer the last line, so that we can do a smart newline, when it's really
+ safe to do so *)
val indent_length : int
@@ -44,9 +41,7 @@ val space : t -> unit
val nspace : t -> int -> unit
val group : t -> int -> (unit -> 'a) -> 'a
-(** [group] will record current indentation
- and indent futher
-*)
+(** [group] will record current indentation and indent futher *)
val vgroup : t -> int -> (unit -> 'a) -> 'a
diff --git a/analysis/vendor/ext/ext_pp_scope.ml b/analysis/vendor/ext/ext_pp_scope.ml
index f074a411f..1d92897d2 100644
--- a/analysis/vendor/ext/ext_pp_scope.ml
+++ b/analysis/vendor/ext/ext_pp_scope.ml
@@ -48,39 +48,32 @@ let add_ident ~mangled:name (stamp : int) (cxt : t) : int * t =
(v, Map_string.add cxt name (Map_int.add imap stamp v))
| Some i -> (i, cxt))
-(**
- same as {!Js_dump.ident} except it generates a string instead of doing the printing
- For fast/debug mode, we can generate the name as
- [Printf.sprintf "%s$%d" name id.stamp] which is
- not relevant to the context
+(** same as {!Js_dump.ident} except it generates a string instead of doing the
+ printing For fast/debug mode, we can generate the name as
+ [Printf.sprintf "%s$%d" name id.stamp] which is not relevant to the context
- Attention:
- - $$Array.length, due to the fact that global module is
- always printed in the begining(via imports), so you get a gurantee,
- (global modules will not be printed as [List$1])
+ Attention:
+ - $$Array.length, due to the fact that global module is always printed in
+ the begining(via imports), so you get a gurantee, (global modules will not
+ be printed as [List$1])
- However, this means we loose the ability of dynamic loading, is it a big
- deal? we can fix this by a scanning first, since we already know which
- modules are global
+ However, this means we loose the ability of dynamic loading, is it a big
+ deal? we can fix this by a scanning first, since we already know which
+ modules are global
- check [test/test_global_print.ml] for regression
- - collision
- It is obvious that for the same identifier that they
- print the same name.
+ check [test/test_global_print.ml] for regression
+ - collision It is obvious that for the same identifier that they print the
+ same name.
- It also needs to be hold that for two different identifiers,
- they print different names:
- - This happens when they escape to the same name and
- share the same stamp
- So the key has to be mangled name + stamp
- otherwise, if two identifier happens to have same mangled name,
- if we use the original name as key, they can have same id (like 0).
- then it caused a collision
+ It also needs to be hold that for two different identifiers, they print
+ different names:
+ - This happens when they escape to the same name and share the same stamp So
+ the key has to be mangled name + stamp otherwise, if two identifier
+ happens to have same mangled name, if we use the original name as key,
+ they can have same id (like 0). then it caused a collision
- Here we can guarantee that if mangled name and stamp are not all the same
- they can not have a collision
-
-*)
+ Here we can guarantee that if mangled name and stamp are not all the same
+ they can not have a collision *)
let str_of_ident (cxt : t) (id : Ident.t) : string * t =
if Ext_ident.is_js id then (* reserved by compiler *)
(id.name, cxt)
diff --git a/analysis/vendor/ext/ext_pp_scope.mli b/analysis/vendor/ext/ext_pp_scope.mli
index 460a0354d..1ebc642ee 100644
--- a/analysis/vendor/ext/ext_pp_scope.mli
+++ b/analysis/vendor/ext/ext_pp_scope.mli
@@ -22,12 +22,9 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
-(** Scope type to improve identifier name printing
- Defines scope type [t], so that the pretty printer would
- print more beautiful code:
- print [identifer] instead of [identifier$1234]
- when it can
-*)
+(** Scope type to improve identifier name printing Defines scope type [t], so
+ that the pretty printer would print more beautiful code: print [identifer]
+ instead of [identifier$1234] when it can *)
type t
diff --git a/analysis/vendor/ext/ext_ref.mli b/analysis/vendor/ext/ext_ref.mli
index 94a47dcf0..4ca61c66f 100644
--- a/analysis/vendor/ext/ext_ref.mli
+++ b/analysis/vendor/ext/ext_ref.mli
@@ -22,9 +22,7 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
-(** [non_exn_protect ref value f] assusme [f()]
- would not raise
-*)
+(** [non_exn_protect ref value f] assusme [f()] would not raise *)
val non_exn_protect : 'a ref -> 'a -> (unit -> 'b) -> 'b
@@ -33,8 +31,6 @@ val protect : 'a ref -> 'a -> (unit -> 'b) -> 'b
val protect2 : 'a ref -> 'b ref -> 'a -> 'b -> (unit -> 'c) -> 'c
val non_exn_protect2 : 'a ref -> 'b ref -> 'a -> 'b -> (unit -> 'c) -> 'c
-(** [non_exn_protect2 refa refb va vb f ]
- assume [f ()] would not raise
-*)
+(** [non_exn_protect2 refa refb va vb f ] assume [f ()] would not raise *)
val protect_list : ('a ref * 'a) list -> (unit -> 'b) -> 'b
diff --git a/analysis/vendor/ext/ext_scc.ml b/analysis/vendor/ext/ext_scc.ml
index d640d6871..3434f013e 100644
--- a/analysis/vendor/ext/ext_scc.ml
+++ b/analysis/vendor/ext/ext_scc.ml
@@ -24,16 +24,10 @@
type node = Vec_int.t
-(**
- [int] as data for this algorithm
- Pros:
- 1. Easy to eoncode algorithm (especially given that the capacity of node is known)
- 2. Algorithms itself are much more efficient
- 3. Node comparison semantics is clear
- 4. Easy to print output
- Cons:
- 1. post processing input data
-*)
+(** [int] as data for this algorithm Pros: 1. Easy to eoncode algorithm
+ (especially given that the capacity of node is known) 2. Algorithms itself
+ are much more efficient 3. Node comparison semantics is clear 4. Easy to
+ print output Cons: 1. post processing input data *)
let min_int (x : int) y = if x < y then x else y
let graph e =
diff --git a/analysis/vendor/ext/ext_scc.mli b/analysis/vendor/ext/ext_scc.mli
index b72bc8228..8118b583e 100644
--- a/analysis/vendor/ext/ext_scc.mli
+++ b/analysis/vendor/ext/ext_scc.mli
@@ -25,18 +25,16 @@
type node = Vec_int.t
val graph : Vec_int.t array -> Int_vec_vec.t
-(** Assume input is int array with offset from 0
- Typical input
+(** Assume input is int array with offset from 0 Typical input
{[
[|
- [ 1 ; 2 ]; // 0 -> 1, 0 -> 2
- [ 1 ]; // 0 -> 1
- [ 2 ] // 0 -> 2
+ [ 1 ; 2 ]; // 0 -> 1, 0 -> 2
+ [ 1 ]; // 0 -> 1
+ [ 2 ] // 0 -> 2
|]
]}
- Note that we can tell how many nodes by calculating
- [Array.length] of the input
-*)
+ Note that we can tell how many nodes by calculating [Array.length] of the
+ input *)
val graph_check : node array -> int * int list
(** Used for unit test *)
diff --git a/analysis/vendor/ext/ext_string_array.ml b/analysis/vendor/ext/ext_string_array.ml
index 94234d6d1..0e3fb4205 100644
--- a/analysis/vendor/ext/ext_string_array.ml
+++ b/analysis/vendor/ext/ext_string_array.ml
@@ -37,8 +37,10 @@ let rec binary_search_aux (arr : string array) (lo : int) (hi : int)
let lo_val = Array.unsafe_get arr lo in
if lo_val = key then Some lo else None
else binary_search_aux arr lo mid key
- else if (* a[lo] =< a[mid] < key <= a[hi] *)
- lo = mid then
+ else if
+ (* a[lo] =< a[mid] < key <= a[hi] *)
+ lo = mid
+ then
let hi_val = Array.unsafe_get arr hi in
if hi_val = key then Some hi else None
else binary_search_aux arr mid hi key
@@ -67,8 +69,10 @@ let rec binary_search_assoc (arr : (string * _) array) (lo : int) (hi : int)
let lo_val = Array.unsafe_get arr lo in
if fst lo_val = key then Some (snd lo_val) else None
else binary_search_assoc arr lo mid key
- else if (* a[lo] =< a[mid] < key <= a[hi] *)
- lo = mid then
+ else if
+ (* a[lo] =< a[mid] < key <= a[hi] *)
+ lo = mid
+ then
let hi_val = Array.unsafe_get arr hi in
if fst hi_val = key then Some (snd hi_val) else None
else binary_search_assoc arr mid hi key
diff --git a/analysis/vendor/ext/ext_topsort.ml b/analysis/vendor/ext/ext_topsort.ml
index 7cdef010c..c7888ad6c 100644
--- a/analysis/vendor/ext/ext_topsort.ml
+++ b/analysis/vendor/ext/ext_topsort.ml
@@ -32,13 +32,12 @@ end)
type t = Edge_vec.t
-(**
- This graph is different the graph used in [scc] graph, since
- we need dynamic shrink the graph, so for each vector the first node is it self ,
- it will also change the input.
+(** This graph is different the graph used in [scc] graph, since we need dynamic
+ shrink the graph, so for each vector the first node is it self , it will
+ also change the input.
- TODO: error handling (cycle handling) and defensive bad input (missing edges etc)
-*)
+ TODO: error handling (cycle handling) and defensive bad input (missing edges
+ etc) *)
let layered_dfs (g : t) =
let queue = Queue.create () in
diff --git a/analysis/vendor/ext/ext_topsort.mli b/analysis/vendor/ext/ext_topsort.mli
index 11d634cb9..e8f906849 100644
--- a/analysis/vendor/ext/ext_topsort.mli
+++ b/analysis/vendor/ext/ext_topsort.mli
@@ -29,5 +29,4 @@ module Edge_vec : Vec_gen.S with type elt = edges
type t = Edge_vec.t
val layered_dfs : t -> Set_int.t Queue.t
-(** the input will be modified ,
-*)
+(** the input will be modified , *)
diff --git a/analysis/vendor/ext/ext_utf8.ml b/analysis/vendor/ext/ext_utf8.ml
index 18336b804..04846c1e5 100644
--- a/analysis/vendor/ext/ext_utf8.ml
+++ b/analysis/vendor/ext/ext_utf8.ml
@@ -29,19 +29,30 @@ let classify chr =
let c = int_of_char chr in
(* Classify byte according to leftmost 0 bit *)
if c land 0b1000_0000 = 0 then Single c
- else if (* c 0b0____*)
- c land 0b0100_0000 = 0 then Cont (c land 0b0011_1111)
- else if (* c 0b10___*)
- c land 0b0010_0000 = 0 then Leading (1, c land 0b0001_1111)
- else if (* c 0b110__*)
- c land 0b0001_0000 = 0 then Leading (2, c land 0b0000_1111)
- else if (* c 0b1110_ *)
- c land 0b0000_1000 = 0 then Leading (3, c land 0b0000_0111)
- else if (* c 0b1111_0___*)
- c land 0b0000_0100 = 0 then Leading (4, c land 0b0000_0011)
- else if (* c 0b1111_10__*)
- c land 0b0000_0010 = 0 then Leading (5, c land 0b0000_0001)
- (* c 0b1111_110__ *)
+ else if
+ (* c 0b0____*)
+ c land 0b0100_0000 = 0
+ then Cont (c land 0b0011_1111)
+ else if
+ (* c 0b10___*)
+ c land 0b0010_0000 = 0
+ then Leading (1, c land 0b0001_1111)
+ else if
+ (* c 0b110__*)
+ c land 0b0001_0000 = 0
+ then Leading (2, c land 0b0000_1111)
+ else if
+ (* c 0b1110_ *)
+ c land 0b0000_1000 = 0
+ then Leading (3, c land 0b0000_0111)
+ else if
+ (* c 0b1111_0___*)
+ c land 0b0000_0100 = 0
+ then Leading (4, c land 0b0000_0011)
+ else if
+ (* c 0b1111_10__*)
+ c land 0b0000_0010 = 0
+ then Leading (5, c land 0b0000_0001) (* c 0b1111_110__ *)
else Invalid
exception Invalid_utf8 of string
diff --git a/analysis/vendor/ext/ext_utf8.mli b/analysis/vendor/ext/ext_utf8.mli
index e1beadec5..fc1037fe3 100644
--- a/analysis/vendor/ext/ext_utf8.mli
+++ b/analysis/vendor/ext/ext_utf8.mli
@@ -29,9 +29,7 @@ val classify : char -> byte
val follow : string -> int -> int -> int -> int * int
val next : string -> remaining:int -> int -> int
-(**
- return [-1] if failed
-*)
+(** return [-1] if failed *)
exception Invalid_utf8 of string
diff --git a/analysis/vendor/ext/ext_util.ml b/analysis/vendor/ext/ext_util.ml
index 58b8ad2a1..0bd6a14f6 100644
--- a/analysis/vendor/ext/ext_util.ml
+++ b/analysis/vendor/ext/ext_util.ml
@@ -22,12 +22,9 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
-(**
- {[
- (power_2_above 16 63 = 64)
- (power_2_above 16 76 = 128)
- ]}
-*)
+(** {[
+ (power_2_above 16 63 = 64) (power_2_above 16 76 = 128)
+ ]} *)
let rec power_2_above x n =
if x >= n then x
else if x * 2 > Sys.max_array_length then x
diff --git a/analysis/vendor/ext/hash_gen.ml b/analysis/vendor/ext/hash_gen.ml
index 589639aea..777ed8743 100644
--- a/analysis/vendor/ext/hash_gen.ml
+++ b/analysis/vendor/ext/hash_gen.ml
@@ -211,11 +211,9 @@ module type S = sig
val find_opt : 'a t -> key -> 'a option
val find_key_opt : 'a t -> key -> key option
- (** return the key found in the hashtbl.
- Use case: when you find the key existed in hashtbl,
- you want to use the one stored in the hashtbl.
- (they are semantically equivlanent, but may have other information different)
- *)
+ (** return the key found in the hashtbl. Use case: when you find the key
+ existed in hashtbl, you want to use the one stored in the hashtbl. (they
+ are semantically equivlanent, but may have other information different) *)
val find_default : 'a t -> key -> 'a -> 'a
diff --git a/analysis/vendor/ext/hash_set.mli b/analysis/vendor/ext/hash_set.mli
index bd8254337..101c685a2 100644
--- a/analysis/vendor/ext/hash_set.mli
+++ b/analysis/vendor/ext/hash_set.mli
@@ -22,11 +22,9 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
-(** Ideas are based on {!Hash},
- however, {!Hash.add} does not really optimize and has a bad semantics for {!Hash_set},
- This module fixes the semantics of [add].
- [remove] is not optimized since it is not used too much
-*)
+(** Ideas are based on {!Hash}, however, {!Hash.add} does not really optimize
+ and has a bad semantics for {!Hash_set}, This module fixes the semantics of
+ [add]. [remove] is not optimized since it is not used too much *)
(** A naive t implementation on top of [hashtbl], the value is [unit]*)
module Make (H : Hashtbl.HashedType) : Hash_set_gen.S with type key = H.t
diff --git a/analysis/vendor/ext/hash_set_ident_mask.mli b/analysis/vendor/ext/hash_set_ident_mask.mli
index 1c5bb8f48..6732f7f20 100644
--- a/analysis/vendor/ext/hash_set_ident_mask.mli
+++ b/analysis/vendor/ext/hash_set_ident_mask.mli
@@ -1,5 +1,5 @@
type ident = Ident.t
-(** Based on [hash_set] specialized for mask operations *)
+(** Based on [hash_set] specialized for mask operations *)
type t
@@ -11,13 +11,10 @@ val create : int -> t
val add_unmask : t -> ident -> unit
val mask_and_check_all_hit : t -> ident -> bool
-(** [check_mask h key] if [key] exists mask it otherwise nothing
- return true if all keys are masked otherwise false
-*)
+(** [check_mask h key] if [key] exists mask it otherwise nothing return true if
+ all keys are masked otherwise false *)
val iter_and_unmask : t -> (ident -> bool -> unit) -> unit
-(** [iter_and_unmask f h] iterating the collection and mask all idents,
- dont consul the collection in function [f]
- TODO: what happens if an exception raised in the callback,
- would the hashtbl still be in consistent state?
-*)
+(** [iter_and_unmask f h] iterating the collection and mask all idents, dont
+ consul the collection in function [f] TODO: what happens if an exception
+ raised in the callback, would the hashtbl still be in consistent state? *)
diff --git a/analysis/vendor/ext/identifiable.mli b/analysis/vendor/ext/identifiable.mli
index 9dd8defd9..396369432 100644
--- a/analysis/vendor/ext/identifiable.mli
+++ b/analysis/vendor/ext/identifiable.mli
@@ -52,13 +52,13 @@ module type Map = sig
'a t ->
'a t ->
'a t
- (** [disjoint_union m1 m2] contains all bindings from [m1] and
- [m2]. If some binding is present in both and the associated
- value is not equal, a Fatal_error is raised *)
+ (** [disjoint_union m1 m2] contains all bindings from [m1] and [m2]. If some
+ binding is present in both and the associated value is not equal, a
+ Fatal_error is raised *)
val union_right : 'a t -> 'a t -> 'a t
- (** [union_right m1 m2] contains all bindings from [m1] and [m2]. If
- some binding is present in both, the one from [m2] is taken *)
+ (** [union_right m1 m2] contains all bindings from [m1] and [m2]. If some
+ binding is present in both, the one from [m2] is taken *)
val union_left : 'a t -> 'a t -> 'a t
(** [union_left m1 m2 = union_right m2 m1] *)
diff --git a/analysis/vendor/ext/map_gen.ml b/analysis/vendor/ext/map_gen.ml
index 7c8af834d..4099f309a 100644
--- a/analysis/vendor/ext/map_gen.ml
+++ b/analysis/vendor/ext/map_gen.ml
@@ -304,105 +304,89 @@ module type S = sig
val to_sorted_array_with_f : 'a t -> (key -> 'a -> 'b) -> 'b array
val add : 'a t -> key -> 'a -> 'a t
- (** [add x y m]
- If [x] was already bound in [m], its previous binding disappears. *)
+ (** [add x y m] If [x] was already bound in [m], its previous binding
+ disappears. *)
val adjust : 'a t -> key -> ('a option -> 'a) -> 'a t
- (** [adjust acc k replace ] if not exist [add (replace None ], otherwise
- [add k v (replace (Some old))]
- *)
+ (** [adjust acc k replace ] if not exist [add (replace None ], otherwise
+ [add k v (replace (Some old))] *)
val singleton : key -> 'a -> 'a t
val remove : 'a t -> key -> 'a t
- (** [remove x m] returns a map containing the same bindings as
- [m], except for [x] which is unbound in the returned map. *)
+ (** [remove x m] returns a map containing the same bindings as [m], except for
+ [x] which is unbound in the returned map. *)
(* val merge:
'a t -> 'b t ->
(key -> 'a option -> 'b option -> 'c option) -> 'c t *)
- (** [merge f m1 m2] computes a map whose keys is a subset of keys of [m1]
- and of [m2]. The presence of each such binding, and the corresponding
- value, is determined with the function [f].
- @since 3.12.0
- *)
+ (** [merge f m1 m2] computes a map whose keys is a subset of keys of [m1] and
+ of [m2]. The presence of each such binding, and the corresponding value,
+ is determined with the function [f].
+ @since 3.12.0 *)
val disjoint_merge_exn : 'a t -> 'a t -> (key -> 'a -> 'a -> exn) -> 'a t
(* merge two maps, will raise if they have the same key *)
val iter : 'a t -> (key -> 'a -> unit) -> unit
- (** [iter f m] applies [f] to all bindings in map [m].
- The bindings are passed to [f] in increasing order. *)
+ (** [iter f m] applies [f] to all bindings in map [m]. The bindings are passed
+ to [f] in increasing order. *)
val fold : 'a t -> 'b -> (key -> 'a -> 'b -> 'b) -> 'b
- (** [fold f m a] computes [(f kN dN ... (f k1 d1 a)...)],
- where [k1 ... kN] are the keys of all bindings in [m]
- (in increasing order) *)
+ (** [fold f m a] computes [(f kN dN ... (f k1 d1 a)...)], where [k1 ... kN]
+ are the keys of all bindings in [m] (in increasing order) *)
val for_all : 'a t -> (key -> 'a -> bool) -> bool
- (** [for_all p m] checks if all the bindings of the map.
- order unspecified
- *)
+ (** [for_all p m] checks if all the bindings of the map. order unspecified *)
val exists : 'a t -> (key -> 'a -> bool) -> bool
- (** [exists p m] checks if at least one binding of the map
- satisfy the predicate [p].
- order unspecified
- *)
+ (** [exists p m] checks if at least one binding of the map satisfy the
+ predicate [p]. order unspecified *)
(* val filter: 'a t -> (key -> 'a -> bool) -> 'a t *)
- (** [filter p m] returns the map with all the bindings in [m]
- that satisfy predicate [p].
- order unspecified
- *)
+ (** [filter p m] returns the map with all the bindings in [m] that satisfy
+ predicate [p]. order unspecified *)
(* val partition: 'a t -> (key -> 'a -> bool) -> 'a t * 'a t *)
- (** [partition p m] returns a pair of maps [(m1, m2)], where
- [m1] contains all the bindings of [s] that satisfy the
- predicate [p], and [m2] is the map with all the bindings of
- [s] that do not satisfy [p].
- *)
+ (** [partition p m] returns a pair of maps [(m1, m2)], where [m1] contains all
+ the bindings of [s] that satisfy the predicate [p], and [m2] is the map
+ with all the bindings of [s] that do not satisfy [p]. *)
val cardinal : 'a t -> int
(** Return the number of bindings of a map. *)
val bindings : 'a t -> (key * 'a) list
- (** Return the list of all bindings of the given map.
- The returned list is sorted in increasing order with respect
- to the ordering *)
+ (** Return the list of all bindings of the given map. The returned list is
+ sorted in increasing order with respect to the ordering *)
val keys : 'a t -> key list
(* Increasing order *)
(* val split: 'a t -> key -> 'a t * 'a option * 'a t *)
- (** [split x m] returns a triple [(l, data, r)], where
- [l] is the map with all the bindings of [m] whose key
- is strictly less than [x];
- [r] is the map with all the bindings of [m] whose key
- is strictly greater than [x];
- [data] is [None] if [m] contains no binding for [x],
- or [Some v] if [m] binds [v] to [x].
- @since 3.12.0
- *)
+ (** [split x m] returns a triple [(l, data, r)], where [l] is the map with all
+ the bindings of [m] whose key is strictly less than [x]; [r] is the map
+ with all the bindings of [m] whose key is strictly greater than [x];
+ [data] is [None] if [m] contains no binding for [x], or [Some v] if [m]
+ binds [v] to [x].
+ @since 3.12.0 *)
val find_exn : 'a t -> key -> 'a
- (** [find x m] returns the current binding of [x] in [m],
- or raises [Not_found] if no such binding exists. *)
+ (** [find x m] returns the current binding of [x] in [m], or raises
+ [Not_found] if no such binding exists. *)
val find_opt : 'a t -> key -> 'a option
val find_default : 'a t -> key -> 'a -> 'a
val map : 'a t -> ('a -> 'b) -> 'b t
- (** [map f m] returns a map with same domain as [m], where the
- associated value [a] of all bindings of [m] has been
- replaced by the result of the application of [f] to [a].
- The bindings are passed to [f] in increasing order
- with respect to the ordering over the type of the keys. *)
+ (** [map f m] returns a map with same domain as [m], where the associated
+ value [a] of all bindings of [m] has been replaced by the result of the
+ application of [f] to [a]. The bindings are passed to [f] in increasing
+ order with respect to the ordering over the type of the keys. *)
val mapi : 'a t -> (key -> 'a -> 'b) -> 'b t
- (** Same as {!Map.S.map}, but the function receives as arguments both the
- key and the associated value for each binding of the map. *)
+ (** Same as {!Map.S.map}, but the function receives as arguments both the key
+ and the associated value for each binding of the map. *)
val of_list : (key * 'a) list -> 'a t
diff --git a/analysis/vendor/ext/misc.mli b/analysis/vendor/ext/misc.mli
index 54227b1a9..43463d02c 100644
--- a/analysis/vendor/ext/misc.mli
+++ b/analysis/vendor/ext/misc.mli
@@ -58,32 +58,32 @@ module Stdlib : sig
type 'a t = 'a list
val compare : ('a -> 'a -> int) -> 'a t -> 'a t -> int
- (** The lexicographic order supported by the provided order.
- There is no constraint on the relative lengths of the lists. *)
+ (** The lexicographic order supported by the provided order. There is no
+ constraint on the relative lengths of the lists. *)
val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
- (** Returns [true] iff the given lists have the same length and content
- with respect to the given equality function. *)
+ (** Returns [true] iff the given lists have the same length and content with
+ respect to the given equality function. *)
val filter_map : ('a -> 'b option) -> 'a t -> 'b t
- (** [filter_map f l] applies [f] to every element of [l], filters
- out the [None] elements and returns the list of the arguments of
- the [Some] elements. *)
+ (** [filter_map f l] applies [f] to every element of [l], filters out the
+ [None] elements and returns the list of the arguments of the [Some]
+ elements. *)
val some_if_all_elements_are_some : 'a option t -> 'a t option
- (** If all elements of the given list are [Some _] then [Some xs]
- is returned with the [xs] being the contents of those [Some]s, with
- order preserved. Otherwise return [None]. *)
+ (** If all elements of the given list are [Some _] then [Some xs] is
+ returned with the [xs] being the contents of those [Some]s, with order
+ preserved. Otherwise return [None]. *)
val map2_prefix : ('a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t * 'b t
- (** [let r1, r2 = map2_prefix f l1 l2]
- If [l1] is of length n and [l2 = h2 @ t2] with h2 of length n,
- r1 is [List.map2 f l1 h1] and r2 is t2. *)
+ (** [let r1, r2 = map2_prefix f l1 l2] If [l1] is of length n and
+ [l2 = h2 @ t2] with h2 of length n, r1 is [List.map2 f l1 h1] and r2 is
+ t2. *)
val split_at : int -> 'a t -> 'a t * 'a t
- (** [split_at n l] returns the pair [before, after] where [before] is
- the [n] first elements of [l] and [after] the remaining ones.
- If [l] has less than [n] elements, raises Invalid_argument. *)
+ (** [split_at n l] returns the pair [before, after] where [before] is the
+ [n] first elements of [l] and [after] the remaining ones. If [l] has
+ less than [n] elements, raises Invalid_argument. *)
end
module Option : sig
@@ -229,46 +229,42 @@ module LongString : sig
end
val edit_distance : string -> string -> int -> int option
-(** [edit_distance a b cutoff] computes the edit distance between
- strings [a] and [b]. To help efficiency, it uses a cutoff: if the
- distance [d] is smaller than [cutoff], it returns [Some d], else
- [None].
-
- The distance algorithm currently used is Damerau-Levenshtein: it
- computes the number of insertion, deletion, substitution of
- letters, or swapping of adjacent letters to go from one word to the
- other. The particular algorithm may change in the future.
-*)
+(** [edit_distance a b cutoff] computes the edit distance between strings [a]
+ and [b]. To help efficiency, it uses a cutoff: if the distance [d] is
+ smaller than [cutoff], it returns [Some d], else [None].
+
+ The distance algorithm currently used is Damerau-Levenshtein: it computes
+ the number of insertion, deletion, substitution of letters, or swapping of
+ adjacent letters to go from one word to the other. The particular algorithm
+ may change in the future. *)
val spellcheck : string list -> string -> string list
-(** [spellcheck env name] takes a list of names [env] that exist in
- the current environment and an erroneous [name], and returns a
- list of suggestions taken from [env], that are close enough to
- [name] that it may be a typo for one of them. *)
+(** [spellcheck env name] takes a list of names [env] that exist in the current
+ environment and an erroneous [name], and returns a list of suggestions taken
+ from [env], that are close enough to [name] that it may be a typo for one of
+ them. *)
val did_you_mean : Format.formatter -> (unit -> string list) -> unit
-(** [did_you_mean ppf get_choices] hints that the user may have meant
- one of the option returned by calling [get_choices]. It does nothing
- if the returned list is empty.
+(** [did_you_mean ppf get_choices] hints that the user may have meant one of the
+ option returned by calling [get_choices]. It does nothing if the returned
+ list is empty.
The [unit -> ...] thunking is meant to delay any potentially-slow
- computation (typically computing edit-distance with many things
- from the current environment) to when the hint message is to be
- printed. You should print an understandable error message before
- calling [did_you_mean], so that users get a clear notification of
- the failure even if producing the hint is slow.
-*)
+ computation (typically computing edit-distance with many things from the
+ current environment) to when the hint message is to be printed. You should
+ print an understandable error message before calling [did_you_mean], so that
+ users get a clear notification of the failure even if producing the hint is
+ slow. *)
val cut_at : string -> char -> string * string
-(** [String.cut_at s c] returns a pair containing the sub-string before
- the first occurrence of [c] in [s], and the sub-string after the
- first occurrence of [c] in [s].
- [let (before, after) = String.cut_at s c in
- before ^ String.make 1 c ^ after] is the identity if [s] contains [c].
-
- Raise [Not_found] if the character does not appear in the string
- @since 4.01
-*)
+(** [String.cut_at s c] returns a pair containing the sub-string before the
+ first occurrence of [c] in [s], and the sub-string after the first
+ occurrence of [c] in [s].
+ [let (before, after) = String.cut_at s c in before ^ String.make 1 c ^
+ after] is the identity if [s] contains [c].
+
+ Raise [Not_found] if the character does not appear in the string
+ @since 4.01 *)
module StringSet : Set.S with type elt = string
module StringMap : Map.S with type key = string
@@ -308,32 +304,30 @@ end
val normalise_eol : string -> string
(** [normalise_eol s] returns a fresh copy of [s] with any '\r' characters
- removed. Intended for pre-processing text which will subsequently be printed
- on a channel which performs EOL transformations (i.e. Windows) *)
+ removed. Intended for pre-processing text which will subsequently be printed
+ on a channel which performs EOL transformations (i.e. Windows) *)
val delete_eol_spaces : string -> string
-(** [delete_eol_spaces s] returns a fresh copy of [s] with any end of
- line spaces removed. Intended to normalize the output of the
- toplevel for tests. *)
+(** [delete_eol_spaces s] returns a fresh copy of [s] with any end of line
+ spaces removed. Intended to normalize the output of the toplevel for tests.
+*)
(** {1 Hook machinery}
- Hooks machinery:
- [add_hook name f] will register a function that will be called on the
- argument of a later call to [apply_hooks]. Hooks are applied in the
- lexicographical order of their names.
-*)
+ Hooks machinery: [add_hook name f] will register a function that will be
+ called on the argument of a later call to [apply_hooks]. Hooks are applied
+ in the lexicographical order of their names. *)
type hook_info = {sourcefile: string}
exception
HookExnWrapper of {error: exn; hook_name: string; hook_info: hook_info}
-(** An exception raised by a hook will be wrapped into a
- [HookExnWrapper] constructor by the hook machinery. *)
+(** An exception raised by a hook will be wrapped into a [HookExnWrapper]
+ constructor by the hook machinery. *)
val raise_direct_hook_exn : exn -> 'a
(** A hook can use [raise_unwrapped_hook_exn] to raise an exception that will
- not be wrapped into a {!HookExnWrapper}. *)
+ not be wrapped into a {!HookExnWrapper}. *)
module type HookSig = sig
type t
diff --git a/analysis/vendor/ext/ordered_hash_map_gen.ml b/analysis/vendor/ext/ordered_hash_map_gen.ml
index b85ce6e96..f933816a3 100644
--- a/analysis/vendor/ext/ordered_hash_map_gen.ml
+++ b/analysis/vendor/ext/ordered_hash_map_gen.ml
@@ -22,9 +22,8 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
-(** Hash based datastrucure which does not support [remove],
- so that the adding order is strict and continous
-*)
+(** Hash based datastrucure which does not support [remove], so that the adding
+ order is strict and continous *)
module type S = sig
type key
diff --git a/analysis/vendor/ext/ordered_hash_map_local_ident.mli b/analysis/vendor/ext/ordered_hash_map_local_ident.mli
index 66af1d078..79e131f1e 100644
--- a/analysis/vendor/ext/ordered_hash_map_local_ident.mli
+++ b/analysis/vendor/ext/ordered_hash_map_local_ident.mli
@@ -23,6 +23,5 @@
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
include Ordered_hash_map_gen.S with type key = Ident.t
-(** Hash algorithm only hash
- stamp, this makes sense when all identifiers are local (no global)
-*)
+(** Hash algorithm only hash stamp, this makes sense when all identifiers are
+ local (no global) *)
diff --git a/analysis/vendor/ext/set_gen.ml b/analysis/vendor/ext/set_gen.ml
index 0fd5e66f4..d1b31cd03 100644
--- a/analysis/vendor/ext/set_gen.ml
+++ b/analysis/vendor/ext/set_gen.ml
@@ -211,15 +211,10 @@ let rec add_max v = function
| Leaf x -> unsafe_two_elements x v
| Node n -> bal n.l n.v (add_max v n.r)
-(**
- Invariants:
- 1. l < v < r
- 2. l and r are balanced
+(** Invariants: 1. l < v < r 2. l and r are balanced
- Proof by induction
- The height of output will be ~~ (max (height l) (height r) + 2)
- Also use the lemma from [bal]
-*)
+ Proof by induction The height of output will be ~~ (max (height l) (height
+ r) + 2) Also use the lemma from [bal] *)
let rec internal_join l v r =
match (l, r) with
| Empty, _ -> add_min v r
diff --git a/analysis/vendor/ext/vec_gen.ml b/analysis/vendor/ext/vec_gen.ml
index ca2a4eeb2..06f65a217 100644
--- a/analysis/vendor/ext/vec_gen.ml
+++ b/analysis/vendor/ext/vec_gen.ml
@@ -51,9 +51,7 @@ module type S = sig
val of_sub_array : elt array -> int -> int -> t
val unsafe_internal_array : t -> elt array
- (** Exposed for some APIs which only take array as input,
- when exposed
- *)
+ (** Exposed for some APIs which only take array as input, when exposed *)
val reserve : t -> int -> unit
diff --git a/analysis/vendor/ext/warnings.ml b/analysis/vendor/ext/warnings.ml
index de488a907..3b2f61faf 100644
--- a/analysis/vendor/ext/warnings.ml
+++ b/analysis/vendor/ext/warnings.ml
@@ -465,7 +465,8 @@ let message = function
in
Printf.sprintf
"Ambiguous or-pattern variables under guard;\n\
- %s may match different arguments. (See manual section 8.5)" msg
+ %s may match different arguments. (See manual section 8.5)"
+ msg
| Unused_module s -> "unused module " ^ s ^ "."
| Constraint_on_gadt ->
"Type constraints do not apply to GADT cases of variant types."
diff --git a/analysis/vendor/ext/warnings.mli b/analysis/vendor/ext/warnings.mli
index 4b96f0f42..2a4a447c3 100644
--- a/analysis/vendor/ext/warnings.mli
+++ b/analysis/vendor/ext/warnings.mli
@@ -114,8 +114,8 @@ val backup : unit -> state
val restore : state -> unit
val mk_lazy : (unit -> 'a) -> 'a Lazy.t
-(** Like [Lazy.of_fun], but the function is applied with
- the warning settings at the time [mk_lazy] is called. *)
+(** Like [Lazy.of_fun], but the function is applied with the warning settings at
+ the time [mk_lazy] is called. *)
val has_warnings : bool ref
diff --git a/analysis/vendor/json/Json.ml b/analysis/vendor/json/Json.ml
index 407afb152..9af177b8c 100644
--- a/analysis/vendor/json/Json.ml
+++ b/analysis/vendor/json/Json.ml
@@ -46,82 +46,47 @@ let string_of_number f =
if s.[String.length s - 1] = '.' then String.sub s 0 (String.length s - 1)
else s
-(**
- * This module is provided for easier working with optional values.
- *)
+(** * This module is provided for easier working with optional values. *)
module Infix = struct
- (** The "force unwrap" operator
- *
- * If you're sure there's a value, you can force it.
- * ```
- * open Json.Infix;
- * let x: int = Some(10) |! "Expected this to be present";
- * Js.log(x);
- * ```
- *
- * But you gotta be sure, otherwise it will throw.
- * ```reason;raises
- * open Json.Infix;
- * let x: int = None |! "This will throw";
- * ```
- *)
+ (** The "force unwrap" operator * * If you're sure there's a value, you can
+ force it. * ``` * open Json.Infix; * let x: int = Some(10) |! "Expected
+ this to be present"; * Js.log(x); * ``` * * But you gotta be sure,
+ otherwise it will throw. * ```reason;raises * open Json.Infix; * let x:
+ int = None |! "This will throw"; * ``` *)
let ( |! ) o d =
match o with
| None -> failwith d
| Some v -> v
- (** The "upwrap with default" operator
- * ```
- * open Json.Infix;
- * let x: int = Some(10) |? 4;
- * let y: int = None |? 5;
- * Js.log2(x, y);
- * ```
- *)
+ (** The "upwrap with default" operator * ``` * open Json.Infix; * let x: int =
+ Some(10) |? 4; * let y: int = None |? 5; * Js.log2(x, y); * ``` *)
let ( |? ) o d =
match o with
| None -> d
| Some v -> v
- (** The "transform contents into new optional" operator
- * ```
- * open Json.Infix;
- * let maybeInc = x => x > 5 ? Some(x + 1) : None;
- * let x: option(int) = Some(14) |?> maybeInc;
- * let y: option(int) = None |?> maybeInc;
- * ```
- *)
+ (** The "transform contents into new optional" operator * ``` * open
+ Json.Infix; * let maybeInc = x => x > 5 ? Some(x + 1) : None; * let x:
+ option(int) = Some(14) |?> maybeInc; * let y: option(int) = None |?>
+ maybeInc; * ``` *)
let ( |?> ) o fn =
match o with
| None -> None
| Some v -> fn v
- (** The "transform contents into new value & then re-wrap" operator
- * ```
- * open Json.Infix;
- * let inc = x => x + 1;
- * let x: option(int) = Some(7) |?>> inc;
- * let y: option(int) = None |?>> inc;
- * Js.log2(x, y);
- * ```
- *)
+ (** The "transform contents into new value & then re-wrap" operator * ``` *
+ open Json.Infix; * let inc = x => x + 1; * let x: option(int) = Some(7)
+ |?>> inc; * let y: option(int) = None |?>> inc; * Js.log2(x, y); * ``` *)
let ( |?>> ) o fn =
match o with
| None -> None
| Some v -> Some (fn v)
- (** "handle the value if present, otherwise here's the default"
- *
- * It's called fold because that's what people call it :?. It's the same as "transform contents to new value" + "unwrap with default".
- *
- * ```
- * open Json.Infix;
- * let inc = x => x + 1;
- * let x: int = fold(Some(4), 10, inc);
- * let y: int = fold(None, 2, inc);
- * Js.log2(x, y);
- * ```
- *)
+ (** "handle the value if present, otherwise here's the default" * * It's
+ called fold because that's what people call it :?. It's the same as
+ "transform contents to new value" + "unwrap with default". * * ``` * open
+ Json.Infix; * let inc = x => x + 1; * let x: int = fold(Some(4), 10, inc);
+ * let y: int = fold(None, 2, inc); * Js.log2(x, y); * ``` *)
let fold o d f =
match o with
| None -> d
diff --git a/analysis/vendor/ml/ast_helper.mli b/analysis/vendor/ml/ast_helper.mli
index b9e2bf349..4c674140f 100644
--- a/analysis/vendor/ml/ast_helper.mli
+++ b/analysis/vendor/ml/ast_helper.mli
@@ -30,8 +30,8 @@ val default_loc : loc ref
(** Default value for all optional location arguments. *)
val with_default_loc : loc -> (unit -> 'a) -> 'a
-(** Set the [default_loc] within the scope of the execution
- of the provided function. *)
+(** Set the [default_loc] within the scope of the execution of the provided
+ function. *)
(** {1 Constants} *)
@@ -78,13 +78,12 @@ module Typ : sig
val force_poly : core_type -> core_type
val varify_constructors : str list -> core_type -> core_type
- (** [varify_constructors newtypes te] is type expression [te], of which
- any of nullary type constructor [tc] is replaced by type variable of
- the same name, if [tc]'s name appears in [newtypes].
- Raise [Syntaxerr.Variable_in_scope] if any type variable inside [te]
- appears in [newtypes].
- @since 4.05
- *)
+ (** [varify_constructors newtypes te] is type expression [te], of which any of
+ nullary type constructor [tc] is replaced by type variable of the same
+ name, if [tc]'s name appears in [newtypes]. Raise
+ [Syntaxerr.Variable_in_scope] if any type variable inside [te] appears in
+ [newtypes].
+ @since 4.05 *)
end
(** Patterns *)
diff --git a/analysis/vendor/ml/ast_iterator.ml b/analysis/vendor/ml/ast_iterator.ml
index 8234560ce..fda04b4b2 100644
--- a/analysis/vendor/ml/ast_iterator.ml
+++ b/analysis/vendor/ml/ast_iterator.ml
@@ -63,10 +63,9 @@ type iterator = {
value_description: iterator -> value_description -> unit;
with_constraint: iterator -> with_constraint -> unit;
}
-(** A [iterator] record implements one "method" per syntactic category,
- using an open recursion style: each method takes as its first
- argument the iterator to be applied to children in the syntax
- tree. *)
+(** A [iterator] record implements one "method" per syntactic category, using an
+ open recursion style: each method takes as its first argument the iterator
+ to be applied to children in the syntax tree. *)
let iter_fst f (x, _) = f x
let iter_snd f (_, y) = f y
@@ -547,7 +546,9 @@ let default_iterator =
type_extension = T.iter_type_extension;
extension_constructor = T.iter_extension_constructor;
value_description =
- (fun this {pval_name; pval_type; pval_prim = _; pval_loc; pval_attributes} ->
+ (fun this
+ {pval_name; pval_type; pval_prim = _; pval_loc; pval_attributes}
+ ->
iter_loc this pval_name;
this.typ this pval_type;
this.attributes this pval_attributes;
@@ -601,7 +602,9 @@ let default_iterator =
this.location this pcd_loc;
this.attributes this pcd_attributes);
label_declaration =
- (fun this {pld_name; pld_type; pld_loc; pld_mutable = _; pld_attributes} ->
+ (fun this
+ {pld_name; pld_type; pld_loc; pld_mutable = _; pld_attributes}
+ ->
iter_loc this pld_name;
this.typ this pld_type;
this.location this pld_loc;
diff --git a/analysis/vendor/ml/ast_iterator.mli b/analysis/vendor/ml/ast_iterator.mli
index 9730f0650..c13458a2f 100644
--- a/analysis/vendor/ml/ast_iterator.mli
+++ b/analysis/vendor/ml/ast_iterator.mli
@@ -13,7 +13,7 @@
(* *)
(**************************************************************************)
-(** {!iterator} allows to implement AST inspection using open recursion. A
+(** {!iterator} allows to implement AST inspection using open recursion. A
typical mapper would be based on {!default_iterator}, a trivial iterator,
and will fall back on it for handling the syntax it does not modify. *)
@@ -61,10 +61,9 @@ type iterator = {
value_description: iterator -> value_description -> unit;
with_constraint: iterator -> with_constraint -> unit;
}
-(** A [iterator] record implements one "method" per syntactic category,
- using an open recursion style: each method takes as its first
- argument the iterator to be applied to children in the syntax
- tree. *)
+(** A [iterator] record implements one "method" per syntactic category, using an
+ open recursion style: each method takes as its first argument the iterator
+ to be applied to children in the syntax tree. *)
val default_iterator : iterator
(** A default iterator, which implements a "do not do anything" mapping. *)
diff --git a/analysis/vendor/ml/ast_mapper.mli b/analysis/vendor/ml/ast_mapper.mli
index 53bce1610..d4209efef 100644
--- a/analysis/vendor/ml/ast_mapper.mli
+++ b/analysis/vendor/ml/ast_mapper.mli
@@ -15,38 +15,38 @@
(** The interface of a -ppx rewriter
- A -ppx rewriter is a program that accepts a serialized abstract syntax
- tree and outputs another, possibly modified, abstract syntax tree.
- This module encapsulates the interface between the compiler and
- the -ppx rewriters, handling such details as the serialization format,
- forwarding of command-line flags, and storing state.
-
- {!mapper} allows to implement AST rewriting using open recursion.
- A typical mapper would be based on {!default_mapper}, a deep
- identity mapper, and will fall back on it for handling the syntax it
- does not modify. For example:
-
- {[
-open Asttypes
-open Parsetree
-open Ast_mapper
-
-let test_mapper argv =
- { default_mapper with
- expr = fun mapper expr ->
- match expr with
- | { pexp_desc = Pexp_extension ({ txt = "test" }, PStr [])} ->
- Ast_helper.Exp.constant (Const_int 42)
- | other -> default_mapper.expr mapper other; }
-
-let () =
- register "ppx_test" test_mapper]}
-
- This -ppx rewriter, which replaces [[%test]] in expressions with
- the constant [42], can be compiled using
- [ocamlc -o ppx_test -I +compiler-libs ocamlcommon.cma ppx_test.ml].
-
- *)
+ A -ppx rewriter is a program that accepts a serialized abstract syntax tree
+ and outputs another, possibly modified, abstract syntax tree. This module
+ encapsulates the interface between the compiler and the -ppx rewriters,
+ handling such details as the serialization format, forwarding of
+ command-line flags, and storing state.
+
+ {!mapper} allows to implement AST rewriting using open recursion. A typical
+ mapper would be based on {!default_mapper}, a deep identity mapper, and will
+ fall back on it for handling the syntax it does not modify. For example:
+
+ {[
+ open Asttypes
+ open Parsetree
+ open Ast_mapper
+
+ let test_mapper argv =
+ {
+ default_mapper with
+ expr =
+ (fun mapper expr ->
+ match expr with
+ | {pexp_desc = Pexp_extension ({txt = "test"}, PStr [])} ->
+ Ast_helper.Exp.constant (Const_int 42)
+ | other -> default_mapper.expr mapper other);
+ }
+
+ let () = register "ppx_test" test_mapper
+ ]}
+
+ This -ppx rewriter, which replaces [[%test]] in expressions with the
+ constant [42], can be compiled using
+ [ocamlc -o ppx_test -I +compiler-libs ocamlcommon.cma ppx_test.ml]. *)
open Parsetree
@@ -96,10 +96,9 @@ type mapper = {
value_description: mapper -> value_description -> value_description;
with_constraint: mapper -> with_constraint -> with_constraint;
}
-(** A mapper record implements one "method" per syntactic category,
- using an open recursion style: each method takes as its first
- argument the mapper to be applied to children in the syntax
- tree. *)
+(** A mapper record implements one "method" per syntactic category, using an
+ open recursion style: each method takes as its first argument the mapper to
+ be applied to children in the syntax tree. *)
val default_mapper : mapper
(** A default mapper, which implements a "deep identity" mapping. *)
@@ -107,70 +106,65 @@ val default_mapper : mapper
(** {1 Apply mappers to compilation units} *)
val tool_name : unit -> string
-(** Can be used within a ppx preprocessor to know which tool is
- calling it ["ocamlc"], ["ocamlopt"], ["ocamldoc"], ["ocamldep"],
- ["ocaml"], ... Some global variables that reflect command-line
- options are automatically synchronized between the calling tool
- and the ppx preprocessor: {!Clflags.include_dirs},
- {!Config.load_path}, {!Clflags.open_modules}, {!Clflags.for_package},
- {!Clflags.debug}. *)
+(** Can be used within a ppx preprocessor to know which tool is calling it
+ ["ocamlc"], ["ocamlopt"], ["ocamldoc"], ["ocamldep"], ["ocaml"], ... Some
+ global variables that reflect command-line options are automatically
+ synchronized between the calling tool and the ppx preprocessor:
+ {!Clflags.include_dirs}, {!Config.load_path}, {!Clflags.open_modules},
+ {!Clflags.for_package}, {!Clflags.debug}. *)
val apply : source:string -> target:string -> mapper -> unit
-(** Apply a mapper (parametrized by the unit name) to a dumped
- parsetree found in the [source] file and put the result in the
- [target] file. The [structure] or [signature] field of the mapper
- is applied to the implementation or interface. *)
+(** Apply a mapper (parametrized by the unit name) to a dumped parsetree found
+ in the [source] file and put the result in the [target] file. The
+ [structure] or [signature] field of the mapper is applied to the
+ implementation or interface. *)
val run_main : (string list -> mapper) -> unit
-(** Entry point to call to implement a standalone -ppx rewriter from a
- mapper, parametrized by the command line arguments. The current
- unit name can be obtained from {!Location.input_name}. This
- function implements proper error reporting for uncaught
- exceptions. *)
+(** Entry point to call to implement a standalone -ppx rewriter from a mapper,
+ parametrized by the command line arguments. The current unit name can be
+ obtained from {!Location.input_name}. This function implements proper error
+ reporting for uncaught exceptions. *)
(** {1 Registration API} *)
val register_function : (string -> (string list -> mapper) -> unit) ref
val register : string -> (string list -> mapper) -> unit
-(** Apply the [register_function]. The default behavior is to run the
- mapper immediately, taking arguments from the process command
- line. This is to support a scenario where a mapper is linked as a
- stand-alone executable.
-
- It is possible to overwrite the [register_function] to define
- "-ppx drivers", which combine several mappers in a single process.
- Typically, a driver starts by defining [register_function] to a
- custom implementation, then lets ppx rewriters (linked statically
- or dynamically) register themselves, and then run all or some of
- them. It is also possible to have -ppx drivers apply rewriters to
- only specific parts of an AST.
-
- The first argument to [register] is a symbolic name to be used by
- the ppx driver. *)
+(** Apply the [register_function]. The default behavior is to run the mapper
+ immediately, taking arguments from the process command line. This is to
+ support a scenario where a mapper is linked as a stand-alone executable.
+
+ It is possible to overwrite the [register_function] to define "-ppx
+ drivers", which combine several mappers in a single process. Typically, a
+ driver starts by defining [register_function] to a custom implementation,
+ then lets ppx rewriters (linked statically or dynamically) register
+ themselves, and then run all or some of them. It is also possible to have
+ -ppx drivers apply rewriters to only specific parts of an AST.
+
+ The first argument to [register] is a symbolic name to be used by the ppx
+ driver. *)
(** {1 Convenience functions to write mappers} *)
val map_opt : ('a -> 'b) -> 'a option -> 'b option
val extension_of_error : Location.error -> extension
-(** Encode an error into an 'ocaml.error' extension node which can be
- inserted in a generated Parsetree. The compiler will be
- responsible for reporting the error. *)
+(** Encode an error into an 'ocaml.error' extension node which can be inserted
+ in a generated Parsetree. The compiler will be responsible for reporting the
+ error. *)
val attribute_of_warning : Location.t -> string -> attribute
(** Encode a warning message into an 'ocaml.ppwarning' attribute which can be
- inserted in a generated Parsetree. The compiler will be
- responsible for reporting the warning. *)
+ inserted in a generated Parsetree. The compiler will be responsible for
+ reporting the warning. *)
(** {1 Helper functions to call external mappers} *)
val add_ppx_context_str :
tool_name:string -> Parsetree.structure -> Parsetree.structure
-(** Extract information from the current environment and encode it
- into an attribute which is prepended to the list of structure
- items in order to pass the information to an external
- processor. *)
+(** Extract information from the current environment and encode it into an
+ attribute which is prepended to the list of structure items in order to pass
+ the information to an external processor. *)
val add_ppx_context_sig :
tool_name:string -> Parsetree.signature -> Parsetree.signature
@@ -178,9 +172,8 @@ val add_ppx_context_sig :
val drop_ppx_context_str :
restore:bool -> Parsetree.structure -> Parsetree.structure
-(** Drop the ocaml.ppx.context attribute from a structure. If
- [restore] is true, also restore the associated data in the current
- process. *)
+(** Drop the ocaml.ppx.context attribute from a structure. If [restore] is true,
+ also restore the associated data in the current process. *)
val drop_ppx_context_sig :
restore:bool -> Parsetree.signature -> Parsetree.signature
@@ -188,9 +181,9 @@ val drop_ppx_context_sig :
(** {1 Cookies} *)
-(** Cookies are used to pass information from a ppx processor to
- a further invocation of itself, when called from the OCaml
- toplevel (or other tools that support cookies). *)
+(** Cookies are used to pass information from a ppx processor to a further
+ invocation of itself, when called from the OCaml toplevel (or other tools
+ that support cookies). *)
val set_cookie : string -> Parsetree.expression -> unit
val get_cookie : string -> Parsetree.expression option
diff --git a/analysis/vendor/ml/ast_payload.ml b/analysis/vendor/ml/ast_payload.ml
index 8a9d37854..74cdddc50 100644
--- a/analysis/vendor/ml/ast_payload.ml
+++ b/analysis/vendor/ml/ast_payload.ml
@@ -182,11 +182,14 @@ type lid = string Asttypes.loc
type label_expr = lid * Parsetree.expression
type action = lid * Parsetree.expression option
-(** None means punning is hit
- {[ { x } ]}
- otherwise it comes with a payload
- {[ { x = exp }]}
-*)
+(** None means punning is hit
+ {[
+ {x}
+ ]}
+ otherwise it comes with a payload
+ {[
+ {x = exp}
+ ]} *)
let unrecognized_config_record loc text =
Location.prerr_warning loc (Warnings.Bs_derive_warning text)
diff --git a/analysis/vendor/ml/ast_payload.mli b/analysis/vendor/ml/ast_payload.mli
index 493ad8efb..020235511 100644
--- a/analysis/vendor/ml/ast_payload.mli
+++ b/analysis/vendor/ml/ast_payload.mli
@@ -22,8 +22,8 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
-(** A utility module used when destructuring parsetree attributes, used for
- compiling FFI attributes and built-in ppx *)
+(** A utility module used when destructuring parsetree attributes, used for
+ compiling FFI attributes and built-in ppx *)
type t = Parsetree.payload
@@ -62,23 +62,27 @@ val as_ident : t -> Longident.t Asttypes.loc option
(* val raw_string_payload : Location.t -> string -> t *)
val assert_strings : Location.t -> t -> string list
-(** as a record or empty
- it will accept
+(** as a record or empty it will accept
- {[ [@@@config ]]}
- or
- {[ [@@@config no_export ] ]}
- or
- {[ [@@@config { property .. } ]]}
- Note that we only
{[
- { flat_property}
+ [@@@config]
]}
- below is not allowed
+ or
{[
- {M.flat_property}
+ [@@@config no_export]
+ ]}
+ or
+ {[
+ [@@@config { property .. } ]
]}
-*)
+ Note that we only
+ {[
+ {flat_property}
+ ]}
+ below is not allowed
+ {[
+ {M.flat_property}
+ ]} *)
val ident_or_record_as_config : Location.t -> t -> action list
@@ -90,5 +94,6 @@ val table_dispatch :
(Parsetree.expression option -> 'a) Map_string.t -> action -> 'a
val unrecognized_config_record : Location.t -> string -> unit
-(** Report to the user, as a warning, that the bs-attribute parser is bailing out. (This is to allow
- external ppx, like ppx_deriving, to pick up where the builtin ppx leave off.) *)
+(** Report to the user, as a warning, that the bs-attribute parser is bailing
+ out. (This is to allow external ppx, like ppx_deriving, to pick up where the
+ builtin ppx leave off.) *)
diff --git a/analysis/vendor/ml/bigint_utils.ml b/analysis/vendor/ml/bigint_utils.ml
index 2454e0d15..286a8055e 100644
--- a/analysis/vendor/ml/bigint_utils.ml
+++ b/analysis/vendor/ml/bigint_utils.ml
@@ -89,6 +89,8 @@ let compare (p0, s0) (p1, s1) =
else if len0 > len1 then
if p0 then 1
else -1 (* A longer s0 means it's larger unless it's negative. *)
- else if (* len0 < len1 *)
- p0 then -1
+ else if
+ (* len0 < len1 *)
+ p0
+ then -1
else 1 (* A longer s1 means s0 is smaller unless s1 is negative. *)
diff --git a/analysis/vendor/ml/builtin_attributes.mli b/analysis/vendor/ml/builtin_attributes.mli
index fd898388c..5e5558603 100644
--- a/analysis/vendor/ml/builtin_attributes.mli
+++ b/analysis/vendor/ml/builtin_attributes.mli
@@ -64,24 +64,19 @@ val error_of_extension : Parsetree.extension -> Location.error
val warning_attribute : ?ppwarning:bool -> Parsetree.attribute -> unit
(** Apply warning settings from the specified attribute.
- "ocaml.warning"/"ocaml.warnerror" (and variants without the prefix)
- are processed and other attributes are ignored.
+ "ocaml.warning"/"ocaml.warnerror" (and variants without the prefix) are
+ processed and other attributes are ignored.
- Also implement ocaml.ppwarning (unless ~ppwarning:false is
- passed).
- *)
+ Also implement ocaml.ppwarning (unless ~ppwarning:false is passed). *)
val warning_scope :
?ppwarning:bool -> Parsetree.attributes -> (unit -> 'a) -> 'a
-(** Execute a function in a new scope for warning settings. This
- means that the effect of any call to [warning_attribute] during
- the execution of this function will be discarded after
- execution.
+(** Execute a function in a new scope for warning settings. This means that the
+ effect of any call to [warning_attribute] during the execution of this
+ function will be discarded after execution.
- The function also takes a list of attributes which are processed
- with [warning_attribute] in the fresh scope before the function
- is executed.
- *)
+ The function also takes a list of attributes which are processed with
+ [warning_attribute] in the fresh scope before the function is executed. *)
val warn_on_literal_pattern : Parsetree.attributes -> bool
val explicit_arity : Parsetree.attributes -> bool
diff --git a/analysis/vendor/ml/classify_function.ml b/analysis/vendor/ml/classify_function.ml
index 5565b230b..67b977c44 100644
--- a/analysis/vendor/ml/classify_function.ml
+++ b/analysis/vendor/ml/classify_function.ml
@@ -79,8 +79,7 @@ let classify_exp (prog : _ Flow_ast.Expression.t) : Js_raw_info.exp =
(** It seems we do the parse twice
- in parsing
- - in code generation
-*)
+ - in code generation *)
let classify ?(check : (Location.t * int) option) (prog : string) :
Js_raw_info.exp =
let prog, errors =
diff --git a/analysis/vendor/ml/cmt_format.mli b/analysis/vendor/ml/cmt_format.mli
index 1a84aa68d..a161c3a88 100644
--- a/analysis/vendor/ml/cmt_format.mli
+++ b/analysis/vendor/ml/cmt_format.mli
@@ -15,19 +15,17 @@
(** cmt and cmti files format. *)
-(** The layout of a cmt file is as follows:
- := \{\} \{cmt infos\} \{\}
- where is the cmi file format:
- := .
- More precisely, the optional part must be present if and only if
- the file is:
+(** The layout of a cmt file is as follows: := \{\} \{cmt
+ infos\} \{\} where is the cmi file format: := . More precisely, the optional part must be present
+ if and only if the file is:
- a cmti, or
- a cmt, for a ml file which has no corresponding mli (hence no
- corresponding cmti).
+ corresponding cmti).
- Thus, we provide a common reading function for cmi and cmt(i)
- files which returns an option for each of the three parts: cmi
- info, cmt info, source info. *)
+ Thus, we provide a common reading function for cmi and cmt(i) files which
+ returns an option for each of the three parts: cmi info, cmt info, source
+ info. *)
open Typedtree
@@ -70,13 +68,12 @@ type error = Not_a_typedtree of string
exception Error of error
val read : string -> Cmi_format.cmi_infos option * cmt_infos option
-(** [read filename] opens filename, and extract both the cmi_infos, if
- it exists, and the cmt_infos, if it exists. Thus, it can be used
- with .cmi, .cmt and .cmti files.
+(** [read filename] opens filename, and extract both the cmi_infos, if it
+ exists, and the cmt_infos, if it exists. Thus, it can be used with .cmi,
+ .cmt and .cmti files.
- .cmti files always contain a cmi_infos at the beginning. .cmt files
- only contain a cmi_infos at the beginning if there is no associated
- .cmti file.
+ .cmti files always contain a cmi_infos at the beginning. .cmt files only
+ contain a cmi_infos at the beginning if there is no associated .cmti file.
*)
val read_cmt : string -> cmt_infos
@@ -95,8 +92,8 @@ val save_cmt :
Cmi_format.cmi_infos option ->
(* if a .cmi was generated *)
unit
-(** [save_cmt filename modname binary_annots sourcefile initial_env cmi]
- writes a cmt(i) file. *)
+(** [save_cmt filename modname binary_annots sourcefile initial_env cmi] writes
+ a cmt(i) file. *)
(* Miscellaneous functions *)
diff --git a/analysis/vendor/ml/ctype.ml b/analysis/vendor/ml/ctype.ml
index 10f362d28..0471b8e47 100644
--- a/analysis/vendor/ml/ctype.ml
+++ b/analysis/vendor/ml/ctype.ml
@@ -640,8 +640,9 @@ let get_level env p =
match (Env.find_type p env).type_newtype_level with
| None -> Path.binding_time p
| Some (x, _) -> x
- with Not_found -> (* no newtypes in predef *)
- Path.binding_time p
+ with Not_found ->
+ (* no newtypes in predef *)
+ Path.binding_time p
let rec normalize_package_path env p =
let t = try (Env.find_modtype p env).mtd_type with Not_found -> None in
@@ -4671,7 +4672,8 @@ let maybe_pointer_type env typ =
true
(* This can happen due to e.g. missing -I options,
causing some .cmi files to be unavailable.
- Maybe we should emit a warning. *))
+ Maybe we should emit a warning. *)
+ )
| Tvariant row ->
let row = Btype.row_repr row in
(* if all labels are devoid of arguments, not a pointer *)
diff --git a/analysis/vendor/ml/datarepr.mli b/analysis/vendor/ml/datarepr.mli
index 47113d87e..6104cc503 100644
--- a/analysis/vendor/ml/datarepr.mli
+++ b/analysis/vendor/ml/datarepr.mli
@@ -34,11 +34,9 @@ val find_constr_by_tag :
val constructor_existentials :
constructor_arguments -> type_expr option -> type_expr list * type_expr list
-(** Takes [cd_args] and [cd_res] from a [constructor_declaration] and
- returns:
+(** Takes [cd_args] and [cd_res] from a [constructor_declaration] and returns:
- the types of the constructor's arguments
- - the existential variables introduced by the constructor
- *)
+ - the existential variables introduced by the constructor *)
(* Set the polymorphic variant row_name field *)
val set_row_name : type_declaration -> Path.t -> unit
diff --git a/analysis/vendor/ml/docstrings.mli b/analysis/vendor/ml/docstrings.mli
index 36fcbb282..802176651 100644
--- a/analysis/vendor/ml/docstrings.mli
+++ b/analysis/vendor/ml/docstrings.mli
@@ -40,8 +40,8 @@ val docstring_loc : docstring -> Location.t
(** {2 Set functions}
- These functions are used by the lexer to associate docstrings to
- the locations of tokens. *)
+ These functions are used by the lexer to associate docstrings to the
+ locations of tokens. *)
val set_pre_docstrings : Lexing.position -> docstring list -> unit
(** Docstrings immediately preceding a token *)
@@ -69,29 +69,28 @@ val empty_docs : docs
val docs_attr : docstring -> Parsetree.attribute
val add_docs_attrs : docs -> Parsetree.attributes -> Parsetree.attributes
-(** Convert item documentation to attributes and add them to an
- attribute list *)
+(** Convert item documentation to attributes and add them to an attribute list
+*)
val symbol_docs : unit -> docs
-(** Fetch the item documentation for the current symbol. This also
- marks this documentation (for ambiguity warnings). *)
+(** Fetch the item documentation for the current symbol. This also marks this
+ documentation (for ambiguity warnings). *)
val symbol_docs_lazy : unit -> docs Lazy.t
val rhs_docs : int -> int -> docs
-(** Fetch the item documentation for the symbols between two
- positions. This also marks this documentation (for ambiguity
- warnings). *)
+(** Fetch the item documentation for the symbols between two positions. This
+ also marks this documentation (for ambiguity warnings). *)
val rhs_docs_lazy : int -> int -> docs Lazy.t
val mark_symbol_docs : unit -> unit
-(** Mark the item documentation for the current symbol (for ambiguity
- warnings). *)
+(** Mark the item documentation for the current symbol (for ambiguity warnings).
+*)
val mark_rhs_docs : int -> int -> unit
-(** Mark as associated the item documentation for the symbols between
- two positions (for ambiguity warnings) *)
+(** Mark as associated the item documentation for the symbols between two
+ positions (for ambiguity warnings) *)
(** {2 Fields and constructors}
@@ -105,8 +104,7 @@ val empty_info : info
val info_attr : docstring -> Parsetree.attribute
val add_info_attrs : info -> Parsetree.attributes -> Parsetree.attributes
-(** Convert field info to attributes and add them to an
- attribute list *)
+(** Convert field info to attributes and add them to an attribute list *)
val symbol_info : unit -> info
(** Fetch the field info for the current symbol. *)
@@ -116,8 +114,8 @@ val rhs_info : int -> info
(** {2 Unattached comments}
- The {!text} type represents documentation which is not attached to
- anything. *)
+ The {!text} type represents documentation which is not attached to anything.
+*)
type text = docstring list
@@ -141,10 +139,9 @@ val rhs_text_lazy : int -> text Lazy.t
(** {2 Extra text}
- There may be additional text attached to the delimiters of a block
- (e.g. [struct] and [end]). This is fetched by the following
- functions, which are applied to the contents of the block rather
- than the delimiters. *)
+ There may be additional text attached to the delimiters of a block (e.g.
+ [struct] and [end]). This is fetched by the following functions, which are
+ applied to the contents of the block rather than the delimiters. *)
val symbol_pre_extra_text : unit -> text
(** Fetch additional text preceding the current symbol *)
diff --git a/analysis/vendor/ml/env.ml b/analysis/vendor/ml/env.ml
index 9732a5931..b162f34d4 100644
--- a/analysis/vendor/ml/env.ml
+++ b/analysis/vendor/ml/env.ml
@@ -155,9 +155,9 @@ type summary =
| Env_copy_types of summary * string list
module TycompTbl = struct
- (** This module is used to store components of types (i.e. labels
- and constructors). We keep a representation of each nested
- "open" and the set of local bindings between each of them. *)
+ (** This module is used to store components of types (i.e. labels and
+ constructors). We keep a representation of each nested "open" and the set
+ of local bindings between each of them. *)
type 'a t = {
current: 'a Ident.tbl; (** Local bindings since the last open. *)
@@ -167,13 +167,12 @@ module TycompTbl = struct
and 'a opened = {
components: (string, 'a list) Tbl.t;
- (** Components from the opened module. We keep a list of
- bindings for each name, as in comp_labels and
- comp_constrs. *)
+ (** Components from the opened module. We keep a list of bindings for
+ each name, as in comp_labels and comp_constrs. *)
using: (string -> ('a * 'a) option -> unit) option;
- (** A callback to be applied when a component is used from this
- "open". This is used to detect unused "opens". The
- arguments are used to detect shadowing. *)
+ (** A callback to be applied when a component is used from this "open".
+ This is used to detect unused "opens". The arguments are used to
+ detect shadowing. *)
next: 'a t; (** The table before opening the module. *)
}
@@ -248,10 +247,9 @@ module TycompTbl = struct
end
module IdTbl = struct
- (** This module is used to store all kinds of components except
- (labels and constructors) in environments. We keep a
- representation of each nested "open" and the set of local
- bindings between each of them. *)
+ (** This module is used to store all kinds of components except (labels and
+ constructors) in environments. We keep a representation of each nested
+ "open" and the set of local bindings between each of them. *)
type 'a t = {
current: 'a Ident.tbl; (** Local bindings since the last open *)
@@ -261,15 +259,14 @@ module IdTbl = struct
and 'a opened = {
root: Path.t;
- (** The path of the opened module, to be prefixed in front of
- its local names to produce a valid path in the current
- environment. *)
+ (** The path of the opened module, to be prefixed in front of its local
+ names to produce a valid path in the current environment. *)
components: (string, 'a * int) Tbl.t;
(** Components from the opened module. *)
using: (string -> ('a * 'a) option -> unit) option;
- (** A callback to be applied when a component is used from this
- "open". This is used to detect unused "opens". The
- arguments are used to detect shadowing. *)
+ (** A callback to be applied when a component is used from this "open".
+ This is used to detect unused "opens". The arguments are used to
+ detect shadowing. *)
next: 'a t; (** The table before opening the module. *)
}
diff --git a/analysis/vendor/ml/env.mli b/analysis/vendor/ml/env.mli
index 6ff95b65b..fda86ead7 100644
--- a/analysis/vendor/ml/env.mli
+++ b/analysis/vendor/ml/env.mli
@@ -350,6 +350,6 @@ module Persistent_signature : sig
val load : (unit_name:string -> t option) ref
(** Function used to load a persistent signature. The default is to look for
- the .cmi file in the load path. This function can be overridden to load
- it from memory, for instance to build a self-contained toplevel. *)
+ the .cmi file in the load path. This function can be overridden to load it
+ from memory, for instance to build a self-contained toplevel. *)
end
diff --git a/analysis/vendor/ml/includeclass.ml b/analysis/vendor/ml/includeclass.ml
index 9791bc488..941f182dd 100644
--- a/analysis/vendor/ml/includeclass.ml
+++ b/analysis/vendor/ml/includeclass.ml
@@ -51,8 +51,7 @@ let include_err ppf = function
Printtyp.report_unification_error ppf env ~unif:false trace
(function
| ppf -> fprintf ppf "A type parameter has type")
- (function
- | ppf -> fprintf ppf "but is expected to have type")
+ (function ppf -> fprintf ppf "but is expected to have type")
| CM_Class_type_mismatch (env, cty1, cty2) ->
Printtyp.wrap_printing_env env (fun () ->
fprintf ppf "@[The class type@;<1 2>%a@ %s@;<1 2>%a@]"
@@ -62,20 +61,17 @@ let include_err ppf = function
Printtyp.report_unification_error ppf env ~unif:false trace
(function
| ppf -> fprintf ppf "A parameter has type")
- (function
- | ppf -> fprintf ppf "but is expected to have type")
+ (function ppf -> fprintf ppf "but is expected to have type")
| CM_Val_type_mismatch (lab, env, trace) ->
Printtyp.report_unification_error ppf env ~unif:false trace
(function
| ppf -> fprintf ppf "The instance variable %s@ has type" lab)
- (function
- | ppf -> fprintf ppf "but is expected to have type")
+ (function ppf -> fprintf ppf "but is expected to have type")
| CM_Meth_type_mismatch (lab, env, trace) ->
Printtyp.report_unification_error ppf env ~unif:false trace
(function
| ppf -> fprintf ppf "The method %s@ has type" lab)
- (function
- | ppf -> fprintf ppf "but is expected to have type")
+ (function ppf -> fprintf ppf "but is expected to have type")
| CM_Non_mutable_value lab ->
fprintf ppf "@[The non-mutable instance variable %s cannot become mutable@]"
lab
diff --git a/analysis/vendor/ml/includemod.ml b/analysis/vendor/ml/includemod.ml
index d99f29373..4b1d125ce 100644
--- a/analysis/vendor/ml/includemod.ml
+++ b/analysis/vendor/ml/includemod.ml
@@ -558,8 +558,8 @@ let include_err ~env ppf = function
| Extension_constructors (id, x1, x2) ->
fprintf ppf
"@[Extension declarations do not match:@ %a@;\
- <1 -2>is not included in@ %a@]" (extension_constructor id) x1
- (extension_constructor id) x2;
+ <1 -2>is not included in@ %a@]"
+ (extension_constructor id) x1 (extension_constructor id) x2;
show_locs ppf (x1.ext_loc, x2.ext_loc)
| Module_types (mty1, mty2) ->
fprintf ppf
@@ -568,8 +568,8 @@ let include_err ~env ppf = function
| Modtype_infos (id, d1, d2) ->
fprintf ppf
"@[Module type declarations do not match:@ %a@;\
- <1 -2>does not match@ %a@]" (modtype_declaration id) d1
- (modtype_declaration id) d2
+ <1 -2>does not match@ %a@]"
+ (modtype_declaration id) d1 (modtype_declaration id) d2
| Modtype_permutation -> fprintf ppf "Illegal permutation of structure fields"
| Interface_mismatch (impl_name, intf_name) ->
fprintf ppf "@[The implementation %s@ does not match the interface %s:"
diff --git a/analysis/vendor/ml/location.mli b/analysis/vendor/ml/location.mli
index db4aa270d..68104dd5b 100644
--- a/analysis/vendor/ml/location.mli
+++ b/analysis/vendor/ml/location.mli
@@ -23,12 +23,10 @@ type t = Warnings.loc = {
loc_ghost: bool;
}
-(** Note on the use of Lexing.position in this module.
- If [pos_fname = ""], then use [!input_name] instead.
- If [pos_lnum = -1], then [pos_bol = 0]. Use [pos_cnum] and
- re-parse the file to get the line and character numbers.
- Else all fields are correct.
-*)
+(** Note on the use of Lexing.position in this module. If [pos_fname = ""], then
+ use [!input_name] instead. If [pos_lnum = -1], then [pos_bol = 0]. Use
+ [pos_cnum] and re-parse the file to get the line and character numbers. Else
+ all fields are correct. *)
val none : t
(** An arbitrary value of type [t]; describes an empty ghost range. *)
@@ -37,8 +35,8 @@ val in_file : string -> t
(** Return an empty ghost range located in a given file. *)
val init : Lexing.lexbuf -> string -> unit
-(** Set the file name and line number of the [lexbuf] to be the start
- of the named file. *)
+(** Set the file name and line number of the [lexbuf] to be the start of the
+ named file. *)
val curr : Lexing.lexbuf -> t
(** Get the location of the current token from the [lexbuf]. *)
@@ -47,8 +45,8 @@ val symbol_rloc : unit -> t
val symbol_gloc : unit -> t
val rhs_loc : int -> t
-(** [rhs_loc n] returns the location of the symbol at position [n], starting
- at 1, in the current parser rule. *)
+(** [rhs_loc n] returns the location of the symbol at position [n], starting at
+ 1, in the current parser rule. *)
val input_name : string ref
val set_input_name : string -> unit
@@ -88,8 +86,8 @@ val print_filename : formatter -> string -> unit
val absolute_path : string -> string
val show_filename : string -> string
-(** In -absname mode, return the absolute path for this filename.
- Otherwise, returns the filename unchanged. *)
+(** In -absname mode, return the absolute path for this filename. Otherwise,
+ returns the filename unchanged. *)
val absname : bool ref
@@ -135,12 +133,11 @@ val error_of_printer_file : (formatter -> 'a -> unit) -> 'a -> error
val error_of_exn : exn -> [`Ok of error | `Already_displayed] option
val register_error_of_exn : (exn -> error option) -> unit
-(** Each compiler module which defines a custom type of exception
- which can surface as a user-visible error should register
- a "printer" for this exception using [register_error_of_exn].
- The result of the printer is an [error] value containing
- a location, a message, and optionally sub-messages (each of them
- being located as well). *)
+(** Each compiler module which defines a custom type of exception which can
+ surface as a user-visible error should register a "printer" for this
+ exception using [register_error_of_exn]. The result of the printer is an
+ [error] value containing a location, a message, and optionally sub-messages
+ (each of them being located as well). *)
val report_error : ?src:string option -> formatter -> error -> unit
diff --git a/analysis/vendor/ml/matching.ml b/analysis/vendor/ml/matching.ml
index 174e5ab26..05566261f 100644
--- a/analysis/vendor/ml/matching.ml
+++ b/analysis/vendor/ml/matching.ml
@@ -756,7 +756,8 @@ let insert_or_append p ps act ors no =
let _, not_e = get_equiv q rem in
if
or_ok p ps not_e
- && (* check append condition for head of O *)
+ &&
+ (* check append condition for head of O *)
List.for_all (* check insert condition for tail of O *)
(fun cl ->
match cl with
@@ -2811,7 +2812,8 @@ let check_partial is_mutable is_lazy pat_act_list = function
| Total ->
if
pat_act_list = []
- || (* allow empty case list *)
+ ||
+ (* allow empty case list *)
List.exists
(fun (pats, lam) -> is_mutable pats && (is_guarded lam || is_lazy pats))
pat_act_list
diff --git a/analysis/vendor/ml/parmatch.mli b/analysis/vendor/ml/parmatch.mli
index 1213d4783..59584e2a5 100644
--- a/analysis/vendor/ml/parmatch.mli
+++ b/analysis/vendor/ml/parmatch.mli
@@ -96,9 +96,10 @@ val check_unused :
val irrefutable : pattern -> bool
val inactive : partial:partial -> pattern -> bool
-(** An inactive pattern is a pattern, matching against which can be duplicated, erased or
- delayed without change in observable behavior of the program. Patterns containing
- (lazy _) subpatterns or reads of mutable fields are active. *)
+(** An inactive pattern is a pattern, matching against which can be duplicated,
+ erased or delayed without change in observable behavior of the program.
+ Patterns containing (lazy _) subpatterns or reads of mutable fields are
+ active. *)
(* Ambiguous bindings *)
val check_ambiguous_bindings : case list -> unit
diff --git a/analysis/vendor/ml/parsetree.ml b/analysis/vendor/ml/parsetree.ml
index c37ff8824..8ed4f74ee 100644
--- a/analysis/vendor/ml/parsetree.ml
+++ b/analysis/vendor/ml/parsetree.ml
@@ -174,8 +174,7 @@ and pattern_desc =
Other forms of interval are recognized by the parser
but rejected by the type-checker. *)
- | Ppat_tuple of pattern list
- (* (P1, ..., Pn)
+ | Ppat_tuple of pattern list (* (P1, ..., Pn)
Invariant: n >= 2
*)
@@ -218,8 +217,7 @@ and expression = {
}
and expression_desc =
- | Pexp_ident of Longident.t loc
- (* x
+ | Pexp_ident of Longident.t loc (* x
M.x
*)
| Pexp_constant of constant (* 1, 'a', "true", 1.0, 1l, 1L, 1n *)
diff --git a/analysis/vendor/ml/predef.mli b/analysis/vendor/ml/predef.mli
index 9e3da5077..c08c4d324 100644
--- a/analysis/vendor/ml/predef.mli
+++ b/analysis/vendor/ml/predef.mli
@@ -79,10 +79,9 @@ val builtin_values : (string * Ident.t) list
val builtin_idents : (string * Ident.t) list
val ident_division_by_zero : Ident.t
-(** All predefined exceptions, exposed as [Ident.t] for flambda (for
- building value approximations).
- The [Ident.t] for division by zero is also exported explicitly
- so flambda can generate code to raise it. *)
+(** All predefined exceptions, exposed as [Ident.t] for flambda (for building
+ value approximations). The [Ident.t] for division by zero is also exported
+ explicitly so flambda can generate code to raise it. *)
val all_predef_exns : Ident.t list
diff --git a/analysis/vendor/ml/rec_check.ml b/analysis/vendor/ml/rec_check.ml
index 6942a0edb..f9f8e1193 100644
--- a/analysis/vendor/ml/rec_check.ml
+++ b/analysis/vendor/ml/rec_check.ml
@@ -21,15 +21,15 @@ module Rec_context = struct
type access =
| Dereferenced
(** [Dereferenced] indicates that the value (not just the address) of a
- variable is accessed *)
+ variable is accessed *)
| Guarded
(** [Guarded] indicates that the address of a variable is used in a
- guarded context, i.e. under a constructor. A variable that is
- dereferenced within a function body or lazy context is also considered
- guarded. *)
+ guarded context, i.e. under a constructor. A variable that is
+ dereferenced within a function body or lazy context is also
+ considered guarded. *)
| Unguarded
(** [Unguarded] indicates that the address of a variable is used in an
- unguarded context, i.e. not under a constructor. *)
+ unguarded context, i.e. not under a constructor. *)
(** [guard] represents guarded contexts such as [C -] and [{l = -}] *)
let guard : access -> access = function
@@ -44,8 +44,8 @@ module Rec_context = struct
| Guarded -> Dereferenced
| Unguarded -> Dereferenced
- (** [delay] represents contexts that delay evaluation such as [fun p -> -]
- or [lazy -] *)
+ (** [delay] represents contexts that delay evaluation such as [fun p -> -] or
+ [lazy -] *)
let delay : access -> access = function
| Dereferenced -> Guarded
| Guarded -> Guarded
@@ -61,7 +61,8 @@ module Rec_context = struct
(** The address of a subexpression is not used, but may be bound *)
val inspect : t -> t
- (** The value of a subexpression is inspected with match, application, etc. *)
+ (** The value of a subexpression is inspected with match, application, etc.
+ *)
val delay : t -> t
(** An expression appears under 'fun p ->' or 'lazy' *)
@@ -73,8 +74,8 @@ module Rec_context = struct
(** Combine the access information of two expressions *)
val empty : t
- (** No variables are accessed in an expression; it might be a
- constant or a global identifier *)
+ (** No variables are accessed in an expression; it might be a constant or a
+ global identifier *)
val unguarded : t -> Ident.t list
(** The list of identifiers that are used in an unguarded context *)
diff --git a/analysis/vendor/ml/record_coercion.ml b/analysis/vendor/ml/record_coercion.ml
index 9a0c4eb74..0bb221437 100644
--- a/analysis/vendor/ml/record_coercion.ml
+++ b/analysis/vendor/ml/record_coercion.ml
@@ -12,7 +12,8 @@ let check_record_fields ?repr1 ?repr2 (fields1 : Types.label_declaration list)
with
| Some ld1 ->
if field_is_optional ld1.ld_id repr1 <> field_is_optional ld2.ld_id repr2
- then (* optional field can't be modified *)
+ then
+ (* optional field can't be modified *)
violation := true;
let get_as (({txt}, payload) : Parsetree.attribute) =
if txt = "as" then Ast_payload.is_single_string payload else None
diff --git a/analysis/vendor/ml/rescript_cpp.ml b/analysis/vendor/ml/rescript_cpp.ml
index 939e64574..52957dd1c 100644
--- a/analysis/vendor/ml/rescript_cpp.ml
+++ b/analysis/vendor/ml/rescript_cpp.ml
@@ -113,7 +113,10 @@ let () =
we want to overwrite in some cases with the
same stdlib
*)
- let version = Config.version (* so that it can be overridden*) in
+ let version =
+ Config.version
+ (* so that it can be overridden*)
+ in
replace_directive_built_in_value "OCAML_VERSION" (Dir_string version);
replace_directive_built_in_value "OS_TYPE" (Dir_string Sys.os_type)
@@ -155,16 +158,14 @@ let semantic_version_parse str start last_index =
let additional = String.sub str patch_end (last_index - patch_end + 1) in
((major, minor, patch), additional)
-(**
- {[
- semver Location.none "1.2.3" "~1.3.0" = false;;
- semver Location.none "1.2.3" "^1.3.0" = true ;;
- semver Location.none "1.2.3" ">1.3.0" = false ;;
- semver Location.none "1.2.3" ">=1.3.0" = false ;;
- semver Location.none "1.2.3" "<1.3.0" = true ;;
- semver Location.none "1.2.3" "<=1.3.0" = true ;;
- ]}
- *)
+(** {[
+ semver Location.none "1.2.3" "~1.3.0" = false;;
+ semver Location.none "1.2.3" "^1.3.0" = true;;
+ semver Location.none "1.2.3" ">1.3.0" = false;;
+ semver Location.none "1.2.3" ">=1.3.0" = false;;
+ semver Location.none "1.2.3" "<1.3.0" = true;;
+ semver Location.none "1.2.3" "<=1.3.0" = true
+ ]} *)
let semver loc lhs str =
let last_index = String.length str - 1 in
if last_index < 0 then raise (Pp_error (Illegal_semver str, loc))
@@ -321,7 +322,8 @@ let directive_parse (token_with_comments : Lexing.lexbuf -> Parser.token) lexbuf
Location.curr lexbuf )))
| e -> no e
and parse_or calc : bool = parse_or_aux calc (parse_and calc)
- and (* a || (b || (c || d))*)
+ and
+ (* a || (b || (c || d))*)
parse_or_aux calc v : bool =
(* let l = v in *)
match token () with
diff --git a/analysis/vendor/ml/tast_iterator.mli b/analysis/vendor/ml/tast_iterator.mli
index 474260920..57065d9f2 100644
--- a/analysis/vendor/ml/tast_iterator.mli
+++ b/analysis/vendor/ml/tast_iterator.mli
@@ -13,9 +13,7 @@
(* *)
(**************************************************************************)
-(**
-Allows the implementation of typed tree inspection using open recursion
-*)
+(** Allows the implementation of typed tree inspection using open recursion *)
open Asttypes
open Typedtree
diff --git a/analysis/vendor/ml/typeclass.ml b/analysis/vendor/ml/typeclass.ml
index 52b79eaef..26c84e8f2 100644
--- a/analysis/vendor/ml/typeclass.ml
+++ b/analysis/vendor/ml/typeclass.ml
@@ -1070,8 +1070,7 @@ let report_error env ppf = function
Printtyp.report_unification_error ppf env trace
(function
| ppf -> fprintf ppf "The %s %s@ has type" k m)
- (function
- | ppf -> fprintf ppf "but is expected to have type")
+ (function ppf -> fprintf ppf "but is expected to have type")
| Structure_expected clty ->
fprintf ppf
"@[This class expression is not a class structure; it has type@ %a@]"
@@ -1097,8 +1096,7 @@ let report_error env ppf = function
Printtyp.report_unification_error ppf env trace
(function
| ppf -> fprintf ppf "The expression \"new %s\" has type" c)
- (function
- | ppf -> fprintf ppf "but is used with type")
+ (function ppf -> fprintf ppf "but is used with type")
| Virtual_class (cl, imm, mets, vals) ->
let print_mets ppf mets =
List.iter
@@ -1154,7 +1152,8 @@ let report_error env ppf = function
Printtyp.reset ();
fprintf ppf
"@[@[Some type variables are unbound in this type:@;\
- <1 2>%t@]@ @[%a@]@]" printer print_reason reason
+ <1 2>%t@]@ @[%a@]@]"
+ printer print_reason reason
| Non_generalizable_class (id, clty) ->
fprintf ppf
"@[The type of this class,@ %a,@ contains type variables that cannot be \
diff --git a/analysis/vendor/ml/typecore.ml b/analysis/vendor/ml/typecore.ml
index 909d73bab..39763f393 100644
--- a/analysis/vendor/ml/typecore.ml
+++ b/analysis/vendor/ml/typecore.ml
@@ -382,7 +382,8 @@ let finalize_variant pat =
(* Force check of well-formedness WHY? *)
(* unify_pat pat.pat_env pat
(newty(Tvariant{row_fields=[]; row_more=newvar(); row_closed=false;
- row_bound=(); row_fixed=false; row_name=None})); *))
+ row_bound=(); row_fixed=false; row_name=None})); *)
+ )
| _ -> ()
let rec iter_pattern f p =
@@ -750,8 +751,7 @@ let print_expr_type_clash ?type_clash_context env trace ppf =
Printtyp.super_report_unification_error ppf env trace
(function
| ppf -> error_type_text ppf type_clash_context)
- (function
- | ppf -> error_expected_type_text ppf type_clash_context);
+ (function ppf -> error_expected_type_text ppf type_clash_context);
print_extra_type_clash_help ppf trace type_clash_context;
show_extra_help ppf env trace
@@ -4221,8 +4221,7 @@ let report_error env ppf = function
(function
| ppf ->
fprintf ppf "The record field %a@ belongs to the type" longident lid)
- (function
- | ppf -> fprintf ppf "but is mixed here with fields of type")
+ (function ppf -> fprintf ppf "but is mixed here with fields of type")
| Pattern_type_clash trace ->
(* modified *)
super_report_unification_error ppf env trace
@@ -4230,7 +4229,7 @@ let report_error env ppf = function
| ppf -> fprintf ppf "This pattern matches values of type")
(function
| ppf ->
- fprintf ppf "but a pattern was expected which matches values of type")
+ fprintf ppf "but a pattern was expected which matches values of type")
| Or_pattern_type_clash (id, trace) ->
(* modified *)
super_report_unification_error ppf env trace
@@ -4239,8 +4238,7 @@ let report_error env ppf = function
fprintf ppf
"The variable %s on the left-hand side of this or-pattern has type"
(Ident.name id))
- (function
- | ppf -> fprintf ppf "but on the right-hand side it has type")
+ (function ppf -> fprintf ppf "but on the right-hand side it has type")
| Multiply_bound_variable name ->
fprintf ppf "Variable %s is bound several times in this matching" name
| Orpat_vars (id, valid_idents) ->
@@ -4373,7 +4371,7 @@ let report_error env ppf = function
name longident lid kind)
(function
| ppf ->
- fprintf ppf "but a %s was expected belonging to the %s type" name kind)
+ fprintf ppf "but a %s was expected belonging to the %s type" name kind)
| Undefined_method (ty, me, valid_methods) -> (
reset_and_mark_loops ty;
fprintf ppf
@@ -4394,8 +4392,7 @@ let report_error env ppf = function
"This expression cannot be coerced to type@;<1 2>%a;@ it has type"
(Printtyp.type_expansion ty)
ty')
- (function
- | ppf -> fprintf ppf "but is here used with type");
+ (function ppf -> fprintf ppf "but is here used with type");
if b then
fprintf ppf ".@.@[%s@ %s@]"
"This simple coercion was not fully general."
@@ -4455,8 +4452,7 @@ let report_error env ppf = function
super_report_unification_error ppf env trace
(function
| ppf -> fprintf ppf "Recursive local constraint when unifying")
- (function
- | ppf -> fprintf ppf "with")
+ (function ppf -> fprintf ppf "with")
| Unexpected_existential -> fprintf ppf "Unexpected existential"
| Unqualified_gadt_pattern (tpath, name) ->
fprintf ppf "@[The GADT constructor %s of type %a@ %s.@]" name path tpath
diff --git a/analysis/vendor/ml/typedecl.ml b/analysis/vendor/ml/typedecl.ml
index d1e63ffcf..df1e35c31 100644
--- a/analysis/vendor/ml/typedecl.ml
+++ b/analysis/vendor/ml/typedecl.ml
@@ -1180,11 +1180,14 @@ let compute_variance_type env check (required, loc) decl tyl =
let v = get_variance ty tvl in
let tr = decl.type_private in
(* Use required variance where relevant *)
- let concr = decl.type_kind <> Type_abstract (*|| tr = Type_new*) in
+ let concr =
+ decl.type_kind <> Type_abstract
+ (*|| tr = Type_new*)
+ in
let p, n =
if tr = Private || not (Btype.is_Tvar ty) then (p, n) (* set *)
else (false, false)
- (* only check *)
+ (* only check *)
and i = concr || (i && tr = Private) in
let v = union v (make p n i) in
let v =
@@ -2161,8 +2164,7 @@ let report_error ppf = function
Printtyp.report_unification_error ppf env trace
(function
| ppf -> fprintf ppf "This type constructor expands to type")
- (function
- | ppf -> fprintf ppf "but is used here with type")
+ (function ppf -> fprintf ppf "but is used here with type")
| Null_arity_external -> fprintf ppf "External identifiers must be functions"
| Unbound_type_var (ty, decl) -> (
fprintf ppf "A type variable is unbound in this type declaration";
@@ -2204,8 +2206,7 @@ let report_error ppf = function
(function
| ppf ->
fprintf ppf "The constructor %a@ has type" Printtyp.longident lid)
- (function
- | ppf -> fprintf ppf "but was expected to be of type")
+ (function ppf -> fprintf ppf "but was expected to be of type")
| Rebind_mismatch (lid, p, p') ->
fprintf ppf "@[%s@ %a@ %s@ %s@ %s@ %s@ %s@]" "The constructor"
Printtyp.longident lid "extends type" (Path.name p)
@@ -2269,8 +2270,10 @@ let report_error ppf = function
fprintf ppf "@[GADT case syntax cannot be used in a 'nonrec' block.@]"
| Variant_runtime_representation_mismatch
(Variant_coercion.VariantError
- {is_spread_context; error = Variant_coercion.Untagged {left_is_unboxed}})
- ->
+ {
+ is_spread_context;
+ error = Variant_coercion.Untagged {left_is_unboxed};
+ }) ->
let other_variant_text =
if is_spread_context then "the variant where this is spread"
else "the other variant"
@@ -2282,7 +2285,7 @@ let report_error ppf = function
^ " is not. Both variants unboxed configuration must match")
| Variant_runtime_representation_mismatch
(Variant_coercion.VariantError
- {is_spread_context; error = Variant_coercion.TagName _}) ->
+ {is_spread_context; error = Variant_coercion.TagName _}) ->
let other_variant_text =
if is_spread_context then "the variant where this is spread"
else "the other variant"
@@ -2301,7 +2304,8 @@ let report_error ppf = function
fprintf ppf "@[Type parameters are not supported in variant type spreads.@]"
| Variant_spread_fail
(Variant_type_spread.DuplicateConstructor
- {variant_with_overlapping_constructor; overlapping_constructor_name}) ->
+ {variant_with_overlapping_constructor; overlapping_constructor_name})
+ ->
fprintf ppf
"@[Variant %s has a constructor named %s, but a constructor named %s \
already exists in the variant it's spread into.@ You cannot spread \
diff --git a/analysis/vendor/ml/typedtree.mli b/analysis/vendor/ml/typedtree.mli
index d51696122..910980a6e 100644
--- a/analysis/vendor/ml/typedtree.mli
+++ b/analysis/vendor/ml/typedtree.mli
@@ -16,9 +16,7 @@
(** Abstract syntax tree after typing *)
(** By comparison with {!Parsetree}:
- - Every {!Longindent.t} is accompanied by a resolved {!Path.t}.
-
-*)
+ - Every {!Longindent.t} is accompanied by a resolved {!Path.t}. *)
open Asttypes
open Types
@@ -69,19 +67,13 @@ and pattern_desc =
| Tpat_tuple of pattern list
(** (P1, ..., Pn)
- Invariant: n >= 2
- *)
+ Invariant: n >= 2 *)
| Tpat_construct of Longident.t loc * constructor_description * pattern list
- (** C []
- C P [P]
- C (P1, ..., Pn) [P1; ...; Pn]
- *)
+ (** C [] C P [P] C (P1, ..., Pn) [P1; ...; Pn] *)
| Tpat_variant of label * pattern option * row_desc ref
- (** `A (None)
- `A P (Some P)
+ (** `A (None) `A P (Some P)
- See {!Types.row_desc} for an explanation of the last parameter.
- *)
+ See {!Types.row_desc} for an explanation of the last parameter. *)
| Tpat_record of
(Longident.t loc * label_description * pattern) list * closed_flag
(** { l1=P1; ...; ln=Pn } (flag = Closed)
@@ -93,9 +85,8 @@ and pattern_desc =
| Tpat_or of pattern * pattern * row_desc option
(** P1 | P2
- [row_desc] = [Some _] when translating [Ppat_type _],
- [None] otherwise.
- *)
+ [row_desc] = [Some _] when translating [Ppat_type _], [None]
+ otherwise. *)
| Tpat_lazy of pattern (** lazy P *)
and expression = {
@@ -110,75 +101,57 @@ and expression = {
and exp_extra =
| Texp_constraint of core_type (** E : T *)
| Texp_coerce of core_type option * core_type
- (** E :> T [Texp_coerce (None, T)]
- E : T0 :> T [Texp_coerce (Some T0, T)]
- *)
+ (** E :> T [Texp_coerce (None, T)] E : T0 :> T [Texp_coerce (Some T0, T)]
+ *)
| Texp_open of override_flag * Path.t * Longident.t loc * Env.t
- (** let open[!] M in [Texp_open (!, P, M, env)]
- where [env] is the environment after opening [P]
- *)
+ (** let open[!] M in [Texp_open (!, P, M, env)] where [env] is the
+ environment after opening [P] *)
| Texp_poly of core_type option (** Used for method bodies. *)
- | Texp_newtype of string (** fun (type t) -> *)
+ | Texp_newtype of string (** fun (type t) -> *)
and expression_desc =
| Texp_ident of Path.t * Longident.t loc * Types.value_description
- (** x
- M.x
- *)
+ (** x M.x *)
| Texp_constant of constant (** 1, 'a', "true", 1.0, 1l, 1L, 1n *)
| Texp_let of rec_flag * value_binding list * expression
- (** let P1 = E1 and ... and Pn = EN in E (flag = Nonrecursive)
- let rec P1 = E1 and ... and Pn = EN in E (flag = Recursive)
- *)
+ (** let P1 = E1 and ... and Pn = EN in E (flag = Nonrecursive) let rec P1
+ = E1 and ... and Pn = EN in E (flag = Recursive) *)
| Texp_function of {
arg_label: arg_label;
param: Ident.t;
cases: case list;
partial: partial;
}
- (** [Pexp_fun] and [Pexp_function] both translate to [Texp_function].
- See {!Parsetree} for more details.
+ (** [Pexp_fun] and [Pexp_function] both translate to [Texp_function]. See
+ {!Parsetree} for more details.
- [param] is the identifier that is to be used to name the
- parameter of the function.
+ [param] is the identifier that is to be used to name the parameter of
+ the function.
- partial =
- [Partial] if the pattern match is partial
- [Total] otherwise.
- *)
+ partial = [Partial] if the pattern match is partial [Total] otherwise.
+ *)
| Texp_apply of expression * (arg_label * expression option) list
(** E0 ~l1:E1 ... ~ln:En
- The expression can be None if the expression is abstracted over
- this argument. It currently appears when a label is applied.
+ The expression can be None if the expression is abstracted over this
+ argument. It currently appears when a label is applied.
- For example:
- let f x ~y = x + y in
- f ~y:3
+ For example: let f x ~y = x + y in f ~y:3
- The resulting typedtree for the application is:
- Texp_apply (Texp_ident "f/1037",
- [(Nolabel, None);
- (Labelled "y", Some (Texp_constant Const_int 3))
- ])
- *)
+ The resulting typedtree for the application is: Texp_apply (Texp_ident
+ "f/1037",
+ [(Nolabel, None); (Labelled "y", Some (Texp_constant Const_int 3)) ])
+ *)
| Texp_match of expression * case list * case list * partial
- (** match E0 with
- | P1 -> E1
- | P2 -> E2
- | exception P3 -> E3
+ (** match E0 with | P1 -> E1 | P2 -> E2 | exception P3 -> E3
- [Texp_match (E0, [(P1, E1); (P2, E2)], [(P3, E3)], _)]
- *)
+ [Texp_match (E0, [(P1, E1); (P2, E2)], [(P3, E3)], _)] *)
| Texp_try of expression * case list
(** try E with P1 -> E1 | ... | PN -> EN *)
| Texp_tuple of expression list (** (E1, ..., EN) *)
| Texp_construct of
Longident.t loc * constructor_description * expression list
- (** C []
- C E [E]
- C (E1, ..., En) [E1;...;En]
- *)
+ (** C [] C E [E] C (E1, ..., En) [E1;...;En] *)
| Texp_variant of label * expression option
| Texp_record of {
fields: (Types.label_description * record_label_definition) array;
@@ -244,7 +217,8 @@ and module_expr = {
(** Annotations for [Tmod_constraint]. *)
and module_type_constraint =
| Tmodtype_implicit
- (** The module type constraint has been synthesized during typechecking. *)
+ (** The module type constraint has been synthesized during typechecking.
+ *)
| Tmodtype_explicit of module_type
(** The module type was in the source file. *)
@@ -255,9 +229,8 @@ and module_expr_desc =
| Tmod_apply of module_expr * module_expr * module_coercion
| Tmod_constraint of
module_expr * Types.module_type * module_type_constraint * module_coercion
- (** ME (constraint = Tmodtype_implicit)
- (ME : MT) (constraint = Tmodtype_explicit MT)
- *)
+ (** ME (constraint = Tmodtype_implicit) (ME : MT) (constraint =
+ Tmodtype_explicit MT) *)
| Tmod_unpack of expression * Types.module_type
and structure = {
diff --git a/analysis/vendor/ml/typeopt.ml b/analysis/vendor/ml/typeopt.ml
index a18e77be7..94d1beae8 100644
--- a/analysis/vendor/ml/typeopt.ml
+++ b/analysis/vendor/ml/typeopt.ml
@@ -35,10 +35,8 @@ let scrape_ty env ty =
let scrape env ty = (scrape_ty env ty).desc
-(** [Types.constructor_description]
- records the type at the definition type so for ['a option]
- it will always be [Tvar]
-*)
+(** [Types.constructor_description] records the type at the definition type so
+ for ['a option] it will always be [Tvar] *)
let rec type_cannot_contain_undefined (typ : Types.type_expr) (env : Env.t) =
match scrape env typ with
| Tconstr (p, _, _) -> (
@@ -145,15 +143,15 @@ let classify env ty =
| Tarrow _ | Ttuple _ | Tpackage _ | Tobject _ | Tnil | Tvariant _ -> Addr
| Tlink _ | Tsubst _ | Tpoly _ | Tfield _ -> assert false
-(** Whether a forward block is needed for a lazy thunk on a value, i.e.
- if the value can be represented as a float/forward/lazy *)
+(** Whether a forward block is needed for a lazy thunk on a value, i.e. if the
+ value can be represented as a float/forward/lazy *)
let lazy_val_requires_forward env ty =
match classify env ty with
| Any | Lazy -> true
| Float (*-> Config.flat_float_array*) | Addr | Int -> false
(** The compilation of the expression [lazy e] depends on the form of e:
- constants, floats and identifiers are optimized. The optimization must be
+ constants, floats and identifiers are optimized. The optimization must be
taken into account when determining whether a recursive binding is safe. *)
let classify_lazy_argument :
Typedtree.expression ->
diff --git a/analysis/vendor/ml/types.mli b/analysis/vendor/ml/types.mli
index e279cf6ec..90c983c40 100644
--- a/analysis/vendor/ml/types.mli
+++ b/analysis/vendor/ml/types.mli
@@ -18,8 +18,7 @@
(** [Types] defines the representation of types and declarations (that is, the
content of module signatures).
- CMI files are made of marshalled types.
-*)
+ CMI files are made of marshalled types. *)
open Asttypes
(** Asttypes exposes basic definitions shared both by Parsetree and Types. *)
@@ -31,13 +30,12 @@ type type_expr = {mutable desc: type_desc; mutable level: int; id: int}
find in OCaml. [type_expr] wraps this with some annotations.
The [level] field tracks the level of polymorphism associated to a type,
- guiding the generalization algorithm.
- Put shortly, when referring to a type in a given environment, both the type
- and the environment have a level. If the type has an higher level, then it
- can be considered fully polymorphic (type variables will be printed as
- ['a]), otherwise it'll be weakly polymorphic, or non generalized (type
- variables printed as ['_a]).
- See [http://okmij.org/ftp/ML/generalization.html] for more information.
+ guiding the generalization algorithm. Put shortly, when referring to a type
+ in a given environment, both the type and the environment have a level. If
+ the type has an higher level, then it can be considered fully polymorphic
+ (type variables will be printed as ['a]), otherwise it'll be weakly
+ polymorphic, or non generalized (type variables printed as ['_a]). See
+ [http://okmij.org/ftp/ML/generalization.html] for more information.
Note about [type_declaration]: one should not make the confusion between
[type_expr] and [type_declaration].
@@ -50,63 +48,56 @@ type type_expr = {mutable desc: type_desc; mutable level: int; id: int}
Also, as the type system of OCaml is generative, a [type_declaration] can
have the side-effect of introducing a new type constructor, different from
- all other known types.
- Whereas [type_expr] is a pure construct which allows referring to existing
- types.
+ all other known types. Whereas [type_expr] is a pure construct which allows
+ referring to existing types.
- Note on mutability: TBD.
- *)
+ Note on mutability: TBD. *)
and type_desc =
| Tvar of string option
- (** [Tvar (Some "a")] ==> ['a] or ['_a]
- [Tvar None] ==> [_] *)
+ (** [Tvar (Some "a")] ==> ['a] or ['_a] [Tvar None] ==> [_] *)
| Tarrow of arg_label * type_expr * type_expr * commutable
(** [Tarrow (Nolabel, e1, e2, c)] ==> [e1 -> e2]
- [Tarrow (Labelled "l", e1, e2, c)] ==> [l:e1 -> e2]
- [Tarrow (Optional "l", e1, e2, c)] ==> [?l:e1 -> e2]
+ [Tarrow (Labelled "l", e1, e2, c)] ==> [l:e1 -> e2]
+ [Tarrow (Optional "l", e1, e2, c)] ==> [?l:e1 -> e2]
- See [commutable] for the last argument. *)
+ See [commutable] for the last argument. *)
| Ttuple of type_expr list (** [Ttuple [t1;...;tn]] ==> [(t1 * ... * tn)] *)
| Tconstr of Path.t * type_expr list * abbrev_memo ref
- (** [Tconstr (`A.B.t', [t1;...;tn], _)] ==> [(t1,...,tn) A.B.t]
- The last parameter keep tracks of known expansions, see [abbrev_memo]. *)
+ (** [Tconstr (`A.B.t', [t1;...;tn], _)] ==> [(t1,...,tn) A.B.t] The last
+ parameter keep tracks of known expansions, see [abbrev_memo]. *)
| Tobject of type_expr * (Path.t * type_expr list) option ref
(** [Tobject (`f1:t1;...;fn: tn', `None')] ==> [< f1: t1; ...; fn: tn >]
- f1, fn are represented as a linked list of types using Tfield and Tnil
- constructors.
+ f1, fn are represented as a linked list of types using Tfield and Tnil
+ constructors.
- [Tobject (_, `Some (`A.ct', [t1;...;tn]')] ==> [(t1, ..., tn) A.ct].
- where A.ct is the type of some class.
+ [Tobject (_, `Some (`A.ct', [t1;...;tn]')] ==> [(t1, ..., tn) A.ct].
+ where A.ct is the type of some class.
- There are also special cases for so-called "class-types", cf. [Typeclass]
- and [Ctype.set_object_name]:
+ There are also special cases for so-called "class-types", cf.
+ [Typeclass] and [Ctype.set_object_name]:
- [Tobject (Tfield(_,_,...(Tfield(_,_,rv)...),
- Some(`A.#ct`, [rv;t1;...;tn])]
- ==> [(t1, ..., tn) #A.ct]
- [Tobject (_, Some(`A.#ct`, [Tnil;t1;...;tn])] ==> [(t1, ..., tn) A.ct]
+ [Tobject (Tfield(_,_,...(Tfield(_,_,rv)...), Some(`A.#ct`,
+ [rv;t1;...;tn])] ==> [(t1, ..., tn) #A.ct]
+ [Tobject (_, Some(`A.#ct`, [Tnil;t1;...;tn])] ==> [(t1, ..., tn) A.ct]
- where [rv] is the hidden row variable.
- *)
+ where [rv] is the hidden row variable. *)
| Tfield of string * field_kind * type_expr * type_expr
(** [Tfield ("foo", Fpresent, t, ts)] ==> [<...; foo : t; ts>] *)
| Tnil (** [Tnil] ==> [<...; >] *)
| Tlink of type_expr (** Indirection used by unification engine. *)
| Tsubst of type_expr (* for copying *)
(** [Tsubst] is used temporarily to store information in low-level
- functions manipulating representation of types, such as
- instantiation or copy.
- This constructor should not appear outside of these cases. *)
+ functions manipulating representation of types, such as instantiation
+ or copy. This constructor should not appear outside of these cases. *)
| Tvariant of row_desc
(** Representation of polymorphic variants, see [row_desc]. *)
| Tunivar of string option
- (** Occurrence of a type variable introduced by a
- forall quantifier / [Tpoly]. *)
+ (** Occurrence of a type variable introduced by a forall quantifier /
+ [Tpoly]. *)
| Tpoly of type_expr * type_expr list
- (** [Tpoly (ty,tyl)] ==> ['a1... 'an. ty],
- where 'a1 ... 'an are names given to types in tyl
- and occurrences of those types in ty. *)
+ (** [Tpoly (ty,tyl)] ==> ['a1... 'an. ty], where 'a1 ... 'an are names
+ given to types in tyl and occurrences of those types in ty. *)
| Tpackage of Path.t * Longident.t list * type_expr list
(** Type of a first-class module (a.k.a package). *)
@@ -157,24 +148,21 @@ and row_field =
alias. This is done for performance purposes.
For instance, when defining [type 'a pair = 'a * 'a], when one refers to an
- ['a pair], it is just a shortcut for the ['a * 'a] type.
- This expansion will be stored in the [abbrev_memo] of the corresponding
- [Tconstr] node.
+ ['a pair], it is just a shortcut for the ['a * 'a] type. This expansion will
+ be stored in the [abbrev_memo] of the corresponding [Tconstr] node.
In practice, [abbrev_memo] behaves like list of expansions with a mutable
tail.
- Note on marshalling: [abbrev_memo] must not appear in saved types.
- [Btype], with [cleanup_abbrev] and [memo], takes care of tracking and
- removing abbreviations.
-*)
+ Note on marshalling: [abbrev_memo] must not appear in saved types. [Btype],
+ with [cleanup_abbrev] and [memo], takes care of tracking and removing
+ abbreviations. *)
and abbrev_memo =
| Mnil (** No known abbreviation *)
| Mcons of private_flag * Path.t * type_expr * type_expr * abbrev_memo
- (** Found one abbreviation.
- A valid abbreviation should be at least as visible and reachable by the
- same path.
- The first expression is the abbreviation and the second the expansion. *)
+ (** Found one abbreviation. A valid abbreviation should be at least as
+ visible and reachable by the same path. The first expression is the
+ abbreviation and the second the expansion. *)
| Mlink of abbrev_memo ref
(** Abbreviations can be found after this indirection *)
@@ -182,25 +170,20 @@ and field_kind = Fvar of field_kind option ref | Fpresent | Fabsent
(** [commutable] is a flag appended to every arrow type.
- When typing an application, if the type of the functional is
- known, its type is instantiated with [Cok] arrows, otherwise as
- [Clink (ref Cunknown)].
+ When typing an application, if the type of the functional is known, its type
+ is instantiated with [Cok] arrows, otherwise as [Clink (ref Cunknown)].
- When the type is not known, the application will be used to infer
- the actual type. This is fragile in presence of labels where
- there is no principal type.
+ When the type is not known, the application will be used to infer the actual
+ type. This is fragile in presence of labels where there is no principal
+ type.
- Two incompatible applications relying on [Cunknown] arrows will
- trigger an error.
+ Two incompatible applications relying on [Cunknown] arrows will trigger an
+ error.
- let f g =
- g ~a:() ~b:();
- g ~b:() ~a:();
+ let f g = g ~a:() ~b:(); g ~b:() ~a:();
- Error: This function is applied to arguments
- in an order different from other calls.
- This is only allowed when the real type is known.
-*)
+ Error: This function is applied to arguments in an order different from
+ other calls. This is only allowed when the real type is known. *)
and commutable = Cok | Cunknown | Clink of commutable ref
module TypeOps : sig
diff --git a/analysis/vendor/ml/typetexp.ml b/analysis/vendor/ml/typetexp.ml
index e8eefc280..99bc353f2 100644
--- a/analysis/vendor/ml/typetexp.ml
+++ b/analysis/vendor/ml/typetexp.ml
@@ -967,14 +967,12 @@ let report_error env ppf = function
Printtyp.report_unification_error ppf Env.empty trace
(function
| ppf -> fprintf ppf "This type")
- (function
- | ppf -> fprintf ppf "should be an instance of type")
+ (function ppf -> fprintf ppf "should be an instance of type")
| Alias_type_mismatch trace ->
Printtyp.report_unification_error ppf Env.empty trace
(function
| ppf -> fprintf ppf "This alias is bound to type")
- (function
- | ppf -> fprintf ppf "but is used as an instance of type")
+ (function ppf -> fprintf ppf "but is used as an instance of type")
| Present_has_conjunction l ->
fprintf ppf "The present constructor %s has a conjunctive type" l
| Present_has_no_type l ->
diff --git a/analysis/vendor/ml/variant_coercion.ml b/analysis/vendor/ml/variant_coercion.ml
index 136125f57..7fbf96228 100644
--- a/analysis/vendor/ml/variant_coercion.ml
+++ b/analysis/vendor/ml/variant_coercion.ml
@@ -40,7 +40,8 @@ let variant_has_same_runtime_representation_as_target ~(target_path : Path.t)
path_same Predef.path_string
(* unboxed Number(float) :> float *)
|| path_same Predef.path_float
- || (* unboxed BigInt(bigint) :> bigint *)
+ ||
+ (* unboxed BigInt(bigint) :> bigint *)
path_same Predef.path_bigint
| Cstr_tuple [] -> (
(* Check that @as payloads match with the target path to coerce to.
diff --git a/analysis/vendor/res_syntax/reactjs_jsx_v3.ml b/analysis/vendor/res_syntax/reactjs_jsx_v3.ml
index 46de98bf4..9e4006e79 100644
--- a/analysis/vendor/res_syntax/reactjs_jsx_v3.ml
+++ b/analysis/vendor/res_syntax/reactjs_jsx_v3.ml
@@ -157,19 +157,19 @@ let get_props_attr payload =
match payload with
| Some
(PStr
- ({
- pstr_desc =
- Pstr_eval ({pexp_desc = Pexp_record (record_fields, None)}, _);
- }
- :: _rest)) ->
+ ({
+ pstr_desc =
+ Pstr_eval ({pexp_desc = Pexp_record (record_fields, None)}, _);
+ }
+ :: _rest)) ->
List.fold_left get_props_name_value default_props record_fields
| Some
(PStr
- ({
- pstr_desc =
- Pstr_eval ({pexp_desc = Pexp_ident {txt = Lident "props"}}, _);
- }
- :: _rest)) ->
+ ({
+ pstr_desc =
+ Pstr_eval ({pexp_desc = Pexp_ident {txt = Lident "props"}}, _);
+ }
+ :: _rest)) ->
{props_name = "props"}
| Some (PStr ({pstr_desc = Pstr_eval (_, _); pstr_loc} :: _rest)) ->
Jsx_common.raise_error ~loc:pstr_loc
diff --git a/analysis/vendor/res_syntax/res_comments_table.ml b/analysis/vendor/res_syntax/res_comments_table.ml
index b531fde32..7ca8c9639 100644
--- a/analysis/vendor/res_syntax/res_comments_table.ml
+++ b/analysis/vendor/res_syntax/res_comments_table.ml
@@ -606,15 +606,15 @@ and walk_list : ?prev_loc:Location.t -> node list -> t -> Comment.t list -> unit
* closing token of a "list-of-things". This routine visits the whole list,
* but returns any remaining comments that likely fall after the whole list. *)
and visit_list_but_continue_with_remaining_comments :
- 'node.
- ?prev_loc:Location.t ->
- newline_delimited:bool ->
- get_loc:('node -> Location.t) ->
- walk_node:('node -> t -> Comment.t list -> unit) ->
- 'node list ->
- t ->
- Comment.t list ->
- Comment.t list =
+ 'node.
+ ?prev_loc:Location.t ->
+ newline_delimited:bool ->
+ get_loc:('node -> Location.t) ->
+ walk_node:('node -> t -> Comment.t list -> unit) ->
+ 'node list ->
+ t ->
+ Comment.t list ->
+ Comment.t list =
fun ?prev_loc ~newline_delimited ~get_loc ~walk_node l t comments ->
let open Location in
match l with
diff --git a/analysis/vendor/res_syntax/res_core.ml b/analysis/vendor/res_syntax/res_core.ml
index 48023f378..d69b98e9b 100644
--- a/analysis/vendor/res_syntax/res_core.ml
+++ b/analysis/vendor/res_syntax/res_core.ml
@@ -319,7 +319,7 @@ let is_es6_arrow_expression ~in_ternary p =
* || (&Clflags.classic && (l == Nolabel && !is_optional(l'))) => (t1, t2)
* We'll arrive at the outer rparen just before the =>.
* This is not an es6 arrow.
- * *)
+ *)
false
| _ -> (
Parser.next_unsafe state;
@@ -513,12 +513,10 @@ let wrap_type_annotation ~loc newtypes core_type body =
in
(exp, typ)
-(**
- * process the occurrence of _ in the arguments of a function application
- * replace _ with a new variable, currently __x, in the arguments
- * return a wrapping function that wraps ((__x) => ...) around an expression
- * e.g. foo(_, 3) becomes (__x) => foo(__x, 3)
- *)
+(** * process the occurrence of _ in the arguments of a function application *
+ replace _ with a new variable, currently __x, in the arguments * return a
+ wrapping function that wraps ((__x) => ...) around an expression * e.g.
+ foo(_, 3) becomes (__x) => foo(__x, 3) *)
let process_underscore_application (p : Parser.t) args =
let exp_question = ref None in
let hidden_var = "__x" in
@@ -2385,7 +2383,7 @@ and parse_template_expr ?prefix p =
* }
*
* We want to give a nice error message in these cases
- * *)
+ *)
and over_parse_constrained_or_coerced_or_arrow_expression p expr =
match p.Parser.token with
| ColonGreaterThan -> parse_coerced_expr ~expr p
@@ -4236,17 +4234,17 @@ and parse_type_alias p typ =
| _ -> typ
(* type_parameter ::=
- * | type_expr
- * | ~ident: type_expr
- * | ~ident: type_expr=?
- *
- * note:
- * | attrs ~ident: type_expr -> attrs are on the arrow
- * | attrs type_expr -> attrs are here part of the type_expr
- *
- * dotted_type_parameter ::=
- * | . type_parameter
-*)
+ * | type_expr
+ * | ~ident: type_expr
+ * | ~ident: type_expr=?
+ *
+ * note:
+ * | attrs ~ident: type_expr -> attrs are on the arrow
+ * | attrs type_expr -> attrs are here part of the type_expr
+ *
+ * dotted_type_parameter ::=
+ * | . type_parameter
+ *)
and parse_type_parameter p =
let doc_attr : Parsetree.attributes =
match p.Parser.token with
diff --git a/analysis/vendor/res_syntax/res_parens.ml b/analysis/vendor/res_syntax/res_parens.ml
index bf946c315..b2db425d6 100644
--- a/analysis/vendor/res_syntax/res_parens.ml
+++ b/analysis/vendor/res_syntax/res_parens.ml
@@ -149,7 +149,8 @@ let sub_binary_expr_operand parent_operator child_operator =
|| prec_parent == prec_child
&& not
(ParsetreeViewer.flattenable_operators parent_operator child_operator)
- || (* a && b || c, add parens to (a && b) for readability, who knows the difference by heart… *)
+ ||
+ (* a && b || c, add parens to (a && b) for readability, who knows the difference by heart… *)
(parent_operator = "||" && child_operator = "&&")
let rhs_binary_expr_operand parent_operator rhs =
diff --git a/analysis/vendor/res_syntax/res_parser.ml b/analysis/vendor/res_syntax/res_parser.ml
index 424629092..5be349748 100644
--- a/analysis/vendor/res_syntax/res_parser.ml
+++ b/analysis/vendor/res_syntax/res_parser.ml
@@ -61,8 +61,8 @@ let module_comment_to_attribute_token comment =
Token.ModuleComment (loc, txt)
(* Advance to the next non-comment token and store any encountered comment
- * in the parser's state. Every comment contains the end position of its
- * previous token to facilite comment interleaving *)
+ * in the parser's state. Every comment contains the end position of its
+ * previous token to facilite comment interleaving *)
let rec next ?prev_end_pos p =
if p.token = Eof then assert false;
let prev_end_pos =
diff --git a/analysis/vendor/res_syntax/res_parsetree_viewer.ml b/analysis/vendor/res_syntax/res_parsetree_viewer.ml
index 35e02d872..37b2602a7 100644
--- a/analysis/vendor/res_syntax/res_parsetree_viewer.ml
+++ b/analysis/vendor/res_syntax/res_parsetree_viewer.ml
@@ -15,7 +15,7 @@ let arrow_type ?(arity = max_int) ct =
ptyp_attributes = [({txt = "bs"}, _)];
} ->
(* stop here, the uncurried attribute always indicates the beginning of an arrow function
- * e.g. `(. int) => (. int)` instead of `(. int, . int)` *)
+ * e.g. `(. int) => (. int)` instead of `(. int, . int)` *)
(attrs_before, List.rev acc, typ)
| {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = _attrs}
as return_type ->
diff --git a/analysis/vendor/res_syntax/res_parsetree_viewer.mli b/analysis/vendor/res_syntax/res_parsetree_viewer.mli
index d270e05e0..6df9bd80a 100644
--- a/analysis/vendor/res_syntax/res_parsetree_viewer.mli
+++ b/analysis/vendor/res_syntax/res_parsetree_viewer.mli
@@ -1,6 +1,6 @@
(* Restructures a nested tree of arrow types into its args & returnType
- * The parsetree contains: a => b => c => d, for printing purposes
- * we restructure the tree into (a, b, c) and its returnType d *)
+ * The parsetree contains: a => b => c => d, for printing purposes
+ * we restructure the tree into (a, b, c) and its returnType d *)
val arrow_type :
?arity:int ->
Parsetree.core_type ->
@@ -40,8 +40,8 @@ type if_condition_kind =
| IfLet of Parsetree.pattern * Parsetree.expression
(* if ... else if ... else ... is represented as nested expressions: if ... else { if ... }
- * The purpose of this function is to flatten nested ifs into one sequence.
- * Basically compute: ([if, else if, else if, else if], else) *)
+ * The purpose of this function is to flatten nested ifs into one sequence.
+ * Basically compute: ([if, else if, else if, else if], else) *)
val collect_if_expressions :
Parsetree.expression ->
(Location.t * if_condition_kind * Parsetree.expression) list
@@ -69,11 +69,11 @@ val fun_expr :
bool * Parsetree.attributes * fun_param_kind list * Parsetree.expression
(* example:
- * `makeCoordinate({
- * x: 1,
- * y: 2,
- * })`
- * Notice howe `({` and `})` "hug" or stick to each other *)
+ * `makeCoordinate({
+ * x: 1,
+ * y: 2,
+ * })`
+ * Notice howe `({` and `})` "hug" or stick to each other *)
val is_huggable_expression : Parsetree.expression -> bool
val is_huggable_pattern : Parsetree.pattern -> bool
diff --git a/analysis/vendor/res_syntax/res_scanner.ml b/analysis/vendor/res_syntax/res_scanner.ml
index 5d823a737..c57c0a642 100644
--- a/analysis/vendor/res_syntax/res_scanner.ml
+++ b/analysis/vendor/res_syntax/res_scanner.ml
@@ -248,7 +248,7 @@ let scan_number scanner =
in
scan_digits scanner ~base;
- (* *)
+ (* *)
let is_float =
if '.' == scanner.ch then (
next scanner;
@@ -559,8 +559,14 @@ let scan_single_line_comment scanner =
let scan_multi_line_comment scanner =
(* assumption: we're only ever using this helper in `scan` after detecting a comment *)
- let doc_comment = peek2 scanner = '*' && peek3 scanner <> '/' (* no /**/ *) in
- let standalone = doc_comment && peek3 scanner = '*' (* /*** *) in
+ let doc_comment =
+ peek2 scanner = '*' && peek3 scanner <> '/'
+ (* no /**/ *)
+ in
+ let standalone =
+ doc_comment && peek3 scanner = '*'
+ (* /*** *)
+ in
let content_start_off =
scanner.offset + if doc_comment then if standalone then 4 else 3 else 2
in
diff --git a/client/src/commands.ts b/client/src/commands.ts
index a26e07897..c75a85564 100644
--- a/client/src/commands.ts
+++ b/client/src/commands.ts
@@ -15,13 +15,13 @@ export const codeAnalysisWithReanalyze = (
diagnosticsCollection: DiagnosticCollection,
diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap,
outputChannel: OutputChannel,
- codeAnalysisRunningStatusBarItem: StatusBarItem
+ codeAnalysisRunningStatusBarItem: StatusBarItem,
) => {
runCodeAnalysisWithReanalyze(
targetDir,
diagnosticsCollection,
diagnosticsResultCodeActions,
outputChannel,
- codeAnalysisRunningStatusBarItem
+ codeAnalysisRunningStatusBarItem,
);
};
diff --git a/client/src/commands/code_analysis.ts b/client/src/commands/code_analysis.ts
index d8eeaec20..e739573f0 100644
--- a/client/src/commands/code_analysis.ts
+++ b/client/src/commands/code_analysis.ts
@@ -71,7 +71,7 @@ let classifyMessage = (msg: string) => {
let resultsToDiagnostics = (
results: DiagnosticsResultFormat,
- diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap
+ diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap,
): {
diagnosticsMap: Map;
} => {
@@ -99,7 +99,7 @@ let resultsToDiagnostics = (
let diagnostic = new Diagnostic(
issueLocationRange,
diagnosticText,
- DiagnosticSeverity.Warning
+ DiagnosticSeverity.Warning,
);
// Don't show reports about optional arguments.
@@ -132,11 +132,11 @@ let resultsToDiagnostics = (
new Range(
new Position(line, character),
- new Position(line, character)
+ new Position(line, character),
),
// reanalyze seems to add two extra spaces at the start of the line
// content to replace.
- text
+ text,
);
codeAction.edit = codeActionEdit;
@@ -166,9 +166,9 @@ let resultsToDiagnostics = (
Uri.parse(item.file),
new Range(
new Position(item.range[0], item.range[1]),
- new Position(item.range[2], item.range[3])
+ new Position(item.range[2], item.range[3]),
),
- ""
+ "",
);
codeAction.command = {
@@ -203,13 +203,13 @@ export const runCodeAnalysisWithReanalyze = (
diagnosticsCollection: DiagnosticCollection,
diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap,
outputChannel: OutputChannel,
- codeAnalysisRunningStatusBarItem: StatusBarItem
+ codeAnalysisRunningStatusBarItem: StatusBarItem,
) => {
let currentDocument = window.activeTextEditor.document;
let cwd = targetDir ?? path.dirname(currentDocument.uri.fsPath);
let projectRootPath: string | null = findProjectRootOfFileInDir(
- currentDocument.uri.fsPath
+ currentDocument.uri.fsPath,
);
// This little weird lookup is because in the legacy setup reanalyze needs to be
@@ -250,11 +250,11 @@ export const runCodeAnalysisWithReanalyze = (
// here.
if (e.includes("End_of_file")) {
window.showErrorMessage(
- `Something went wrong trying to run reanalyze. Please try cleaning and rebuilding your ReScript project.`
+ `Something went wrong trying to run reanalyze. Please try cleaning and rebuilding your ReScript project.`,
);
} else {
window.showErrorMessage(
- `Something went wrong trying to run reanalyze: '${e}'`
+ `Something went wrong trying to run reanalyze: '${e}'`,
);
}
});
@@ -270,7 +270,7 @@ export const runCodeAnalysisWithReanalyze = (
window
.showErrorMessage(
`Something went wrong when running the code analyzer.`,
- "See details in error log"
+ "See details in error log",
)
.then((_choice) => {
outputChannel.show();
@@ -278,12 +278,12 @@ export const runCodeAnalysisWithReanalyze = (
outputChannel.appendLine("\n\n>>>>");
outputChannel.appendLine(
- "Parsing JSON from reanalyze failed. The raw, invalid JSON can be reproduced by following the instructions below. Please run that command and report the issue + failing JSON on the extension bug tracker: https://github.com/rescript-lang/rescript-vscode/issues"
+ "Parsing JSON from reanalyze failed. The raw, invalid JSON can be reproduced by following the instructions below. Please run that command and report the issue + failing JSON on the extension bug tracker: https://github.com/rescript-lang/rescript-vscode/issues",
);
outputChannel.appendLine(
`> To reproduce, run "${binaryPath} ${opts.join(
- " "
- )}" in directory: "${cwd}"`
+ " ",
+ )}" in directory: "${cwd}"`,
);
outputChannel.appendLine("\n");
}
@@ -297,7 +297,7 @@ export const runCodeAnalysisWithReanalyze = (
let { diagnosticsMap } = resultsToDiagnostics(
json,
- diagnosticsResultCodeActions
+ diagnosticsResultCodeActions,
);
// This smoothens the experience of the diagnostics updating a bit by
diff --git a/client/src/commands/create_interface.ts b/client/src/commands/create_interface.ts
index cfef3f85c..929aea1ea 100644
--- a/client/src/commands/create_interface.ts
+++ b/client/src/commands/create_interface.ts
@@ -25,7 +25,7 @@ export const createInterface = (client: LanguageClient) => {
.showInformationMessage(
"Interface file already exists. Do you want to overwrite it?",
"Yes",
- "No"
+ "No",
)
.then((answer) => {
if (answer === "Yes") {
diff --git a/client/src/commands/dump_debug.ts b/client/src/commands/dump_debug.ts
index 9b41c4a9b..a845848c3 100644
--- a/client/src/commands/dump_debug.ts
+++ b/client/src/commands/dump_debug.ts
@@ -68,7 +68,7 @@ function runDebugDump({
p.stderr?.on("data", (e) => {
window.showErrorMessage(
- `Something went wrong trying to run debug dump: '${e}'`
+ `Something went wrong trying to run debug dump: '${e}'`,
);
resolve(e.toString());
});
@@ -124,7 +124,7 @@ export const dumpDebugRetrigger = () => {
export const dumpDebug = async (
context: ExtensionContext,
- statusBarItem: StatusBarItem
+ statusBarItem: StatusBarItem,
) => {
const editor = window.activeTextEditor;
@@ -139,7 +139,7 @@ export const dumpDebug = async (
let projectRootPath: string | null = findProjectRootOfFileInDir(filePath);
const binaryPath = getBinaryPath(
"rescript-editor-analysis.exe",
- projectRootPath
+ projectRootPath,
);
if (binaryPath === null) {
window.showErrorMessage("Binary executable not found.");
@@ -150,7 +150,7 @@ export const dumpDebug = async (
debugCommands.map((d) => d.title),
{
title: "Select call type",
- }
+ },
);
const callType = debugCommands.find((d) => d.title === callTypeTitle);
@@ -242,7 +242,7 @@ export const dumpDebug = async (
character.toString(),
endLine.toString(),
endChar.toString(),
- currentFile
+ currentFile,
);
break;
}
@@ -251,7 +251,7 @@ export const dumpDebug = async (
currentFile,
line.toString(),
character.toString(),
- "add-missing-cases" // TODO: Make selectable
+ "add-missing-cases", // TODO: Make selectable
);
break;
}
diff --git a/client/src/commands/switch_impl_intf.ts b/client/src/commands/switch_impl_intf.ts
index 9213ca1a3..b8f9ec968 100644
--- a/client/src/commands/switch_impl_intf.ts
+++ b/client/src/commands/switch_impl_intf.ts
@@ -19,7 +19,7 @@ export const switchImplIntf = async (client: LanguageClient) => {
if (!(isIntf || isImpl)) {
await window.showInformationMessage(
- "This command only can run on *.res or *.resi files."
+ "This command only can run on *.res or *.resi files.",
);
return;
}
@@ -37,7 +37,7 @@ export const switchImplIntf = async (client: LanguageClient) => {
// if interface doesn't exist, ask the user before creating.
const selection = await window.showInformationMessage(
"Do you want to create an interface *.resi?",
- ...["No", "Yes"]
+ ...["No", "Yes"],
);
if (selection !== "Yes") return;
diff --git a/client/src/extension.ts b/client/src/extension.ts
index 350d88718..5778d0c7e 100644
--- a/client/src/extension.ts
+++ b/client/src/extension.ts
@@ -1,366 +1,366 @@
-import * as path from "path";
-import {
- workspace,
- ExtensionContext,
- commands,
- languages,
- window,
- StatusBarAlignment,
- Uri,
- Range,
- Position,
- CodeAction,
- WorkspaceEdit,
- CodeActionKind,
- Diagnostic,
-} from "vscode";
-
-import {
- LanguageClient,
- LanguageClientOptions,
- ServerOptions,
- State,
- TransportKind,
-} from "vscode-languageclient/node";
-
-import * as customCommands from "./commands";
-import {
- DiagnosticsResultCodeActionsMap,
- statusBarItem,
-} from "./commands/code_analysis";
-
-let client: LanguageClient;
-
-// let taskProvider = tasks.registerTaskProvider('Run ReScript build', {
-// provideTasks: () => {
-// // if (!rakePromise) {
-// // rakePromise = getRakeTasks();
-// // }
-// // return rakePromise;
-
-// // taskDefinition: TaskDefinition,
-// // scope: WorkspaceFolder | TaskScope.Global | TaskScope.Workspace,
-// // name: string,
-// // source: string,
-// // execution ?: ProcessExecution | ShellExecution | CustomExecution,
-// // problemMatchers ?: string | string[]
-// return [
-// new Task(
-// {
-// type: 'bsb',
-// },
-// TaskScope.Workspace,
-// // definition.task,
-// 'build and watch',
-// 'bsb',
-// new ShellExecution(
-// // `./node_modules/.bin/bsb -make-world -w`
-// `pwd`
-// ),
-// "Hello"
-// )
-// ]
-// },
-// resolveTask(_task: Task): Task | undefined {
-// // const task = _task.definition.task;
-// // // A Rake task consists of a task and an optional file as specified in RakeTaskDefinition
-// // // Make sure that this looks like a Rake task by checking that there is a task.
-// // if (task) {
-// // // resolveTask requires that the same definition object be used.
-// // const definition: RakeTaskDefinition = _task.definition;
-// // return new Task(
-// // definition,
-// // definition.task,
-// // 'rake',
-// // new vscode.ShellExecution(`rake ${definition.task}`)
-// // );
-// // }
-// return undefined;
-// }
-// });
-
-export function activate(context: ExtensionContext) {
- let outputChannel = window.createOutputChannel(
- "ReScript Language Server",
- "rescript"
- );
-
- function createLanguageClient() {
- // The server is implemented in node
- let serverModule = context.asAbsolutePath(
- path.join("server", "out", "cli.js")
- );
- // The debug options for the server
- // --inspect=6009: runs the server in Node's Inspector mode so VS Code can attach to the server for debugging
- let debugOptions = { execArgv: ["--nolazy", "--inspect=6009"] };
-
- // If the extension is launched in debug mode then the debug server options are used
- // Otherwise the run options are used
- let serverOptions: ServerOptions = {
- run: {
- module: serverModule,
- args: ["--node-ipc"],
- transport: TransportKind.ipc,
- },
- debug: {
- module: serverModule,
- args: ["--node-ipc"],
- transport: TransportKind.ipc,
- options: debugOptions,
- },
- };
-
- // Options to control the language client
- let clientOptions: LanguageClientOptions = {
- documentSelector: [{ scheme: "file", language: "rescript" }],
- // We'll send the initial configuration in here, but this might be
- // problematic because every consumer of the LS will need to mimic this.
- // We'll leave it like this for now, but might be worth revisiting later on.
- initializationOptions: {
- extensionConfiguration: workspace.getConfiguration("rescript.settings"),
-
- // Keep this in sync with the `extensionClientCapabilities` type in the
- // server.
- extensionClientCapabilities: {
- supportsMarkdownLinks: true,
- },
- },
- outputChannel,
- markdown: {
- isTrusted: true,
- },
- };
-
- const client = new LanguageClient(
- "ReScriptLSP",
- "ReScript Language Server",
- serverOptions,
- clientOptions
- );
-
- // This sets up a listener that, if we're in code analysis mode, triggers
- // code analysis as the LS server reports that ReScript compilation has
- // finished. This is needed because code analysis must wait until
- // compilation has finished, and the most reliable source for that is the LS
- // server, that already keeps track of when the compiler finishes in order to
- // other provide fresh diagnostics.
- context.subscriptions.push(
- client.onDidChangeState(({ newState }) => {
- if (newState === State.Running) {
- context.subscriptions.push(
- client.onNotification("rescript/compilationFinished", () => {
- if (inCodeAnalysisState.active === true) {
- customCommands.codeAnalysisWithReanalyze(
- inCodeAnalysisState.activatedFromDirectory,
- diagnosticsCollection,
- diagnosticsResultCodeActions,
- outputChannel,
- codeAnalysisRunningStatusBarItem
- );
- }
- })
- );
- }
- })
- );
-
- return client;
- }
-
- // Create the language client and start the client.
- client = createLanguageClient();
-
- // Create a custom diagnostics collection, for cases where we want to report
- // diagnostics programatically from inside of the extension. The reason this
- // is separate from the diagnostics provided by the LS server itself is that
- // this should be possible to clear independently of the other diagnostics
- // coming from the ReScript compiler.
- let diagnosticsCollection = languages.createDiagnosticCollection("rescript");
-
- // This map will hold code actions produced by the code analysis, in a
- // format that's cheap to look up.
- let diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap = new Map();
- let codeAnalysisRunningStatusBarItem = window.createStatusBarItem(
- StatusBarAlignment.Right
- );
-
- let debugDumpStatusBarItem = window.createStatusBarItem(
- StatusBarAlignment.Right
- );
-
- let inCodeAnalysisState: {
- active: boolean;
- activatedFromDirectory: string | null;
- } = { active: false, activatedFromDirectory: null };
-
- // This code actions provider yields the code actions potentially extracted
- // from the code analysis to the editor.
- languages.registerCodeActionsProvider("rescript", {
- async provideCodeActions(document, rangeOrSelection) {
- let availableActions =
- diagnosticsResultCodeActions.get(document.uri.fsPath) ?? [];
-
- const allRemoveActionEdits = availableActions.filter(
- ({ codeAction }) => codeAction.title === "Remove unused"
- );
-
- const actions: CodeAction[] = availableActions
- .filter(
- ({ range }) =>
- range.contains(rangeOrSelection) || range.isEqual(rangeOrSelection)
- )
- .map(({ codeAction }) => codeAction);
-
- if (allRemoveActionEdits.length > 0) {
- const removeAllCodeAction = new CodeAction("Remove all unused in file");
- const edit = new WorkspaceEdit();
- allRemoveActionEdits.forEach((subEdit) => {
- subEdit.codeAction.edit.entries().forEach(([uri, [textEdit]]) => {
- edit.replace(uri, textEdit.range, textEdit.newText);
- });
- });
- removeAllCodeAction.kind = CodeActionKind.RefactorRewrite;
- removeAllCodeAction.edit = edit;
- actions.push(removeAllCodeAction);
- }
-
- return actions;
- },
- });
-
- // Register custom commands
- commands.registerCommand("rescript-vscode.create_interface", () => {
- customCommands.createInterface(client);
- });
-
- commands.registerCommand(
- "rescript-vscode.clear_diagnostic",
- (diagnostic: Diagnostic) => {
- const editor = window.activeTextEditor;
- if (!editor) {
- return;
- }
-
- const document = editor.document;
- const diagnostics = diagnosticsCollection.get(document.uri);
- const newDiagnostics = diagnostics.filter((d) => d !== diagnostic);
- diagnosticsCollection.set(document.uri, newDiagnostics);
- }
- );
-
- commands.registerCommand("rescript-vscode.open_compiled", () => {
- customCommands.openCompiled(client);
- });
-
- commands.registerCommand("rescript-vscode.debug-dump-start", () => {
- customCommands.dumpDebug(context, debugDumpStatusBarItem);
- });
-
- commands.registerCommand("rescript-vscode.debug-dump-retrigger", () => {
- customCommands.dumpDebugRetrigger();
- });
-
- commands.registerCommand(
- "rescript-vscode.go_to_location",
- async (fileUri: string, startLine: number, startCol: number) => {
- await window.showTextDocument(Uri.parse(fileUri), {
- selection: new Range(
- new Position(startLine, startCol),
- new Position(startLine, startCol)
- ),
- });
- }
- );
-
- // Starts the code analysis mode.
- commands.registerCommand("rescript-vscode.start_code_analysis", () => {
- // Save the directory this first ran from, and re-use that when continuously
- // running the analysis. This is so that the target of the analysis does not
- // change on subsequent runs, if there are multiple ReScript projects open
- // in the editor.
- let currentDocument = window.activeTextEditor.document;
-
- inCodeAnalysisState.active = true;
-
- // Pointing reanalyze to the dir of the current file path is fine, because
- // reanalyze will walk upwards looking for a bsconfig.json in order to find
- // the correct project root.
- inCodeAnalysisState.activatedFromDirectory = path.dirname(
- currentDocument.uri.fsPath
- );
-
- codeAnalysisRunningStatusBarItem.command =
- "rescript-vscode.stop_code_analysis";
- codeAnalysisRunningStatusBarItem.show();
- statusBarItem.setToStopText(codeAnalysisRunningStatusBarItem);
-
- customCommands.codeAnalysisWithReanalyze(
- inCodeAnalysisState.activatedFromDirectory,
- diagnosticsCollection,
- diagnosticsResultCodeActions,
- outputChannel,
- codeAnalysisRunningStatusBarItem
- );
- });
-
- commands.registerCommand("rescript-vscode.stop_code_analysis", () => {
- inCodeAnalysisState.active = false;
- inCodeAnalysisState.activatedFromDirectory = null;
-
- diagnosticsCollection.clear();
- diagnosticsResultCodeActions.clear();
-
- codeAnalysisRunningStatusBarItem.hide();
- });
-
- commands.registerCommand("rescript-vscode.switch-impl-intf", () => {
- customCommands.switchImplIntf(client);
- });
-
- commands.registerCommand("rescript-vscode.restart_language_server", () => {
- client.stop().then(() => {
- client = createLanguageClient();
- client.start();
- });
- });
-
- // Start the client. This will also launch the server
- client.start();
-
- // Restart the language client automatically when certain configuration
- // changes. These are typically settings that affect the capabilities of the
- // language client, and because of that requires a full restart.
- context.subscriptions.push(
- workspace.onDidChangeConfiguration(({ affectsConfiguration }) => {
- // Put any configuration that, when changed, requires a full restart of
- // the server here. That will typically be any configuration that affects
- // the capabilities declared by the server, since those cannot be updated
- // on the fly, and require a full restart with new capabilities set when
- // initializing.
- if (
- affectsConfiguration("rescript.settings.inlayHints") ||
- affectsConfiguration("rescript.settings.codeLens") ||
- affectsConfiguration("rescript.settings.signatureHelp") ||
- affectsConfiguration("rescript.settings.incrementalTypechecking") ||
- affectsConfiguration("rescript.settings.cache")
- ) {
- commands.executeCommand("rescript-vscode.restart_language_server");
- } else {
- // Send a general message that configuration has updated. Clients
- // interested can then pull the new configuration as they see fit.
- client
- .sendNotification("workspace/didChangeConfiguration")
- .catch((err) => {
- window.showErrorMessage(String(err));
- });
- }
- })
- );
-}
-
-export function deactivate(): Thenable | undefined {
- if (!client) {
- return undefined;
- }
- return client.stop();
-}
+import * as path from "path";
+import {
+ workspace,
+ ExtensionContext,
+ commands,
+ languages,
+ window,
+ StatusBarAlignment,
+ Uri,
+ Range,
+ Position,
+ CodeAction,
+ WorkspaceEdit,
+ CodeActionKind,
+ Diagnostic,
+} from "vscode";
+
+import {
+ LanguageClient,
+ LanguageClientOptions,
+ ServerOptions,
+ State,
+ TransportKind,
+} from "vscode-languageclient/node";
+
+import * as customCommands from "./commands";
+import {
+ DiagnosticsResultCodeActionsMap,
+ statusBarItem,
+} from "./commands/code_analysis";
+
+let client: LanguageClient;
+
+// let taskProvider = tasks.registerTaskProvider('Run ReScript build', {
+// provideTasks: () => {
+// // if (!rakePromise) {
+// // rakePromise = getRakeTasks();
+// // }
+// // return rakePromise;
+
+// // taskDefinition: TaskDefinition,
+// // scope: WorkspaceFolder | TaskScope.Global | TaskScope.Workspace,
+// // name: string,
+// // source: string,
+// // execution ?: ProcessExecution | ShellExecution | CustomExecution,
+// // problemMatchers ?: string | string[]
+// return [
+// new Task(
+// {
+// type: 'bsb',
+// },
+// TaskScope.Workspace,
+// // definition.task,
+// 'build and watch',
+// 'bsb',
+// new ShellExecution(
+// // `./node_modules/.bin/bsb -make-world -w`
+// `pwd`
+// ),
+// "Hello"
+// )
+// ]
+// },
+// resolveTask(_task: Task): Task | undefined {
+// // const task = _task.definition.task;
+// // // A Rake task consists of a task and an optional file as specified in RakeTaskDefinition
+// // // Make sure that this looks like a Rake task by checking that there is a task.
+// // if (task) {
+// // // resolveTask requires that the same definition object be used.
+// // const definition: RakeTaskDefinition = _task.definition;
+// // return new Task(
+// // definition,
+// // definition.task,
+// // 'rake',
+// // new vscode.ShellExecution(`rake ${definition.task}`)
+// // );
+// // }
+// return undefined;
+// }
+// });
+
+export function activate(context: ExtensionContext) {
+ let outputChannel = window.createOutputChannel(
+ "ReScript Language Server",
+ "rescript",
+ );
+
+ function createLanguageClient() {
+ // The server is implemented in node
+ let serverModule = context.asAbsolutePath(
+ path.join("server", "out", "cli.js"),
+ );
+ // The debug options for the server
+ // --inspect=6009: runs the server in Node's Inspector mode so VS Code can attach to the server for debugging
+ let debugOptions = { execArgv: ["--nolazy", "--inspect=6009"] };
+
+ // If the extension is launched in debug mode then the debug server options are used
+ // Otherwise the run options are used
+ let serverOptions: ServerOptions = {
+ run: {
+ module: serverModule,
+ args: ["--node-ipc"],
+ transport: TransportKind.ipc,
+ },
+ debug: {
+ module: serverModule,
+ args: ["--node-ipc"],
+ transport: TransportKind.ipc,
+ options: debugOptions,
+ },
+ };
+
+ // Options to control the language client
+ let clientOptions: LanguageClientOptions = {
+ documentSelector: [{ scheme: "file", language: "rescript" }],
+ // We'll send the initial configuration in here, but this might be
+ // problematic because every consumer of the LS will need to mimic this.
+ // We'll leave it like this for now, but might be worth revisiting later on.
+ initializationOptions: {
+ extensionConfiguration: workspace.getConfiguration("rescript.settings"),
+
+ // Keep this in sync with the `extensionClientCapabilities` type in the
+ // server.
+ extensionClientCapabilities: {
+ supportsMarkdownLinks: true,
+ },
+ },
+ outputChannel,
+ markdown: {
+ isTrusted: true,
+ },
+ };
+
+ const client = new LanguageClient(
+ "ReScriptLSP",
+ "ReScript Language Server",
+ serverOptions,
+ clientOptions,
+ );
+
+ // This sets up a listener that, if we're in code analysis mode, triggers
+ // code analysis as the LS server reports that ReScript compilation has
+ // finished. This is needed because code analysis must wait until
+ // compilation has finished, and the most reliable source for that is the LS
+ // server, that already keeps track of when the compiler finishes in order to
+ // other provide fresh diagnostics.
+ context.subscriptions.push(
+ client.onDidChangeState(({ newState }) => {
+ if (newState === State.Running) {
+ context.subscriptions.push(
+ client.onNotification("rescript/compilationFinished", () => {
+ if (inCodeAnalysisState.active === true) {
+ customCommands.codeAnalysisWithReanalyze(
+ inCodeAnalysisState.activatedFromDirectory,
+ diagnosticsCollection,
+ diagnosticsResultCodeActions,
+ outputChannel,
+ codeAnalysisRunningStatusBarItem,
+ );
+ }
+ }),
+ );
+ }
+ }),
+ );
+
+ return client;
+ }
+
+ // Create the language client and start the client.
+ client = createLanguageClient();
+
+ // Create a custom diagnostics collection, for cases where we want to report
+ // diagnostics programatically from inside of the extension. The reason this
+ // is separate from the diagnostics provided by the LS server itself is that
+ // this should be possible to clear independently of the other diagnostics
+ // coming from the ReScript compiler.
+ let diagnosticsCollection = languages.createDiagnosticCollection("rescript");
+
+ // This map will hold code actions produced by the code analysis, in a
+ // format that's cheap to look up.
+ let diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap = new Map();
+ let codeAnalysisRunningStatusBarItem = window.createStatusBarItem(
+ StatusBarAlignment.Right,
+ );
+
+ let debugDumpStatusBarItem = window.createStatusBarItem(
+ StatusBarAlignment.Right,
+ );
+
+ let inCodeAnalysisState: {
+ active: boolean;
+ activatedFromDirectory: string | null;
+ } = { active: false, activatedFromDirectory: null };
+
+ // This code actions provider yields the code actions potentially extracted
+ // from the code analysis to the editor.
+ languages.registerCodeActionsProvider("rescript", {
+ async provideCodeActions(document, rangeOrSelection) {
+ let availableActions =
+ diagnosticsResultCodeActions.get(document.uri.fsPath) ?? [];
+
+ const allRemoveActionEdits = availableActions.filter(
+ ({ codeAction }) => codeAction.title === "Remove unused",
+ );
+
+ const actions: CodeAction[] = availableActions
+ .filter(
+ ({ range }) =>
+ range.contains(rangeOrSelection) || range.isEqual(rangeOrSelection),
+ )
+ .map(({ codeAction }) => codeAction);
+
+ if (allRemoveActionEdits.length > 0) {
+ const removeAllCodeAction = new CodeAction("Remove all unused in file");
+ const edit = new WorkspaceEdit();
+ allRemoveActionEdits.forEach((subEdit) => {
+ subEdit.codeAction.edit.entries().forEach(([uri, [textEdit]]) => {
+ edit.replace(uri, textEdit.range, textEdit.newText);
+ });
+ });
+ removeAllCodeAction.kind = CodeActionKind.RefactorRewrite;
+ removeAllCodeAction.edit = edit;
+ actions.push(removeAllCodeAction);
+ }
+
+ return actions;
+ },
+ });
+
+ // Register custom commands
+ commands.registerCommand("rescript-vscode.create_interface", () => {
+ customCommands.createInterface(client);
+ });
+
+ commands.registerCommand(
+ "rescript-vscode.clear_diagnostic",
+ (diagnostic: Diagnostic) => {
+ const editor = window.activeTextEditor;
+ if (!editor) {
+ return;
+ }
+
+ const document = editor.document;
+ const diagnostics = diagnosticsCollection.get(document.uri);
+ const newDiagnostics = diagnostics.filter((d) => d !== diagnostic);
+ diagnosticsCollection.set(document.uri, newDiagnostics);
+ },
+ );
+
+ commands.registerCommand("rescript-vscode.open_compiled", () => {
+ customCommands.openCompiled(client);
+ });
+
+ commands.registerCommand("rescript-vscode.debug-dump-start", () => {
+ customCommands.dumpDebug(context, debugDumpStatusBarItem);
+ });
+
+ commands.registerCommand("rescript-vscode.debug-dump-retrigger", () => {
+ customCommands.dumpDebugRetrigger();
+ });
+
+ commands.registerCommand(
+ "rescript-vscode.go_to_location",
+ async (fileUri: string, startLine: number, startCol: number) => {
+ await window.showTextDocument(Uri.parse(fileUri), {
+ selection: new Range(
+ new Position(startLine, startCol),
+ new Position(startLine, startCol),
+ ),
+ });
+ },
+ );
+
+ // Starts the code analysis mode.
+ commands.registerCommand("rescript-vscode.start_code_analysis", () => {
+ // Save the directory this first ran from, and re-use that when continuously
+ // running the analysis. This is so that the target of the analysis does not
+ // change on subsequent runs, if there are multiple ReScript projects open
+ // in the editor.
+ let currentDocument = window.activeTextEditor.document;
+
+ inCodeAnalysisState.active = true;
+
+ // Pointing reanalyze to the dir of the current file path is fine, because
+ // reanalyze will walk upwards looking for a bsconfig.json in order to find
+ // the correct project root.
+ inCodeAnalysisState.activatedFromDirectory = path.dirname(
+ currentDocument.uri.fsPath,
+ );
+
+ codeAnalysisRunningStatusBarItem.command =
+ "rescript-vscode.stop_code_analysis";
+ codeAnalysisRunningStatusBarItem.show();
+ statusBarItem.setToStopText(codeAnalysisRunningStatusBarItem);
+
+ customCommands.codeAnalysisWithReanalyze(
+ inCodeAnalysisState.activatedFromDirectory,
+ diagnosticsCollection,
+ diagnosticsResultCodeActions,
+ outputChannel,
+ codeAnalysisRunningStatusBarItem,
+ );
+ });
+
+ commands.registerCommand("rescript-vscode.stop_code_analysis", () => {
+ inCodeAnalysisState.active = false;
+ inCodeAnalysisState.activatedFromDirectory = null;
+
+ diagnosticsCollection.clear();
+ diagnosticsResultCodeActions.clear();
+
+ codeAnalysisRunningStatusBarItem.hide();
+ });
+
+ commands.registerCommand("rescript-vscode.switch-impl-intf", () => {
+ customCommands.switchImplIntf(client);
+ });
+
+ commands.registerCommand("rescript-vscode.restart_language_server", () => {
+ client.stop().then(() => {
+ client = createLanguageClient();
+ client.start();
+ });
+ });
+
+ // Start the client. This will also launch the server
+ client.start();
+
+ // Restart the language client automatically when certain configuration
+ // changes. These are typically settings that affect the capabilities of the
+ // language client, and because of that requires a full restart.
+ context.subscriptions.push(
+ workspace.onDidChangeConfiguration(({ affectsConfiguration }) => {
+ // Put any configuration that, when changed, requires a full restart of
+ // the server here. That will typically be any configuration that affects
+ // the capabilities declared by the server, since those cannot be updated
+ // on the fly, and require a full restart with new capabilities set when
+ // initializing.
+ if (
+ affectsConfiguration("rescript.settings.inlayHints") ||
+ affectsConfiguration("rescript.settings.codeLens") ||
+ affectsConfiguration("rescript.settings.signatureHelp") ||
+ affectsConfiguration("rescript.settings.incrementalTypechecking") ||
+ affectsConfiguration("rescript.settings.cache")
+ ) {
+ commands.executeCommand("rescript-vscode.restart_language_server");
+ } else {
+ // Send a general message that configuration has updated. Clients
+ // interested can then pull the new configuration as they see fit.
+ client
+ .sendNotification("workspace/didChangeConfiguration")
+ .catch((err) => {
+ window.showErrorMessage(String(err));
+ });
+ }
+ }),
+ );
+}
+
+export function deactivate(): Thenable | undefined {
+ if (!client) {
+ return undefined;
+ }
+ return client.stop();
+}
diff --git a/client/src/utils.ts b/client/src/utils.ts
index 397c1a593..1474888ee 100644
--- a/client/src/utils.ts
+++ b/client/src/utils.ts
@@ -24,19 +24,19 @@ export const getLegacyBinaryProdPath = (b: binaryName) =>
"server",
"analysis_binaries",
platformDir,
- b
+ b,
);
export const getBinaryPath = (
binaryName: "rescript-editor-analysis.exe" | "rescript-tools.exe",
- projectRootPath: string | null = null
+ projectRootPath: string | null = null,
): string | null => {
const binaryFromCompilerPackage = path.join(
projectRootPath ?? "",
"node_modules",
"rescript",
platformDir,
- binaryName
+ binaryName,
);
if (projectRootPath != null && fs.existsSync(binaryFromCompilerPackage)) {
@@ -60,7 +60,7 @@ export const createFileInTempDir = (prefix = "", extension = "") => {
};
export let findProjectRootOfFileInDir = (
- source: DocumentUri
+ source: DocumentUri,
): null | DocumentUri => {
let dir = path.dirname(source);
if (
diff --git a/client/tsconfig.json b/client/tsconfig.json
index b31153f4e..b0924c856 100644
--- a/client/tsconfig.json
+++ b/client/tsconfig.json
@@ -1,18 +1,12 @@
-{
- "compilerOptions": {
- "module": "commonjs",
- "target": "es2019",
- "lib": [
- "ES2019"
- ],
- "outDir": "out",
- "rootDir": "src",
- "sourceMap": true
- },
- "include": [
- "src"
- ],
- "exclude": [
- "node_modules"
- ]
-}
+{
+ "compilerOptions": {
+ "module": "commonjs",
+ "target": "es2019",
+ "lib": ["ES2019"],
+ "outDir": "out",
+ "rootDir": "src",
+ "sourceMap": true
+ },
+ "include": ["src"],
+ "exclude": ["node_modules"]
+}
diff --git a/package-lock.json b/package-lock.json
index 204495aab..771876b5d 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -17,6 +17,7 @@
"@types/semver": "^7.7.0",
"@types/vscode": "1.68.0",
"esbuild": "^0.20.1",
+ "prettier": "^3.6.2",
"typescript": "^5.8.3"
},
"engines": {
@@ -96,6 +97,22 @@
"@esbuild/win32-x64": "0.20.1"
}
},
+ "node_modules/prettier": {
+ "version": "3.6.2",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz",
+ "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==",
+ "dev": true,
+ "license": "MIT",
+ "bin": {
+ "prettier": "bin/prettier.cjs"
+ },
+ "engines": {
+ "node": ">=14"
+ },
+ "funding": {
+ "url": "https://github.com/prettier/prettier?sponsor=1"
+ }
+ },
"node_modules/semver": {
"version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
diff --git a/package.json b/package.json
index 0f249092c..5811157e8 100644
--- a/package.json
+++ b/package.json
@@ -258,6 +258,7 @@
"@types/semver": "^7.7.0",
"@types/vscode": "1.68.0",
"esbuild": "^0.20.1",
+ "prettier": "^3.6.2",
"typescript": "^5.8.3"
},
"dependencies": {
diff --git a/rescript.configuration.json b/rescript.configuration.json
index 1cfd33d59..297d9781c 100644
--- a/rescript.configuration.json
+++ b/rescript.configuration.json
@@ -1,93 +1,51 @@
{
- "comments": {
- "lineComment": "//",
- "blockComment": [
- "/*",
- "*/"
- ]
- },
- "brackets": [
- [
- "{",
- "}"
- ],
- [
- "[",
- "]"
- ],
- [
- "(",
- ")"
- ]
- ],
- "autoClosingPairs": [
- [
- "{",
- "}"
- ],
- [
- "[",
- "]"
- ],
- [
- "(",
- ")"
- ],
- [
- "\"",
- "\""
- ],
- [
- "`",
- "`"
- ],
- {
- "open": "/*",
- "close": " */",
- "notIn": ["string"]
- }
- ],
- "surroundingPairs": [
- [
- "{",
- "}"
- ],
- [
- "[",
- "]"
- ],
- [
- "(",
- ")"
- ],
- [
- "\"",
- "\""
- ],
- [
- "`",
- "`"
- ]
- ],
- "folding": {
- "markers": {
- "start": "^\\s*//\\s*#?region\\b",
- "end": "^\\s*//\\s*#?endregion\\b"
- }
- },
- "onEnterRules": [
- {
- "beforeText": { "pattern": "^\\s*/\\*(?!/)([^\\*]|\\*(?!/))*$" },
- "afterText": { "pattern": "^\\s*\\*/$" },
- "action": { "indent": "indentOutdent", "appendText": " "}
- },
- {
- "beforeText": { "pattern": "^\\s*/\\*(?!/)([^\\*]|\\*(?!/))*$" },
- "action": { "indent": "none", "appendText": " "}
- },
- {
- "beforeText": { "pattern": "^(\\t|[ ])*[ ]\\*/\\s*$" },
- "action": { "indent": "none", "removeText": 1 }
- }
- ]
+ "comments": {
+ "lineComment": "//",
+ "blockComment": ["/*", "*/"]
+ },
+ "brackets": [
+ ["{", "}"],
+ ["[", "]"],
+ ["(", ")"]
+ ],
+ "autoClosingPairs": [
+ ["{", "}"],
+ ["[", "]"],
+ ["(", ")"],
+ ["\"", "\""],
+ ["`", "`"],
+ {
+ "open": "/*",
+ "close": " */",
+ "notIn": ["string"]
+ }
+ ],
+ "surroundingPairs": [
+ ["{", "}"],
+ ["[", "]"],
+ ["(", ")"],
+ ["\"", "\""],
+ ["`", "`"]
+ ],
+ "folding": {
+ "markers": {
+ "start": "^\\s*//\\s*#?region\\b",
+ "end": "^\\s*//\\s*#?endregion\\b"
+ }
+ },
+ "onEnterRules": [
+ {
+ "beforeText": { "pattern": "^\\s*/\\*(?!/)([^\\*]|\\*(?!/))*$" },
+ "afterText": { "pattern": "^\\s*\\*/$" },
+ "action": { "indent": "indentOutdent", "appendText": " " }
+ },
+ {
+ "beforeText": { "pattern": "^\\s*/\\*(?!/)([^\\*]|\\*(?!/))*$" },
+ "action": { "indent": "none", "appendText": " " }
+ },
+ {
+ "beforeText": { "pattern": "^(\\t|[ ])*[ ]\\*/\\s*$" },
+ "action": { "indent": "none", "removeText": 1 }
+ }
+ ]
}
diff --git a/server/config.md b/server/config.md
index 644eae01e..e75125b1b 100644
--- a/server/config.md
+++ b/server/config.md
@@ -7,50 +7,50 @@ These configurations are sent to the server on [initialization](https://microsof
```typescript
interface config {
/**
- * Whether you want the extension to prompt for autostarting a ReScript build if a project is opened with no build running
- * @default true
- */
+ * Whether you want the extension to prompt for autostarting a ReScript build if a project is opened with no build running
+ * @default true
+ */
askToStartBuild: boolean;
/**
- * Inlay Hint config
- */
+ * Inlay Hint config
+ */
inlayHints: {
/**
- * Enable Inlay Hint
- * @defalt false
- */
+ * Enable Inlay Hint
+ * @defalt false
+ */
enable: boolean;
/**
- * Maximum length of character for inlay hints. Set to null to have an unlimited length. Inlay hints that exceed the maximum length will not be shown
- * @defalt 25
- */
+ * Maximum length of character for inlay hints. Set to null to have an unlimited length. Inlay hints that exceed the maximum length will not be shown
+ * @defalt 25
+ */
maxLength: number | null;
};
/**
- * Enable CodeLens
- * @default false
- */
+ * Enable CodeLens
+ * @default false
+ */
codeLens: boolean;
/**
- * Path to the directory where cross-platform ReScript binaries are. You can use it if you haven't or don't want to use the installed ReScript from node_modules in your project.
- * @default null
- */
+ * Path to the directory where cross-platform ReScript binaries are. You can use it if you haven't or don't want to use the installed ReScript from node_modules in your project.
+ * @default null
+ */
binaryPath: string | null;
/**
- * Path to the directory where platform-specific ReScript binaries are. You can use it if you haven't or don't want to use the installed ReScript from node_modules in your project.
- * @default null
- */
+ * Path to the directory where platform-specific ReScript binaries are. You can use it if you haven't or don't want to use the installed ReScript from node_modules in your project.
+ * @default null
+ */
platformPath: string | null;
/**
- * Signature Help config
- */
+ * Signature Help config
+ */
signatureHelp: {
/**
- * Enable Signature Help
- * @default true
- */
+ * Enable Signature Help
+ * @default true
+ */
enabled: boolean;
};
}
diff --git a/server/src/cli.ts b/server/src/cli.ts
index 749be0c6f..8f39083d8 100644
--- a/server/src/cli.ts
+++ b/server/src/cli.ts
@@ -3,7 +3,7 @@ import fs from "fs";
import path from "path";
import server from "./server";
-const args = process.argv.slice(2)
+const args = process.argv.slice(2);
const help = `ReScript Language Server
@@ -18,21 +18,23 @@ Options:
(() => {
switch (args[0]) {
- case '--stdio':
+ case "--stdio":
return server(true);
- case '--node-ipc':
+ case "--node-ipc":
return server(false);
- case '--version':
- case '-v':
- const { version } = JSON.parse(fs.readFileSync(path.join(__dirname, "..", "package.json")).toString())
+ case "--version":
+ case "-v":
+ const { version } = JSON.parse(
+ fs.readFileSync(path.join(__dirname, "..", "package.json")).toString(),
+ );
console.log(version);
process.exit(0);
- case '--help':
- case '-h':
+ case "--help":
+ case "-h":
console.log(help);
process.exit(0);
default:
console.log(help);
- process.exit(1)
+ process.exit(1);
}
})();
diff --git a/server/src/codeActions.ts b/server/src/codeActions.ts
index 77d82e3a2..fe15b4928 100644
--- a/server/src/codeActions.ts
+++ b/server/src/codeActions.ts
@@ -22,7 +22,7 @@ interface findCodeActionsConfig {
let wrapRangeInText = (
range: p.Range,
wrapStart: string,
- wrapEnd: string
+ wrapEnd: string,
): p.TextEdit[] => {
// We need to adjust the start of where we replace if this is a single
// character on a single line.
@@ -68,7 +68,7 @@ let wrapRangeInText = (
let insertBeforeEndingChar = (
range: p.Range,
- newText: string
+ newText: string,
): p.TextEdit[] => {
let beforeEndingChar = {
line: range.end.line,
@@ -196,7 +196,9 @@ interface codeActionExtractorConfig {
codeActions: filesCodeActions;
}
-type codeActionExtractor = (config: codeActionExtractorConfig) => Promise;
+type codeActionExtractor = (
+ config: codeActionExtractorConfig,
+) => Promise;
// This action extracts hints the compiler emits for misspelled identifiers, and
// offers to replace the misspelled name with the correct name suggested by the
@@ -271,13 +273,15 @@ let wrapInSome: codeActionExtractor = async ({
let lineIndexWithType = restOfMessage.findIndex((l) =>
l
.trim()
- .startsWith("but a pattern was expected which matches values of type")
+ .startsWith(
+ "but a pattern was expected which matches values of type",
+ ),
);
if (lineIndexWithType === -1) return false;
// The type is either on this line or the next
let [_, typ = ""] = restOfMessage[lineIndexWithType].split(
- "but a pattern was expected which matches values of type"
+ "but a pattern was expected which matches values of type",
);
if (typ.trim() === "") {
@@ -319,14 +323,14 @@ let handleUndefinedRecordFieldsAction = ({
file,
range,
diagnostic,
- todoValue
+ todoValue,
}: {
recordFieldNames: string[];
codeActions: filesCodeActions;
file: string;
range: p.Range;
diagnostic: p.Diagnostic;
- todoValue: string
+ todoValue: string;
}) => {
if (recordFieldNames != null) {
codeActions[file] = codeActions[file] || [];
@@ -452,7 +456,7 @@ let addUndefinedRecordFieldsV10: codeActionExtractor = async ({
diagnostic,
file,
range,
- todoValue: `failwith("TODO")`
+ todoValue: `failwith("TODO")`,
});
}
@@ -499,7 +503,7 @@ let addUndefinedRecordFieldsV11: codeActionExtractor = async ({
diagnostic,
file,
range,
- todoValue: `%todo`
+ todoValue: `%todo`,
});
}
@@ -563,7 +567,7 @@ let applyUncurried: codeActionExtractor = async ({
}) => {
if (
line.startsWith(
- "This is an uncurried ReScript function. It must be applied with a dot."
+ "This is an uncurried ReScript function. It must be applied with a dot.",
)
) {
const locOfOpenFnParens = {
@@ -692,7 +696,7 @@ let simpleTypeMismatches: codeActionExtractor = async ({
if (line.startsWith(lookFor)) {
let thisHasTypeArr = takeUntil(
[line.slice(lookFor.length), ...array.slice(index + 1)],
- "Somewhere wanted:"
+ "Somewhere wanted:",
);
let somewhereWantedArr = array
.slice(index + thisHasTypeArr.length)
@@ -734,7 +738,7 @@ let simpleTypeMismatches: codeActionExtractor = async ({
[file]: wrapRangeInText(
range,
"switch ",
- ` { | None => ${defaultValue} | Some(v) => v }`
+ ` { | None => ${defaultValue} | Some(v) => v }`,
),
},
},
diff --git a/server/src/constants.ts b/server/src/constants.ts
index f5becaef9..1242c454d 100644
--- a/server/src/constants.ts
+++ b/server/src/constants.ts
@@ -12,13 +12,13 @@ export let editorAnalysisName = "rescript-editor-analysis.exe";
export let builtinAnalysisDevPath = path.join(
path.dirname(__dirname),
"..",
- editorAnalysisName
+ editorAnalysisName,
);
export let builtinAnalysisProdPath = path.join(
path.dirname(__dirname),
"analysis_binaries",
platformDir,
- editorAnalysisName
+ editorAnalysisName,
);
export let rescriptBinName = "rescript";
diff --git a/server/src/incrementalCompilation.ts b/server/src/incrementalCompilation.ts
index ad69a18fd..cb6a65a04 100644
--- a/server/src/incrementalCompilation.ts
+++ b/server/src/incrementalCompilation.ts
@@ -21,7 +21,7 @@ function debug() {
const INCREMENTAL_FOLDER_NAME = "___incremental";
const INCREMENTAL_FILE_FOLDER_LOCATION = path.join(
c.compilerDirPartialPath,
- INCREMENTAL_FOLDER_NAME
+ INCREMENTAL_FOLDER_NAME,
);
type RewatchCompilerArgs = {
@@ -110,7 +110,7 @@ export function incrementalCompilationFileChanged(changedPath: string) {
}
cleanUpIncrementalFiles(
entry.file.sourceFilePath,
- entry.project.rootPath
+ entry.project.rootPath,
);
}
}
@@ -118,14 +118,14 @@ export function incrementalCompilationFileChanged(changedPath: string) {
export function removeIncrementalFileFolder(
projectRootPath: string,
- onAfterRemove?: () => void
+ onAfterRemove?: () => void,
) {
fs.rm(
path.resolve(projectRootPath, INCREMENTAL_FILE_FOLDER_LOCATION),
{ force: true, recursive: true },
(_) => {
onAfterRemove?.();
- }
+ },
);
}
@@ -137,14 +137,14 @@ export function recreateIncrementalFileFolder(projectRootPath: string) {
fs.mkdir(
path.resolve(projectRootPath, INCREMENTAL_FILE_FOLDER_LOCATION),
{ recursive: true },
- (_) => {}
+ (_) => {},
);
});
}
export function cleanUpIncrementalFiles(
filePath: string,
- projectRootPath: string
+ projectRootPath: string,
) {
const ext = filePath.endsWith(".resi") ? ".resi" : ".res";
const namespace = utils.getNamespaceNameFromConfigFile(projectRootPath);
@@ -162,9 +162,9 @@ export function cleanUpIncrementalFiles(
path.resolve(
projectRootPath,
INCREMENTAL_FILE_FOLDER_LOCATION,
- path.basename(filePath)
+ path.basename(filePath),
),
- (_) => {}
+ (_) => {},
);
[
@@ -176,24 +176,24 @@ export function cleanUpIncrementalFiles(
].forEach((file) => {
fs.unlink(
path.resolve(projectRootPath, INCREMENTAL_FILE_FOLDER_LOCATION, file),
- (_) => {}
+ (_) => {},
);
});
}
function getBscArgs(
- entry: IncrementallyCompiledFileInfo
+ entry: IncrementallyCompiledFileInfo,
): Promise | RewatchCompilerArgs | null> {
const buildNinjaPath = path.resolve(
entry.project.rootPath,
- c.buildNinjaPartialPath
+ c.buildNinjaPartialPath,
);
const rewatchLockfile = path.resolve(
entry.project.workspaceRootPath,
- c.rewatchLockPartialPath
+ c.rewatchLockPartialPath,
);
const rescriptLockfile = path.resolve(
entry.project.workspaceRootPath,
- c.rescriptLockPartialPath
+ c.rescriptLockPartialPath,
);
let buildSystem: "bsb" | "rewatch" | null = null;
@@ -209,13 +209,14 @@ function getBscArgs(
try {
stat = fs.statSync(rescriptLockfile);
buildSystem = "rewatch";
- }
- catch {}
+ } catch {}
if (buildSystem == null) {
console.log("Did not find build.ninja or rewatch.lock, cannot proceed..");
return Promise.resolve(null);
} else if (debug()) {
- console.log(`Using build system: ${buildSystem} for ${entry.file.sourceFilePath}`);
+ console.log(
+ `Using build system: ${buildSystem} for ${entry.file.sourceFilePath}`,
+ );
}
const bsbCacheEntry = entry.buildNinja;
const rewatchCacheEntry = entry.buildRewatch;
@@ -235,7 +236,7 @@ function getBscArgs(
) {
return Promise.resolve(rewatchCacheEntry.compilerArgs);
}
- return new Promise(async(resolve, _reject) => {
+ return new Promise(async (resolve, _reject) => {
function resolveResult(result: Array | RewatchCompilerArgs) {
if (stat != null && Array.isArray(result)) {
entry.buildSystem = "bsb";
@@ -307,46 +308,67 @@ function getBscArgs(
if (project?.rescriptVersion == null) return;
let rewatchPath = path.resolve(
entry.project.workspaceRootPath,
- "node_modules/@rolandpeelen/rewatch/rewatch"
+ "node_modules/@rolandpeelen/rewatch/rewatch",
);
let rescriptRewatchPath = null;
- if (semver.valid(project.rescriptVersion) &&
- semver.satisfies(project.rescriptVersion as string, ">11", { includePrerelease: true })) {
- rescriptRewatchPath = await utils.findRewatchBinary(entry.project.workspaceRootPath)
+ if (
+ semver.valid(project.rescriptVersion) &&
+ semver.satisfies(project.rescriptVersion as string, ">11", {
+ includePrerelease: true,
+ })
+ ) {
+ rescriptRewatchPath = await utils.findRewatchBinary(
+ entry.project.workspaceRootPath,
+ );
}
- if (semver.valid(project.rescriptVersion) &&
- semver.satisfies(project.rescriptVersion as string, ">=12.0.0-beta.1", { includePrerelease: true })) {
- rescriptRewatchPath = await utils.findRescriptExeBinary(entry.project.workspaceRootPath)
+ if (
+ semver.valid(project.rescriptVersion) &&
+ semver.satisfies(
+ project.rescriptVersion as string,
+ ">=12.0.0-beta.1",
+ { includePrerelease: true },
+ )
+ ) {
+ rescriptRewatchPath = await utils.findRescriptExeBinary(
+ entry.project.workspaceRootPath,
+ );
}
if (rescriptRewatchPath != null) {
rewatchPath = rescriptRewatchPath;
if (debug()) {
- console.log(`Found rewatch binary bundled with v12: ${rescriptRewatchPath}`)
+ console.log(
+ `Found rewatch binary bundled with v12: ${rescriptRewatchPath}`,
+ );
}
} else {
if (debug()) {
- console.log("Did not find rewatch binary bundled with v12")
+ console.log("Did not find rewatch binary bundled with v12");
}
}
-
- const rewatchArguments = semver.satisfies(project.rescriptVersion, ">=12.0.0-beta.2", { includePrerelease: true }) ? [
- "compiler-args",
- entry.file.sourceFilePath,
- ] : [
+
+ const rewatchArguments = semver.satisfies(
+ project.rescriptVersion,
+ ">=12.0.0-beta.2",
+ { includePrerelease: true },
+ )
+ ? ["compiler-args", entry.file.sourceFilePath]
+ : [
"--rescript-version",
project.rescriptVersion,
"--compiler-args",
entry.file.sourceFilePath,
];
- const bscExe = await utils.findBscExeBinary(entry.project.workspaceRootPath);
+ const bscExe = await utils.findBscExeBinary(
+ entry.project.workspaceRootPath,
+ );
const env = bscExe != null ? { RESCRIPT_BSC_EXE: bscExe } : undefined;
const compilerArgs = JSON.parse(
cp
.execFileSync(rewatchPath, rewatchArguments, { env })
.toString()
- .trim()
+ .trim(),
) as RewatchCompilerArgs;
resolveResult(compilerArgs);
} catch (e) {
@@ -407,7 +429,7 @@ function triggerIncrementalCompilationOfFile(
filePath: string,
fileContent: string,
send: send,
- onCompilationFinished?: () => void
+ onCompilationFinished?: () => void,
) {
let incrementalFileCacheEntry = incrementallyCompiledFileInfo.get(filePath);
if (incrementalFileCacheEntry == null) {
@@ -426,7 +448,7 @@ function triggerIncrementalCompilationOfFile(
const projectRewatchLockfile = path.resolve(
projectRootPath,
- c.rewatchLockPartialPath
+ c.rewatchLockPartialPath,
);
let foundRewatchLockfileInProjectRoot = false;
@@ -456,13 +478,13 @@ function triggerIncrementalCompilationOfFile(
const incrementalFolderPath = path.join(
projectRootPath,
- INCREMENTAL_FILE_FOLDER_LOCATION
+ INCREMENTAL_FILE_FOLDER_LOCATION,
);
let originalTypeFileLocation = path.resolve(
projectRootPath,
c.compilerDirPartialPath,
- path.relative(projectRootPath, filePath)
+ path.relative(projectRootPath, filePath),
);
const parsed = path.parse(originalTypeFileLocation);
@@ -496,11 +518,11 @@ function triggerIncrementalCompilationOfFile(
};
incrementalFileCacheEntry.project.callArgs = figureOutBscArgs(
- incrementalFileCacheEntry
+ incrementalFileCacheEntry,
);
originalTypeFileToFilePath.set(
incrementalFileCacheEntry.file.originalTypeFileLocation,
- incrementalFileCacheEntry.file.sourceFilePath
+ incrementalFileCacheEntry.file.sourceFilePath,
);
incrementallyCompiledFileInfo.set(filePath, incrementalFileCacheEntry);
}
@@ -539,7 +561,7 @@ async function figureOutBscArgs(entry: IncrementallyCompiledFileInfo) {
if (debug()) {
console.log(
"Found no project (or ReScript version) for " +
- entry.file.sourceFilePath
+ entry.file.sourceFilePath,
);
}
return null;
@@ -562,7 +584,7 @@ async function figureOutBscArgs(entry: IncrementallyCompiledFileInfo) {
if (config.extensionConfiguration.incrementalTypechecking?.acrossFiles) {
callArgs.push(
"-I",
- path.resolve(entry.project.rootPath, INCREMENTAL_FILE_FOLDER_LOCATION)
+ path.resolve(entry.project.rootPath, INCREMENTAL_FILE_FOLDER_LOCATION),
);
}
@@ -571,14 +593,14 @@ async function figureOutBscArgs(entry: IncrementallyCompiledFileInfo) {
if (isBsb) {
callArgs.push(
"-I",
- path.resolve(entry.project.rootPath, c.compilerDirPartialPath, value)
+ path.resolve(entry.project.rootPath, c.compilerDirPartialPath, value),
);
} else {
// TODO: once ReScript v12 is out we can remove this check for `.`
if (value === ".") {
callArgs.push(
"-I",
- path.resolve(entry.project.rootPath, c.compilerOcamlDirPartialPath)
+ path.resolve(entry.project.rootPath, c.compilerOcamlDirPartialPath),
);
} else {
callArgs.push("-I", value);
@@ -607,7 +629,9 @@ async function figureOutBscArgs(entry: IncrementallyCompiledFileInfo) {
// Only available in v11+
if (
semver.valid(project.rescriptVersion) &&
- semver.satisfies(project.rescriptVersion as string, ">=11", { includePrerelease: true })
+ semver.satisfies(project.rescriptVersion as string, ">=11", {
+ includePrerelease: true,
+ })
) {
callArgs.push("-ignore-parse-errors");
}
@@ -620,7 +644,7 @@ async function compileContents(
entry: IncrementallyCompiledFileInfo,
fileContent: string,
send: (msg: p.Message) => void,
- onCompilationFinished?: () => void
+ onCompilationFinished?: () => void,
) {
const triggerToken = entry.compilation?.triggerToken;
let callArgs = await entry.project.callArgs;
@@ -632,7 +656,7 @@ async function compileContents(
} else {
if (debug()) {
console.log(
- "Could not figure out call args. Maybe build.ninja does not exist yet?"
+ "Could not figure out call args. Maybe build.ninja does not exist yet?",
);
}
return;
@@ -648,10 +672,17 @@ async function compileContents(
try {
fs.writeFileSync(entry.file.incrementalFilePath, fileContent);
- let cwd = entry.buildSystem === "bsb" ? entry.project.rootPath : path.resolve(entry.project.rootPath, c.compilerDirPartialPath)
+ let cwd =
+ entry.buildSystem === "bsb"
+ ? entry.project.rootPath
+ : path.resolve(entry.project.rootPath, c.compilerDirPartialPath);
if (debug()) {
- console.log(`About to invoke bsc from \"${cwd}\", used ${entry.buildSystem}`);
- console.log(`${entry.project.bscBinaryLocation} ${callArgs.map(c => `"${c}"`).join(" ")}`);
+ console.log(
+ `About to invoke bsc from \"${cwd}\", used ${entry.buildSystem}`,
+ );
+ console.log(
+ `${entry.project.bscBinaryLocation} ${callArgs.map((c) => `"${c}"`).join(" ")}`,
+ );
}
const process = cp.execFile(
entry.project.bscBinaryLocation,
@@ -663,12 +694,12 @@ async function compileContents(
console.log(
`Recompiled ${entry.file.sourceFileName} in ${
(performance.now() - startTime) / 1000
- }s`
+ }s`,
);
} else {
if (debug())
console.log(
- `Compilation of ${entry.file.sourceFileName} was killed.`
+ `Compilation of ${entry.file.sourceFileName} was killed.`,
);
}
let hasIgnoredErrorMessages = false;
@@ -683,7 +714,7 @@ async function compileContents(
// Reset compilation status as this compilation finished
entry.compilation = null;
const { result, codeActions } = await utils.parseCompilerLogOutput(
- `${stderr}\n#Done()`
+ `${stderr}\n#Done()`,
);
const actions = Object.values(codeActions)[0] ?? [];
@@ -715,10 +746,9 @@ async function compileContents(
.filter((d) => {
if (
!d.message.startsWith("Uninterpreted extension 'rescript.") &&
- (
- !d.message.includes(
- `/${INCREMENTAL_FOLDER_NAME}/${entry.file.sourceFileName}`
- ) ||
+ (!d.message.includes(
+ `/${INCREMENTAL_FOLDER_NAME}/${entry.file.sourceFileName}`,
+ ) ||
// The `Multiple definition of the name ` type error's
// message includes the filepath with LOC of the duplicate definition
d.message.startsWith("Multiple definition of the") ||
@@ -726,8 +756,7 @@ async function compileContents(
// type errors all include the filepath with LOC
d.message.startsWith("Signature mismatch") ||
d.message.startsWith("In this `with' constraint") ||
- d.message.startsWith("This `with' constraint on")
- )
+ d.message.startsWith("This `with' constraint on"))
) {
hasIgnoredErrorMessages = true;
return true;
@@ -745,13 +774,13 @@ async function compileContents(
hasReportedFeatureFailedError.add(entry.project.rootPath);
const logfile = path.resolve(
entry.project.incrementalFolderPath,
- "error.log"
+ "error.log",
);
fs.writeFileSync(
logfile,
`== BSC ARGS ==\n${callArgs?.join(
- " "
- )}\n\n== OUTPUT ==\n${stderr}`
+ " ",
+ )}\n\n== OUTPUT ==\n${stderr}`,
);
let params: p.ShowMessageParams = {
type: p.MessageType.Warning,
@@ -779,7 +808,7 @@ async function compileContents(
send(notification);
}
onCompilationFinished?.();
- }
+ },
);
entry.killCompilationListeners.push(() => {
process.kill("SIGKILL");
@@ -793,7 +822,7 @@ export function handleUpdateOpenedFile(
filePath: string,
fileContent: string,
send: send,
- onCompilationFinished?: () => void
+ onCompilationFinished?: () => void,
) {
if (debug()) {
console.log("Updated: " + filePath);
@@ -802,7 +831,7 @@ export function handleUpdateOpenedFile(
filePath,
fileContent,
send,
- onCompilationFinished
+ onCompilationFinished,
);
}
@@ -818,7 +847,7 @@ export function handleClosedFile(filePath: string) {
}
export function getCodeActionsFromIncrementalCompilation(
- filePath: string
+ filePath: string,
): Array | null {
const entry = incrementallyCompiledFileInfo.get(filePath);
if (entry != null) {
diff --git a/server/src/lookup.ts b/server/src/lookup.ts
index fa2c9908f..38dff3761 100644
--- a/server/src/lookup.ts
+++ b/server/src/lookup.ts
@@ -27,7 +27,7 @@ export const replaceFileExtension = (filePath: string, ext: string): string => {
// otherwise recursively check parent directories for it.
export const findFilePathFromProjectRoot = (
directory: p.DocumentUri | null, // This must be a directory and not a file!
- filePartialPath: string
+ filePartialPath: string,
): null | p.DocumentUri => {
if (directory == null) {
return null;
@@ -54,7 +54,7 @@ export const readConfig = (projDir: p.DocumentUri): BuildSchema | null => {
let configFile = fs.readFileSync(
fs.existsSync(rescriptJson) ? rescriptJson : bsconfigJson,
- { encoding: "utf-8" }
+ { encoding: "utf-8" },
);
let result: BuildSchema = JSON.parse(configFile);
@@ -110,7 +110,7 @@ export const getSuffixAndPathFragmentFromBsconfig = (bsconfig: BuildSchema) => {
export const getFilenameFromBsconfig = (
projDir: string,
- partialFilePath: string
+ partialFilePath: string,
): string | null => {
let bsconfig = readConfig(projDir);
@@ -128,17 +128,17 @@ export const getFilenameFromBsconfig = (
// Monorepo helpers
export const getFilenameFromRootBsconfig = (
projDir: string,
- partialFilePath: string
+ partialFilePath: string,
): string | null => {
let rootConfigPath = findFilePathFromProjectRoot(
path.join("..", projDir),
- c.rescriptJsonPartialPath
+ c.rescriptJsonPartialPath,
);
if (!rootConfigPath) {
rootConfigPath = findFilePathFromProjectRoot(
path.join("..", projDir),
- c.bsconfigPartialPath
+ c.bsconfigPartialPath,
);
}
diff --git a/server/src/server.ts b/server/src/server.ts
index d51433822..a386bcb10 100644
--- a/server/src/server.ts
+++ b/server/src/server.ts
@@ -60,14 +60,20 @@ let codeActionsFromDiagnostics: codeActions.filesCodeActions = {};
// will be properly defined later depending on the mode (stdio/node-rpc)
let send: (msg: p.Message) => void = (_) => {};
-let findRescriptBinary = async (projectRootPath: p.DocumentUri | null): Promise => {
- if (config.extensionConfiguration.binaryPath != null &&
- fs.existsSync(path.join(config.extensionConfiguration.binaryPath, "rescript"))) {
- return path.join(config.extensionConfiguration.binaryPath, "rescript")
+let findRescriptBinary = async (
+ projectRootPath: p.DocumentUri | null,
+): Promise => {
+ if (
+ config.extensionConfiguration.binaryPath != null &&
+ fs.existsSync(
+ path.join(config.extensionConfiguration.binaryPath, "rescript"),
+ )
+ ) {
+ return path.join(config.extensionConfiguration.binaryPath, "rescript");
}
- return utils.findRescriptBinary(projectRootPath)
-}
+ return utils.findRescriptBinary(projectRootPath);
+};
let createInterfaceRequest = new v.RequestType<
p.TextDocumentIdentifier,
@@ -82,7 +88,7 @@ let openCompiledFileRequest = new v.RequestType<
>("textDocument/openCompiled");
let getCurrentCompilerDiagnosticsForFile = (
- fileUri: string
+ fileUri: string,
): p.Diagnostic[] => {
let diagnostics: p.Diagnostic[] | null = null;
@@ -202,7 +208,10 @@ let debug = false;
let syncProjectConfigCache = async (rootPath: string) => {
try {
if (debug) console.log("syncing project config cache for " + rootPath);
- await utils.runAnalysisAfterSanityCheck(rootPath, ["cache-project", rootPath]);
+ await utils.runAnalysisAfterSanityCheck(rootPath, [
+ "cache-project",
+ rootPath,
+ ]);
if (debug) console.log("OK - synced project config cache for " + rootPath);
} catch (e) {
if (debug) console.error(e);
@@ -212,7 +221,10 @@ let syncProjectConfigCache = async (rootPath: string) => {
let deleteProjectConfigCache = async (rootPath: string) => {
try {
if (debug) console.log("deleting project config cache for " + rootPath);
- await utils.runAnalysisAfterSanityCheck(rootPath, ["cache-delete", rootPath]);
+ await utils.runAnalysisAfterSanityCheck(rootPath, [
+ "cache-delete",
+ rootPath,
+ ]);
if (debug) console.log("OK - deleted project config cache for " + rootPath);
} catch (e) {
if (debug) console.error(e);
@@ -220,33 +232,37 @@ let deleteProjectConfigCache = async (rootPath: string) => {
};
async function onWorkspaceDidChangeWatchedFiles(
- params: p.DidChangeWatchedFilesParams
+ params: p.DidChangeWatchedFilesParams,
) {
- await Promise.all(params.changes.map(async (change) => {
- if (change.uri.includes("build.ninja")) {
- if (config.extensionConfiguration.cache?.projectConfig?.enable === true) {
- let projectRoot = utils.findProjectRootOfFile(change.uri);
- if (projectRoot != null) {
- await syncProjectConfigCache(projectRoot);
- }
- }
- } else if (change.uri.includes("compiler.log")) {
- try {
- await sendUpdatedDiagnostics();
- sendCompilationFinishedMessage();
- if (config.extensionConfiguration.inlayHints?.enable === true) {
- sendInlayHintsRefresh();
+ await Promise.all(
+ params.changes.map(async (change) => {
+ if (change.uri.includes("build.ninja")) {
+ if (
+ config.extensionConfiguration.cache?.projectConfig?.enable === true
+ ) {
+ let projectRoot = utils.findProjectRootOfFile(change.uri);
+ if (projectRoot != null) {
+ await syncProjectConfigCache(projectRoot);
+ }
}
- if (config.extensionConfiguration.codeLens === true) {
- sendCodeLensRefresh();
+ } else if (change.uri.includes("compiler.log")) {
+ try {
+ await sendUpdatedDiagnostics();
+ sendCompilationFinishedMessage();
+ if (config.extensionConfiguration.inlayHints?.enable === true) {
+ sendInlayHintsRefresh();
+ }
+ if (config.extensionConfiguration.codeLens === true) {
+ sendCodeLensRefresh();
+ }
+ } catch {
+ console.log("Error while sending updated diagnostics");
}
- } catch {
- console.log("Error while sending updated diagnostics");
+ } else {
+ ic.incrementalCompilationFileChanged(fileURLToPath(change.uri));
}
- } else {
- ic.incrementalCompilationFileChanged(fileURLToPath(change.uri));
- }
- }));
+ }),
+ );
}
type clientSentBuildAction = {
@@ -274,12 +290,14 @@ let openedFile = async (fileUri: string, fileContent: string) => {
filesDiagnostics: {},
namespaceName:
namespaceName.kind === "success" ? namespaceName.result : null,
- rescriptVersion: await utils.findReScriptVersionForProjectRoot(projectRootPath),
+ rescriptVersion:
+ await utils.findReScriptVersionForProjectRoot(projectRootPath),
bsbWatcherByEditor: null,
bscBinaryLocation: await utils.findBscExeBinary(projectRootPath),
- editorAnalysisLocation: await utils.findEditorAnalysisBinary(projectRootPath),
+ editorAnalysisLocation:
+ await utils.findEditorAnalysisBinary(projectRootPath),
hasPromptedToStartBuild: /(\/|\\)node_modules(\/|\\)/.test(
- projectRootPath
+ projectRootPath,
)
? "never"
: false,
@@ -302,7 +320,7 @@ let openedFile = async (fileUri: string, fileContent: string) => {
// TODO: sometime stale .bsb.lock dangling. bsb -w knows .bsb.lock is
// stale. Use that logic
// TODO: close watcher when lang-server shuts down
- if (await findRescriptBinary(projectRootPath) != null) {
+ if ((await findRescriptBinary(projectRootPath)) != null) {
let payload: clientSentBuildAction = {
title: c.startBuildAction,
projectRootPath: projectRootPath,
@@ -333,7 +351,7 @@ let openedFile = async (fileUri: string, fileContent: string) => {
config.extensionConfiguration.binaryPath == null
? `Can't find ReScript binary in ${path.join(
projectRootPath,
- c.nodeModulesBinDir
+ c.nodeModulesBinDir,
)} or parent directories. Did you install it? It's required to use "rescript" > 9.1`
: `Can't find ReScript binary in the directory ${config.extensionConfiguration.binaryPath}`,
},
@@ -430,7 +448,7 @@ async function hover(msg: p.RequestMessage) {
tmpname,
Boolean(extensionClientCapabilities.supportsMarkdownLinks),
],
- msg
+ msg,
);
fs.unlink(tmpname, () => null);
return response;
@@ -449,7 +467,7 @@ async function inlayHint(msg: p.RequestMessage) {
params.range.end.line,
config.extensionConfiguration.inlayHints?.maxLength,
],
- msg
+ msg,
);
return response;
}
@@ -470,7 +488,7 @@ async function codeLens(msg: p.RequestMessage) {
const response = await utils.runAnalysisCommand(
filePath,
["codeLens", filePath],
- msg
+ msg,
);
return response;
}
@@ -502,7 +520,7 @@ async function signatureHelp(msg: p.RequestMessage) {
? "true"
: "false",
],
- msg
+ msg,
);
fs.unlink(tmpname, () => null);
return response;
@@ -515,7 +533,7 @@ async function definition(msg: p.RequestMessage) {
let response = await utils.runAnalysisCommand(
filePath,
["definition", filePath, params.position.line, params.position.character],
- msg
+ msg,
);
return response;
}
@@ -532,7 +550,7 @@ async function typeDefinition(msg: p.RequestMessage) {
params.position.line,
params.position.character,
],
- msg
+ msg,
);
return response;
}
@@ -541,10 +559,8 @@ async function references(msg: p.RequestMessage) {
// https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_references
let params = msg.params as p.ReferenceParams;
let filePath = fileURLToPath(params.textDocument.uri);
- let result: typeof p.ReferencesRequest.type = await utils.getReferencesForPosition(
- filePath,
- params.position
- );
+ let result: typeof p.ReferencesRequest.type =
+ await utils.getReferencesForPosition(filePath, params.position);
let response: p.ResponseMessage = {
jsonrpc: c.jsonrpcVersion,
id: msg.id,
@@ -554,13 +570,15 @@ async function references(msg: p.RequestMessage) {
return response;
}
-async function prepareRename(msg: p.RequestMessage): Promise {
+async function prepareRename(
+ msg: p.RequestMessage,
+): Promise {
// https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_prepareRename
let params = msg.params as p.PrepareRenameParams;
let filePath = fileURLToPath(params.textDocument.uri);
let locations: null | p.Location[] = await utils.getReferencesForPosition(
filePath,
- params.position
+ params.position,
);
let result: p.Range | null = null;
if (locations !== null) {
@@ -625,7 +643,7 @@ async function documentSymbol(msg: p.RequestMessage) {
filePath,
["documentSymbol", tmpname],
msg,
- /* projectRequired */ false
+ /* projectRequired */ false,
);
fs.unlink(tmpname, () => null);
return response;
@@ -661,7 +679,7 @@ async function semanticTokens(msg: p.RequestMessage) {
filePath,
["semanticTokens", tmpname],
msg,
- /* projectRequired */ false
+ /* projectRequired */ false,
);
fs.unlink(tmpname, () => null);
return response;
@@ -683,7 +701,7 @@ async function completion(msg: p.RequestMessage) {
params.position.character,
tmpname,
],
- msg
+ msg,
);
fs.unlink(tmpname, () => null);
return response;
@@ -702,7 +720,7 @@ async function completionResolve(msg: p.RequestMessage) {
let result = await utils.runAnalysisAfterSanityCheck(
data.filePath,
["completionResolve", data.filePath, data.modulePath],
- true
+ true,
);
item.documentation = { kind: "markdown", value: result };
}
@@ -728,7 +746,7 @@ async function codeAction(msg: p.RequestMessage): Promise {
if (utils.rangeContainsRange(range, params.range)) {
localResults.push(codeAction);
}
- }
+ },
);
fs.writeFileSync(tmpname, code, { encoding: "utf-8" });
@@ -743,7 +761,7 @@ async function codeAction(msg: p.RequestMessage): Promise {
params.range.end.character,
tmpname,
],
- msg
+ msg,
);
fs.unlink(tmpname, () => null);
@@ -845,7 +863,10 @@ let updateDiagnosticSyntax = async (fileUri: string, fileContent: string) => {
let compilerDiagnosticsForFile =
getCurrentCompilerDiagnosticsForFile(fileUri);
let syntaxDiagnosticsForFile: p.Diagnostic[] =
- await utils.runAnalysisAfterSanityCheck(filePath, ["diagnosticSyntax", tmpname]);
+ await utils.runAnalysisAfterSanityCheck(filePath, [
+ "diagnosticSyntax",
+ tmpname,
+ ]);
let notification: p.NotificationMessage = {
jsonrpc: c.jsonrpcVersion,
@@ -922,7 +943,7 @@ async function createInterface(msg: p.RequestMessage): Promise {
let cmiPartialPath = path.join(
path.dirname(resPartialPath),
- path.basename(resPartialPath, c.resExt) + suffixToAppend + c.cmiExt
+ path.basename(resPartialPath, c.resExt) + suffixToAppend + c.cmiExt,
);
let cmiPath = path.join(projDir, c.compilerDirPartialPath, cmiPartialPath);
@@ -946,7 +967,7 @@ async function createInterface(msg: p.RequestMessage): Promise {
let response = await utils.runAnalysisCommand(
filePath,
["createInterface", filePath, cmiPath],
- msg
+ msg,
);
let result = typeof response.result === "string" ? response.result : "";
@@ -1056,18 +1077,26 @@ async function onMessage(msg: p.Message) {
const watchers = Array.from(workspaceFolders).flatMap(
(projectRootPath) => [
{
- globPattern: path.join(projectRootPath, '**', c.compilerLogPartialPath),
+ globPattern: path.join(
+ projectRootPath,
+ "**",
+ c.compilerLogPartialPath,
+ ),
kind: p.WatchKind.Change | p.WatchKind.Create | p.WatchKind.Delete,
},
{
- globPattern: path.join(projectRootPath, '**', c.buildNinjaPartialPath),
+ globPattern: path.join(
+ projectRootPath,
+ "**",
+ c.buildNinjaPartialPath,
+ ),
kind: p.WatchKind.Change | p.WatchKind.Create | p.WatchKind.Delete,
},
{
- globPattern: `${path.join(projectRootPath, '**', c.compilerDirPartialPath)}/**/*.{cmt,cmi}`,
+ globPattern: `${path.join(projectRootPath, "**", c.compilerDirPartialPath)}/**/*.{cmt,cmi}`,
kind: p.WatchKind.Change | p.WatchKind.Delete,
- }
- ]
+ },
+ ],
);
const registrationParams: p.RegistrationParams = {
registrations: [
@@ -1094,7 +1123,10 @@ async function onMessage(msg: p.Message) {
let params = msg.params as p.DidOpenTextDocumentParams;
await openedFile(params.textDocument.uri, params.textDocument.text);
await sendUpdatedDiagnostics();
- await updateDiagnosticSyntax(params.textDocument.uri, params.textDocument.text);
+ await updateDiagnosticSyntax(
+ params.textDocument.uri,
+ params.textDocument.text,
+ );
} else if (msg.method === DidChangeTextDocumentNotification.method) {
let params = msg.params as p.DidChangeTextDocumentParams;
let extName = path.extname(params.textDocument.uri);
@@ -1106,11 +1138,11 @@ async function onMessage(msg: p.Message) {
// we currently only support full changes
updateOpenedFile(
params.textDocument.uri,
- changes[changes.length - 1].text
+ changes[changes.length - 1].text,
);
await updateDiagnosticSyntax(
params.textDocument.uri,
- changes[changes.length - 1].text
+ changes[changes.length - 1].text,
);
}
}
@@ -1159,7 +1191,7 @@ async function onMessage(msg: p.Message) {
extensionClientCapabilities.supportsSnippetSyntax = Boolean(
initParams.capabilities.textDocument?.completion?.completionItem
- ?.snippetSupport
+ ?.snippetSupport,
);
// send the list of features we support
@@ -1332,7 +1364,7 @@ async function onMessage(msg: p.Message) {
// without their settings overriding eachother. Not a problem now though
// as we'll likely only have "global" settings starting out.
let [configuration] = msg.result as [
- extensionConfiguration | null | undefined
+ extensionConfiguration | null | undefined,
];
if (configuration != null) {
config.extensionConfiguration = configuration;
@@ -1355,7 +1387,7 @@ async function onMessage(msg: p.Message) {
if (rescriptBinaryPath != null) {
let bsbProcess = utils.runBuildWatcherUsingValidBuildPath(
rescriptBinaryPath,
- projectRootPath
+ projectRootPath,
);
let root = projectsFiles.get(projectRootPath)!;
root.bsbWatcherByEditor = bsbProcess;
diff --git a/server/src/utils.ts b/server/src/utils.ts
index e961ca23b..9dd089799 100644
--- a/server/src/utils.ts
+++ b/server/src/utils.ts
@@ -28,7 +28,7 @@ export let createFileInTempDir = (extension = "") => {
};
let findProjectRootOfFileInDir = (
- source: p.DocumentUri
+ source: p.DocumentUri,
): null | p.DocumentUri => {
let dir = path.dirname(source);
if (
@@ -50,7 +50,7 @@ let findProjectRootOfFileInDir = (
// TODO: this doesn't handle file:/// scheme
export let findProjectRootOfFile = (
source: p.DocumentUri,
- allowDir?: boolean
+ allowDir?: boolean,
): null | p.DocumentUri => {
// First look in project files
let foundRootFromProjectFiles: string | null = null;
@@ -71,7 +71,7 @@ export let findProjectRootOfFile = (
} else {
const isDir = path.extname(source) === "";
return findProjectRootOfFileInDir(
- isDir && !allowDir ? path.join(source, "dummy.res") : source
+ isDir && !allowDir ? path.join(source, "dummy.res") : source,
);
}
};
@@ -81,7 +81,12 @@ export let findProjectRootOfFile = (
// We won't know which version is in the project root until we read and parse `{project_root}/node_modules/rescript/package.json`
let findBinary = async (
projectRootPath: p.DocumentUri | null,
- binary: "bsc.exe" | "rescript-editor-analysis.exe" | "rescript" | "rewatch.exe" | "rescript.exe"
+ binary:
+ | "bsc.exe"
+ | "rescript-editor-analysis.exe"
+ | "rescript"
+ | "rewatch.exe"
+ | "rescript.exe",
) => {
if (config.extensionConfiguration.platformPath != null) {
return path.join(config.extensionConfiguration.platformPath, binary);
@@ -89,54 +94,60 @@ let findBinary = async (
const rescriptDir = lookup.findFilePathFromProjectRoot(
projectRootPath,
- path.join("node_modules", "rescript")
+ path.join("node_modules", "rescript"),
);
if (rescriptDir == null) {
return null;
}
let rescriptVersion = null;
- let rescriptJSWrapperPath = null
+ let rescriptJSWrapperPath = null;
try {
const rescriptPackageJSONPath = path.join(rescriptDir, "package.json");
- const rescriptPackageJSON = JSON.parse(await fsAsync.readFile(rescriptPackageJSONPath, "utf-8"));
- rescriptVersion = rescriptPackageJSON.version
- rescriptJSWrapperPath = rescriptPackageJSON.bin.rescript
+ const rescriptPackageJSON = JSON.parse(
+ await fsAsync.readFile(rescriptPackageJSONPath, "utf-8"),
+ );
+ rescriptVersion = rescriptPackageJSON.version;
+ rescriptJSWrapperPath = rescriptPackageJSON.bin.rescript;
} catch (error) {
- return null
+ return null;
}
- let binaryPath: string | null = null
+ let binaryPath: string | null = null;
if (binary == "rescript") {
// Can't use the native bsb/rescript since we might need the watcher -w
// flag, which is only in the JS wrapper
- binaryPath = path.join(rescriptDir, rescriptJSWrapperPath)
+ binaryPath = path.join(rescriptDir, rescriptJSWrapperPath);
} else if (semver.gte(rescriptVersion, "12.0.0-alpha.13")) {
// TODO: export `binPaths` from `rescript` package so that we don't need to
// copy the logic for figuring out `target`.
const target = `${process.platform}-${process.arch}`;
- const targetPackagePath = path.join(rescriptDir, "..", `@rescript/${target}/bin.js`)
+ const targetPackagePath = path.join(
+ rescriptDir,
+ "..",
+ `@rescript/${target}/bin.js`,
+ );
const { binPaths } = await import(targetPackagePath);
if (binary == "bsc.exe") {
- binaryPath = binPaths.bsc_exe
+ binaryPath = binPaths.bsc_exe;
} else if (binary == "rescript-editor-analysis.exe") {
- binaryPath = binPaths.rescript_editor_analysis_exe
+ binaryPath = binPaths.rescript_editor_analysis_exe;
} else if (binary == "rewatch.exe") {
- binaryPath = binPaths.rewatch_exe
+ binaryPath = binPaths.rewatch_exe;
} else if (binary == "rescript.exe") {
- binaryPath = binPaths.rescript_exe
+ binaryPath = binPaths.rescript_exe;
}
} else {
- binaryPath = path.join(rescriptDir, c.platformDir, binary)
+ binaryPath = path.join(rescriptDir, c.platformDir, binary);
}
if (binaryPath != null && fs.existsSync(binaryPath)) {
- return binaryPath
+ return binaryPath;
} else {
- return null
+ return null;
}
-}
+};
export let findRescriptBinary = (projectRootPath: p.DocumentUri | null) =>
findBinary(projectRootPath, "rescript");
@@ -168,7 +179,7 @@ type formatCodeResult = execResult;
export let formatCode = (
bscPath: p.DocumentUri | null,
filePath: string,
- code: string
+ code: string,
): formatCodeResult => {
let extension = path.extname(filePath);
let formatTempFileFullPath = createFileInTempDir(extension);
@@ -203,12 +214,14 @@ export let formatCode = (
}
};
-export async function findReScriptVersionForProjectRoot(projectRootPath: string | null): Promise {
- if (projectRootPath == null) {
+export async function findReScriptVersionForProjectRoot(
+ projectRootPath: string | null,
+): Promise {
+ if (projectRootPath == null) {
return undefined;
}
- const bscExe = await findBscExeBinary(projectRootPath)
+ const bscExe = await findBscExeBinary(projectRootPath);
if (bscExe == null) {
return undefined;
@@ -216,7 +229,10 @@ export async function findReScriptVersionForProjectRoot(projectRootPath: string
try {
let version = childProcess.execSync(`${bscExe} -v`);
- return version.toString().replace(/rescript/gi, "").trim();
+ return version
+ .toString()
+ .replace(/rescript/gi, "")
+ .trim();
} catch (e) {
return undefined;
}
@@ -233,7 +249,7 @@ if (fs.existsSync(c.builtinAnalysisDevPath)) {
export let runAnalysisAfterSanityCheck = async (
filePath: p.DocumentUri,
args: Array,
- projectRequired = false
+ projectRequired = false,
) => {
let projectRootPath = findProjectRootOfFile(filePath);
if (projectRootPath == null && projectRequired) {
@@ -241,7 +257,7 @@ export let runAnalysisAfterSanityCheck = async (
}
let rescriptVersion =
projectsFiles.get(projectRootPath ?? "")?.rescriptVersion ??
- await findReScriptVersionForProjectRoot(projectRootPath)
+ (await findReScriptVersionForProjectRoot(projectRootPath));
let binaryPath = builtinBinaryPath;
@@ -304,9 +320,13 @@ export let runAnalysisCommand = async (
filePath: p.DocumentUri,
args: Array,
msg: RequestMessage,
- projectRequired = true
+ projectRequired = true,
) => {
- let result = await runAnalysisAfterSanityCheck(filePath, args, projectRequired);
+ let result = await runAnalysisAfterSanityCheck(
+ filePath,
+ args,
+ projectRequired,
+ );
let response: ResponseMessage = {
jsonrpc: c.jsonrpcVersion,
id: msg.id,
@@ -317,7 +337,7 @@ export let runAnalysisCommand = async (
export let getReferencesForPosition = async (
filePath: p.DocumentUri,
- position: p.Position
+ position: p.Position,
) =>
await runAnalysisAfterSanityCheck(filePath, [
"references",
@@ -333,7 +353,7 @@ export const toCamelCase = (text: string): string => {
};
export const getNamespaceNameFromConfigFile = (
- projDir: p.DocumentUri
+ projDir: p.DocumentUri,
): execResult => {
let config = lookup.readConfig(projDir);
let result = "";
@@ -359,7 +379,7 @@ export const getNamespaceNameFromConfigFile = (
export let getCompiledFilePath = (
filePath: string,
- projDir: string
+ projDir: string,
): execResult => {
let error: execResult = {
kind: "error",
@@ -379,7 +399,7 @@ export let getCompiledFilePath = (
if (!fs.existsSync(result)) {
let compiledPath = lookup.getFilenameFromRootBsconfig(
projDir,
- partialFilePath
+ partialFilePath,
);
if (!compiledPath) {
@@ -397,7 +417,7 @@ export let getCompiledFilePath = (
export let runBuildWatcherUsingValidBuildPath = (
buildPath: p.DocumentUri,
- projectRootPath: p.DocumentUri
+ projectRootPath: p.DocumentUri,
) => {
let cwdEnv = {
cwd: projectRootPath,
@@ -546,7 +566,7 @@ type parsedCompilerLogResult = {
linesWithParseErrors: string[];
};
export let parseCompilerLogOutput = async (
- content: string
+ content: string,
): Promise => {
type parsedDiagnostic = {
code: number | undefined;
@@ -568,7 +588,9 @@ export let parseCompilerLogOutput = async (
tag: undefined,
content: [],
});
- } else if (line.startsWith("FAILED: cannot make progress due to previous errors.")) {
+ } else if (
+ line.startsWith("FAILED: cannot make progress due to previous errors.")
+ ) {
// skip
} else if (line.startsWith("FAILED: dependency cycle")) {
// skip as we can't extract a filepath from this error message
@@ -674,7 +696,7 @@ export let parseCompilerLogOutput = async (
linesWithParseErrors.push(line);
} else {
parsedDiagnostics[parsedDiagnostics.length - 1].content.push(
- line.slice(2)
+ line.slice(2),
);
}
} else if (line.trim() != "") {
@@ -704,7 +726,7 @@ export let parseCompilerLogOutput = async (
}
// remove start and end whitespaces/newlines
- let message = diagnosticMessage.join("\n").trim()
+ let message = diagnosticMessage.join("\n").trim();
// vscode.Diagnostic throws an error if `message` is a blank string
if (message != "") {
@@ -740,7 +762,7 @@ export let parseCompilerLogOutput = async (
export let rangeContainsRange = (
range: p.Range,
- otherRange: p.Range
+ otherRange: p.Range,
): boolean => {
if (
otherRange.start.line < range.start.line ||
diff --git a/server/tsconfig.json b/server/tsconfig.json
index 9eb5378d1..5dab877e0 100644
--- a/server/tsconfig.json
+++ b/server/tsconfig.json
@@ -1,21 +1,15 @@
-{
- "compilerOptions": {
- "target": "es2019",
- "lib": [
- "ES2019"
- ],
- "module": "commonjs",
- "moduleResolution": "node",
- "sourceMap": true,
- "strict": true,
- "outDir": "out",
- "rootDir": "src",
- "esModuleInterop": true
- },
- "include": [
- "src"
- ],
- "exclude": [
- "node_modules"
- ]
-}
+{
+ "compilerOptions": {
+ "target": "es2019",
+ "lib": ["ES2019"],
+ "module": "commonjs",
+ "moduleResolution": "node",
+ "sourceMap": true,
+ "strict": true,
+ "outDir": "out",
+ "rootDir": "src",
+ "esModuleInterop": true
+ },
+ "include": ["src"],
+ "exclude": ["node_modules"]
+}
diff --git a/snippets.json b/snippets.json
index 47281539a..a9c248fb3 100644
--- a/snippets.json
+++ b/snippets.json
@@ -1,76 +1,56 @@
{
- "Module": {
- "prefix": [
- "module"
- ],
- "body": [
- "module ${1:Name} = {",
- "\t${2:// Module contents}",
- "}"
- ]
- },
- "Try": {
- "prefix": [
- "try"
- ],
- "body": [
- "try {",
- "\t${1:expression}",
- "} catch {",
- "| ${2:MyException} => ${3:expression}",
- "}"
- ]
- },
- "For Loop": {
- "prefix": [
- "for"
- ],
- "body": [
- "for ${1:i} in ${2:startValueInclusive} to ${3:endValueInclusive} {",
- "\t${4:Js.log(${1:i})}",
- "}"
- ]
- },
- "Reverse For Loop": {
- "prefix": [
- "for"
- ],
- "body": [
- "for ${1:i} in ${2:startValueInclusive} downto ${3:endValueInclusive} {",
- "\t${4:Js.log(${1:i})}",
- "}"
- ]
- },
- "Global External Object": {
- "prefix": [
- "external"
- ],
- "body": [
- "@val external ${1:setTimeout}: ${2:(unit => unit, int) => float} = \"${3:setTimeout}\""
- ]
- },
- "Global External Module": {
- "prefix": [
- "external"
- ],
- "body": [
- "@scope(\"${1:Math}\") @val external ${2:random}: ${3:unit => float} = \"${4:random}\""
- ]
- },
- "JS Module External": {
- "prefix": [
- "external"
- ],
- "body": [
- "@module(\"${1:path}\") external ${2:dirname}: ${3:string => string} = \"${4:dirname}\""
- ]
- },
- "JS Module Default External": {
- "prefix": [
- "external"
- ],
- "body": [
- "@module external ${1:leftPad}: ${2:(string, int) => string} = \"${3:leftPad}\""
- ]
- }
+ "Module": {
+ "prefix": ["module"],
+ "body": ["module ${1:Name} = {", "\t${2:// Module contents}", "}"]
+ },
+ "Try": {
+ "prefix": ["try"],
+ "body": [
+ "try {",
+ "\t${1:expression}",
+ "} catch {",
+ "| ${2:MyException} => ${3:expression}",
+ "}"
+ ]
+ },
+ "For Loop": {
+ "prefix": ["for"],
+ "body": [
+ "for ${1:i} in ${2:startValueInclusive} to ${3:endValueInclusive} {",
+ "\t${4:Js.log(${1:i})}",
+ "}"
+ ]
+ },
+ "Reverse For Loop": {
+ "prefix": ["for"],
+ "body": [
+ "for ${1:i} in ${2:startValueInclusive} downto ${3:endValueInclusive} {",
+ "\t${4:Js.log(${1:i})}",
+ "}"
+ ]
+ },
+ "Global External Object": {
+ "prefix": ["external"],
+ "body": [
+ "@val external ${1:setTimeout}: ${2:(unit => unit, int) => float} = \"${3:setTimeout}\""
+ ]
+ },
+ "Global External Module": {
+ "prefix": ["external"],
+ "body": [
+ "@scope(\"${1:Math}\") @val external ${2:random}: ${3:unit => float} = \"${4:random}\""
+ ]
+ },
+ "JS Module External": {
+ "prefix": ["external"],
+ "body": [
+ "@module(\"${1:path}\") external ${2:dirname}: ${3:string => string} = \"${4:dirname}\""
+ ]
+ },
+ "JS Module Default External": {
+ "prefix": ["external"],
+ "body": [
+ "@module external ${1:leftPad}: ${2:(string, int) => string} = \"${3:leftPad}\""
+ ]
+ }
}
diff --git a/tools/npm/getBinaryPath.js b/tools/npm/getBinaryPath.js
index 222b76fc6..e110a991c 100644
--- a/tools/npm/getBinaryPath.js
+++ b/tools/npm/getBinaryPath.js
@@ -9,7 +9,7 @@ function getBinaryPath() {
"..",
"binaries",
platformArch,
- "rescript-tools.exe"
+ "rescript-tools.exe",
);
return binPath;
}
diff --git a/tools/package-lock.json b/tools/package-lock.json
index 6f1cdd6cf..fc63a4a79 100644
--- a/tools/package-lock.json
+++ b/tools/package-lock.json
@@ -1,43 +1,43 @@
{
- "name": "@rescript/tools",
- "version": "0.6.6",
- "lockfileVersion": 2,
- "requires": true,
- "packages": {
- "": {
- "name": "@rescript/tools",
- "version": "0.6.6",
- "license": "MIT",
- "dependencies": {
- "rescript": "^11.0.0-rc.7"
- },
- "bin": {
- "rescript-tools": "npm/cli.js"
- },
- "engines": {
- "node": "*"
- }
- },
- "node_modules/rescript": {
- "version": "11.0.0-rc.7",
- "resolved": "https://registry.npmjs.org/rescript/-/rescript-11.0.0-rc.7.tgz",
- "integrity": "sha512-N2ImQVkzF4rzV5PBcQ8htKe4NPnqsS2DhtG9A4iic4eYsNim1Ixt8qyuD40WEYl4AZOuYUaEfJlvLTOV9wHogA==",
- "hasInstallScript": true,
- "bin": {
- "bsc": "bsc",
- "bstracing": "lib/bstracing",
- "rescript": "rescript"
- },
- "engines": {
- "node": ">=10"
- }
- }
- },
- "dependencies": {
- "rescript": {
- "version": "11.0.0-rc.7",
- "resolved": "https://registry.npmjs.org/rescript/-/rescript-11.0.0-rc.7.tgz",
- "integrity": "sha512-N2ImQVkzF4rzV5PBcQ8htKe4NPnqsS2DhtG9A4iic4eYsNim1Ixt8qyuD40WEYl4AZOuYUaEfJlvLTOV9wHogA=="
- }
- }
+ "name": "@rescript/tools",
+ "version": "0.6.6",
+ "lockfileVersion": 2,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "@rescript/tools",
+ "version": "0.6.6",
+ "license": "MIT",
+ "dependencies": {
+ "rescript": "^11.0.0-rc.7"
+ },
+ "bin": {
+ "rescript-tools": "npm/cli.js"
+ },
+ "engines": {
+ "node": "*"
+ }
+ },
+ "node_modules/rescript": {
+ "version": "11.0.0-rc.7",
+ "resolved": "https://registry.npmjs.org/rescript/-/rescript-11.0.0-rc.7.tgz",
+ "integrity": "sha512-N2ImQVkzF4rzV5PBcQ8htKe4NPnqsS2DhtG9A4iic4eYsNim1Ixt8qyuD40WEYl4AZOuYUaEfJlvLTOV9wHogA==",
+ "hasInstallScript": true,
+ "bin": {
+ "bsc": "bsc",
+ "bstracing": "lib/bstracing",
+ "rescript": "rescript"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ }
+ },
+ "dependencies": {
+ "rescript": {
+ "version": "11.0.0-rc.7",
+ "resolved": "https://registry.npmjs.org/rescript/-/rescript-11.0.0-rc.7.tgz",
+ "integrity": "sha512-N2ImQVkzF4rzV5PBcQ8htKe4NPnqsS2DhtG9A4iic4eYsNim1Ixt8qyuD40WEYl4AZOuYUaEfJlvLTOV9wHogA=="
+ }
+ }
}
diff --git a/tools/package.json b/tools/package.json
index f2161e069..15c9adb77 100644
--- a/tools/package.json
+++ b/tools/package.json
@@ -1,42 +1,42 @@
{
- "name": "@rescript/tools",
- "description": "ReScript Tools",
- "version": "0.6.6",
- "author": "ReScript Team",
- "license": "MIT",
- "bin": {
- "rescript-tools": "npm/cli.js"
- },
- "keywords": [
- "ReScript",
- "Tools",
- "Docgen"
- ],
- "files": [
- "npm/cli.js",
- "npm/getBinaryPath.js",
- "npm/*.res",
- "npm/*.resi",
- "binaries",
- "rescript.json",
- "README.md"
- ],
- "engines": {
- "node": "*"
- },
- "homepage": "https://github.com/rescript-lang/rescript-vscode/tools/README.md",
- "repository": {
- "type": "git",
- "url": "https://github.com/rescript-lang/rescript-vscode",
- "directory": "tools"
- },
- "bugs": {
- "url": "https://github.com/rescript-lang/rescript-vscode/issues"
- },
- "scripts": {
- "build": "rescript build"
- },
- "dependencies": {
- "rescript": "^11.0.0-rc.7"
- }
+ "name": "@rescript/tools",
+ "description": "ReScript Tools",
+ "version": "0.6.6",
+ "author": "ReScript Team",
+ "license": "MIT",
+ "bin": {
+ "rescript-tools": "npm/cli.js"
+ },
+ "keywords": [
+ "ReScript",
+ "Tools",
+ "Docgen"
+ ],
+ "files": [
+ "npm/cli.js",
+ "npm/getBinaryPath.js",
+ "npm/*.res",
+ "npm/*.resi",
+ "binaries",
+ "rescript.json",
+ "README.md"
+ ],
+ "engines": {
+ "node": "*"
+ },
+ "homepage": "https://github.com/rescript-lang/rescript-vscode/tools/README.md",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/rescript-lang/rescript-vscode",
+ "directory": "tools"
+ },
+ "bugs": {
+ "url": "https://github.com/rescript-lang/rescript-vscode/issues"
+ },
+ "scripts": {
+ "build": "rescript build"
+ },
+ "dependencies": {
+ "rescript": "^11.0.0-rc.7"
+ }
}
diff --git a/tools/rescript.json b/tools/rescript.json
index cb24d1d8b..04a224e0a 100644
--- a/tools/rescript.json
+++ b/tools/rescript.json
@@ -11,4 +11,4 @@
"module": "commonjs",
"in-source": false
}
-}
\ No newline at end of file
+}
diff --git a/tools/src/tools.ml b/tools/src/tools.ml
index db5a052d6..4df780b3f 100644
--- a/tools/src/tools.ml
+++ b/tools/src/tools.ml
@@ -46,7 +46,8 @@ type docItem =
deprecated: string option;
detail: docItemDetail option;
source: source;
- (** Additional documentation for constructors and record fields, if available. *)
+ (** Additional documentation for constructors and record fields, if
+ available. *)
}
| Module of docsForModule
| ModuleType of {
diff --git a/tsconfig.json b/tsconfig.json
index 16ac76d66..cae98cfda 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -1,26 +1,20 @@
-{
- "compilerOptions": {
- "module": "commonjs",
- "target": "es2019",
- "lib": [
- "ES2019"
- ],
- "outDir": "out",
- "rootDir": "src",
- "sourceMap": true
- },
- "include": [
- "src"
- ],
- "exclude": [
- "node_modules"
- ],
- "references": [
- {
- "path": "./client"
- },
- {
- "path": "./server"
- }
- ]
-}
+{
+ "compilerOptions": {
+ "module": "commonjs",
+ "target": "es2019",
+ "lib": ["ES2019"],
+ "outDir": "out",
+ "rootDir": "src",
+ "sourceMap": true
+ },
+ "include": ["src"],
+ "exclude": ["node_modules"],
+ "references": [
+ {
+ "path": "./client"
+ },
+ {
+ "path": "./server"
+ }
+ ]
+}