mirror of
https://github.com/helix-editor/helix.git
synced 2025-10-06 00:13:28 +02:00
Compare commits
352 Commits
tree-house
...
209558645a
Author | SHA1 | Date | |
---|---|---|---|
|
209558645a | ||
|
987b04bd26 | ||
|
322bb1c189 | ||
|
34e0f7e82f | ||
|
97293c9f36 | ||
|
71038266e8 | ||
|
05a99c2cca | ||
|
14030d0b63 | ||
|
ff376b0d1a | ||
|
d25726f573 | ||
|
8f2af68b30 | ||
|
fe8e21a07f | ||
|
d0218f7e78 | ||
|
e2333f60ae | ||
|
70187c430c | ||
|
8058fefef0 | ||
|
0928e5ea1c | ||
|
b391185716 | ||
|
f59dc9e48f | ||
|
d63c2d2fea | ||
|
0a4207be32 | ||
|
3adc021c06 | ||
|
d1750a7502 | ||
|
c5f0a4bc22 | ||
|
4967229e85 | ||
|
68f11f9324 | ||
|
af74a61ad4 | ||
|
cfb5158cd1 | ||
|
e3fafb6bad | ||
|
6e9939a2d1 | ||
|
b08aba8e8e | ||
|
83abbe56df | ||
|
9cc912a63e | ||
|
fe1393cec8 | ||
|
392e444ff9 | ||
|
0ea5d87985 | ||
|
6b73c3c550 | ||
|
b309d72688 | ||
|
d546a799e5 | ||
|
7c37e8acea | ||
|
d4c91daa5e | ||
|
dc7c2acc08 | ||
|
99cea8c284 | ||
|
077c901be9 | ||
|
a5bf7c0d5e | ||
|
8ab20720da | ||
|
feeaec097a | ||
|
4f5eaa4186 | ||
|
7a5b618fe5 | ||
|
77ff51caa4 | ||
|
7e4e556f84 | ||
|
96c60198ec | ||
|
3dadd82c89 | ||
|
5a8fb732f2 | ||
|
8671882ee2 | ||
|
1d3e65fdbc | ||
|
f81b59fc15 | ||
|
cc8e890906 | ||
|
aa14cd38fc | ||
|
22a3b10dd8 | ||
|
535e6ee77b | ||
|
4b40b45527 | ||
|
95c378a764 | ||
|
74bb02ffe7 | ||
|
b81ee02db4 | ||
|
9ec07cf1f6 | ||
|
9f34f8b5ff | ||
|
3fb1443162 | ||
|
207c0e3899 | ||
|
f9f5fe6b12 | ||
|
e5e7fe43ce | ||
|
da4ede9535 | ||
|
6f26a257d5 | ||
|
b1eb9c09f4 | ||
|
b6ccbddc77 | ||
|
a4a2b50a50 | ||
|
e5d1f6c517 | ||
|
050e1d9b47 | ||
|
2b9cc20d23 | ||
|
9e3b510dc7 | ||
|
6b93c77033 | ||
|
fdaec3f972 | ||
|
8a898c88bc | ||
|
e0544d01f1 | ||
|
001efa801e | ||
|
00dbca93c4 | ||
|
6726c1f41c | ||
|
7747d3b93e | ||
|
4d0466d30c | ||
|
ed2807ae07 | ||
|
327f3852f4 | ||
|
155fde5178 | ||
|
f5a399c7f9 | ||
|
cb7188d5cc | ||
|
a44695e4e8 | ||
|
ef3a49d03c | ||
|
56fa9bf7c1 | ||
|
b5a9c34e14 | ||
|
e8e36a6a8e | ||
|
18572973e6 | ||
|
4a25f63169 | ||
|
0345400c41 | ||
|
43990ed0c8 | ||
|
178c55708a | ||
|
6c0d598183 | ||
|
5b5f6daab3 | ||
|
601c904e50 | ||
|
93cf3b1baf | ||
|
fdfc6df122 | ||
|
d2595930fa | ||
|
758f80a4fc | ||
|
e58b08d22a | ||
|
62f3cd3f5a | ||
|
39cccc23e5 | ||
|
f0be627dcb | ||
|
1bdd8ae784 | ||
|
2d5a19f081 | ||
|
d9fe4798fa | ||
|
39eec87284 | ||
|
285a7440a3 | ||
|
2f7cc9d0ae | ||
|
ca4f638dfd | ||
|
4480da752c | ||
|
1f7b593857 | ||
|
6807e32ec1 | ||
|
54e748b0ce | ||
|
c8224bcf4e | ||
|
1941f0b639 | ||
|
e17b80a5a2 | ||
|
6dc4722665 | ||
|
6ea3677b9f | ||
|
fe2291a59b | ||
|
a789ec7f4b | ||
|
6b511964bb | ||
|
ddbac29d14 | ||
|
f4557d0bff | ||
|
6479f74a57 | ||
|
27c90b7fff | ||
|
9ea190b729 | ||
|
c2782568f1 | ||
|
8dbc664a30 | ||
|
f72b6f758b | ||
|
4fd4588482 | ||
|
94c96cfe0e | ||
|
44b5413716 | ||
|
e15134beac | ||
|
22e60d6a71 | ||
|
8297d60ca0 | ||
|
4281228da3 | ||
|
395a71bf53 | ||
|
1e4bf6704a | ||
|
b01fbb4a22 | ||
|
f75a26cb9b | ||
|
21ae1c98fb | ||
|
7b8a4b7a51 | ||
|
715d4ae2d5 | ||
|
22b184b570 | ||
|
665ee4da22 | ||
|
ecd18e3eb2 | ||
|
e7f95ca6b2 | ||
|
4418e338e8 | ||
|
6c71fc00b2 | ||
|
727758e068 | ||
|
63eb1b870c | ||
|
2d5826d194 | ||
|
9f4ef2fc3d | ||
|
fd8aacc1a4 | ||
|
2ee11a0a9d | ||
|
9512cb9472 | ||
|
3658e97c2b | ||
|
ab668c2dfc | ||
|
ef2ebc5f24 | ||
|
5cda70e866 | ||
|
c67c3faa78 | ||
|
6fd1efd1c2 | ||
|
86f10ae24c | ||
|
d2f37b1559 | ||
|
e844a4365d | ||
|
ca7479ca88 | ||
|
7e1fbb05fd | ||
|
2f560914fb | ||
|
636cbe58e3 | ||
|
43187f2ed3 | ||
|
532f241287 | ||
|
ba04f53830 | ||
|
242353b2ba | ||
|
de898460b8 | ||
|
8e0f326ebb | ||
|
16d06643a4 | ||
|
9447a9cc93 | ||
|
febc3d03b3 | ||
|
06047808eb | ||
|
02fe437622 | ||
|
e88e48f41c | ||
|
fc53af9f4e | ||
|
3e5bb392fa | ||
|
479c3b5584 | ||
|
9789b27461 | ||
|
6c6607ef62 | ||
|
bcb6c20a84 | ||
|
f7ab5ec4a1 | ||
|
6a090471a8 | ||
|
6081a5df81 | ||
|
0043c16506 | ||
|
e5f9937c1d | ||
|
91dff9393d | ||
|
0ca12250bc | ||
|
4d782bbd18 | ||
|
b036fa0b9b | ||
|
0d799235f6 | ||
|
305f8bc165 | ||
|
e03f100187 | ||
|
f75d71844f | ||
|
d654a07d3d | ||
|
930340e646 | ||
|
44293dfd22 | ||
|
a9d51ef258 | ||
|
b9f980f567 | ||
|
c3c4895179 | ||
|
974ac9eaf3 | ||
|
60fce357fb | ||
|
4f985832bf | ||
|
43963473e3 | ||
|
2338b44909 | ||
|
58dfa158c2 | ||
|
171dfc60e5 | ||
|
250af462cd | ||
|
40a3fb9b92 | ||
|
c96642125f | ||
|
4a85171907 | ||
|
472a27e4f2 | ||
|
036729211a | ||
|
d3fb8fc9b8 | ||
|
684e108fd0 | ||
|
3c6c221d45 | ||
|
6b94d70f20 | ||
|
1491cbc8f3 | ||
|
e11794be37 | ||
|
fba644f2b4 | ||
|
24fe989596 | ||
|
fed3edcab7 | ||
|
4099465632 | ||
|
9100bce9aa | ||
|
f5dc8245ea | ||
|
362e97e927 | ||
|
ba54b6afe4 | ||
|
837627dd8a | ||
|
1246549afd | ||
|
ada8004ea5 | ||
|
205e7ece70 | ||
|
1315b7e2b1 | ||
|
52192ae29e | ||
|
fba1a6188a | ||
|
b90d8960a8 | ||
|
6e4ec96101 | ||
|
62f270e5d2 | ||
|
3b7aaddb13 | ||
|
ab97585b69 | ||
|
9dbfb9b4eb | ||
|
091f19f67c | ||
|
ae3eac8aeb | ||
|
fbe9785613 | ||
|
7092e30f8d | ||
|
705d467932 | ||
|
05a4d05646 | ||
|
2b26d27416 | ||
|
e773d6cc92 | ||
|
633c5fbf0f | ||
|
b75e95862c | ||
|
f4b488e380 | ||
|
7410fe35a3 | ||
|
637274c4d4 | ||
|
01341cbbf6 | ||
|
b1f4717356 | ||
|
25b299abc5 | ||
|
4dd4ba798c | ||
|
ca4ae7f287 | ||
|
d375f1e7f4 | ||
|
8d2870b94a | ||
|
f6878f62f7 | ||
|
6c43dc4962 | ||
|
1ea9050a5e | ||
|
2baff46b25 | ||
|
921ca08e1b | ||
|
17fb12bcf3 | ||
|
67f1fe20c3 | ||
|
8961ae1dc6 | ||
|
3366db0afb | ||
|
733ebcdaeb | ||
|
2bd7452fe0 | ||
|
7dcddf98c6 | ||
|
c9e7b0f84f | ||
|
2fbe7fc5b5 | ||
|
12523cd126 | ||
|
8d58f6ce8d | ||
|
702a961517 | ||
|
1023e8f964 | ||
|
223ceec10a | ||
|
cb1ec1b27e | ||
|
4098151591 | ||
|
1edf98262c | ||
|
237d875e7d | ||
|
b70b8df916 | ||
|
ae0dd313bd | ||
|
76029e5840 | ||
|
3a6c9747b8 | ||
|
ebf96bd469 | ||
|
5a1dcc2429 | ||
|
be1bf2f909 | ||
|
05ae617e1c | ||
|
e606652a96 | ||
|
df02ef6a99 | ||
|
3ceae88c3a | ||
|
b4e51ef895 | ||
|
f157a918a3 | ||
|
a7c3a43069 | ||
|
702b1d0a0f | ||
|
b0528bbac4 | ||
|
09bc67ad6d | ||
|
6be38642f4 | ||
|
f46222ced3 | ||
|
9bb80a74e1 | ||
|
be1cf090c3 | ||
|
a3b64b6da2 | ||
|
aea53523dd | ||
|
24e3ccc31b | ||
|
c94fde8d1c | ||
|
84e95d35ee | ||
|
447a6d3299 | ||
|
47547e94ad | ||
|
908b9edf28 | ||
|
63a1a94d92 | ||
|
bfd2c72715 | ||
|
4c8600967c | ||
|
313ef30f64 | ||
|
9bb370c91e | ||
|
cb1ecc9128 | ||
|
7a6bc53528 | ||
|
60a03a35c6 | ||
|
e4ef096945 | ||
|
9e7a6a5dbd | ||
|
1460a086df | ||
|
51d3b15557 | ||
|
e53462c78c | ||
|
fb45017a26 | ||
|
cbac427383 | ||
|
46cb177792 | ||
|
4784650ccf | ||
|
ece12dd74d | ||
|
72932a391b | ||
|
4c630c148a | ||
|
ac3c6ebaff |
4
.github/ISSUE_TEMPLATE/blank_issue.md
vendored
4
.github/ISSUE_TEMPLATE/blank_issue.md
vendored
@@ -1,4 +0,0 @@
|
||||
---
|
||||
name: Blank Issue
|
||||
about: Create a blank issue.
|
||||
---
|
9
.github/workflows/build.yml
vendored
9
.github/workflows/build.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule'
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install MSRV toolchain
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
@@ -45,12 +45,13 @@ jobs:
|
||||
name: Test Suite
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule'
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
HELIX_LOG_LEVEL: info
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install MSRV toolchain
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
@@ -84,7 +85,7 @@ jobs:
|
||||
if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule'
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install MSRV toolchain
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
@@ -120,7 +121,7 @@ jobs:
|
||||
if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule'
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install MSRV toolchain
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
|
2
.github/workflows/cachix.yml
vendored
2
.github/workflows/cachix.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install nix
|
||||
uses: cachix/install-nix-action@v31
|
||||
|
2
.github/workflows/gh-pages.yml
vendored
2
.github/workflows/gh-pages.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Setup mdBook
|
||||
uses: peaceiris/actions-mdbook@v2
|
||||
|
37
.github/workflows/release.yml
vendored
37
.github/workflows/release.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
@@ -58,23 +58,21 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false # don't fail other jobs if one fails
|
||||
matrix:
|
||||
build: [x86_64-linux, aarch64-linux, x86_64-macos, x86_64-windows] #, x86_64-win-gnu, win32-msvc
|
||||
build: [x86_64-linux, aarch64-linux, x86_64-macos, x86_64-windows, aarch64-macos] #, x86_64-win-gnu, win32-msvc
|
||||
include:
|
||||
- build: x86_64-linux
|
||||
os: ubuntu-24.04
|
||||
# WARN: When changing this to a newer version, make sure that the GLIBC isnt too new, as this can cause issues
|
||||
# with portablity on older systems that dont follow ubuntus more rapid release cadence.
|
||||
os: ubuntu-22.04
|
||||
rust: stable
|
||||
target: x86_64-unknown-linux-gnu
|
||||
cross: false
|
||||
- build: aarch64-linux
|
||||
os: ubuntu-24.04-arm
|
||||
# Version should be kept in lockstep with the x86_64 version
|
||||
os: ubuntu-22.04-arm
|
||||
rust: stable
|
||||
target: aarch64-unknown-linux-gnu
|
||||
cross: false
|
||||
# - build: riscv64-linux
|
||||
# os: ubuntu-22.04
|
||||
# rust: stable
|
||||
# target: riscv64gc-unknown-linux-gnu
|
||||
# cross: true
|
||||
- build: x86_64-macos
|
||||
os: macos-latest
|
||||
rust: stable
|
||||
@@ -85,13 +83,16 @@ jobs:
|
||||
rust: stable
|
||||
target: x86_64-pc-windows-msvc
|
||||
cross: false
|
||||
# 23.03: build issues
|
||||
- build: aarch64-macos
|
||||
os: macos-latest
|
||||
rust: stable
|
||||
target: aarch64-apple-darwin
|
||||
cross: false
|
||||
skip_tests: true # x86_64 host can't run aarch64 code
|
||||
# - build: riscv64-linux
|
||||
# os: ubuntu-22.04
|
||||
# rust: stable
|
||||
# target: riscv64gc-unknown-linux-gnu
|
||||
# cross: true
|
||||
# - build: x86_64-win-gnu
|
||||
# os: windows-2019
|
||||
# rust: stable-x86_64-gnu
|
||||
@@ -103,16 +104,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Download grammars
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
|
||||
- name: Move grammars under runtime
|
||||
if: "!startsWith(matrix.os, 'windows')"
|
||||
run: |
|
||||
mkdir -p runtime/grammars/sources
|
||||
tar xJf grammars/grammars.tar.xz -C runtime/grammars/sources
|
||||
tar xJf grammars.tar.xz -C runtime/grammars/sources
|
||||
|
||||
# The rust-toolchain action ignores rust-toolchain.toml files.
|
||||
# Removing this before building with cargo ensures that the rust-toolchain
|
||||
@@ -213,7 +214,7 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p dist
|
||||
if [ "${{ matrix.os }}" = "windows-2019" ]; then
|
||||
if [ "${{ matrix.os }}" = "windows-latest" ]; then
|
||||
cp "target/${{ matrix.target }}/opt/hx.exe" "dist/"
|
||||
else
|
||||
cp "target/${{ matrix.target }}/opt/hx" "dist/"
|
||||
@@ -234,9 +235,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v5
|
||||
|
||||
- name: Build archive
|
||||
shell: bash
|
||||
@@ -291,7 +292,7 @@ jobs:
|
||||
file_glob: true
|
||||
tag: ${{ github.ref_name }}
|
||||
overwrite: true
|
||||
|
||||
|
||||
- name: Upload binaries as artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
if: env.preview == 'true'
|
||||
|
300
CHANGELOG.md
300
CHANGELOG.md
@@ -20,6 +20,306 @@ Updated languages and queries:
|
||||
Packaging:
|
||||
-->
|
||||
|
||||
# 25.07.1 (2025-07-18)
|
||||
|
||||
This is a patch release which lowers the GLIBC requirements of the release artifacts published to GitHub ([#13983](https://github.com/helix-editor/helix/pull/13983))
|
||||
|
||||
# 25.07 (2025-07-15)
|
||||
|
||||
As always, a big thank you to all of the contributors! This release saw changes from 195 contributors.
|
||||
|
||||
Breaking changes:
|
||||
|
||||
* The parsing of the command line has been rewritten and now supports flags and expansions ([#12527](https://github.com/helix-editor/helix/pull/12527), [#13018](https://github.com/helix-editor/helix/pull/13018), [9574e55](https://github.com/helix-editor/helix/commit/9574e55), [2d4c2a1](https://github.com/helix-editor/helix/commit/2d4c2a1), [#13192](https://github.com/helix-editor/helix/pull/13192), [67f1fe2](https://github.com/helix-editor/helix/commit/67f1fe2), [#13466](https://github.com/helix-editor/helix/pull/13466), [#13467](https://github.com/helix-editor/helix/pull/13467), [#13840](https://github.com/helix-editor/helix/pull/13840))
|
||||
* Quoting and spaces are now handled differently. This can break existing keymaps which use typable commands, in particular `:sh`, `:set-option` or `:toggle-option`.
|
||||
* The `:rsort` command has been removed. Use the reverse flag instead: `:sort --reverse`
|
||||
|
||||
Features:
|
||||
|
||||
* Add a picker which explores directories ([#11285](https://github.com/helix-editor/helix/pull/11285), [d4aed40](https://github.com/helix-editor/helix/commit/d4aed40))
|
||||
* Allow cycling through multiple LSP Hover responses with `A-n`/`A-p` ([#10122](https://github.com/helix-editor/helix/pull/10122), [2367b20](https://github.com/helix-editor/helix/commit/2367b20))
|
||||
* Add support for incomplete LSP completions ([5c1f3f8](https://github.com/helix-editor/helix/commit/5c1f3f8))
|
||||
* Add support for EditorConfig ([#13056](https://github.com/helix-editor/helix/pull/13056), [#13443](https://github.com/helix-editor/helix/pull/13443))
|
||||
* Add support for LSP document colors ([#12308](https://github.com/helix-editor/helix/pull/12308), [d43de14](https://github.com/helix-editor/helix/commit/d43de14), [47cdd23](https://github.com/helix-editor/helix/commit/47cdd23), [ba54b6a](https://github.com/helix-editor/helix/commit/ba54b6a), [#13188](https://github.com/helix-editor/helix/pull/13188))
|
||||
* Support expansions in external formatter arguments ([#13429](https://github.com/helix-editor/helix/pull/13429))
|
||||
* Switch out the highlighter for the `tree-house` crate ([#12972](https://github.com/helix-editor/helix/pull/12972), [09bc67a](https://github.com/helix-editor/helix/commit/09bc67a), [a7c3a43](https://github.com/helix-editor/helix/commit/a7c3a43), [3ceae88](https://github.com/helix-editor/helix/commit/3ceae88), [05ae617](https://github.com/helix-editor/helix/commit/05ae617), [5a1dcc2](https://github.com/helix-editor/helix/commit/5a1dcc2), [ebf96bd](https://github.com/helix-editor/helix/commit/ebf96bd), [#13644](https://github.com/helix-editor/helix/pull/13644), [b1f4717](https://github.com/helix-editor/helix/commit/b1f4717), [7410fe3](https://github.com/helix-editor/helix/commit/7410fe3), [633c5fb](https://github.com/helix-editor/helix/commit/633c5fb), [362e97e](https://github.com/helix-editor/helix/commit/362e97e), [#13828](https://github.com/helix-editor/helix/pull/13828), [6fd1efd](https://github.com/helix-editor/helix/commit/6fd1efd))
|
||||
* This fixes a number of highlighter bugs.
|
||||
* Locals like parameter highlights are now highlighted even when the definition is not in view.
|
||||
* Markdown is now injected into rust doc comments (`///` and `//!`).
|
||||
* Add support for the DAP `startDebugging` reverse request ([#13403](https://github.com/helix-editor/helix/pull/13403))
|
||||
|
||||
Commands:
|
||||
|
||||
* Add `copy_between_registers` for interactive copying between two registers ([066e938](https://github.com/helix-editor/helix/commit/066e938))
|
||||
* Add `extend_to_file_{start,end}`, select-mode variants of `goto_file_{start,end}` ([#11767](https://github.com/helix-editor/helix/pull/11767))
|
||||
* Add `:!` alias for `:sh` and `:|` for `:pipe` ([#13263](https://github.com/helix-editor/helix/pull/13263))
|
||||
* Add `goto_column` and `extend_to_column` ([#13440](https://github.com/helix-editor/helix/pull/13440))
|
||||
* Add an `--insensitive`/`-i` flag to the `:sort` command ([#13560](https://github.com/helix-editor/helix/pull/13560))
|
||||
* Add `rotate_selections_first` and `rotate_selections_last` ([#13615](https://github.com/helix-editor/helix/pull/13615))
|
||||
* Add a `--no-format` flag for all `:write` commands ([2f56091](https://github.com/helix-editor/helix/commit/2f56091))
|
||||
* Add a `--skip-visible` flag for `:buffer-close-others` and `:buffer-close-others!` ([#5393](https://github.com/helix-editor/helix/pull/5393))
|
||||
|
||||
Usability improvements:
|
||||
|
||||
* Replace current file using `A-ret` in pickers rather than loading it in the background ([#12605](https://github.com/helix-editor/helix/pull/12605))
|
||||
* Set multiple selections when passing a file with multiple locations to `hx` ([#12192](https://github.com/helix-editor/helix/pull/12192))
|
||||
* Add path completion for multiple cursors ([#12550](https://github.com/helix-editor/helix/pull/12550), [c9dc940](https://github.com/helix-editor/helix/commit/c9dc940))
|
||||
* Truncate long prompt lines with "…" ([#12036](https://github.com/helix-editor/helix/pull/12036), [9d6ea77](https://github.com/helix-editor/helix/commit/9d6ea77), [0b9701e](https://github.com/helix-editor/helix/commit/0b9701e), [d3fb8fc](https://github.com/helix-editor/helix/commit/d3fb8fc))
|
||||
* Allow specifying languages in `:lsp-stop` and `:lsp-restart` ([#12578](https://github.com/helix-editor/helix/pull/12578), [3d7e273](https://github.com/helix-editor/helix/commit/3d7e273))
|
||||
* Add `m` (nearest matching pair) to infobox popups for `md` and `mr` ([#12650](https://github.com/helix-editor/helix/pull/12650))
|
||||
* Add a hint message in the statusline when using `:sort` on a single selection ([#12585](https://github.com/helix-editor/helix/pull/12585))
|
||||
* Avoid wrapping around in `goto_{next,prev}_diag` ([#12704](https://github.com/helix-editor/helix/pull/12704))
|
||||
* Support responses from multiple language servers for LSP goto-definition (and declaration, type definition and implementation) and goto-references ([f7394d5](https://github.com/helix-editor/helix/commit/f7394d5), [1a821ac](https://github.com/helix-editor/helix/commit/1a821ac), [d285a8a](https://github.com/helix-editor/helix/commit/d285a8a))
|
||||
* Show formatter errors in `:format` ([47f84d0](https://github.com/helix-editor/helix/commit/47f84d0))
|
||||
* Show typable command docs in keybinding infobox popups when the command takes no arguments ([e9c16b7](https://github.com/helix-editor/helix/commit/e9c16b7))
|
||||
* Add per-command titles to register selection infobox popups for `select_register`, `insert_register` and `copy_between_registers` ([e0da129](https://github.com/helix-editor/helix/commit/e0da129))
|
||||
* Add container name column to the LSP symbol picker ([#12930](https://github.com/helix-editor/helix/pull/12930))
|
||||
* Add a theme key for highlighting directories in completions and picker items ([#12855](https://github.com/helix-editor/helix/pull/12855), [7bebe0a](https://github.com/helix-editor/helix/commit/7bebe0a))
|
||||
* Add `editor.trim-final-newlines` and `editor.trim-trailing-whitespace` config options ([aa20eb8](https://github.com/helix-editor/helix/commit/aa20eb8))
|
||||
* Warn when the configured theme is unusable because true-color is not available ([#13058](https://github.com/helix-editor/helix/pull/13058))
|
||||
* Allow configuring `[workspace-]diagnostic` statusline element severities ([#13288](https://github.com/helix-editor/helix/pull/13288), [b0528bb](https://github.com/helix-editor/helix/commit/b0528bb))
|
||||
* Improve completion for shell commands ([#12883](https://github.com/helix-editor/helix/pull/12883), [532f241](https://github.com/helix-editor/helix/commit/532f241))
|
||||
* Show the primary selection index in the `selections` statusline element when there are multiple selections ([#12326](https://github.com/helix-editor/helix/pull/12326))
|
||||
* Use configured language server names when possible in `--health` output ([#13573](https://github.com/helix-editor/helix/pull/13573))
|
||||
* Add a statusline element for indentation style ([#13632](https://github.com/helix-editor/helix/pull/13632))
|
||||
* Set the working directory of language server commands to the workspace root ([#13691](https://github.com/helix-editor/helix/pull/13691))
|
||||
* Avoid jumpiness in the picker preview for languages with non-default tab widths ([#13761](https://github.com/helix-editor/helix/pull/13761))
|
||||
* Add a config option for limiting LSP inlay hint length ([#13742](https://github.com/helix-editor/helix/pull/13742))
|
||||
* Improve heuristics used in the diff gutter ([#13722](https://github.com/helix-editor/helix/pull/13722))
|
||||
* Allow moving a file with `:move` when its old path does not exist ([#13748](https://github.com/helix-editor/helix/pull/13748))
|
||||
* Allow moving a file into a directory with `:move` ([#13922](https://github.com/helix-editor/helix/pull/13922))
|
||||
* Show human-readable file sizes in the statusline message for file writes ([#13627](https://github.com/helix-editor/helix/pull/13627))
|
||||
* Add diagnostic source to the diagnosics pickers ([#13758](https://github.com/helix-editor/helix/pull/13758))
|
||||
* Show all active scopes under the cursor in `:tree-sitter-highlight-name` ([4a85171](https://github.com/helix-editor/helix/commit/4a85171))
|
||||
* Auto-close the LSP code-actions popup ([#13832](https://github.com/helix-editor/helix/pull/13832))
|
||||
* Add a configuration option for controlling atomic writes to disk ([#13656](https://github.com/helix-editor/helix/pull/13656))
|
||||
|
||||
Fixes:
|
||||
|
||||
* Fix panic from using `search_selection_detect_word_boundaries` (`*`) at the end of the file ([#12611](https://github.com/helix-editor/helix/pull/12611))
|
||||
* Discard placeholder text for zero tabstop `${0:placeholder}` ([#12647](https://github.com/helix-editor/helix/pull/12647))
|
||||
* Fix panic in `goto_file` (`gf`) on file names with non-ASCII characters ([#12673](https://github.com/helix-editor/helix/pull/12673))
|
||||
* Only accept unmodified characters in `goto_word` (`gw`) ([f5f9f49](https://github.com/helix-editor/helix/commit/f5f9f49), [0364521](https://github.com/helix-editor/helix/commit/0364521))
|
||||
* Skip recording keys pressed by macros while recording a macro ([#12733](https://github.com/helix-editor/helix/pull/12733))
|
||||
* Deny unknown fields in `editor.smart-tab` config ([28047fe](https://github.com/helix-editor/helix/commit/28047fe))
|
||||
* Fix soft-wrap word boundary detection for Unicode combining accent characters ([#12483](https://github.com/helix-editor/helix/pull/12483))
|
||||
* Fix clearing of infobox popups in `select_register` and `insert_register` commands ([e882a75](https://github.com/helix-editor/helix/commit/e882a75))
|
||||
* Fix handling of `stderr` of DAP child processes ([d0d1693](https://github.com/helix-editor/helix/commit/d0d1693))
|
||||
* Cancel all pending requests when a DAP session terminates ([26db541](https://github.com/helix-editor/helix/commit/26db541))
|
||||
* Properly discard out-of-date diagnostics ([313a647](https://github.com/helix-editor/helix/commit/313a647))
|
||||
* Fix display of multiple language servers in `hx --health` ([#12841](https://github.com/helix-editor/helix/pull/12841))
|
||||
* Respect `editor.default-yank-register` in `:yank-joined` ([#12890](https://github.com/helix-editor/helix/pull/12890))
|
||||
* Escape percent character when pasting the history register into the picker ([#12886](https://github.com/helix-editor/helix/pull/12886))
|
||||
* Render rulers before the cursor ([2d3b75a](https://github.com/helix-editor/helix/commit/2d3b75a))
|
||||
* Avoid inserting final newlines in empty files ([67879a1](https://github.com/helix-editor/helix/commit/67879a1))
|
||||
* Gracefully handle partial failure in multi-language-server requests ([#13156](https://github.com/helix-editor/helix/pull/13156), [14cab4b](https://github.com/helix-editor/helix/commit/14cab4b))
|
||||
* Improve LSP progress message display in the statusline ([#13180](https://github.com/helix-editor/helix/pull/13180))
|
||||
* Fix behavior of `<esc>` removing added indentation in documents with CRLF line endings ([702a961](https://github.com/helix-editor/helix/commit/702a961))
|
||||
* Append changes to document history before pushing jumplist jumps ([#13619](https://github.com/helix-editor/helix/pull/13619))
|
||||
* Fix overflow in the display of large chunks of text in the signature-help component ([#13566](https://github.com/helix-editor/helix/pull/13566))
|
||||
* Fix panic from clearing whitespace when changing multiple selections on one line ([#13673](https://github.com/helix-editor/helix/pull/13673))
|
||||
* Include formatting options in LSP range formatting request ([#13734](https://github.com/helix-editor/helix/pull/13734))
|
||||
* Consistently set statusline errors when LSP features are not available ([#12577](https://github.com/helix-editor/helix/pull/12577))
|
||||
* Fix `goto_file` on Windows ([#13770](https://github.com/helix-editor/helix/pull/13770))
|
||||
* Fix crash in `goto_word` (`gw`) when `editor.jump-label-alphabet` is configured to be empty ([#13863](https://github.com/helix-editor/helix/pull/13863))
|
||||
* Fix `open_above` / `open_below` (`o` / `O`) when using a count on a document with CRLF line-endings ([#13905](https://github.com/helix-editor/helix/pull/13905))
|
||||
|
||||
Themes:
|
||||
|
||||
* Update `modus` themes ([#12670](https://github.com/helix-editor/helix/pull/12670))
|
||||
* Update `snazzy` ([#11089](https://github.com/helix-editor/helix/pull/11089))
|
||||
* Update `gruber-darker` ([#12797](https://github.com/helix-editor/helix/pull/12797))
|
||||
* Update `cyan_light` ([#12864](https://github.com/helix-editor/helix/pull/12864), [#12891](https://github.com/helix-editor/helix/pull/12891))
|
||||
* Update `onedarker` ([#12833](https://github.com/helix-editor/helix/pull/12833))
|
||||
* Update `github_light` ([#12907](https://github.com/helix-editor/helix/pull/12907))
|
||||
* Update `kanagawa` ([#12895](https://github.com/helix-editor/helix/pull/12895))
|
||||
* Add `beans` ([#12963](https://github.com/helix-editor/helix/pull/12963))
|
||||
* Update `base16_transparent` ([#13080](https://github.com/helix-editor/helix/pull/13080))
|
||||
* Update `sunset` ([#13086](https://github.com/helix-editor/helix/pull/13086))
|
||||
* Add `carbon` ([#13067](https://github.com/helix-editor/helix/pull/13067))
|
||||
* Update `soralized` ([#13121](https://github.com/helix-editor/helix/pull/13121))
|
||||
* Add `focus_nova` ([#13144](https://github.com/helix-editor/helix/pull/13144))
|
||||
* Update `onedark` ([#13166](https://github.com/helix-editor/helix/pull/13166))
|
||||
* Update `adwaita-light` ([#13174](https://github.com/helix-editor/helix/pull/13174))
|
||||
* Add `earl_grey` ([#13203](https://github.com/helix-editor/helix/pull/13203))
|
||||
* Update `spacebones` ([#13213](https://github.com/helix-editor/helix/pull/13213))
|
||||
* Add `peachpuff` ([#13225](https://github.com/helix-editor/helix/pull/13225))
|
||||
* Update catppuccin themes ([#13262](https://github.com/helix-editor/helix/pull/13262))
|
||||
* Update gruvbox themes ([#13315](https://github.com/helix-editor/helix/pull/13315))
|
||||
* Update serika themes ([#13341](https://github.com/helix-editor/helix/pull/13341))
|
||||
* Add `gruvbox-material` ([#13311](https://github.com/helix-editor/helix/pull/13311))
|
||||
* Add `ashen` ([#13366](https://github.com/helix-editor/helix/pull/13366))
|
||||
* Update Zed themes ([#13370](https://github.com/helix-editor/helix/pull/13370))
|
||||
* Update Tokyonight themes ([#13375](https://github.com/helix-editor/helix/pull/13375))
|
||||
* Update `onelight` ([#13413](https://github.com/helix-editor/helix/pull/13413))
|
||||
* Add `ataraxia` ([#13390](https://github.com/helix-editor/helix/pull/13390))
|
||||
* Add `vesper` ([#13394](https://github.com/helix-editor/helix/pull/13394))
|
||||
* Add `kinda_nvim` and `kinda_nvim_light` ([#13406](https://github.com/helix-editor/helix/pull/13406))
|
||||
* Update `sonokai` ([#13410](https://github.com/helix-editor/helix/pull/13410))
|
||||
* Add `nyxvamp` themes ([#12185](https://github.com/helix-editor/helix/pull/12185))
|
||||
* Update nord themes ([#13574](https://github.com/helix-editor/helix/pull/13574))
|
||||
* Add `lapis_aquamarine` ([#13726](https://github.com/helix-editor/helix/pull/13726))
|
||||
* Add `sidra` ([#13575](https://github.com/helix-editor/helix/pull/13575))
|
||||
* Add `dark-synthwave` ([#13857](https://github.com/helix-editor/helix/pull/13857))
|
||||
* Update `rose_pine` ([#13908](https://github.com/helix-editor/helix/pull/13908))
|
||||
* Add `doom-one` ([#13933](https://github.com/helix-editor/helix/pull/13933))
|
||||
* Update `nightfox` ([#13957](https://github.com/helix-editor/helix/pull/13957))
|
||||
|
||||
New languages:
|
||||
|
||||
* Ghostty config ([#12703](https://github.com/helix-editor/helix/pull/12703))
|
||||
* Tera ([#12756](https://github.com/helix-editor/helix/pull/12756))
|
||||
* FGA ([#12763](https://github.com/helix-editor/helix/pull/12763))
|
||||
* CSV ([#11973](https://github.com/helix-editor/helix/pull/11973))
|
||||
* Yara ([#12753](https://github.com/helix-editor/helix/pull/12753))
|
||||
* Djot ([#12562](https://github.com/helix-editor/helix/pull/12562))
|
||||
* Ink ([#12773](https://github.com/helix-editor/helix/pull/12773))
|
||||
* Mail ([#12945](https://github.com/helix-editor/helix/pull/12945))
|
||||
* SourcePawn ([#13028](https://github.com/helix-editor/helix/pull/13028))
|
||||
* TLA+ ([#13081](https://github.com/helix-editor/helix/pull/13081))
|
||||
* Werk ([#13136](https://github.com/helix-editor/helix/pull/13136))
|
||||
* Debian control file ([#13245](https://github.com/helix-editor/helix/pull/13245))
|
||||
* WESL ([#13267](https://github.com/helix-editor/helix/pull/13267))
|
||||
* Fennel ([#13260](https://github.com/helix-editor/helix/pull/13260), [6081a5d](https://github.com/helix-editor/helix/commit/6081a5d))
|
||||
* Quarto ([#13339](https://github.com/helix-editor/helix/pull/13339))
|
||||
* Pug ([#13435](https://github.com/helix-editor/helix/pull/13435))
|
||||
* Slang ([#13449](https://github.com/helix-editor/helix/pull/13449))
|
||||
* Dunst config ([#13458](https://github.com/helix-editor/helix/pull/13458))
|
||||
* Luau ([#13702](https://github.com/helix-editor/helix/pull/13702))
|
||||
* Caddyfile ([#13859](https://github.com/helix-editor/helix/pull/13859))
|
||||
* Java properties ([#13874](https://github.com/helix-editor/helix/pull/13874))
|
||||
* Git notes ([#13885](https://github.com/helix-editor/helix/pull/13885))
|
||||
* systemd (split from INI) ([#13907](https://github.com/helix-editor/helix/pull/13907))
|
||||
* JSON-LD (split from JSON) ([#13925](https://github.com/helix-editor/helix/pull/13925))
|
||||
* Django HTML ([#13935](https://github.com/helix-editor/helix/pull/13935))
|
||||
|
||||
Updated languages and queries:
|
||||
|
||||
* Add `ruby-lsp` for Ruby ([#12511](https://github.com/helix-editor/helix/pull/12511))
|
||||
* Add `wat_server` for Wat ([#12581](https://github.com/helix-editor/helix/pull/12581))
|
||||
* Recognize `bun.lock` as JSONC ([fcf981b](https://github.com/helix-editor/helix/commit/fcf981b))
|
||||
* Update tree-sitter-rust ([#12607](https://github.com/helix-editor/helix/pull/12607), [1afa63d](https://github.com/helix-editor/helix/commit/1afa63d))
|
||||
* Fix configuration of `cs-lsp` ([#12615](https://github.com/helix-editor/helix/pull/12615))
|
||||
* Add `beancount-language-server` for Beancount ([#12610](https://github.com/helix-editor/helix/pull/12610))
|
||||
* Update tree-sitter-fish ([#12456](https://github.com/helix-editor/helix/pull/12456))
|
||||
* Add `fish-lsp` for Fish ([#12456](https://github.com/helix-editor/helix/pull/12456))
|
||||
* Update tree-sitter-ini ([#12456](https://github.com/helix-editor/helix/pull/12456), [#13088](https://github.com/helix-editor/helix/pull/13088))
|
||||
* Recognize `hgrc` as INI ([#12456](https://github.com/helix-editor/helix/pull/12456))
|
||||
* Restrict tagged template injection languages for ECMA languages ([#12217](https://github.com/helix-editor/helix/pull/12217))
|
||||
* Update tree-sitter-zig ([#11980](https://github.com/helix-editor/helix/pull/11980), [#12708](https://github.com/helix-editor/helix/pull/12708))
|
||||
* Update tree-sitter-elixir ([8bf9adf](https://github.com/helix-editor/helix/commit/8bf9adf))
|
||||
* Add `asm-lsp` for Assembly dialects ([#12684](https://github.com/helix-editor/helix/pull/12684))
|
||||
* Update tree-sitter-just ([#12692](https://github.com/helix-editor/helix/pull/12692), #)
|
||||
* Update tree-sitter-cairo ([#12712](https://github.com/helix-editor/helix/pull/12712))
|
||||
* Configure a comment token for Svelte ([#12743](https://github.com/helix-editor/helix/pull/12743))
|
||||
* Recognize `.sublime-*` files ([#12750](https://github.com/helix-editor/helix/pull/12750))
|
||||
* Highlight `$` tagged templates as shell commands in ECMA languages ([#12751](https://github.com/helix-editor/helix/pull/12751))
|
||||
* Add `#'` comment token for R ([#12748](https://github.com/helix-editor/helix/pull/12748))
|
||||
* Fix module/namespace highlight in Unison ([93fa990](https://github.com/helix-editor/helix/commit/93fa990))
|
||||
* Add missing `#not-eq?` and `#not-match?` highlights in TSQ ([3824010](https://github.com/helix-editor/helix/commit/3824010))
|
||||
* Reverse the precedence order of highlight queries ([#9458](https://github.com/helix-editor/helix/pull/9458), [#12777](https://github.com/helix-editor/helix/pull/12777), [#12795](https://github.com/helix-editor/helix/pull/12795), [144a4f4](https://github.com/helix-editor/helix/commit/144a4f4), [e1c26eb](https://github.com/helix-editor/helix/commit/e1c26eb), [e1060a2](https://github.com/helix-editor/helix/commit/e1060a2), [7f41670](https://github.com/helix-editor/helix/commit/7f41670), [#13293](https://github.com/helix-editor/helix/pull/13293))
|
||||
* Update Rust highlights ([b8bfc44](https://github.com/helix-editor/helix/commit/b8bfc44), [#12871](https://github.com/helix-editor/helix/pull/12871), [#13664](https://github.com/helix-editor/helix/pull/13664))
|
||||
* Add block comment configuration for PHP ([0ab403d](https://github.com/helix-editor/helix/commit/0ab403d))
|
||||
* Update Gren highlights ([#12769](https://github.com/helix-editor/helix/pull/12769))
|
||||
* Remove `ERROR` node highlighting from all highlight queries ([16ff063](https://github.com/helix-editor/helix/commit/16ff063))
|
||||
* Update tree-sitter-erlang and highlights ([18b9eb9](https://github.com/helix-editor/helix/commit/18b9eb9), [9f3b193](https://github.com/helix-editor/helix/commit/9f3b193), [12139a4](https://github.com/helix-editor/helix/commit/12139a4))
|
||||
* Update Nix injections ([#12776](https://github.com/helix-editor/helix/pull/12776), [#12774](https://github.com/helix-editor/helix/pull/12774), [#13851](https://github.com/helix-editor/helix/pull/13851))
|
||||
* Add indent queries for Nix ([#12829](https://github.com/helix-editor/helix/pull/12829))
|
||||
* Update Markdown highlights ([#12696](https://github.com/helix-editor/helix/pull/12696))
|
||||
* Recognize `xsl` as XML ([#12834](https://github.com/helix-editor/helix/pull/12834))
|
||||
* Remove deprecated `typst-lsp` config ([5a66270](https://github.com/helix-editor/helix/commit/5a66270))
|
||||
* Replace `pkgbuild-language-server` with `termux-language-server` ([c3c9a0d](https://github.com/helix-editor/helix/commit/c3c9a0d))
|
||||
* Update SQL highlights ([#12837](https://github.com/helix-editor/helix/pull/12837))
|
||||
* Recognize `mpd` and `smil` as XML ([#12916](https://github.com/helix-editor/helix/pull/12916))
|
||||
* Add indents and textojbects for Kotlin ([#12925](https://github.com/helix-editor/helix/pull/12925))
|
||||
* Fix module highlights in Koto ([7e87a36](https://github.com/helix-editor/helix/commit/7e87a36))
|
||||
* Update language servers for Protobuf ([#12936](https://github.com/helix-editor/helix/pull/12936))
|
||||
* Add `astro-ls` for Astro ([#12939](https://github.com/helix-editor/helix/pull/12939))
|
||||
* Fix recognition of "scons*" files as Python ([#12943](https://github.com/helix-editor/helix/pull/12943))
|
||||
* Update C# queries ([#12948](https://github.com/helix-editor/helix/pull/12948))
|
||||
* Add comment textojbect to TOML ([#12952](https://github.com/helix-editor/helix/pull/12952))
|
||||
* Add `starpls` as Starlark language server ([#12958](https://github.com/helix-editor/helix/pull/12958))
|
||||
* Add `pkl-lsp` for PKL ([#12962](https://github.com/helix-editor/helix/pull/12962))
|
||||
* Add `kdlfmt` formatter for KDL ([#12967](https://github.com/helix-editor/helix/pull/12967))
|
||||
* Update CSS highlights ([#12497](https://github.com/helix-editor/helix/pull/12497), [fed3edc](https://github.com/helix-editor/helix/commit/fed3edc))
|
||||
* Add `harper-ls` ([#13029](https://github.com/helix-editor/helix/pull/13029))
|
||||
* Change `wgsl_analyzer` to `wgsl-analyzer` ([#13063](https://github.com/helix-editor/helix/pull/13063))
|
||||
* Update tree-sitter-vhdl ([#13091](https://github.com/helix-editor/helix/pull/13091))
|
||||
* Update tree-sitter-openscad ([#13033](https://github.com/helix-editor/helix/pull/13033))
|
||||
* Update Rust injections ([694b615](https://github.com/helix-editor/helix/commit/694b615), [1bd7a39](https://github.com/helix-editor/helix/commit/1bd7a39))
|
||||
* Update Ruby highlights ([#13055](https://github.com/helix-editor/helix/pull/13055))
|
||||
* Recognize `gitconfig` as an extension ([#13115](https://github.com/helix-editor/helix/pull/13115))
|
||||
* Add `///` comment token for Amber ([#13122](https://github.com/helix-editor/helix/pull/13122))
|
||||
* Add indent queries for Starlark ([#13126](https://github.com/helix-editor/helix/pull/13126))
|
||||
* Recognize more systemd file types as INI ([#13139](https://github.com/helix-editor/helix/pull/13139))
|
||||
* Update scheme queries ([#13143](https://github.com/helix-editor/helix/pull/13143))
|
||||
* Recognize `tmTheme` as XML ([#13202](https://github.com/helix-editor/helix/pull/13202))
|
||||
* Update `golangci-lint` command for v2 ([#13204](https://github.com/helix-editor/helix/pull/13204))
|
||||
* Add `just-lsp` for Just ([#13276](https://github.com/helix-editor/helix/pull/13276))
|
||||
* Add a tree-sitter-prolog grammar ([#11611](https://github.com/helix-editor/helix/pull/11611))
|
||||
* Fix typos in Ada queries ([#13251](https://github.com/helix-editor/helix/pull/13251))
|
||||
* Update mint language server args ([#13248](https://github.com/helix-editor/helix/pull/13248))
|
||||
* Update typescript highlights ([#13250](https://github.com/helix-editor/helix/pull/13250))
|
||||
* Update tree-sitter-jjdescription ([#13329](https://github.com/helix-editor/helix/pull/13329))
|
||||
* Add injection queries for Quint ([#13322](https://github.com/helix-editor/helix/pull/13322))
|
||||
* Update tree-sitter-scss and highlights ([#13414](https://github.com/helix-editor/helix/pull/13414))
|
||||
* Update tree-sitter-go-mod ([#13395](https://github.com/helix-editor/helix/pull/13395))
|
||||
* Update tree-sitter-svelte ([#13423](https://github.com/helix-editor/helix/pull/13423))
|
||||
* Update Lua highlights ([#13401](https://github.com/helix-editor/helix/pull/13401))
|
||||
* Update Go highlights ([#13425](https://github.com/helix-editor/helix/pull/13425), [25b299a](https://github.com/helix-editor/helix/commit/25b299a), [#13825](https://github.com/helix-editor/helix/pull/13825))
|
||||
* Recognize `.git-blame-ignore-revs` as gitignore ([#13460](https://github.com/helix-editor/helix/pull/13460))
|
||||
* Update Verilog highlights ([#13473](https://github.com/helix-editor/helix/pull/13473), [#13493](https://github.com/helix-editor/helix/pull/13493))
|
||||
* Update tree-sitter-v ([#13469](https://github.com/helix-editor/helix/pull/13469))
|
||||
* Update WGSL highlights ([#13479](https://github.com/helix-editor/helix/pull/13479))
|
||||
* Update Bash highlights ([#13477](https://github.com/helix-editor/helix/pull/13477))
|
||||
* Update tree-sitter-cpp ([#13504](https://github.com/helix-editor/helix/pull/13504))
|
||||
* Update rust-analyzer config to use server-side file watching ([#13432](https://github.com/helix-editor/helix/pull/13432))
|
||||
* Update Vue injections ([#13511](https://github.com/helix-editor/helix/pull/13511))
|
||||
* Recognize `sld` as Scheme ([#13528](https://github.com/helix-editor/helix/pull/13528))
|
||||
* Recognize more files as git-attributes ([#13540](https://github.com/helix-editor/helix/pull/13540))
|
||||
* Update tree-sitter-haskell and queries ([#13475](https://github.com/helix-editor/helix/pull/13475))
|
||||
* Align INI highlights with TOML ([#13589](https://github.com/helix-editor/helix/pull/13589))
|
||||
* Add tree-sitter-rust-format-args for `format_args!` injections in Rust ([#13533](https://github.com/helix-editor/helix/pull/13533), [#13657](https://github.com/helix-editor/helix/pull/13657), [4dd4ba7](https://github.com/helix-editor/helix/commit/4dd4ba7), [86f10ae](https://github.com/helix-editor/helix/commit/86f10ae))
|
||||
* Update Ungrammar highlights ([8d58f6c](https://github.com/helix-editor/helix/commit/8d58f6c))
|
||||
* Add `ty` language server for Python ([#13643](https://github.com/helix-editor/helix/pull/13643))
|
||||
* Add `clarinet` language server for Clarity ([#13647](https://github.com/helix-editor/helix/pull/13647))
|
||||
* Update prisma config to avoid a crash in the language server ([f6878f6](https://github.com/helix-editor/helix/commit/f6878f6))
|
||||
* Add `pyrefly` for Python ([#13713](https://github.com/helix-editor/helix/pull/13713))
|
||||
* Update Python highlights ([#13715](https://github.com/helix-editor/helix/pull/13715))
|
||||
* Update Mojo language server and formatter to `pixi` ([#13648](https://github.com/helix-editor/helix/pull/13648))
|
||||
* Add `tombi` for TOML ([#13723](https://github.com/helix-editor/helix/pull/13723))
|
||||
* Add `neocmakelsp` for CMake ([#13740](https://github.com/helix-editor/helix/pull/13740))
|
||||
* Update C and C++ highlights ([#13747](https://github.com/helix-editor/helix/pull/13747), [#13772](https://github.com/helix-editor/helix/pull/13772))
|
||||
* Highlight escape sequences in ECMA languages ([#13762](https://github.com/helix-editor/helix/pull/13762))
|
||||
* Add an external formatter config for Crystal ([#13759](https://github.com/helix-editor/helix/pull/13759))
|
||||
* Add `amber-lsp` for Amber ([#13763](https://github.com/helix-editor/helix/pull/13763))
|
||||
* Update HTML highlights ([#13753](https://github.com/helix-editor/helix/pull/13753))
|
||||
* Update tree-sitter-purescript and highlights ([#13782](https://github.com/helix-editor/helix/pull/13782))
|
||||
* Update tree-sitter-gleam and highlights ([#13793](https://github.com/helix-editor/helix/pull/13793), [#13807](https://github.com/helix-editor/helix/pull/13807), [#13813](https://github.com/helix-editor/helix/pull/13813))
|
||||
* Recognize Buck files as Starlark ([#13810](https://github.com/helix-editor/helix/pull/13810))
|
||||
* Use tree-sitter-crystal instead of tree-sitter-ruby for Crystal and add custom queries ([#13805](https://github.com/helix-editor/helix/pull/13805))
|
||||
* Update tree-sitter-twig ([#13689](https://github.com/helix-editor/helix/pull/13689))
|
||||
* Recognize `jsconfig.json` as JSONC, use as JavaScript and JSX roots ([#13822](https://github.com/helix-editor/helix/pull/13822))
|
||||
* Recognize `.gem/credentials` as YAML ([#13843](https://github.com/helix-editor/helix/pull/13843))
|
||||
* Update Dockerfile injections ([#13845](https://github.com/helix-editor/helix/pull/13845), 13852)
|
||||
* Change tree-sitter parser for Git commit message files ([44293df](https://github.com/helix-editor/helix/commit/44293df))
|
||||
* Recognize `mimeapps.list` as INI ([#13850](https://github.com/helix-editor/helix/pull/13850))
|
||||
* Update tree-sitter-odin, highlights and indents ([#13877](https://github.com/helix-editor/helix/pull/13877), [#13917](https://github.com/helix-editor/helix/pull/13917))
|
||||
* Add locals queries for C, improve parameter highlighting ([#13876](https://github.com/helix-editor/helix/pull/13876))
|
||||
* Add textobjects for QML ([#13855](https://github.com/helix-editor/helix/pull/13855))
|
||||
* Add comment tokens for DTD ([#13904](https://github.com/helix-editor/helix/pull/13904))
|
||||
* Add `dts-lsp` for DeviceTree ([#13907](https://github.com/helix-editor/helix/pull/13907))
|
||||
* Update gomod highlights ([#13913](https://github.com/helix-editor/helix/pull/13913))
|
||||
* Recognize `compose.yaml` and `compose.yml` as Docker Compose ([#13930](https://github.com/helix-editor/helix/pull/13930))
|
||||
|
||||
Packaging:
|
||||
|
||||
* Fix handling of spaces in Bash completion ([#12828](https://github.com/helix-editor/helix/pull/12828))
|
||||
* Refactor Nix flake ([#12831](https://github.com/helix-editor/helix/pull/12831), [#13024](https://github.com/helix-editor/helix/pull/13024), [cb1ecc9](https://github.com/helix-editor/helix/commit/cb1ecc9), [#13305](https://github.com/helix-editor/helix/pull/13305))
|
||||
* Add `ConsoleOnly` to `Helix.desktop` categories ([#13236](https://github.com/helix-editor/helix/pull/13236))
|
||||
* Drop Nix flake dependency on flake-utils ([60a03a3](https://github.com/helix-editor/helix/commit/60a03a3))
|
||||
* Increase the MSRV to 1.82 ([#13275](https://github.com/helix-editor/helix/pull/13275))
|
||||
|
||||
# 25.01.1 (2025-01-19)
|
||||
|
||||
25.01.1 is a patch release focusing on fixing bugs and panics from changes in 25.01.
|
||||
|
856
Cargo.lock
generated
856
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
13
Cargo.toml
13
Cargo.toml
@@ -37,19 +37,24 @@ package.helix-tui.opt-level = 2
|
||||
package.helix-term.opt-level = 2
|
||||
|
||||
[workspace.dependencies]
|
||||
tree-sitter = { version = "0.22" }
|
||||
tree-house = { version = "0.3.0", default-features = false }
|
||||
nucleo = "0.5.0"
|
||||
slotmap = "1.0.7"
|
||||
thiserror = "2.0"
|
||||
tempfile = "3.19.1"
|
||||
tempfile = "3.21.0"
|
||||
bitflags = "2.9"
|
||||
unicode-segmentation = "1.2"
|
||||
ropey = { version = "1.6.1", default-features = false, features = ["simd"] }
|
||||
foldhash = "0.1"
|
||||
foldhash = "0.2"
|
||||
parking_lot = "0.12"
|
||||
futures-executor = "0.3"
|
||||
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
|
||||
tokio-stream = "0.1.17"
|
||||
toml = "0.9"
|
||||
termina = "0.1.0"
|
||||
|
||||
[workspace.package]
|
||||
version = "25.1.1"
|
||||
version = "25.7.1"
|
||||
edition = "2021"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
categories = ["editor"]
|
||||
|
@@ -11,6 +11,7 @@
|
||||
- [Textobjects](./textobjects.md)
|
||||
- [Syntax aware motions](./syntax-aware-motions.md)
|
||||
- [Pickers](./pickers.md)
|
||||
- [Jumplist](./jumplist.md)
|
||||
- [Keymap](./keymap.md)
|
||||
- [Command line](./command-line.md)
|
||||
- [Commands](./commands.md)
|
||||
@@ -28,3 +29,5 @@
|
||||
- [Adding textobject queries](./guides/textobject.md)
|
||||
- [Adding indent queries](./guides/indent.md)
|
||||
- [Adding injection queries](./guides/injection.md)
|
||||
- [Adding tags queries](./guides/tags.md)
|
||||
- [Adding rainbow bracket queries](./guides/rainbow_bracket_queries.md)
|
||||
|
@@ -35,10 +35,19 @@ RUSTFLAGS="-C target-feature=-crt-static"
|
||||
2. Compile from source:
|
||||
|
||||
```sh
|
||||
# Reproducible
|
||||
cargo install --path helix-term --locked
|
||||
```
|
||||
```sh
|
||||
# Optimized
|
||||
cargo install \
|
||||
--profile opt \
|
||||
--config 'build.rustflags=["-C", "target-cpu=native"]' \
|
||||
--path helix-term \
|
||||
--locked
|
||||
```
|
||||
|
||||
This command will create the `hx` executable and construct the tree-sitter
|
||||
Either command will create the `hx` executable and construct the tree-sitter
|
||||
grammars in the local `runtime` folder.
|
||||
|
||||
> 💡 If you do not want to fetch or build grammars, set an environment variable `HELIX_DISABLE_AUTO_GRAMMAR_BUILD`
|
||||
@@ -182,7 +191,7 @@ cargo deb -- --locked
|
||||
```
|
||||
|
||||
> 💡 This locks you into the `--release` profile. But you can also build helix in any way you like.
|
||||
> As long as you leave a `target/release/hx` file, it will get packaged with `cargo deb --no-build`
|
||||
> As long as you leave a `target/release/hx` file, it will get packaged with `cargo deb --no-build`
|
||||
|
||||
> 💡 Don't worry about the following:
|
||||
> ```
|
||||
|
@@ -47,6 +47,12 @@ The following variables are supported:
|
||||
| `cursor_column` | The column number of the primary cursor in the currently focused document, starting at 1. This is counted as the number of grapheme clusters from the start of the line rather than bytes or codepoints. |
|
||||
| `buffer_name` | The relative path of the currently focused document. `[scratch]` is expanded instead for scratch buffers. |
|
||||
| `line_ending` | A string containing the line ending of the currently focused document. For example on Unix systems this is usually a line-feed character (`\n`) but on Windows systems this may be a carriage-return plus a line-feed (`\r\n`). The line ending kind of the currently focused document can be inspected with the `:line-ending` command. |
|
||||
| `current_working_directory` | Current working directory |
|
||||
| `workspace_directory` | Nearest ancestor directory of the current working directory that contains `.git`, `.svn`, `jj` or `.helix` |
|
||||
| `language` | A string containing the language name of the currently focused document.|
|
||||
| `selection` | A string containing the contents of the primary selection of the currently focused document. |
|
||||
| `selection_line_start` | The line number of the start of the primary selection in the currently focused document, starting at 1. |
|
||||
| `selection_line_end` | The line number of the end of the primary selection in the currently focused document, starting at 1. |
|
||||
|
||||
Aside from editor variables, the following expansions may be used:
|
||||
|
||||
|
@@ -19,6 +19,7 @@
|
||||
- [`[editor.soft-wrap]` Section](#editorsoft-wrap-section)
|
||||
- [`[editor.smart-tab]` Section](#editorsmart-tab-section)
|
||||
- [`[editor.inline-diagnostics]` Section](#editorinline-diagnostics-section)
|
||||
- [`[editor.word-completion]` Section](#editorword-completion-section)
|
||||
|
||||
### `[editor]` Section
|
||||
|
||||
@@ -53,14 +54,16 @@
|
||||
| `workspace-lsp-roots` | Directories relative to the workspace root that are treated as LSP roots. Should only be set in `.helix/config.toml` | `[]` |
|
||||
| `default-line-ending` | The line ending to use for new documents. Can be `native`, `lf`, `crlf`, `ff`, `cr` or `nel`. `native` uses the platform's native line ending (`crlf` on Windows, otherwise `lf`). | `native` |
|
||||
| `insert-final-newline` | Whether to automatically insert a trailing line-ending on write if missing | `true` |
|
||||
| `atomic-save` | Whether to use atomic operations to write documents to disk. This prevents data loss if the editor is interrupted while writing the file, but may confuse some file watching/hot reloading programs. | `true` |
|
||||
| `trim-final-newlines` | Whether to automatically remove line-endings after the final one on write | `false` |
|
||||
| `trim-trailing-whitespace` | Whether to automatically remove whitespace preceding line endings on write | `false` |
|
||||
| `popup-border` | Draw border around `popup`, `menu`, `all`, or `none` | `none` |
|
||||
| `indent-heuristic` | How the indentation for a newly inserted line is computed: `simple` just copies the indentation level from the previous line, `tree-sitter` computes the indentation based on the syntax tree and `hybrid` combines both approaches. If the chosen heuristic is not available, a different one will be used as a fallback (the fallback order being `hybrid` -> `tree-sitter` -> `simple`). | `hybrid`
|
||||
| `jump-label-alphabet` | The characters that are used to generate two character jump labels. Characters at the start of the alphabet are used first. | `"abcdefghijklmnopqrstuvwxyz"`
|
||||
| `end-of-line-diagnostics` | Minimum severity of diagnostics to render at the end of the line. Set to `disable` to disable entirely. Refer to the setting about `inline-diagnostics` for more details | "disable"
|
||||
| `end-of-line-diagnostics` | Minimum severity of diagnostics to render at the end of the line. Set to `disable` to disable entirely. Refer to the setting about `inline-diagnostics` for more details | `"hint"`
|
||||
| `clipboard-provider` | Which API to use for clipboard interaction. One of `pasteboard` (MacOS), `wayland`, `x-clip`, `x-sel`, `win-32-yank`, `termux`, `tmux`, `windows`, `termcode`, `none`, or a custom command set. | Platform and environment specific. |
|
||||
| `editor-config` | Whether to read settings from [EditorConfig](https://editorconfig.org) files | `true` |
|
||||
| `rainbow-brackets` | Whether to render rainbow colors for matching brackets. Requires tree-sitter `rainbows.scm` queries for the language. | `false` |
|
||||
|
||||
### `[editor.clipboard-provider]` Section
|
||||
|
||||
@@ -130,15 +133,17 @@ The following statusline elements can be configured:
|
||||
| `file-name` | The path/name of the opened file |
|
||||
| `file-absolute-path` | The absolute path/name of the opened file |
|
||||
| `file-base-name` | The basename of the opened file |
|
||||
| `current-working-directory` | The current working directory |
|
||||
| `file-modification-indicator` | The indicator to show whether the file is modified (a `[+]` appears when there are unsaved changes) |
|
||||
| `file-encoding` | The encoding of the opened file if it differs from UTF-8 |
|
||||
| `file-line-ending` | The file line endings (CRLF or LF) |
|
||||
| `file-indent-style` | The file indentation style |
|
||||
| `read-only-indicator` | An indicator that shows `[readonly]` when a file cannot be written |
|
||||
| `total-line-numbers` | The total line numbers of the opened file |
|
||||
| `file-type` | The type of the opened file |
|
||||
| `diagnostics` | The number of warnings and/or errors |
|
||||
| `workspace-diagnostics` | The number of warnings and/or errors on workspace |
|
||||
| `selections` | The number of active selections |
|
||||
| `selections` | The primary selection index out of the number of active selections |
|
||||
| `primary-selection-length` | The number of characters currently in primary selection |
|
||||
| `position` | The cursor position |
|
||||
| `position-percentage` | The cursor position as a percentage of the total number of lines |
|
||||
@@ -156,6 +161,7 @@ The following statusline elements can be configured:
|
||||
| `display-progress-messages` | Display LSP progress messages below statusline[^1] | `false` |
|
||||
| `auto-signature-help` | Enable automatic popup of signature help (parameter hints) | `true` |
|
||||
| `display-inlay-hints` | Display inlay hints[^2] | `false` |
|
||||
| `inlay-hints-length-limit` | Maximum displayed length (non-zero number) of inlay hints | Unset by default |
|
||||
| `display-color-swatches` | Show color swatches next to colors | `true` |
|
||||
| `display-signature-help-docs` | Display docs under signature help popup | `true` |
|
||||
| `snippets` | Enables snippet completions. Requires a server restart (`:lsp-restart`) to take effect after `:config-reload`/`:set`. | `true` |
|
||||
@@ -447,7 +453,7 @@ fn main() {
|
||||
|
||||
| Key | Description | Default |
|
||||
|------------|-------------|---------|
|
||||
| `cursor-line` | The minimum severity that a diagnostic must have to be shown inline on the line that contains the primary cursor. Set to `disable` to not show any diagnostics inline. This option does not have any effect when in insert-mode and will only take effect 350ms after moving the cursor to a different line. | `"disable"` |
|
||||
| `cursor-line` | The minimum severity that a diagnostic must have to be shown inline on the line that contains the primary cursor. Set to `disable` to not show any diagnostics inline. This option does not have any effect when in insert-mode and will only take effect 350ms after moving the cursor to a different line. | `"warning"` |
|
||||
| `other-lines` | The minimum severity that a diagnostic must have to be shown inline on a line that does not contain the cursor-line. Set to `disable` to not show any diagnostics inline. | `"disable"` |
|
||||
| `prefix-len` | How many horizontal bars `─` are rendered before the diagnostic text. | `1` |
|
||||
| `max-wrap` | Equivalent of the `editor.soft-wrap.max-wrap` option for diagnostics. | `20` |
|
||||
@@ -465,12 +471,20 @@ fn main() {
|
||||
}
|
||||
```
|
||||
|
||||
### `[editor.word-completion]` Section
|
||||
|
||||
The new diagnostic rendering is not yet enabled by default. As soon as end of line or inline diagnostics are enabled the old diagnostics rendering is automatically disabled. The recommended default setting are:
|
||||
Options for controlling completion of words from open buffers.
|
||||
|
||||
| Key | Description | Default |
|
||||
| --- | --- | --- |
|
||||
| `enable` | Whether word completion is enabled | `true` |
|
||||
| `trigger-length` | Number of word characters to type before triggering completion | `7` |
|
||||
|
||||
Example:
|
||||
|
||||
```toml
|
||||
[editor]
|
||||
end-of-line-diagnostics = "hint"
|
||||
[editor.inline-diagnostics]
|
||||
cursor-line = "warning" # show warnings and errors on the cursorline inline
|
||||
[editor.word-completion]
|
||||
enable = true
|
||||
# Set the trigger length lower so that words are completed more often
|
||||
trigger-length = 4
|
||||
```
|
||||
|
@@ -1,264 +1,299 @@
|
||||
| Language | Syntax Highlighting | Treesitter Textobjects | Auto Indent | Default language servers |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| ada | ✓ | ✓ | | `ada_language_server` |
|
||||
| adl | ✓ | ✓ | ✓ | |
|
||||
| agda | ✓ | | | |
|
||||
| amber | ✓ | | | |
|
||||
| astro | ✓ | | | `astro-ls` |
|
||||
| awk | ✓ | ✓ | | `awk-language-server` |
|
||||
| bash | ✓ | ✓ | ✓ | `bash-language-server` |
|
||||
| bass | ✓ | | | `bass` |
|
||||
| beancount | ✓ | | | `beancount-language-server` |
|
||||
| bibtex | ✓ | | | `texlab` |
|
||||
| bicep | ✓ | | | `bicep-langserver` |
|
||||
| bitbake | ✓ | | | `bitbake-language-server` |
|
||||
| blade | ✓ | | | |
|
||||
| blueprint | ✓ | | | `blueprint-compiler` |
|
||||
| c | ✓ | ✓ | ✓ | `clangd` |
|
||||
| c-sharp | ✓ | ✓ | | `OmniSharp` |
|
||||
| cabal | | | | `haskell-language-server-wrapper` |
|
||||
| cairo | ✓ | ✓ | ✓ | `cairo-language-server` |
|
||||
| capnp | ✓ | | ✓ | |
|
||||
| cel | ✓ | | | |
|
||||
| circom | ✓ | | | `circom-lsp` |
|
||||
| clojure | ✓ | | | `clojure-lsp` |
|
||||
| cmake | ✓ | ✓ | ✓ | `cmake-language-server` |
|
||||
| codeql | ✓ | ✓ | | `codeql` |
|
||||
| comment | ✓ | | | |
|
||||
| common-lisp | ✓ | | ✓ | `cl-lsp` |
|
||||
| cpon | ✓ | | ✓ | |
|
||||
| cpp | ✓ | ✓ | ✓ | `clangd` |
|
||||
| crystal | ✓ | ✓ | | `crystalline` |
|
||||
| css | ✓ | | ✓ | `vscode-css-language-server` |
|
||||
| csv | ✓ | | | |
|
||||
| cue | ✓ | | | `cuelsp` |
|
||||
| cylc | ✓ | ✓ | ✓ | |
|
||||
| d | ✓ | ✓ | ✓ | `serve-d` |
|
||||
| dart | ✓ | ✓ | ✓ | `dart` |
|
||||
| dbml | ✓ | | | |
|
||||
| debian | ✓ | | | |
|
||||
| devicetree | ✓ | | | |
|
||||
| dhall | ✓ | ✓ | | `dhall-lsp-server` |
|
||||
| diff | ✓ | | | |
|
||||
| djot | ✓ | | | |
|
||||
| docker-compose | ✓ | ✓ | ✓ | `docker-compose-langserver`, `yaml-language-server` |
|
||||
| dockerfile | ✓ | ✓ | | `docker-langserver` |
|
||||
| dot | ✓ | | | `dot-language-server` |
|
||||
| dtd | ✓ | | | |
|
||||
| dune | ✓ | | | |
|
||||
| earthfile | ✓ | ✓ | ✓ | `earthlyls` |
|
||||
| edoc | ✓ | | | |
|
||||
| eex | ✓ | | | |
|
||||
| ejs | ✓ | | | |
|
||||
| elisp | ✓ | | | |
|
||||
| elixir | ✓ | ✓ | ✓ | `elixir-ls` |
|
||||
| elm | ✓ | ✓ | | `elm-language-server` |
|
||||
| elvish | ✓ | | | `elvish` |
|
||||
| env | ✓ | ✓ | | |
|
||||
| erb | ✓ | | | |
|
||||
| erlang | ✓ | ✓ | | `erlang_ls`, `elp` |
|
||||
| esdl | ✓ | | | |
|
||||
| fennel | ✓ | | | `fennel-ls` |
|
||||
| fga | ✓ | ✓ | ✓ | |
|
||||
| fidl | ✓ | | | |
|
||||
| fish | ✓ | ✓ | ✓ | `fish-lsp` |
|
||||
| forth | ✓ | | | `forth-lsp` |
|
||||
| fortran | ✓ | | ✓ | `fortls` |
|
||||
| fsharp | ✓ | | | `fsautocomplete` |
|
||||
| gas | ✓ | ✓ | | `asm-lsp` |
|
||||
| gdscript | ✓ | ✓ | ✓ | |
|
||||
| gemini | ✓ | | | |
|
||||
| gherkin | ✓ | | | |
|
||||
| ghostty | ✓ | | | |
|
||||
| git-attributes | ✓ | | | |
|
||||
| git-commit | ✓ | ✓ | | |
|
||||
| git-config | ✓ | ✓ | | |
|
||||
| git-ignore | ✓ | | | |
|
||||
| git-rebase | ✓ | | | |
|
||||
| gjs | ✓ | ✓ | ✓ | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` |
|
||||
| gleam | ✓ | ✓ | | `gleam` |
|
||||
| glimmer | ✓ | | | `ember-language-server` |
|
||||
| glsl | ✓ | ✓ | ✓ | `glsl_analyzer` |
|
||||
| gn | ✓ | | | |
|
||||
| go | ✓ | ✓ | ✓ | `gopls`, `golangci-lint-langserver` |
|
||||
| godot-resource | ✓ | ✓ | | |
|
||||
| gomod | ✓ | | | `gopls` |
|
||||
| gotmpl | ✓ | | | `gopls` |
|
||||
| gowork | ✓ | | | `gopls` |
|
||||
| gpr | ✓ | | | `ada_language_server` |
|
||||
| graphql | ✓ | ✓ | | `graphql-lsp` |
|
||||
| gren | ✓ | ✓ | | |
|
||||
| groovy | ✓ | | | |
|
||||
| gts | ✓ | ✓ | ✓ | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` |
|
||||
| hare | ✓ | | | |
|
||||
| haskell | ✓ | ✓ | | `haskell-language-server-wrapper` |
|
||||
| haskell-persistent | ✓ | | | |
|
||||
| hcl | ✓ | ✓ | ✓ | `terraform-ls` |
|
||||
| heex | ✓ | ✓ | | `elixir-ls` |
|
||||
| helm | ✓ | | | `helm_ls` |
|
||||
| hocon | ✓ | ✓ | ✓ | |
|
||||
| hoon | ✓ | | | |
|
||||
| hosts | ✓ | | | |
|
||||
| html | ✓ | | | `vscode-html-language-server`, `superhtml` |
|
||||
| hurl | ✓ | ✓ | ✓ | |
|
||||
| hyprlang | ✓ | | ✓ | `hyprls` |
|
||||
| idris | | | | `idris2-lsp` |
|
||||
| iex | ✓ | | | |
|
||||
| ini | ✓ | | | |
|
||||
| ink | ✓ | | | |
|
||||
| inko | ✓ | ✓ | ✓ | |
|
||||
| janet | ✓ | | | |
|
||||
| java | ✓ | ✓ | ✓ | `jdtls` |
|
||||
| javascript | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| jinja | ✓ | | | |
|
||||
| jjdescription | ✓ | | | |
|
||||
| jq | ✓ | ✓ | | `jq-lsp` |
|
||||
| jsdoc | ✓ | | | |
|
||||
| json | ✓ | ✓ | ✓ | `vscode-json-language-server` |
|
||||
| json5 | ✓ | | | |
|
||||
| jsonc | ✓ | | ✓ | `vscode-json-language-server` |
|
||||
| jsonnet | ✓ | | | `jsonnet-language-server` |
|
||||
| jsx | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| julia | ✓ | ✓ | ✓ | `julia` |
|
||||
| just | ✓ | ✓ | ✓ | `just-lsp` |
|
||||
| kdl | ✓ | ✓ | ✓ | |
|
||||
| koka | ✓ | | ✓ | `koka` |
|
||||
| kotlin | ✓ | ✓ | ✓ | `kotlin-language-server` |
|
||||
| koto | ✓ | ✓ | ✓ | `koto-ls` |
|
||||
| latex | ✓ | ✓ | | `texlab` |
|
||||
| ld | ✓ | | ✓ | |
|
||||
| ldif | ✓ | | | |
|
||||
| lean | ✓ | | | `lean` |
|
||||
| ledger | ✓ | | | |
|
||||
| llvm | ✓ | ✓ | ✓ | |
|
||||
| llvm-mir | ✓ | ✓ | ✓ | |
|
||||
| llvm-mir-yaml | ✓ | | ✓ | |
|
||||
| log | ✓ | | | |
|
||||
| lpf | ✓ | | | |
|
||||
| lua | ✓ | ✓ | ✓ | `lua-language-server` |
|
||||
| mail | ✓ | ✓ | | |
|
||||
| make | ✓ | | ✓ | |
|
||||
| markdoc | ✓ | | | `markdoc-ls` |
|
||||
| markdown | ✓ | | | `marksman`, `markdown-oxide` |
|
||||
| markdown.inline | ✓ | | | |
|
||||
| matlab | ✓ | ✓ | ✓ | |
|
||||
| mermaid | ✓ | | | |
|
||||
| meson | ✓ | | ✓ | `mesonlsp` |
|
||||
| mint | | | | `mint` |
|
||||
| mojo | ✓ | ✓ | ✓ | `magic` |
|
||||
| move | ✓ | | | |
|
||||
| msbuild | ✓ | | ✓ | |
|
||||
| nasm | ✓ | ✓ | | `asm-lsp` |
|
||||
| nestedtext | ✓ | ✓ | ✓ | |
|
||||
| nginx | ✓ | | | |
|
||||
| nickel | ✓ | | ✓ | `nls` |
|
||||
| nim | ✓ | ✓ | ✓ | `nimlangserver` |
|
||||
| nix | ✓ | ✓ | ✓ | `nil`, `nixd` |
|
||||
| nu | ✓ | | | `nu` |
|
||||
| nunjucks | ✓ | | | |
|
||||
| ocaml | ✓ | | ✓ | `ocamllsp` |
|
||||
| ocaml-interface | ✓ | | | `ocamllsp` |
|
||||
| odin | ✓ | ✓ | ✓ | `ols` |
|
||||
| ohm | ✓ | ✓ | ✓ | |
|
||||
| opencl | ✓ | ✓ | ✓ | `clangd` |
|
||||
| openscad | ✓ | | | `openscad-lsp` |
|
||||
| org | ✓ | | | |
|
||||
| pascal | ✓ | ✓ | | `pasls` |
|
||||
| passwd | ✓ | | | |
|
||||
| pem | ✓ | | | |
|
||||
| perl | ✓ | ✓ | ✓ | `perlnavigator` |
|
||||
| pest | ✓ | ✓ | ✓ | `pest-language-server` |
|
||||
| php | ✓ | ✓ | ✓ | `intelephense` |
|
||||
| php-only | ✓ | | | |
|
||||
| pkgbuild | ✓ | ✓ | ✓ | `termux-language-server`, `bash-language-server` |
|
||||
| pkl | ✓ | | ✓ | `pkl-lsp` |
|
||||
| po | ✓ | ✓ | | |
|
||||
| pod | ✓ | | | |
|
||||
| ponylang | ✓ | ✓ | ✓ | |
|
||||
| powershell | ✓ | | | |
|
||||
| prisma | ✓ | ✓ | | `prisma-language-server` |
|
||||
| prolog | ✓ | | ✓ | `swipl` |
|
||||
| protobuf | ✓ | ✓ | ✓ | `buf`, `pb`, `protols` |
|
||||
| prql | ✓ | | | |
|
||||
| purescript | ✓ | ✓ | | `purescript-language-server` |
|
||||
| python | ✓ | ✓ | ✓ | `ruff`, `jedi-language-server`, `pylsp` |
|
||||
| qml | ✓ | | ✓ | `qmlls` |
|
||||
| quarto | ✓ | | ✓ | |
|
||||
| quint | ✓ | | | `quint-language-server` |
|
||||
| r | ✓ | | | `R` |
|
||||
| racket | ✓ | | ✓ | `racket` |
|
||||
| regex | ✓ | | | |
|
||||
| rego | ✓ | | | `regols` |
|
||||
| rescript | ✓ | ✓ | | `rescript-language-server` |
|
||||
| rmarkdown | ✓ | | ✓ | `R` |
|
||||
| robot | ✓ | | | `robotframework_ls` |
|
||||
| ron | ✓ | | ✓ | |
|
||||
| rst | ✓ | | | |
|
||||
| ruby | ✓ | ✓ | ✓ | `ruby-lsp`, `solargraph` |
|
||||
| rust | ✓ | ✓ | ✓ | `rust-analyzer` |
|
||||
| sage | ✓ | ✓ | | |
|
||||
| scala | ✓ | ✓ | ✓ | `metals` |
|
||||
| scheme | ✓ | | ✓ | |
|
||||
| scss | ✓ | | | `vscode-css-language-server` |
|
||||
| slint | ✓ | ✓ | ✓ | `slint-lsp` |
|
||||
| smali | ✓ | | ✓ | |
|
||||
| smithy | ✓ | | | `cs` |
|
||||
| sml | ✓ | | | |
|
||||
| snakemake | ✓ | | ✓ | `pylsp` |
|
||||
| solidity | ✓ | ✓ | | `solc` |
|
||||
| sourcepawn | ✓ | ✓ | | `sourcepawn-studio` |
|
||||
| spade | ✓ | | ✓ | `spade-language-server` |
|
||||
| spicedb | ✓ | | | |
|
||||
| sql | ✓ | ✓ | | |
|
||||
| sshclientconfig | ✓ | | | |
|
||||
| starlark | ✓ | ✓ | ✓ | `starpls` |
|
||||
| strace | ✓ | | | |
|
||||
| supercollider | ✓ | | | |
|
||||
| svelte | ✓ | | ✓ | `svelteserver` |
|
||||
| sway | ✓ | ✓ | ✓ | `forc` |
|
||||
| swift | ✓ | ✓ | | `sourcekit-lsp` |
|
||||
| t32 | ✓ | | | |
|
||||
| tablegen | ✓ | ✓ | ✓ | |
|
||||
| tact | ✓ | ✓ | ✓ | |
|
||||
| task | ✓ | | | |
|
||||
| tcl | ✓ | | ✓ | |
|
||||
| teal | ✓ | | | `teal-language-server` |
|
||||
| templ | ✓ | | | `templ` |
|
||||
| tera | ✓ | | | |
|
||||
| textproto | ✓ | ✓ | ✓ | |
|
||||
| tfvars | ✓ | | ✓ | `terraform-ls` |
|
||||
| thrift | ✓ | | | |
|
||||
| tlaplus | ✓ | | | |
|
||||
| todotxt | ✓ | | | |
|
||||
| toml | ✓ | ✓ | | `taplo` |
|
||||
| tsq | ✓ | | | `ts_query_ls` |
|
||||
| tsx | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| twig | ✓ | | | |
|
||||
| typescript | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| typespec | ✓ | ✓ | ✓ | `tsp-server` |
|
||||
| typst | ✓ | | | `tinymist` |
|
||||
| ungrammar | ✓ | | | |
|
||||
| unison | ✓ | ✓ | ✓ | |
|
||||
| uxntal | ✓ | | | |
|
||||
| v | ✓ | ✓ | ✓ | `v-analyzer` |
|
||||
| vala | ✓ | ✓ | | `vala-language-server` |
|
||||
| vento | ✓ | | | |
|
||||
| verilog | ✓ | ✓ | | `svlangserver` |
|
||||
| vhdl | ✓ | | | `vhdl_ls` |
|
||||
| vhs | ✓ | | | |
|
||||
| vue | ✓ | | | `vue-language-server` |
|
||||
| wast | ✓ | | | |
|
||||
| wat | ✓ | | | `wat_server` |
|
||||
| webc | ✓ | | | |
|
||||
| werk | ✓ | | | |
|
||||
| wesl | ✓ | ✓ | | |
|
||||
| wgsl | ✓ | | | `wgsl-analyzer` |
|
||||
| wit | ✓ | | ✓ | |
|
||||
| wren | ✓ | ✓ | ✓ | |
|
||||
| xit | ✓ | | | |
|
||||
| xml | ✓ | | ✓ | |
|
||||
| xtc | ✓ | | | |
|
||||
| yaml | ✓ | ✓ | ✓ | `yaml-language-server`, `ansible-language-server` |
|
||||
| yara | ✓ | | | `yls` |
|
||||
| yuck | ✓ | | | |
|
||||
| zig | ✓ | ✓ | ✓ | `zls` |
|
||||
| Language | Syntax Highlighting | Treesitter Textobjects | Auto Indent | Code Navigation Tags | Rainbow Brackets | Default language servers |
|
||||
| --- | --- | --- | --- | --- | --- | --- |
|
||||
| ada | ✓ | ✓ | | | | `ada_language_server` |
|
||||
| adl | ✓ | ✓ | ✓ | | | |
|
||||
| agda | ✓ | | | | | |
|
||||
| alloy | ✓ | | | | | |
|
||||
| amber | ✓ | | | | | `amber-lsp` |
|
||||
| astro | ✓ | | | | | `astro-ls` |
|
||||
| awk | ✓ | ✓ | | | | `awk-language-server` |
|
||||
| bash | ✓ | ✓ | ✓ | ✓ | ✓ | `bash-language-server` |
|
||||
| bass | ✓ | | | | | `bass` |
|
||||
| beancount | ✓ | | | | | `beancount-language-server` |
|
||||
| bibtex | ✓ | | | | | `texlab` |
|
||||
| bicep | ✓ | | | | | `bicep-langserver` |
|
||||
| bitbake | ✓ | | | | | `bitbake-language-server` |
|
||||
| blade | ✓ | ✓ | | | ✓ | |
|
||||
| blueprint | ✓ | | | | | `blueprint-compiler` |
|
||||
| c | ✓ | ✓ | ✓ | ✓ | ✓ | `clangd` |
|
||||
| c-sharp | ✓ | ✓ | | ✓ | | `OmniSharp` |
|
||||
| cabal | | | | | | `haskell-language-server-wrapper` |
|
||||
| caddyfile | ✓ | ✓ | ✓ | | | |
|
||||
| cairo | ✓ | ✓ | ✓ | | | `cairo-language-server` |
|
||||
| capnp | ✓ | | ✓ | | | |
|
||||
| cel | ✓ | | | | | |
|
||||
| circom | ✓ | | | | | `circom-lsp` |
|
||||
| clarity | ✓ | | | | | `clarinet` |
|
||||
| clojure | ✓ | | | | ✓ | `clojure-lsp` |
|
||||
| cmake | ✓ | ✓ | ✓ | | | `neocmakelsp`, `cmake-language-server` |
|
||||
| codeql | ✓ | ✓ | | | | `codeql` |
|
||||
| comment | ✓ | | | | | |
|
||||
| common-lisp | ✓ | | ✓ | | ✓ | `cl-lsp` |
|
||||
| cpon | ✓ | | ✓ | | | |
|
||||
| cpp | ✓ | ✓ | ✓ | ✓ | ✓ | `clangd` |
|
||||
| cross-config | ✓ | ✓ | | | ✓ | `taplo`, `tombi` |
|
||||
| crystal | ✓ | ✓ | ✓ | ✓ | | `crystalline`, `ameba-ls` |
|
||||
| css | ✓ | | ✓ | | ✓ | `vscode-css-language-server` |
|
||||
| csv | ✓ | | | | | |
|
||||
| cue | ✓ | | | | | `cuelsp` |
|
||||
| cylc | ✓ | ✓ | ✓ | | | |
|
||||
| cython | ✓ | | ✓ | ✓ | | |
|
||||
| d | ✓ | ✓ | ✓ | | | `serve-d` |
|
||||
| dart | ✓ | ✓ | ✓ | | | `dart` |
|
||||
| dbml | ✓ | | | | | |
|
||||
| debian | ✓ | | | | | |
|
||||
| devicetree | ✓ | | | | | `dts-lsp` |
|
||||
| dhall | ✓ | ✓ | | | | `dhall-lsp-server` |
|
||||
| diff | ✓ | | | | | |
|
||||
| djot | ✓ | | | | | |
|
||||
| docker-bake | ✓ | ✓ | ✓ | ✓ | ✓ | `docker-language-server` |
|
||||
| docker-compose | ✓ | ✓ | ✓ | | | `docker-compose-langserver`, `yaml-language-server`, `docker-language-server` |
|
||||
| dockerfile | ✓ | ✓ | | | | `docker-langserver`, `docker-language-server` |
|
||||
| dot | ✓ | | | | | `dot-language-server` |
|
||||
| doxyfile | ✓ | ✓ | ✓ | ✓ | | |
|
||||
| dtd | ✓ | | | | | |
|
||||
| dune | ✓ | | | | | |
|
||||
| dunstrc | ✓ | | | | | |
|
||||
| earthfile | ✓ | ✓ | ✓ | | | `earthlyls` |
|
||||
| edoc | ✓ | | | | | |
|
||||
| eex | ✓ | | | | | |
|
||||
| ejs | ✓ | | | | | |
|
||||
| elisp | ✓ | | | ✓ | | |
|
||||
| elixir | ✓ | ✓ | ✓ | ✓ | ✓ | `elixir-ls`, `expert` |
|
||||
| elm | ✓ | ✓ | | ✓ | | `elm-language-server` |
|
||||
| elvish | ✓ | | | | | `elvish` |
|
||||
| env | ✓ | ✓ | | | | |
|
||||
| erb | ✓ | | | | | |
|
||||
| erlang | ✓ | ✓ | | ✓ | ✓ | `erlang_ls`, `elp` |
|
||||
| esdl | ✓ | | | | | |
|
||||
| fennel | ✓ | | | | | `fennel-ls` |
|
||||
| fga | ✓ | ✓ | ✓ | | | |
|
||||
| fidl | ✓ | | | | | |
|
||||
| fish | ✓ | ✓ | ✓ | | | `fish-lsp` |
|
||||
| flatbuffers | ✓ | | | | | |
|
||||
| forth | ✓ | | | | | `forth-lsp` |
|
||||
| fortran | ✓ | | ✓ | | | `fortls` |
|
||||
| fsharp | ✓ | | | | | `fsautocomplete` |
|
||||
| gas | ✓ | ✓ | | | | `asm-lsp` |
|
||||
| gdscript | ✓ | ✓ | ✓ | ✓ | | |
|
||||
| gemini | ✓ | | | | | |
|
||||
| gherkin | ✓ | | | | | |
|
||||
| ghostty | ✓ | | | | | |
|
||||
| git-attributes | ✓ | | | | | |
|
||||
| git-cliff-config | ✓ | ✓ | | | ✓ | `taplo`, `tombi` |
|
||||
| git-commit | ✓ | ✓ | | | | |
|
||||
| git-config | ✓ | ✓ | | | | |
|
||||
| git-ignore | ✓ | | | | | |
|
||||
| git-notes | ✓ | | | | | |
|
||||
| git-rebase | ✓ | | | | | |
|
||||
| gitlab-ci | ✓ | ✓ | ✓ | ✓ | ✓ | `yaml-language-server`, `gitlab-ci-ls` |
|
||||
| gjs | ✓ | ✓ | ✓ | ✓ | | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` |
|
||||
| gleam | ✓ | ✓ | | | ✓ | `gleam` |
|
||||
| glimmer | ✓ | | | | | `ember-language-server` |
|
||||
| glsl | ✓ | ✓ | ✓ | | | `glsl_analyzer` |
|
||||
| gn | ✓ | | | | | |
|
||||
| go | ✓ | ✓ | ✓ | ✓ | ✓ | `gopls`, `golangci-lint-langserver` |
|
||||
| go-format-string | ✓ | | | | ✓ | |
|
||||
| godot-resource | ✓ | ✓ | | | | |
|
||||
| gomod | ✓ | | | | | `gopls` |
|
||||
| gotmpl | ✓ | | | | | `gopls` |
|
||||
| gowork | ✓ | | | | | `gopls` |
|
||||
| gpr | ✓ | | | | | `ada_language_server` |
|
||||
| graphql | ✓ | ✓ | | | | `graphql-lsp` |
|
||||
| gren | ✓ | ✓ | | | | |
|
||||
| groovy | ✓ | | | | | |
|
||||
| gts | ✓ | ✓ | ✓ | ✓ | | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` |
|
||||
| hare | ✓ | | | | | |
|
||||
| haskell | ✓ | ✓ | | | | `haskell-language-server-wrapper` |
|
||||
| haskell-persistent | ✓ | | | | | |
|
||||
| hcl | ✓ | ✓ | ✓ | | | `terraform-ls` |
|
||||
| hdl | ✓ | | | | | `hdls` |
|
||||
| heex | ✓ | ✓ | | | | `elixir-ls`, `expert` |
|
||||
| helm | ✓ | | | | | `helm_ls` |
|
||||
| hocon | ✓ | ✓ | ✓ | | | |
|
||||
| hoon | ✓ | | | | | |
|
||||
| hosts | ✓ | | | | | |
|
||||
| html | ✓ | ✓ | | | ✓ | `vscode-html-language-server`, `superhtml` |
|
||||
| htmldjango | ✓ | | | | | `djlsp`, `vscode-html-language-server`, `superhtml` |
|
||||
| hurl | ✓ | ✓ | ✓ | | | |
|
||||
| hyprlang | ✓ | | ✓ | | | `hyprls` |
|
||||
| idris | | | | | | `idris2-lsp` |
|
||||
| iex | ✓ | | | | | |
|
||||
| ini | ✓ | | | | | |
|
||||
| ink | ✓ | | | | | |
|
||||
| inko | ✓ | ✓ | ✓ | ✓ | | |
|
||||
| janet | ✓ | | ✓ | | ✓ | |
|
||||
| java | ✓ | ✓ | ✓ | ✓ | ✓ | `jdtls` |
|
||||
| javascript | ✓ | ✓ | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| jinja | ✓ | | | | | |
|
||||
| jjconfig | ✓ | ✓ | ✓ | | | `taplo`, `tombi` |
|
||||
| jjdescription | ✓ | | | | | |
|
||||
| jjrevset | ✓ | | | | | |
|
||||
| jjtemplate | ✓ | | | | | |
|
||||
| jq | ✓ | ✓ | | | | `jq-lsp` |
|
||||
| jsdoc | ✓ | | | | | |
|
||||
| json | ✓ | ✓ | ✓ | | ✓ | `vscode-json-language-server` |
|
||||
| json-ld | ✓ | ✓ | ✓ | | | `vscode-json-language-server` |
|
||||
| json5 | ✓ | | | | | |
|
||||
| jsonc | ✓ | | ✓ | | | `vscode-json-language-server` |
|
||||
| jsonnet | ✓ | | | | | `jsonnet-language-server` |
|
||||
| jsx | ✓ | ✓ | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| julia | ✓ | ✓ | ✓ | | | `julia` |
|
||||
| just | ✓ | ✓ | ✓ | ✓ | | `just-lsp` |
|
||||
| kconfig | ✓ | | ✓ | | | |
|
||||
| kdl | ✓ | ✓ | ✓ | ✓ | | |
|
||||
| koka | ✓ | | ✓ | | | `koka` |
|
||||
| kotlin | ✓ | ✓ | ✓ | ✓ | | `kotlin-language-server` |
|
||||
| koto | ✓ | ✓ | ✓ | | | `koto-ls` |
|
||||
| latex | ✓ | ✓ | | | | `texlab` |
|
||||
| ld | ✓ | | ✓ | | | |
|
||||
| ldif | ✓ | | | | | |
|
||||
| lean | ✓ | | | | | `lake` |
|
||||
| ledger | ✓ | | | | | |
|
||||
| llvm | ✓ | ✓ | ✓ | | | |
|
||||
| llvm-mir | ✓ | ✓ | ✓ | | | |
|
||||
| llvm-mir-yaml | ✓ | | ✓ | | | |
|
||||
| log | ✓ | | | | | |
|
||||
| lpf | ✓ | | | | | |
|
||||
| lua | ✓ | ✓ | ✓ | | ✓ | `lua-language-server` |
|
||||
| luap | ✓ | | | | | |
|
||||
| luau | ✓ | ✓ | ✓ | | | `luau-lsp` |
|
||||
| mail | ✓ | ✓ | | | | |
|
||||
| make | ✓ | | ✓ | | | |
|
||||
| markdoc | ✓ | | | | | `markdoc-ls` |
|
||||
| markdown | ✓ | | | ✓ | | `marksman`, `markdown-oxide` |
|
||||
| markdown-rustdoc | ✓ | | | | | |
|
||||
| markdown.inline | ✓ | | | | | |
|
||||
| matlab | ✓ | ✓ | ✓ | | | |
|
||||
| mermaid | ✓ | | | | | |
|
||||
| meson | ✓ | | ✓ | | | `mesonlsp` |
|
||||
| mint | | | | | | `mint` |
|
||||
| mojo | ✓ | ✓ | ✓ | | | `pixi` |
|
||||
| move | ✓ | | | | | |
|
||||
| msbuild | ✓ | | ✓ | | | |
|
||||
| nasm | ✓ | ✓ | | | | `asm-lsp` |
|
||||
| nestedtext | ✓ | ✓ | ✓ | | | |
|
||||
| nginx | ✓ | | | | | |
|
||||
| nickel | ✓ | | ✓ | | | `nls` |
|
||||
| nim | ✓ | ✓ | ✓ | | | `nimlangserver` |
|
||||
| nix | ✓ | ✓ | ✓ | | ✓ | `nil`, `nixd` |
|
||||
| nu | ✓ | | | | | `nu` |
|
||||
| nunjucks | ✓ | | | | | |
|
||||
| ocaml | ✓ | | ✓ | | | `ocamllsp` |
|
||||
| ocaml-interface | ✓ | | | | | `ocamllsp` |
|
||||
| odin | ✓ | ✓ | ✓ | | | `ols` |
|
||||
| ohm | ✓ | ✓ | ✓ | | | |
|
||||
| opencl | ✓ | ✓ | ✓ | | | `clangd` |
|
||||
| openscad | ✓ | | | | | `openscad-lsp` |
|
||||
| org | ✓ | | | | | |
|
||||
| pascal | ✓ | ✓ | | | | `pasls` |
|
||||
| passwd | ✓ | | | | | |
|
||||
| pem | ✓ | | | | | |
|
||||
| perl | ✓ | ✓ | ✓ | | | `perlnavigator` |
|
||||
| pest | ✓ | ✓ | ✓ | | | `pest-language-server` |
|
||||
| php | ✓ | ✓ | ✓ | ✓ | ✓ | `intelephense` |
|
||||
| php-only | ✓ | | | ✓ | | |
|
||||
| pip-requirements | ✓ | | | | | |
|
||||
| pkgbuild | ✓ | ✓ | ✓ | | | `termux-language-server`, `bash-language-server` |
|
||||
| pkl | ✓ | | ✓ | | | `pkl-lsp` |
|
||||
| po | ✓ | ✓ | | | | |
|
||||
| pod | ✓ | | | | | |
|
||||
| ponylang | ✓ | ✓ | ✓ | | | |
|
||||
| powershell | ✓ | | | | | |
|
||||
| prisma | ✓ | ✓ | | | | `prisma-language-server` |
|
||||
| prolog | ✓ | | ✓ | | | `swipl` |
|
||||
| properties | ✓ | ✓ | | | | |
|
||||
| protobuf | ✓ | ✓ | ✓ | ✓ | | `buf`, `pb`, `protols` |
|
||||
| prql | ✓ | | | | | |
|
||||
| pug | ✓ | | | | | |
|
||||
| purescript | ✓ | ✓ | | | | `purescript-language-server` |
|
||||
| python | ✓ | ✓ | ✓ | ✓ | ✓ | `ty`, `ruff`, `jedi-language-server`, `pylsp` |
|
||||
| qml | ✓ | ✓ | ✓ | | | `qmlls` |
|
||||
| quarto | ✓ | | ✓ | | | |
|
||||
| quint | ✓ | | | | | `quint-language-server` |
|
||||
| r | ✓ | | | | | `R` |
|
||||
| racket | ✓ | | ✓ | | ✓ | `racket` |
|
||||
| regex | ✓ | | | | ✓ | |
|
||||
| rego | ✓ | | | | | `regols` |
|
||||
| rescript | ✓ | ✓ | | | | `rescript-language-server` |
|
||||
| rmarkdown | ✓ | | ✓ | | | `R` |
|
||||
| robot | ✓ | | | | | `robotframework_ls` |
|
||||
| robots.txt | ✓ | ✓ | | ✓ | | |
|
||||
| ron | ✓ | | ✓ | ✓ | ✓ | |
|
||||
| rst | ✓ | | | | | |
|
||||
| ruby | ✓ | ✓ | ✓ | ✓ | ✓ | `ruby-lsp`, `solargraph` |
|
||||
| rust | ✓ | ✓ | ✓ | ✓ | ✓ | `rust-analyzer` |
|
||||
| rust-format-args | ✓ | | | | | |
|
||||
| rust-format-args-macro | ✓ | ✓ | ✓ | | ✓ | |
|
||||
| sage | ✓ | ✓ | | | | |
|
||||
| scala | ✓ | ✓ | ✓ | | | `metals` |
|
||||
| scheme | ✓ | ✓ | ✓ | | ✓ | |
|
||||
| scss | ✓ | | | | ✓ | `vscode-css-language-server` |
|
||||
| shellcheckrc | ✓ | ✓ | | | | |
|
||||
| slang | ✓ | ✓ | ✓ | | | `slangd` |
|
||||
| slint | ✓ | ✓ | ✓ | | | `slint-lsp` |
|
||||
| smali | ✓ | | ✓ | | | |
|
||||
| smithy | ✓ | | | | | `cs` |
|
||||
| sml | ✓ | | | | | |
|
||||
| snakemake | ✓ | | ✓ | | | `pylsp` |
|
||||
| solidity | ✓ | ✓ | | | | `solc` |
|
||||
| sourcepawn | ✓ | ✓ | | | | `sourcepawn-studio` |
|
||||
| spade | ✓ | | ✓ | | | `spade-language-server` |
|
||||
| spicedb | ✓ | | | ✓ | | |
|
||||
| sql | ✓ | ✓ | | | | |
|
||||
| sshclientconfig | ✓ | | | | | |
|
||||
| starlark | ✓ | ✓ | ✓ | | ✓ | `starpls` |
|
||||
| strace | ✓ | | | | | |
|
||||
| strictdoc | ✓ | | | ✓ | | |
|
||||
| supercollider | ✓ | | | | | |
|
||||
| svelte | ✓ | | ✓ | | | `svelteserver` |
|
||||
| sway | ✓ | ✓ | ✓ | | | `forc` |
|
||||
| swift | ✓ | ✓ | | | ✓ | `sourcekit-lsp` |
|
||||
| systemd | ✓ | | | | | `systemd-lsp` |
|
||||
| systemverilog | ✓ | | | | | |
|
||||
| t32 | ✓ | | | | | |
|
||||
| tablegen | ✓ | ✓ | ✓ | | | |
|
||||
| tact | ✓ | ✓ | ✓ | | | |
|
||||
| task | ✓ | | | | | |
|
||||
| tcl | ✓ | | ✓ | | | |
|
||||
| teal | ✓ | | | | | `teal-language-server` |
|
||||
| templ | ✓ | | | | | `templ` |
|
||||
| tera | ✓ | | | | | |
|
||||
| textproto | ✓ | ✓ | ✓ | | | |
|
||||
| tfvars | ✓ | | ✓ | | | `terraform-ls` |
|
||||
| thrift | ✓ | | | | | |
|
||||
| tlaplus | ✓ | | | | | |
|
||||
| todotxt | ✓ | | | | | |
|
||||
| toml | ✓ | ✓ | | | ✓ | `taplo`, `tombi` |
|
||||
| tsq | ✓ | | | | ✓ | `ts_query_ls` |
|
||||
| tsx | ✓ | ✓ | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| twig | ✓ | | | | | |
|
||||
| typescript | ✓ | ✓ | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| typespec | ✓ | ✓ | ✓ | | | `tsp-server` |
|
||||
| typst | ✓ | | | ✓ | | `tinymist` |
|
||||
| ungrammar | ✓ | | | | | |
|
||||
| unison | ✓ | ✓ | ✓ | | | |
|
||||
| uxntal | ✓ | | | | | |
|
||||
| v | ✓ | ✓ | ✓ | | | `v-analyzer` |
|
||||
| vala | ✓ | ✓ | | | | `vala-language-server` |
|
||||
| vento | ✓ | | | | | |
|
||||
| verilog | ✓ | ✓ | | | | `svlangserver` |
|
||||
| vhdl | ✓ | | | | | `vhdl_ls` |
|
||||
| vhs | ✓ | | | | | |
|
||||
| vim | ✓ | | | | | |
|
||||
| vue | ✓ | | | | | `vue-language-server` |
|
||||
| wast | ✓ | | | | | |
|
||||
| wat | ✓ | | | | | `wat_server` |
|
||||
| webc | ✓ | | | | | |
|
||||
| werk | ✓ | | | | | |
|
||||
| wesl | ✓ | ✓ | | | | |
|
||||
| wgsl | ✓ | ✓ | ✓ | ✓ | ✓ | `wgsl-analyzer` |
|
||||
| wit | ✓ | | ✓ | | | |
|
||||
| wren | ✓ | ✓ | ✓ | | | |
|
||||
| xit | ✓ | | | | | |
|
||||
| xml | ✓ | ✓ | ✓ | | ✓ | |
|
||||
| xtc | ✓ | | | | | |
|
||||
| yaml | ✓ | ✓ | ✓ | | ✓ | `yaml-language-server`, `ansible-language-server` |
|
||||
| yara | ✓ | | | | | `yls` |
|
||||
| yuck | ✓ | | | | | |
|
||||
| zig | ✓ | ✓ | ✓ | | | `zls` |
|
||||
|
@@ -106,10 +106,14 @@
|
||||
| `code_action` | Perform code action | normal: `` <space>a ``, select: `` <space>a `` |
|
||||
| `buffer_picker` | Open buffer picker | normal: `` <space>b ``, select: `` <space>b `` |
|
||||
| `jumplist_picker` | Open jumplist picker | normal: `` <space>j ``, select: `` <space>j `` |
|
||||
| `symbol_picker` | Open symbol picker | normal: `` <space>s ``, select: `` <space>s `` |
|
||||
| `symbol_picker` | Open symbol picker | |
|
||||
| `syntax_symbol_picker` | Open symbol picker from syntax information | |
|
||||
| `lsp_or_syntax_symbol_picker` | Open symbol picker from LSP or syntax information | normal: `` <space>s ``, select: `` <space>s `` |
|
||||
| `changed_file_picker` | Open changed file picker | normal: `` <space>g ``, select: `` <space>g `` |
|
||||
| `select_references_to_symbol_under_cursor` | Select symbol references | normal: `` <space>h ``, select: `` <space>h `` |
|
||||
| `workspace_symbol_picker` | Open workspace symbol picker | normal: `` <space>S ``, select: `` <space>S `` |
|
||||
| `workspace_symbol_picker` | Open workspace symbol picker | |
|
||||
| `syntax_workspace_symbol_picker` | Open workspace symbol picker from syntax information | |
|
||||
| `lsp_or_syntax_workspace_symbol_picker` | Open workspace symbol picker from LSP or syntax information | normal: `` <space>S ``, select: `` <space>S `` |
|
||||
| `diagnostics_picker` | Open diagnostic picker | normal: `` <space>d ``, select: `` <space>d `` |
|
||||
| `workspace_diagnostics_picker` | Open workspace diagnostic picker | normal: `` <space>D ``, select: `` <space>D `` |
|
||||
| `last_picker` | Open last picker | normal: `` <space>' ``, select: `` <space>' `` |
|
||||
@@ -168,6 +172,8 @@
|
||||
| `smart_tab` | Insert tab if all cursors have all whitespace to their left; otherwise, run a separate command. | insert: `` <tab> `` |
|
||||
| `insert_tab` | Insert tab char | insert: `` <S-tab> `` |
|
||||
| `insert_newline` | Insert newline char | insert: `` <C-j> ``, `` <ret> `` |
|
||||
| `insert_char_interactive` | Insert an interactively-chosen char | |
|
||||
| `append_char_interactive` | Append an interactively-chosen char | |
|
||||
| `delete_char_backward` | Delete previous char | insert: `` <C-h> ``, `` <backspace> ``, `` <S-backspace> `` |
|
||||
| `delete_char_forward` | Delete next char | insert: `` <C-d> ``, `` <del> `` |
|
||||
| `delete_word_backward` | Delete previous word | insert: `` <C-w> ``, `` <A-backspace> `` |
|
||||
@@ -267,6 +273,8 @@
|
||||
| `goto_prev_comment` | Goto previous comment | normal: `` [c ``, select: `` [c `` |
|
||||
| `goto_next_test` | Goto next test | normal: `` ]T ``, select: `` ]T `` |
|
||||
| `goto_prev_test` | Goto previous test | normal: `` [T ``, select: `` [T `` |
|
||||
| `goto_next_xml_element` | Goto next (X)HTML element | normal: `` ]x ``, select: `` ]x `` |
|
||||
| `goto_prev_xml_element` | Goto previous (X)HTML element | normal: `` [x ``, select: `` [x `` |
|
||||
| `goto_next_entry` | Goto next pairing | normal: `` ]e ``, select: `` ]e `` |
|
||||
| `goto_prev_entry` | Goto previous pairing | normal: `` [e ``, select: `` [e `` |
|
||||
| `goto_next_paragraph` | Goto next paragraph | normal: `` ]p ``, select: `` ]p `` |
|
||||
@@ -301,5 +309,7 @@
|
||||
| `command_palette` | Open command palette | normal: `` <space>? ``, select: `` <space>? `` |
|
||||
| `goto_word` | Jump to a two-character label | normal: `` gw `` |
|
||||
| `extend_to_word` | Extend to a two-character label | select: `` gw `` |
|
||||
| `goto_next_tabstop` | goto next snippet placeholder | |
|
||||
| `goto_prev_tabstop` | goto next snippet placeholder | |
|
||||
| `goto_next_tabstop` | Goto next snippet placeholder | |
|
||||
| `goto_prev_tabstop` | Goto next snippet placeholder | |
|
||||
| `rotate_selections_first` | Make the first selection your primary one | |
|
||||
| `rotate_selections_last` | Make the last selection your primary one | |
|
||||
|
@@ -78,7 +78,7 @@
|
||||
| `:log-open` | Open the helix log file. |
|
||||
| `:insert-output` | Run shell command, inserting output before each selection. |
|
||||
| `:append-output` | Run shell command, appending output after each selection. |
|
||||
| `:pipe`, `:|` | Pipe each selection to the shell command. |
|
||||
| `:pipe`, `:\|` | Pipe each selection to the shell command. |
|
||||
| `:pipe-to` | Pipe each selection to the shell command, ignoring output. |
|
||||
| `:run-shell-command`, `:sh`, `:!` | Run a shell command |
|
||||
| `:reset-diff-change`, `:diffget`, `:diffg` | Reset the diff change at the cursor position. |
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Guides
|
||||
|
||||
This section contains guides for adding new language server configurations,
|
||||
tree-sitter grammars, textobject queries, and other similar items.
|
||||
tree-sitter grammars, textobject and rainbow bracket queries, and other similar items.
|
||||
|
132
book/src/guides/rainbow_bracket_queries.md
Normal file
132
book/src/guides/rainbow_bracket_queries.md
Normal file
@@ -0,0 +1,132 @@
|
||||
# Adding Rainbow Bracket Queries
|
||||
|
||||
Helix uses `rainbows.scm` tree-sitter query files to provide rainbow bracket
|
||||
functionality.
|
||||
|
||||
Tree-sitter queries are documented in the tree-sitter online documentation.
|
||||
If you're writing queries for the first time, be sure to check out the section
|
||||
on [syntax highlighting queries] and on [query syntax].
|
||||
|
||||
Rainbow queries have two captures: `@rainbow.scope` and `@rainbow.bracket`.
|
||||
`@rainbow.scope` should capture any node that increases the nesting level
|
||||
while `@rainbow.bracket` should capture any bracket nodes. Put another way:
|
||||
`@rainbow.scope` switches to the next rainbow color for all nodes in the tree
|
||||
under it while `@rainbow.bracket` paints captured nodes with the current
|
||||
rainbow color.
|
||||
|
||||
For an example, let's add rainbow queries for the tree-sitter query (TSQ)
|
||||
language itself. These queries will go into a
|
||||
`runtime/queries/tsq/rainbows.scm` file in the repository root.
|
||||
|
||||
First we'll add the `@rainbow.bracket` captures. TSQ only has parentheses and
|
||||
square brackets:
|
||||
|
||||
```tsq
|
||||
["(" ")" "[" "]"] @rainbow.bracket
|
||||
```
|
||||
|
||||
The ordering of the nodes within the alternation (square brackets) is not
|
||||
taken into consideration.
|
||||
|
||||
> Note: Why are these nodes quoted? Most syntax highlights capture text
|
||||
> surrounded by parentheses. These are _named nodes_ and correspond to the
|
||||
> names of rules in the grammar. Brackets are usually written in tree-sitter
|
||||
> grammars as literal strings, for example:
|
||||
>
|
||||
> ```js
|
||||
> {
|
||||
> // ...
|
||||
> arguments: seq("(", repeat($.argument), ")"),
|
||||
> // ...
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> Nodes written as literal strings in tree-sitter grammars may be captured
|
||||
> in queries with those same literal strings.
|
||||
|
||||
Then we'll add `@rainbow.scope` captures. The easiest way to do this is to
|
||||
view the `grammar.js` file in the tree-sitter grammar's repository. For TSQ,
|
||||
that file is [here][tsq grammar.js]. As we scroll down the `grammar.js`, we
|
||||
see that the `(alternation)`, (L36) `(group)` (L57), `(named_node)` (L59),
|
||||
`(predicate)` (L87) and `(wildcard_node)` (L97) nodes all contain literal
|
||||
parentheses or square brackets in their definitions. These nodes are all
|
||||
direct parents of brackets and happen to also be the nodes we want to change
|
||||
to the next rainbow color, so we capture them as `@rainbow.scope`.
|
||||
|
||||
```tsq
|
||||
[
|
||||
(group)
|
||||
(named_node)
|
||||
(wildcard_node)
|
||||
(predicate)
|
||||
(alternation)
|
||||
] @rainbow.scope
|
||||
```
|
||||
|
||||
This strategy works as a rule of thumb for most programming and configuration
|
||||
languages. Markup languages can be trickier and may take additional
|
||||
experimentation to find the correct nodes to use for scopes and brackets.
|
||||
|
||||
The `:tree-sitter-subtree` command shows the syntax tree under the primary
|
||||
selection in S-expression format and can be a useful tool for determining how
|
||||
to write a query.
|
||||
|
||||
### Properties
|
||||
|
||||
The `rainbow.include-children` property may be applied to `@rainbow.scope`
|
||||
captures. By default, all `@rainbow.bracket` captures must be direct descendant
|
||||
of a node captured with `@rainbow.scope` in a syntax tree in order to be
|
||||
highlighted. The `rainbow.include-children` property disables that check and
|
||||
allows `@rainbow.bracket` captures to be highlighted if they are direct or
|
||||
indirect descendants of some node captured with `@rainbow.scope`.
|
||||
|
||||
For example, this property is used in the HTML rainbow queries.
|
||||
|
||||
For a document like `<a>link</a>`, the syntax tree is:
|
||||
|
||||
```tsq
|
||||
(element ; <a>link</a>
|
||||
(start_tag ; <a>
|
||||
(tag_name)) ; a
|
||||
(text) ; link
|
||||
(end_tag ; </a>
|
||||
(tag_name))) ; a
|
||||
```
|
||||
|
||||
If we want to highlight the `<`, `>` and `</` nodes with rainbow colors, we
|
||||
capture them as `@rainbow.bracket`:
|
||||
|
||||
```tsq
|
||||
["<" ">" "</"] @rainbow.bracket
|
||||
```
|
||||
|
||||
And we capture `(element)` as `@rainbow.scope` because `(element)` nodes nest
|
||||
within each other: they increment the nesting level and switch to the next
|
||||
color in the rainbow.
|
||||
|
||||
```tsq
|
||||
(element) @rainbow.scope
|
||||
```
|
||||
|
||||
But this combination of `@rainbow.scope` and `@rainbow.bracket` will not
|
||||
highlight any nodes. `<`, `>` and `</` are children of the `(start_tag)` and
|
||||
`(end_tag)` nodes. We can't capture `(start_tag)` and `(end_tag)` as
|
||||
`@rainbow.scope` because they don't nest other elements. We can fix this case
|
||||
by removing the requirement that `<`, `>` and `</` are direct descendants of
|
||||
`(element)` using the `rainbow.include-children` property.
|
||||
|
||||
```tsq
|
||||
((element) @rainbow.scope
|
||||
(#set! rainbow.include-children))
|
||||
```
|
||||
|
||||
With this property set, `<`, `>`, and `</` will highlight with rainbow colors
|
||||
even though they aren't direct descendents of the `(element)` node.
|
||||
|
||||
`rainbow.include-children` is not necessary for the vast majority of programming
|
||||
languages. It is only necessary when the node that increments the nesting level
|
||||
(changes rainbow color) is not the direct parent of the bracket node.
|
||||
|
||||
[syntax highlighting queries]: https://tree-sitter.github.io/tree-sitter/syntax-highlighting#highlights
|
||||
[query syntax]: https://tree-sitter.github.io/tree-sitter/using-parsers#pattern-matching-with-queries
|
||||
[tsq grammar.js]: https://github.com/the-mikedavis/tree-sitter-tsq/blob/48b5e9f82ae0a4727201626f33a17f69f8e0ff86/grammar.js
|
34
book/src/guides/tags.md
Normal file
34
book/src/guides/tags.md
Normal file
@@ -0,0 +1,34 @@
|
||||
## Adding tags queries
|
||||
|
||||
See tree-sitter's documentation on [Code Navigation Systems] for more
|
||||
background on tags queries.
|
||||
|
||||
Helix provides LSP-like features such as document and workspace symbol pickers
|
||||
out-of-the-box for languages with `tags.scm` queries based on syntax trees. To
|
||||
be analyzed a language must have a tree-sitter grammar and a `tags.scm` query
|
||||
file which pattern matches interesting nodes from syntax trees.
|
||||
|
||||
Query files should be placed in `runtime/queries/{language}/tags.scm`
|
||||
when contributing to Helix. You may place these under your local runtime
|
||||
directory (`~/.config/helix/runtime` in Linux for example) for the sake of
|
||||
testing.
|
||||
|
||||
The following [captures][tree-sitter-captures] are recognized:
|
||||
|
||||
| Capture name |
|
||||
|--- |
|
||||
| `definition.class` |
|
||||
| `definition.constant` |
|
||||
| `definition.function` |
|
||||
| `definition.interface` |
|
||||
| `definition.macro` |
|
||||
| `definition.module` |
|
||||
| `definition.struct` |
|
||||
| `definition.type` |
|
||||
|
||||
[Example query files][example-queries] can be found in the Helix GitHub
|
||||
repository.
|
||||
|
||||
[Code Navigation Systems]: https://tree-sitter.github.io/tree-sitter/4-code-navigation.html
|
||||
[tree-sitter-captures]: https://tree-sitter.github.io/tree-sitter/using-parsers/queries/index.html
|
||||
[example-queries]: https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+path%3A%2A%2A/tags.scm&type=Code
|
@@ -23,10 +23,13 @@ The following [captures][tree-sitter-captures] are recognized:
|
||||
| `test.inside` |
|
||||
| `test.around` |
|
||||
| `parameter.inside` |
|
||||
| `parameter.around` |
|
||||
| `comment.inside` |
|
||||
| `comment.around` |
|
||||
| `entry.inside` |
|
||||
| `entry.around` |
|
||||
| `xml-element.inside` |
|
||||
| `xml-element.around` |
|
||||
|
||||
[Example query files][textobject-examples] can be found in the helix GitHub repository.
|
||||
|
||||
|
36
book/src/jumplist.md
Normal file
36
book/src/jumplist.md
Normal file
@@ -0,0 +1,36 @@
|
||||
## Using the jumplist
|
||||
|
||||
To help with quick navigation, Helix maintains a list of "jumps" called the jumplist.
|
||||
Whenever you make a significant movement (see next section), Helix stores your selections from before the move as a jump.
|
||||
A jump serves as a kind of checkpoint, allowing you to jump to a separate location, make edits, and return to where you were with your previous selections.
|
||||
This way, the jumplist tracks both your previous location and your selections.
|
||||
You can manually save a jump by using `Ctrl-s`.
|
||||
To jump backward in the jumplist, use `Ctrl-o`; to go forward, use `Ctrl-i`. To view and select from the full jumplist, use `Space-j` to open the jumplist picker.
|
||||
|
||||
### What makes a jump
|
||||
The following is a non-exhaustive list of which actions add a jump to the jumplist:
|
||||
- Switching buffers
|
||||
- Using the buffer picker, going to the next/previous buffer
|
||||
- Going to the last accessed/modified file
|
||||
- Making a new file (`:new FILE`)
|
||||
- Opening a file (`:open FILE`)
|
||||
- Includes `:log-open`, `:config-open`, `:config-open-workspace`, `:tutor`
|
||||
- Navigating by pickers, global search, or the file explorer
|
||||
- `goto_file` (`gf`)
|
||||
- Big in-file movements
|
||||
- `select_regex` (`s`)
|
||||
- `split_regex` (`S`)
|
||||
- `search` (`/`)
|
||||
- `keep_selections` and `remove_selections` (`K` and `<A-K>`)
|
||||
- `goto_file_start` (`gg`)
|
||||
- `goto_file_end`
|
||||
- `goto_last_line` (`ge`)
|
||||
- `:goto 123` / `:123` / `123G`
|
||||
- `goto_definition` (`gd`)
|
||||
- `goto_declaration` (`gD`)
|
||||
- `goto_type_definition` (`gy`)
|
||||
- `goto_reference` (`gr`)
|
||||
- Other
|
||||
- `Ctrl-s` manually creates a jump
|
||||
- Trying to close a modified buffer can switch you to that buffer and create a jump
|
||||
- The debugger can create jumps as you jump stack frames
|
@@ -35,6 +35,8 @@ Normal mode is the default mode when you launch helix. You can return to it from
|
||||
|
||||
> NOTE: Unlike Vim, `f`, `F`, `t` and `T` are not confined to the current line.
|
||||
|
||||
> Hereafter, `<n>` represents an integer by typing a sequence of digits.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `h`, `Left` | Move left | `move_char_left` |
|
||||
@@ -47,11 +49,11 @@ Normal mode is the default mode when you launch helix. You can return to it from
|
||||
| `W` | Move next WORD start | `move_next_long_word_start` |
|
||||
| `B` | Move previous WORD start | `move_prev_long_word_start` |
|
||||
| `E` | Move next WORD end | `move_next_long_word_end` |
|
||||
| `t` | Find 'till next char | `find_till_char` |
|
||||
| `t` | Find till next char | `find_till_char` |
|
||||
| `f` | Find next char | `find_next_char` |
|
||||
| `T` | Find 'till previous char | `till_prev_char` |
|
||||
| `T` | Find till previous char | `till_prev_char` |
|
||||
| `F` | Find previous char | `find_prev_char` |
|
||||
| `G` | Go to line number `<n>` | `goto_line` |
|
||||
| `<n>G`, `<n>gg` | Go to line number `<n>` | `goto_line` |
|
||||
| `Alt-.` | Repeat last motion (`f`, `t`, `m`, `[` or `]`) | `repeat_last_motion` |
|
||||
| `Home` | Move to the start of the line | `goto_line_start` |
|
||||
| `End` | Move to the end of the line | `goto_line_end` |
|
||||
@@ -212,8 +214,10 @@ Jumps to various locations.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `g` | Go to line number `<n>` else start of file | `goto_file_start` |
|
||||
| <code>|</code> | Go to column number `<n>` else start of line | `goto_column` |
|
||||
| `<n>g`| Go to line number `<n>` | `goto_file_start` |
|
||||
| `g` | Go to the start of the file | `goto_file_start` |
|
||||
| <code><n>|</code> | Go to column number `<n>` | `goto_column` |
|
||||
| <code>|</code> | Go to the start of line | `goto_column` |
|
||||
| `e` | Go to the end of the file | `goto_last_line` |
|
||||
| `f` | Go to files in the selections | `goto_file` |
|
||||
| `h` | Go to the start of the line | `goto_line_start` |
|
||||
@@ -348,30 +352,32 @@ Displays the signature of the selected completion item. Remapping currently not
|
||||
|
||||
These mappings are in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaired).
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `]d` | Go to next diagnostic (**LSP**) | `goto_next_diag` |
|
||||
| `[d` | Go to previous diagnostic (**LSP**) | `goto_prev_diag` |
|
||||
| `]D` | Go to last diagnostic in document (**LSP**) | `goto_last_diag` |
|
||||
| `[D` | Go to first diagnostic in document (**LSP**) | `goto_first_diag` |
|
||||
| `]f` | Go to next function (**TS**) | `goto_next_function` |
|
||||
| `[f` | Go to previous function (**TS**) | `goto_prev_function` |
|
||||
| `]t` | Go to next type definition (**TS**) | `goto_next_class` |
|
||||
| `[t` | Go to previous type definition (**TS**) | `goto_prev_class` |
|
||||
| `]a` | Go to next argument/parameter (**TS**) | `goto_next_parameter` |
|
||||
| `[a` | Go to previous argument/parameter (**TS**) | `goto_prev_parameter` |
|
||||
| `]c` | Go to next comment (**TS**) | `goto_next_comment` |
|
||||
| `[c` | Go to previous comment (**TS**) | `goto_prev_comment` |
|
||||
| `]T` | Go to next test (**TS**) | `goto_next_test` |
|
||||
| `[T` | Go to previous test (**TS**) | `goto_prev_test` |
|
||||
| `]p` | Go to next paragraph | `goto_next_paragraph` |
|
||||
| `[p` | Go to previous paragraph | `goto_prev_paragraph` |
|
||||
| `]g` | Go to next change | `goto_next_change` |
|
||||
| `[g` | Go to previous change | `goto_prev_change` |
|
||||
| `]G` | Go to last change | `goto_last_change` |
|
||||
| `[G` | Go to first change | `goto_first_change` |
|
||||
| `]Space` | Add newline below | `add_newline_below` |
|
||||
| `[Space` | Add newline above | `add_newline_above` |
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `]d` | Go to next diagnostic (**LSP**) | `goto_next_diag` |
|
||||
| `[d` | Go to previous diagnostic (**LSP**) | `goto_prev_diag` |
|
||||
| `]D` | Go to last diagnostic in document (**LSP**) | `goto_last_diag` |
|
||||
| `[D` | Go to first diagnostic in document (**LSP**) | `goto_first_diag` |
|
||||
| `]f` | Go to next function (**TS**) | `goto_next_function` |
|
||||
| `[f` | Go to previous function (**TS**) | `goto_prev_function` |
|
||||
| `]t` | Go to next type definition (**TS**) | `goto_next_class` |
|
||||
| `[t` | Go to previous type definition (**TS**) | `goto_prev_class` |
|
||||
| `]a` | Go to next argument/parameter (**TS**) | `goto_next_parameter` |
|
||||
| `[a` | Go to previous argument/parameter (**TS**) | `goto_prev_parameter` |
|
||||
| `]c` | Go to next comment (**TS**) | `goto_next_comment` |
|
||||
| `[c` | Go to previous comment (**TS**) | `goto_prev_comment` |
|
||||
| `]T` | Go to next test (**TS**) | `goto_next_test` |
|
||||
| `[T` | Go to previous test (**TS**) | `goto_prev_test` |
|
||||
| `]p` | Go to next paragraph | `goto_next_paragraph` |
|
||||
| `[p` | Go to previous paragraph | `goto_prev_paragraph` |
|
||||
| `]g` | Go to next change | `goto_next_change` |
|
||||
| `[g` | Go to previous change | `goto_prev_change` |
|
||||
| `]G` | Go to last change | `goto_last_change` |
|
||||
| `[G` | Go to first change | `goto_first_change` |
|
||||
| `[x` | Go to next (X)HTML element | `goto_next_xml_element` |
|
||||
| `]x` | Go to previous (X)HTML element | `goto_prev_xml_element` |
|
||||
| `]Space` | Add newline below | `add_newline_below` |
|
||||
| `[Space` | Add newline above | `add_newline_above` |
|
||||
|
||||
## Insert mode
|
||||
|
||||
|
@@ -71,8 +71,10 @@ These configuration keys are available:
|
||||
| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap-at-text-width` is set, defaults to `editor.text-width` |
|
||||
| `rulers` | Overrides the `editor.rulers` config key for the language. |
|
||||
| `path-completion` | Overrides the `editor.path-completion` config key for the language. |
|
||||
| `word-completion` | Overrides the [`editor.word-completion`](./editor.md#editorword-completion-section) configuration for the language. |
|
||||
| `workspace-lsp-roots` | Directories relative to the workspace root that are treated as LSP roots. Should only be set in `.helix/config.toml`. Overwrites the setting of the same name in `config.toml` if set. |
|
||||
| `persistent-diagnostic-sources` | An array of LSP diagnostic sources assumed unchanged when the language server resends the same set of diagnostics. Helix can track the position for these diagnostics internally instead. Useful for diagnostics that are recomputed on save.
|
||||
| `rainbow-brackets` | Overrides the `editor.rainbow-brackets` config key for the language |
|
||||
|
||||
### File-type detection and the `file-types` key
|
||||
|
||||
@@ -109,7 +111,7 @@ of the formatter command. In particular, the `%{buffer_name}` variable can be pa
|
||||
argument to the formatter:
|
||||
|
||||
```toml
|
||||
formatter = { command = "mylang-formatter" , args = ["--stdin", "--stdin-filename %{buffer_name}"] }
|
||||
formatter = { command = "mylang-formatter" , args = ["--stdin", "--stdin-filename", "%{buffer_name}"] }
|
||||
```
|
||||
|
||||
## Language Server configuration
|
||||
|
@@ -15,6 +15,8 @@ Helix' keymap and interaction model ([Using Helix](#usage.md)) is easier to adop
|
||||
| [Visual Studio Code](https://code.visualstudio.com/) | [Helix for VS Code](https://marketplace.visualstudio.com/items?itemName=jasew.vscode-helix-emulation) extension|
|
||||
| [Zed](https://zed.dev/) | native via keybindings ([Bug](https://github.com/zed-industries/zed/issues/4642)) |
|
||||
| [CodeMirror](https://codemirror.net/) | [codemirror-helix](https://gitlab.com/_rvidal/codemirror-helix) |
|
||||
| [Lite XL](https://lite-xl.com/) | [lite-modal-hx](https://codeberg.org/Mandarancio/lite-modal-hx) |
|
||||
| [Lapce](https://lap.dev/lapce/) | | Requested: https://github.com/lapce/lapce/issues/281 |
|
||||
|
||||
|
||||
## Shells
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
- [Linux](#linux)
|
||||
- [Ubuntu/Debian](#ubuntudebian)
|
||||
- [Ubuntu (PPA)](#ubuntu-ppa)
|
||||
- [Fedora/RHEL](#fedorarhel)
|
||||
- [Arch Linux extra](#arch-linux-extra)
|
||||
- [NixOS](#nixos)
|
||||
@@ -26,21 +25,11 @@ The following third party repositories are available:
|
||||
|
||||
### Ubuntu/Debian
|
||||
|
||||
Install the Debian package from the release page.
|
||||
Install the Debian package [from the release page](https://github.com/helix-editor/helix/releases/latest).
|
||||
|
||||
If you are running a system older than Ubuntu 22.04, Mint 21, or Debian 12, you can build the `.deb` file locally
|
||||
[from source](./building-from-source.md#building-the-debian-package).
|
||||
|
||||
### Ubuntu (PPA)
|
||||
|
||||
Add the `PPA` for Helix:
|
||||
|
||||
```sh
|
||||
sudo add-apt-repository ppa:maveonair/helix-editor
|
||||
sudo apt update
|
||||
sudo apt install helix
|
||||
```
|
||||
|
||||
### Fedora/RHEL
|
||||
|
||||
```sh
|
||||
|
@@ -89,24 +89,26 @@ Cmd-s = ":write" # Cmd or Win or Meta and 's' to write
|
||||
|
||||
Special keys are encoded as follows:
|
||||
|
||||
| Key name | Representation |
|
||||
| --- | --- |
|
||||
| Backspace | `"backspace"` |
|
||||
| Space | `"space"` |
|
||||
| Return/Enter | `"ret"` |
|
||||
| Left | `"left"` |
|
||||
| Right | `"right"` |
|
||||
| Up | `"up"` |
|
||||
| Down | `"down"` |
|
||||
| Home | `"home"` |
|
||||
| End | `"end"` |
|
||||
| Page Up | `"pageup"` |
|
||||
| Page Down | `"pagedown"` |
|
||||
| Tab | `"tab"` |
|
||||
| Delete | `"del"` |
|
||||
| Insert | `"ins"` |
|
||||
| Null | `"null"` |
|
||||
| Escape | `"esc"` |
|
||||
| Key name | Representation |
|
||||
| --- | --- |
|
||||
| Backspace | `"backspace"` |
|
||||
| Space | `"space"` |
|
||||
| Return/Enter | `"ret"` |
|
||||
| Left | `"left"` |
|
||||
| Right | `"right"` |
|
||||
| Up | `"up"` |
|
||||
| Down | `"down"` |
|
||||
| Home | `"home"` |
|
||||
| End | `"end"` |
|
||||
| Page Up | `"pageup"` |
|
||||
| Page Down | `"pagedown"` |
|
||||
| Tab | `"tab"` |
|
||||
| Delete | `"del"` |
|
||||
| Insert | `"ins"` |
|
||||
| Null | `"null"` |
|
||||
| Escape | `"esc"` |
|
||||
| Less Than (<) | `"lt"` |
|
||||
| Greater Than (>) | `"gt"` |
|
||||
|
||||
Keys can be disabled by binding them to the `no_op` command.
|
||||
|
||||
|
@@ -24,6 +24,7 @@ function or block of code.
|
||||
| `c` | Comment |
|
||||
| `T` | Test |
|
||||
| `g` | Change |
|
||||
| `x` | (X)HTML element |
|
||||
|
||||
> 💡 `f`, `t`, etc. need a tree-sitter grammar active for the current
|
||||
document and a special tree-sitter query file to work properly. [Only
|
||||
|
@@ -130,6 +130,17 @@ inherits = "boo_berry"
|
||||
berry = "#2A2A4D"
|
||||
```
|
||||
|
||||
### Rainbow
|
||||
|
||||
The `rainbow` key is used for rainbow highlight for matching brackets.
|
||||
The key is a list of styles.
|
||||
|
||||
```toml
|
||||
rainbow = ["#ff0000", "#ffa500", "#fff000", { fg = "#00ff00", modifiers = ["bold"] }]
|
||||
```
|
||||
|
||||
Colors from the palette and modifiers may be used.
|
||||
|
||||
### Scopes
|
||||
|
||||
The following is a list of scopes available to use for styling:
|
||||
@@ -171,8 +182,10 @@ We use a similar set of scopes as
|
||||
|
||||
- `comment` - Code comments
|
||||
- `line` - Single line comments (`//`)
|
||||
- `documentation` - Line documentation comments (e.g. `///` in Rust)
|
||||
- `block` - Block comments (e.g. (`/* */`)
|
||||
- `documentation` - Documentation comments (e.g. `///` in Rust)
|
||||
- `documentation` - Block documentation comments (e.g. `/** */` in Rust)
|
||||
- `unused` - Unused variables and patterns, e.g. `_` and `_foo`
|
||||
|
||||
- `variable` - Variables
|
||||
- `builtin` - Reserved language variables (`self`, `this`, `super`, etc.)
|
||||
|
@@ -47,6 +47,12 @@
|
||||
<content_rating type="oars-1.1" />
|
||||
|
||||
<releases>
|
||||
<release version="25.07.1" date="2025-07-18">
|
||||
<url>https://github.com/helix-editor/helix/releases/tag/25.07.1</url>
|
||||
</release>
|
||||
<release version="25.07" date="2025-07-15">
|
||||
<url>https://helix-editor.com/news/release-25-07-highlights/</url>
|
||||
</release>
|
||||
<release version="25.01.1" date="2025-01-19">
|
||||
<url>https://github.com/helix-editor/helix/releases/tag/25.01.1</url>
|
||||
</release>
|
||||
|
@@ -13,8 +13,8 @@ _hx() {
|
||||
return 0
|
||||
;;
|
||||
--health)
|
||||
languages=$(hx --health | tail -n '+7' | awk '{print $1}' | sed 's/\x1b\[[0-9;]*m//g')
|
||||
mapfile -t COMPREPLY < <(compgen -W """$languages""" -- "$cur")
|
||||
languages=$(hx --health all-languages | tail -n '+2' | awk '{print $1}' | sed 's/\x1b\[[0-9;]*m//g')
|
||||
mapfile -t COMPREPLY < <(compgen -W """clipboard languages all-languages all $languages""" -- "$cur")
|
||||
return 0
|
||||
;;
|
||||
esac
|
||||
|
@@ -4,6 +4,10 @@
|
||||
complete -c hx -s h -l help -d "Prints help information"
|
||||
complete -c hx -l tutor -d "Loads the tutorial"
|
||||
complete -c hx -l health -xa "(__hx_langs_ops)" -d "Checks for errors"
|
||||
complete -c hx -l health -xka all -d "Prints all diagnostic informations"
|
||||
complete -c hx -l health -xka all-languages -d "Lists all languages"
|
||||
complete -c hx -l health -xka languages -d "Lists user configured languages"
|
||||
complete -c hx -l health -xka clipboard -d "Prints system clipboard provider"
|
||||
complete -c hx -s g -l grammar -x -a "fetch build" -d "Fetch or build tree-sitter grammars"
|
||||
complete -c hx -s v -o vv -o vvv -d "Increases logging verbosity"
|
||||
complete -c hx -s V -l version -d "Prints version information"
|
||||
@@ -14,5 +18,5 @@ complete -c hx -l log -r -d "Specifies a file to use for logging"
|
||||
complete -c hx -s w -l working-dir -d "Specify initial working directory" -xa "(__fish_complete_directories)"
|
||||
|
||||
function __hx_langs_ops
|
||||
hx --health languages | tail -n '+2' | string replace -fr '^(\S+) .*' '$1'
|
||||
hx --health all-languages | tail -n '+2' | string replace -fr '^(\S+) .*' '$1'
|
||||
end
|
||||
|
@@ -5,8 +5,8 @@
|
||||
# The help message won't be overridden though, so it will still be present here
|
||||
|
||||
def health_categories [] {
|
||||
let languages = ^hx --health languages | detect columns | get Language | filter { $in != null }
|
||||
let completions = [ "all", "clipboard", "languages" ] | append $languages
|
||||
let languages = ^hx --health all-languages | detect columns | get Language | where { $in != null }
|
||||
let completions = [ "all", "clipboard", "languages", "all-languages" ] | append $languages
|
||||
return $completions
|
||||
}
|
||||
|
||||
|
@@ -25,7 +25,7 @@ _hx() {
|
||||
|
||||
case "$state" in
|
||||
health)
|
||||
local languages=($(hx --health | tail -n '+11' | awk '{print $1}' | sed 's/\x1b\[[0-9;]*m//g;s/[✘✓]//g'))
|
||||
local languages=($(hx --health all-languages | tail -n '+2' | awk '{print $1}' | sed 's/\x1b\[[0-9;]*m//g;s/[✘✓]//g'))
|
||||
_values 'language' $languages
|
||||
;;
|
||||
grammar)
|
||||
|
@@ -6,7 +6,8 @@
|
||||
installShellFiles,
|
||||
git,
|
||||
gitRev ? null,
|
||||
...
|
||||
grammarOverlays ? [],
|
||||
includeGrammarIf ? _: true,
|
||||
}: let
|
||||
fs = lib.fileset;
|
||||
|
||||
@@ -28,7 +29,7 @@
|
||||
# that they reside in. It is built by calling the derivation in the
|
||||
# grammars.nix file, then taking the runtime directory in the git repo
|
||||
# and hooking symlinks up to it.
|
||||
grammars = callPackage ./grammars.nix {};
|
||||
grammars = callPackage ./grammars.nix {inherit grammarOverlays includeGrammarIf;};
|
||||
runtimeDir = runCommand "helix-runtime" {} ''
|
||||
mkdir -p $out
|
||||
ln -s ${./runtime}/* $out
|
||||
@@ -45,6 +46,8 @@ in
|
||||
allowBuiltinFetchGit = true;
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [ runtimeDir ];
|
||||
|
||||
nativeBuildInputs = [
|
||||
installShellFiles
|
||||
git
|
||||
|
@@ -13,7 +13,7 @@ Some suggestions to get started:
|
||||
- Instead of running a release version of Helix, while developing you may want to run in debug mode with `cargo run` which is way faster to compile
|
||||
- Looking for even faster compile times? Give a try to [mold](https://github.com/rui314/mold)
|
||||
- If your preferred language is missing, integrating a tree-sitter grammar for
|
||||
it and defining syntax highlight queries for it is straight forward and
|
||||
it and defining syntax highlight queries for it is straightforward and
|
||||
doesn't require much knowledge of the internals.
|
||||
- If you don't use the Nix development shell and are getting your rust-analyzer binary from rustup, you may need to run `rustup component add rust-analyzer`.
|
||||
This is because `rust-toolchain.toml` selects our MSRV for the development toolchain but doesn't download the matching rust-analyzer automatically.
|
||||
|
@@ -1,10 +1,12 @@
|
||||
|
||||
| Crate | Description |
|
||||
| ----------- | ----------- |
|
||||
| helix-stdx | Extensions to the standard library (similar to [`rust-analyzer`'s](https://github.com/rust-lang/rust-analyzer/blob/ea413f67a8f730b4211c09e103f8207c62e7dbc3/crates/stdx/Cargo.toml#L5)) |
|
||||
| helix-core | Core editing primitives, functional. |
|
||||
| helix-lsp | Language server client |
|
||||
| helix-lsp-types | Language Server Protocol type definitions |
|
||||
| helix-dap | Debug Adapter Protocol (DAP) client |
|
||||
| helix-event | Primitives for defining and handling events within the editor |
|
||||
| helix-loader | Functions for building, fetching, and loading external resources |
|
||||
| helix-view | UI abstractions for use in backends, imperative shell. |
|
||||
| helix-term | Terminal UI |
|
||||
@@ -110,3 +112,17 @@ The `main` function sets up a new `Application` that runs the event loop.
|
||||
## TUI / Term
|
||||
|
||||
TODO: document Component and rendering related stuff
|
||||
|
||||
## Event
|
||||
|
||||
The `helix-event` crate defines primitives for defining and acting on events
|
||||
within the editor. "Events" cover things like opening, changing and closing of
|
||||
documents, starting and stopping of language servers and more.
|
||||
|
||||
`helix-event` has tools for defining events and registering _hooks_ which run
|
||||
any time an event is emitted. `helix-event` also provides `AsyncHook` - a tool
|
||||
for running cancellable tasks which run after events with _debouncing_.
|
||||
|
||||
See the `AsyncHook` type for more information. Events can be created within the
|
||||
`events!` macro. Synchronous hooks can be created with `register_hook!`. And
|
||||
editor-wide events can be sent to hooks with `helix_event::dispatch`.
|
||||
|
34
flake.lock
generated
34
flake.lock
generated
@@ -1,23 +1,5 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1740560979,
|
||||
@@ -36,7 +18,6 @@
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"rust-overlay": "rust-overlay"
|
||||
}
|
||||
@@ -60,21 +41,6 @@
|
||||
"repo": "rust-overlay",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
|
131
flake.nix
131
flake.nix
@@ -3,7 +3,6 @@
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
@@ -13,77 +12,89 @@
|
||||
outputs = {
|
||||
self,
|
||||
nixpkgs,
|
||||
flake-utils,
|
||||
rust-overlay,
|
||||
...
|
||||
}: let
|
||||
inherit (nixpkgs) lib;
|
||||
systems = [
|
||||
"x86_64-linux"
|
||||
"aarch64-linux"
|
||||
"x86_64-darwin"
|
||||
"aarch64-darwin"
|
||||
];
|
||||
eachSystem = lib.genAttrs systems;
|
||||
pkgsFor = eachSystem (system:
|
||||
import nixpkgs {
|
||||
localSystem.system = system;
|
||||
overlays = [(import rust-overlay) self.overlays.helix];
|
||||
});
|
||||
gitRev = self.rev or self.dirtyRev or null;
|
||||
in
|
||||
flake-utils.lib.eachDefaultSystem (system: let
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
overlays = [(import rust-overlay)];
|
||||
};
|
||||
in {
|
||||
packages = eachSystem (system: {
|
||||
inherit (pkgsFor.${system}) helix;
|
||||
/*
|
||||
The default Helix build. Uses the latest stable Rust toolchain, and unstable
|
||||
nixpkgs.
|
||||
|
||||
# Get Helix's MSRV toolchain to build with by default.
|
||||
msrvToolchain = pkgs.pkgsBuildHost.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
|
||||
msrvPlatform = pkgs.makeRustPlatform {
|
||||
cargo = msrvToolchain;
|
||||
rustc = msrvToolchain;
|
||||
};
|
||||
in {
|
||||
packages = rec {
|
||||
helix = pkgs.callPackage ./default.nix {inherit gitRev;};
|
||||
The build inputs can be overridden with the following:
|
||||
|
||||
/**
|
||||
The default Helix build. Uses the latest stable Rust toolchain, and unstable
|
||||
nixpkgs.
|
||||
|
||||
The build inputs can be overriden with the following:
|
||||
|
||||
packages.${system}.default.override { rustPlatform = newPlatform; };
|
||||
|
||||
Overriding a derivation attribute can be done as well:
|
||||
|
||||
packages.${system}.default.overrideAttrs { buildType = "debug"; };
|
||||
*/
|
||||
default = helix;
|
||||
};
|
||||
packages.${system}.default.override { rustPlatform = newPlatform; };
|
||||
|
||||
checks.helix = self.outputs.packages.${system}.helix.override {
|
||||
buildType = "debug";
|
||||
rustPlatform = msrvPlatform;
|
||||
};
|
||||
Overriding a derivation attribute can be done as well:
|
||||
|
||||
# Devshell behavior is preserved.
|
||||
devShells.default = let
|
||||
commonRustFlagsEnv = "-C link-arg=-fuse-ld=lld -C target-cpu=native --cfg tokio_unstable";
|
||||
platformRustFlagsEnv = pkgs.lib.optionalString pkgs.stdenv.isLinux "-Clink-arg=-Wl,--no-rosegment";
|
||||
in
|
||||
pkgs.mkShell
|
||||
{
|
||||
inputsFrom = [self.checks.${system}.helix];
|
||||
nativeBuildInputs = with pkgs;
|
||||
[
|
||||
lld
|
||||
cargo-flamegraph
|
||||
rust-bin.nightly.latest.rust-analyzer
|
||||
]
|
||||
++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) cargo-tarpaulin)
|
||||
++ (lib.optional stdenv.isLinux lldb)
|
||||
++ (lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.CoreFoundation);
|
||||
shellHook = ''
|
||||
export RUST_BACKTRACE="1"
|
||||
export RUSTFLAGS="''${RUSTFLAGS:-""} ${commonRustFlagsEnv} ${platformRustFlagsEnv}"
|
||||
'';
|
||||
packages.${system}.default.overrideAttrs { buildType = "debug"; };
|
||||
*/
|
||||
default = self.packages.${system}.helix;
|
||||
});
|
||||
checks =
|
||||
lib.mapAttrs (system: pkgs: let
|
||||
# Get Helix's MSRV toolchain to build with by default.
|
||||
msrvToolchain = pkgs.pkgsBuildHost.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
|
||||
msrvPlatform = pkgs.makeRustPlatform {
|
||||
cargo = msrvToolchain;
|
||||
rustc = msrvToolchain;
|
||||
};
|
||||
})
|
||||
// {
|
||||
overlays.default = final: prev: {
|
||||
in {
|
||||
helix = self.packages.${system}.helix.override {
|
||||
rustPlatform = msrvPlatform;
|
||||
};
|
||||
})
|
||||
pkgsFor;
|
||||
|
||||
# Devshell behavior is preserved.
|
||||
devShells =
|
||||
lib.mapAttrs (system: pkgs: {
|
||||
default = let
|
||||
commonRustFlagsEnv = "-C link-arg=-fuse-ld=lld -C target-cpu=native --cfg tokio_unstable";
|
||||
platformRustFlagsEnv = lib.optionalString pkgs.stdenv.isLinux "-Clink-arg=-Wl,--no-rosegment";
|
||||
in
|
||||
pkgs.mkShell {
|
||||
inputsFrom = [self.checks.${system}.helix];
|
||||
nativeBuildInputs = with pkgs;
|
||||
[
|
||||
lld
|
||||
cargo-flamegraph
|
||||
rust-bin.nightly.latest.rust-analyzer
|
||||
]
|
||||
++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) cargo-tarpaulin)
|
||||
++ (lib.optional stdenv.isLinux lldb)
|
||||
++ (lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.CoreFoundation);
|
||||
shellHook = ''
|
||||
export RUST_BACKTRACE="1"
|
||||
export RUSTFLAGS="''${RUSTFLAGS:-""} ${commonRustFlagsEnv} ${platformRustFlagsEnv}"
|
||||
'';
|
||||
};
|
||||
})
|
||||
pkgsFor;
|
||||
|
||||
overlays = {
|
||||
helix = final: prev: {
|
||||
helix = final.callPackage ./default.nix {inherit gitRev;};
|
||||
};
|
||||
};
|
||||
|
||||
default = self.overlays.helix;
|
||||
};
|
||||
};
|
||||
nixConfig = {
|
||||
extra-substituters = ["https://helix.cachix.org"];
|
||||
extra-trusted-public-keys = ["helix.cachix.org-1:ejp9KQpR1FBI2onstMQ34yogDm4OgU2ru6lIwPvuCVs="];
|
||||
|
13
grammars.nix
13
grammars.nix
@@ -1,22 +1,13 @@
|
||||
{
|
||||
stdenv,
|
||||
lib,
|
||||
runCommandLocal,
|
||||
runCommand,
|
||||
yj,
|
||||
includeGrammarIf ? _: true,
|
||||
grammarOverlays ? [],
|
||||
...
|
||||
}: let
|
||||
# HACK: nix < 2.6 has a bug in the toml parser, so we convert to JSON
|
||||
# before parsing
|
||||
languages-json = runCommandLocal "languages-toml-to-json" {} ''
|
||||
${yj}/bin/yj -t < ${./languages.toml} > $out
|
||||
'';
|
||||
languagesConfig =
|
||||
if lib.versionAtLeast builtins.nixVersion "2.6.0"
|
||||
then builtins.fromTOML (builtins.readFile ./languages.toml)
|
||||
else builtins.fromJSON (builtins.readFile (builtins.toPath languages-json));
|
||||
builtins.fromTOML (builtins.readFile ./languages.toml);
|
||||
isGitGrammar = grammar:
|
||||
builtins.hasAttr "source" grammar
|
||||
&& builtins.hasAttr "git" grammar.source
|
||||
@@ -96,8 +87,6 @@
|
||||
$CC -c src/parser.c -o parser.o $FLAGS
|
||||
$CXX -shared -o $NAME.so *.o
|
||||
|
||||
ls -al
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
|
@@ -32,23 +32,21 @@ unicode-segmentation.workspace = true
|
||||
unicode-width = "=0.1.12"
|
||||
unicode-general-category = "1.0"
|
||||
slotmap.workspace = true
|
||||
tree-sitter.workspace = true
|
||||
tree-house.workspace = true
|
||||
once_cell = "1.21"
|
||||
arc-swap = "1"
|
||||
regex = "1"
|
||||
bitflags.workspace = true
|
||||
ahash = "0.8.11"
|
||||
hashbrown = { version = "0.14.5", features = ["raw"] }
|
||||
foldhash.workspace = true
|
||||
url = "2.5.4"
|
||||
|
||||
log = "0.4"
|
||||
anyhow = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
toml = "0.8"
|
||||
|
||||
imara-diff = "0.1.8"
|
||||
toml.workspace = true
|
||||
|
||||
imara-diff = "0.2.0"
|
||||
encoding_rs = "0.8"
|
||||
|
||||
chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] }
|
||||
|
@@ -4,7 +4,8 @@
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use crate::{
|
||||
syntax::BlockCommentToken, Change, Range, Rope, RopeSlice, Selection, Tendril, Transaction,
|
||||
syntax::config::BlockCommentToken, Change, Range, Rope, RopeSlice, Selection, Tendril,
|
||||
Transaction,
|
||||
};
|
||||
use helix_stdx::rope::RopeSliceExt;
|
||||
use std::borrow::Cow;
|
||||
|
@@ -16,6 +16,7 @@ pub struct CompletionItem {
|
||||
pub enum CompletionProvider {
|
||||
Lsp(LanguageServerId),
|
||||
Path,
|
||||
Word,
|
||||
}
|
||||
|
||||
impl From<LanguageServerId> for CompletionProvider {
|
||||
|
@@ -1,4 +1,4 @@
|
||||
use crate::syntax::{Configuration, Loader, LoaderError};
|
||||
use crate::syntax::{config::Configuration, Loader, LoaderError};
|
||||
|
||||
/// Language configuration based on built-in languages.toml.
|
||||
pub fn default_lang_config() -> Configuration {
|
||||
|
@@ -1,51 +1,22 @@
|
||||
use std::ops::Range;
|
||||
use std::time::Instant;
|
||||
|
||||
use imara_diff::intern::InternedInput;
|
||||
use imara_diff::Algorithm;
|
||||
use imara_diff::{Algorithm, Diff, Hunk, IndentHeuristic, IndentLevel, InternedInput};
|
||||
use ropey::RopeSlice;
|
||||
|
||||
use crate::{ChangeSet, Rope, Tendril, Transaction};
|
||||
|
||||
/// A `imara_diff::Sink` that builds a `ChangeSet` for a character diff of a hunk
|
||||
struct CharChangeSetBuilder<'a> {
|
||||
res: &'a mut ChangeSet,
|
||||
hunk: &'a InternedInput<char>,
|
||||
pos: u32,
|
||||
}
|
||||
|
||||
impl imara_diff::Sink for CharChangeSetBuilder<'_> {
|
||||
type Out = ();
|
||||
fn process_change(&mut self, before: Range<u32>, after: Range<u32>) {
|
||||
self.res.retain((before.start - self.pos) as usize);
|
||||
self.res.delete(before.len());
|
||||
self.pos = before.end;
|
||||
|
||||
let res = self.hunk.after[after.start as usize..after.end as usize]
|
||||
.iter()
|
||||
.map(|&token| self.hunk.interner[token])
|
||||
.collect();
|
||||
|
||||
self.res.insert(res);
|
||||
}
|
||||
|
||||
fn finish(self) -> Self::Out {
|
||||
self.res.retain(self.hunk.before.len() - self.pos as usize);
|
||||
}
|
||||
}
|
||||
|
||||
struct LineChangeSetBuilder<'a> {
|
||||
struct ChangeSetBuilder<'a> {
|
||||
res: ChangeSet,
|
||||
after: RopeSlice<'a>,
|
||||
file: &'a InternedInput<RopeSlice<'a>>,
|
||||
current_hunk: InternedInput<char>,
|
||||
char_diff: Diff,
|
||||
pos: u32,
|
||||
}
|
||||
|
||||
impl imara_diff::Sink for LineChangeSetBuilder<'_> {
|
||||
type Out = ChangeSet;
|
||||
|
||||
fn process_change(&mut self, before: Range<u32>, after: Range<u32>) {
|
||||
impl ChangeSetBuilder<'_> {
|
||||
fn process_hunk(&mut self, before: Range<u32>, after: Range<u32>) {
|
||||
let len = self.file.before[self.pos as usize..before.start as usize]
|
||||
.iter()
|
||||
.map(|&it| self.file.interner[it].len_chars())
|
||||
@@ -109,25 +80,36 @@ impl imara_diff::Sink for LineChangeSetBuilder<'_> {
|
||||
.flat_map(|&it| self.file.interner[it].chars());
|
||||
self.current_hunk.update_before(hunk_before);
|
||||
self.current_hunk.update_after(hunk_after);
|
||||
|
||||
// the histogram heuristic does not work as well
|
||||
// for characters because the same characters often reoccur
|
||||
// use myer diff instead
|
||||
imara_diff::diff(
|
||||
self.char_diff.compute_with(
|
||||
Algorithm::Myers,
|
||||
&self.current_hunk,
|
||||
CharChangeSetBuilder {
|
||||
res: &mut self.res,
|
||||
hunk: &self.current_hunk,
|
||||
pos: 0,
|
||||
},
|
||||
&self.current_hunk.before,
|
||||
&self.current_hunk.after,
|
||||
self.current_hunk.interner.num_tokens(),
|
||||
);
|
||||
let mut pos = 0;
|
||||
for Hunk { before, after } in self.char_diff.hunks() {
|
||||
self.res.retain((before.start - pos) as usize);
|
||||
self.res.delete(before.len());
|
||||
pos = before.end;
|
||||
|
||||
let res = self.current_hunk.after[after.start as usize..after.end as usize]
|
||||
.iter()
|
||||
.map(|&token| self.current_hunk.interner[token])
|
||||
.collect();
|
||||
|
||||
self.res.insert(res);
|
||||
}
|
||||
self.res
|
||||
.retain(self.current_hunk.before.len() - pos as usize);
|
||||
// reuse allocations
|
||||
self.current_hunk.clear();
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(mut self) -> Self::Out {
|
||||
fn finish(mut self) -> ChangeSet {
|
||||
let len = self.file.before[self.pos as usize..]
|
||||
.iter()
|
||||
.map(|&it| self.file.interner[it].len_chars())
|
||||
@@ -140,7 +122,7 @@ impl imara_diff::Sink for LineChangeSetBuilder<'_> {
|
||||
|
||||
struct RopeLines<'a>(RopeSlice<'a>);
|
||||
|
||||
impl<'a> imara_diff::intern::TokenSource for RopeLines<'a> {
|
||||
impl<'a> imara_diff::TokenSource for RopeLines<'a> {
|
||||
type Token = RopeSlice<'a>;
|
||||
type Tokenizer = ropey::iter::Lines<'a>;
|
||||
|
||||
@@ -161,15 +143,23 @@ pub fn compare_ropes(before: &Rope, after: &Rope) -> Transaction {
|
||||
let res = ChangeSet::with_capacity(32);
|
||||
let after = after.slice(..);
|
||||
let file = InternedInput::new(RopeLines(before.slice(..)), RopeLines(after));
|
||||
let builder = LineChangeSetBuilder {
|
||||
let mut builder = ChangeSetBuilder {
|
||||
res,
|
||||
file: &file,
|
||||
after,
|
||||
pos: 0,
|
||||
current_hunk: InternedInput::default(),
|
||||
char_diff: Diff::default(),
|
||||
};
|
||||
|
||||
let res = imara_diff::diff(Algorithm::Histogram, &file, builder).into();
|
||||
let mut diff = Diff::compute(Algorithm::Histogram, &file);
|
||||
diff.postprocess_with_heuristic(
|
||||
&file,
|
||||
IndentHeuristic::new(|token| IndentLevel::for_ascii_line(file.interner[token].bytes(), 4)),
|
||||
);
|
||||
for hunk in diff.hunks() {
|
||||
builder.process_hunk(hunk.before, hunk.after)
|
||||
}
|
||||
let res = builder.finish().into();
|
||||
|
||||
log::debug!(
|
||||
"rope diff took {}s",
|
||||
|
@@ -228,6 +228,7 @@ impl FromStr for Ini {
|
||||
let glob = GlobBuilder::new(&glob_str)
|
||||
.literal_separator(true)
|
||||
.backslash_escape(true)
|
||||
.empty_alternates(true)
|
||||
.build()?;
|
||||
ini.sections.push(Section {
|
||||
glob: glob.compile_matcher(),
|
||||
|
@@ -242,34 +242,6 @@ pub fn ensure_grapheme_boundary_prev(slice: RopeSlice, char_idx: usize) -> usize
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the given char position is a grapheme boundary.
|
||||
#[must_use]
|
||||
pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool {
|
||||
// Bounds check
|
||||
debug_assert!(char_idx <= slice.len_chars());
|
||||
|
||||
// We work with bytes for this, so convert.
|
||||
let byte_idx = slice.char_to_byte(char_idx);
|
||||
|
||||
// Get the chunk with our byte index in it.
|
||||
let (chunk, chunk_byte_idx, _, _) = slice.chunk_at_byte(byte_idx);
|
||||
|
||||
// Set up the grapheme cursor.
|
||||
let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true);
|
||||
|
||||
// Determine if the given position is a grapheme cluster boundary.
|
||||
loop {
|
||||
match gc.is_boundary(chunk, chunk_byte_idx) {
|
||||
Ok(n) => return n,
|
||||
Err(GraphemeIncomplete::PreContext(n)) => {
|
||||
let (ctx_chunk, ctx_byte_start, _, _) = slice.chunk_at_byte(n - 1);
|
||||
gc.provide_context(ctx_chunk, ctx_byte_start);
|
||||
}
|
||||
Err(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A highly compressed Cow<'a, str> that holds
|
||||
/// atmost u31::MAX bytes and is readonly
|
||||
pub struct GraphemeStr<'a> {
|
||||
|
@@ -1,14 +1,18 @@
|
||||
use std::{borrow::Cow, collections::HashMap, iter};
|
||||
|
||||
use helix_stdx::rope::RopeSliceExt;
|
||||
use tree_sitter::{Query, QueryCursor, QueryPredicateArg};
|
||||
use tree_house::TREE_SITTER_MATCH_LIMIT;
|
||||
|
||||
use crate::{
|
||||
chars::{char_is_line_ending, char_is_whitespace},
|
||||
graphemes::{grapheme_width, tab_width_at},
|
||||
syntax::{IndentationHeuristic, LanguageConfiguration, RopeProvider, Syntax},
|
||||
tree_sitter::Node,
|
||||
Position, Rope, RopeSlice, Tendril,
|
||||
syntax::{self, config::IndentationHeuristic},
|
||||
tree_sitter::{
|
||||
self,
|
||||
query::{InvalidPredicateError, UserPredicate},
|
||||
Capture, Grammar, InactiveQueryCursor, Node, Pattern, Query, QueryMatch, RopeInput,
|
||||
},
|
||||
Position, Rope, RopeSlice, Syntax, Tendril,
|
||||
};
|
||||
|
||||
/// Enum representing indentation style.
|
||||
@@ -149,6 +153,12 @@ pub fn auto_detect_indent_style(document_text: &Rope) -> Option<IndentStyle> {
|
||||
// Give more weight to tabs, because their presence is a very
|
||||
// strong indicator.
|
||||
histogram[0] *= 2;
|
||||
// Gives less weight to single indent, as single spaces are
|
||||
// often used in certain languages' comment systems and rarely
|
||||
// used as the actual document indentation.
|
||||
if histogram[1] > 1 {
|
||||
histogram[1] /= 2;
|
||||
}
|
||||
|
||||
histogram
|
||||
};
|
||||
@@ -279,18 +289,164 @@ fn add_indent_level(
|
||||
|
||||
/// Return true if only whitespace comes before the node on its line.
|
||||
/// If given, new_line_byte_pos is treated the same way as any existing newline.
|
||||
fn is_first_in_line(node: Node, text: RopeSlice, new_line_byte_pos: Option<usize>) -> bool {
|
||||
let mut line_start_byte_pos = text.line_to_byte(node.start_position().row);
|
||||
fn is_first_in_line(node: &Node, text: RopeSlice, new_line_byte_pos: Option<u32>) -> bool {
|
||||
let line = text.byte_to_line(node.start_byte() as usize);
|
||||
let mut line_start_byte_pos = text.line_to_byte(line) as u32;
|
||||
if let Some(pos) = new_line_byte_pos {
|
||||
if line_start_byte_pos < pos && pos <= node.start_byte() {
|
||||
line_start_byte_pos = pos;
|
||||
}
|
||||
}
|
||||
text.byte_slice(line_start_byte_pos..node.start_byte())
|
||||
text.byte_slice(line_start_byte_pos as usize..node.start_byte() as usize)
|
||||
.chars()
|
||||
.all(|c| c.is_whitespace())
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct IndentQueryPredicates {
|
||||
not_kind_eq: Vec<(Capture, Box<str>)>,
|
||||
same_line: Option<(Capture, Capture, bool)>,
|
||||
one_line: Option<(Capture, bool)>,
|
||||
}
|
||||
|
||||
impl IndentQueryPredicates {
|
||||
fn are_satisfied(
|
||||
&self,
|
||||
match_: &QueryMatch,
|
||||
text: RopeSlice,
|
||||
new_line_byte_pos: Option<u32>,
|
||||
) -> bool {
|
||||
for (capture, not_expected_kind) in self.not_kind_eq.iter() {
|
||||
let node = match_.nodes_for_capture(*capture).next();
|
||||
if node.is_some_and(|n| n.kind() == not_expected_kind.as_ref()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((capture1, capture2, negated)) = self.same_line {
|
||||
let n1 = match_.nodes_for_capture(capture1).next();
|
||||
let n2 = match_.nodes_for_capture(capture2).next();
|
||||
let satisfied = n1.zip(n2).is_some_and(|(n1, n2)| {
|
||||
let n1_line = get_node_start_line(text, n1, new_line_byte_pos);
|
||||
let n2_line = get_node_start_line(text, n2, new_line_byte_pos);
|
||||
let same_line = n1_line == n2_line;
|
||||
same_line != negated
|
||||
});
|
||||
|
||||
if !satisfied {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((capture, negated)) = self.one_line {
|
||||
let node = match_.nodes_for_capture(capture).next();
|
||||
let satisfied = node.is_some_and(|node| {
|
||||
let start_line = get_node_start_line(text, node, new_line_byte_pos);
|
||||
let end_line = get_node_end_line(text, node, new_line_byte_pos);
|
||||
let one_line = end_line == start_line;
|
||||
one_line != negated
|
||||
});
|
||||
|
||||
if !satisfied {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IndentQuery {
|
||||
query: Query,
|
||||
properties: HashMap<Pattern, IndentScope>,
|
||||
predicates: HashMap<Pattern, IndentQueryPredicates>,
|
||||
indent_capture: Option<Capture>,
|
||||
indent_always_capture: Option<Capture>,
|
||||
outdent_capture: Option<Capture>,
|
||||
outdent_always_capture: Option<Capture>,
|
||||
align_capture: Option<Capture>,
|
||||
anchor_capture: Option<Capture>,
|
||||
extend_capture: Option<Capture>,
|
||||
extend_prevent_once_capture: Option<Capture>,
|
||||
}
|
||||
|
||||
impl IndentQuery {
|
||||
pub fn new(grammar: Grammar, source: &str) -> Result<Self, tree_sitter::query::ParseError> {
|
||||
let mut properties = HashMap::new();
|
||||
let mut predicates: HashMap<Pattern, IndentQueryPredicates> = HashMap::new();
|
||||
let query = Query::new(grammar, source, |pattern, predicate| match predicate {
|
||||
UserPredicate::SetProperty { key: "scope", val } => {
|
||||
let scope = match val {
|
||||
Some("all") => IndentScope::All,
|
||||
Some("tail") => IndentScope::Tail,
|
||||
Some(other) => {
|
||||
return Err(format!("unknown scope (#set! scope \"{other}\")").into())
|
||||
}
|
||||
None => return Err("missing scope value (#set! scope ...)".into()),
|
||||
};
|
||||
|
||||
properties.insert(pattern, scope);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
UserPredicate::Other(predicate) => {
|
||||
let name = predicate.name();
|
||||
match name {
|
||||
"not-kind-eq?" => {
|
||||
predicate.check_arg_count(2)?;
|
||||
let capture = predicate.capture_arg(0)?;
|
||||
let not_expected_kind = predicate.str_arg(1)?;
|
||||
|
||||
predicates
|
||||
.entry(pattern)
|
||||
.or_default()
|
||||
.not_kind_eq
|
||||
.push((capture, not_expected_kind.into()));
|
||||
Ok(())
|
||||
}
|
||||
"same-line?" | "not-same-line?" => {
|
||||
predicate.check_arg_count(2)?;
|
||||
let capture1 = predicate.capture_arg(0)?;
|
||||
let capture2 = predicate.capture_arg(1)?;
|
||||
let negated = name == "not-same-line?";
|
||||
|
||||
predicates.entry(pattern).or_default().same_line =
|
||||
Some((capture1, capture2, negated));
|
||||
Ok(())
|
||||
}
|
||||
"one-line?" | "not-one-line?" => {
|
||||
predicate.check_arg_count(1)?;
|
||||
let capture = predicate.capture_arg(0)?;
|
||||
let negated = name == "not-one-line?";
|
||||
|
||||
predicates.entry(pattern).or_default().one_line = Some((capture, negated));
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(InvalidPredicateError::unknown(UserPredicate::Other(
|
||||
predicate,
|
||||
))),
|
||||
}
|
||||
}
|
||||
_ => Err(InvalidPredicateError::unknown(predicate)),
|
||||
})?;
|
||||
|
||||
Ok(Self {
|
||||
properties,
|
||||
predicates,
|
||||
indent_capture: query.get_capture("indent"),
|
||||
indent_always_capture: query.get_capture("indent.always"),
|
||||
outdent_capture: query.get_capture("outdent"),
|
||||
outdent_always_capture: query.get_capture("outdent.always"),
|
||||
align_capture: query.get_capture("align"),
|
||||
anchor_capture: query.get_capture("anchor"),
|
||||
extend_capture: query.get_capture("extend"),
|
||||
extend_prevent_once_capture: query.get_capture("extend.prevent-once"),
|
||||
query,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// The total indent for some line of code.
|
||||
/// This is usually constructed in one of 2 ways:
|
||||
/// - Successively add indent captures to get the (added) indent from a single line
|
||||
@@ -453,16 +609,16 @@ struct IndentQueryResult<'a> {
|
||||
extend_captures: HashMap<usize, Vec<ExtendCapture>>,
|
||||
}
|
||||
|
||||
fn get_node_start_line(node: Node, new_line_byte_pos: Option<usize>) -> usize {
|
||||
let mut node_line = node.start_position().row;
|
||||
fn get_node_start_line(text: RopeSlice, node: &Node, new_line_byte_pos: Option<u32>) -> usize {
|
||||
let mut node_line = text.byte_to_line(node.start_byte() as usize);
|
||||
// Adjust for the new line that will be inserted
|
||||
if new_line_byte_pos.is_some_and(|pos| node.start_byte() >= pos) {
|
||||
node_line += 1;
|
||||
}
|
||||
node_line
|
||||
}
|
||||
fn get_node_end_line(node: Node, new_line_byte_pos: Option<usize>) -> usize {
|
||||
let mut node_line = node.end_position().row;
|
||||
fn get_node_end_line(text: RopeSlice, node: &Node, new_line_byte_pos: Option<u32>) -> usize {
|
||||
let mut node_line = text.byte_to_line(node.end_byte() as usize);
|
||||
// Adjust for the new line that will be inserted (with a strict inequality since end_byte is exclusive)
|
||||
if new_line_byte_pos.is_some_and(|pos| node.end_byte() > pos) {
|
||||
node_line += 1;
|
||||
@@ -471,175 +627,96 @@ fn get_node_end_line(node: Node, new_line_byte_pos: Option<usize>) -> usize {
|
||||
}
|
||||
|
||||
fn query_indents<'a>(
|
||||
query: &Query,
|
||||
query: &IndentQuery,
|
||||
syntax: &Syntax,
|
||||
cursor: &mut QueryCursor,
|
||||
text: RopeSlice<'a>,
|
||||
range: std::ops::Range<usize>,
|
||||
new_line_byte_pos: Option<usize>,
|
||||
range: std::ops::Range<u32>,
|
||||
new_line_byte_pos: Option<u32>,
|
||||
) -> IndentQueryResult<'a> {
|
||||
let mut indent_captures: HashMap<usize, Vec<IndentCapture>> = HashMap::new();
|
||||
let mut extend_captures: HashMap<usize, Vec<ExtendCapture>> = HashMap::new();
|
||||
cursor.set_byte_range(range);
|
||||
|
||||
let mut cursor = InactiveQueryCursor::new(range, TREE_SITTER_MATCH_LIMIT).execute_query(
|
||||
&query.query,
|
||||
&syntax.tree().root_node(),
|
||||
RopeInput::new(text),
|
||||
);
|
||||
|
||||
// Iterate over all captures from the query
|
||||
for m in cursor.matches(query, syntax.tree().root_node(), RopeProvider(text)) {
|
||||
while let Some(m) = cursor.next_match() {
|
||||
// Skip matches where not all custom predicates are fulfilled
|
||||
if !query.general_predicates(m.pattern_index).iter().all(|pred| {
|
||||
match pred.operator.as_ref() {
|
||||
"not-kind-eq?" => match (pred.args.first(), pred.args.get(1)) {
|
||||
(
|
||||
Some(QueryPredicateArg::Capture(capture_idx)),
|
||||
Some(QueryPredicateArg::String(kind)),
|
||||
) => {
|
||||
let node = m.nodes_for_capture_index(*capture_idx).next();
|
||||
match node {
|
||||
Some(node) => node.kind()!=kind.as_ref(),
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
panic!("Invalid indent query: Arguments to \"not-kind-eq?\" must be a capture and a string");
|
||||
}
|
||||
},
|
||||
"same-line?" | "not-same-line?" => {
|
||||
match (pred.args.first(), pred.args.get(1)) {
|
||||
(
|
||||
Some(QueryPredicateArg::Capture(capt1)),
|
||||
Some(QueryPredicateArg::Capture(capt2))
|
||||
) => {
|
||||
let n1 = m.nodes_for_capture_index(*capt1).next();
|
||||
let n2 = m.nodes_for_capture_index(*capt2).next();
|
||||
match (n1, n2) {
|
||||
(Some(n1), Some(n2)) => {
|
||||
let n1_line = get_node_start_line(n1, new_line_byte_pos);
|
||||
let n2_line = get_node_start_line(n2, new_line_byte_pos);
|
||||
let same_line = n1_line == n2_line;
|
||||
same_line==(pred.operator.as_ref()=="same-line?")
|
||||
}
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
panic!("Invalid indent query: Arguments to \"{}\" must be 2 captures", pred.operator);
|
||||
}
|
||||
}
|
||||
}
|
||||
"one-line?" | "not-one-line?" => match pred.args.first() {
|
||||
Some(QueryPredicateArg::Capture(capture_idx)) => {
|
||||
let node = m.nodes_for_capture_index(*capture_idx).next();
|
||||
|
||||
match node {
|
||||
Some(node) => {
|
||||
let (start_line, end_line) = (get_node_start_line(node,new_line_byte_pos), get_node_end_line(node, new_line_byte_pos));
|
||||
let one_line = end_line == start_line;
|
||||
one_line != (pred.operator.as_ref() == "not-one-line?")
|
||||
},
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
panic!("Invalid indent query: Arguments to \"not-kind-eq?\" must be a capture and a string");
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
panic!(
|
||||
"Invalid indent query: Unknown predicate (\"{}\")",
|
||||
pred.operator
|
||||
);
|
||||
}
|
||||
}
|
||||
}) {
|
||||
if query
|
||||
.predicates
|
||||
.get(&m.pattern())
|
||||
.is_some_and(|preds| !preds.are_satisfied(&m, text, new_line_byte_pos))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// A list of pairs (node_id, indent_capture) that are added by this match.
|
||||
// They cannot be added to indent_captures immediately since they may depend on other captures (such as an @anchor).
|
||||
let mut added_indent_captures: Vec<(usize, IndentCapture)> = Vec::new();
|
||||
// The row/column position of the optional anchor in this query
|
||||
let mut anchor: Option<tree_sitter::Node> = None;
|
||||
for capture in m.captures {
|
||||
let capture_name = query.capture_names()[capture.index as usize];
|
||||
let capture_type = match capture_name {
|
||||
"indent" => IndentCaptureType::Indent,
|
||||
"indent.always" => IndentCaptureType::IndentAlways,
|
||||
"outdent" => IndentCaptureType::Outdent,
|
||||
"outdent.always" => IndentCaptureType::OutdentAlways,
|
||||
// The alignment will be updated to the correct value at the end, when the anchor is known.
|
||||
"align" => IndentCaptureType::Align(RopeSlice::from("")),
|
||||
"anchor" => {
|
||||
if anchor.is_some() {
|
||||
log::error!("Invalid indent query: Encountered more than one @anchor in the same match.")
|
||||
} else {
|
||||
anchor = Some(capture.node);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
"extend" => {
|
||||
extend_captures
|
||||
.entry(capture.node.id())
|
||||
.or_insert_with(|| Vec::with_capacity(1))
|
||||
.push(ExtendCapture::Extend);
|
||||
continue;
|
||||
}
|
||||
"extend.prevent-once" => {
|
||||
extend_captures
|
||||
.entry(capture.node.id())
|
||||
.or_insert_with(|| Vec::with_capacity(1))
|
||||
.push(ExtendCapture::PreventOnce);
|
||||
continue;
|
||||
}
|
||||
_ => {
|
||||
// Ignore any unknown captures (these may be needed for predicates such as #match?)
|
||||
continue;
|
||||
let mut anchor: Option<&Node> = None;
|
||||
for matched_node in m.matched_nodes() {
|
||||
let node_id = matched_node.node.id();
|
||||
let capture = Some(matched_node.capture);
|
||||
let capture_type = if capture == query.indent_capture {
|
||||
IndentCaptureType::Indent
|
||||
} else if capture == query.indent_always_capture {
|
||||
IndentCaptureType::IndentAlways
|
||||
} else if capture == query.outdent_capture {
|
||||
IndentCaptureType::Outdent
|
||||
} else if capture == query.outdent_always_capture {
|
||||
IndentCaptureType::OutdentAlways
|
||||
} else if capture == query.align_capture {
|
||||
IndentCaptureType::Align(RopeSlice::from(""))
|
||||
} else if capture == query.anchor_capture {
|
||||
if anchor.is_some() {
|
||||
log::error!("Invalid indent query: Encountered more than one @anchor in the same match.")
|
||||
} else {
|
||||
anchor = Some(&matched_node.node);
|
||||
}
|
||||
continue;
|
||||
} else if capture == query.extend_capture {
|
||||
extend_captures
|
||||
.entry(node_id)
|
||||
.or_insert_with(|| Vec::with_capacity(1))
|
||||
.push(ExtendCapture::Extend);
|
||||
continue;
|
||||
} else if capture == query.extend_prevent_once_capture {
|
||||
extend_captures
|
||||
.entry(node_id)
|
||||
.or_insert_with(|| Vec::with_capacity(1))
|
||||
.push(ExtendCapture::PreventOnce);
|
||||
continue;
|
||||
} else {
|
||||
// Ignore any unknown captures (these may be needed for predicates such as #match?)
|
||||
continue;
|
||||
};
|
||||
let scope = capture_type.default_scope();
|
||||
let mut indent_capture = IndentCapture {
|
||||
|
||||
// Apply additional settings for this capture
|
||||
let scope = query
|
||||
.properties
|
||||
.get(&m.pattern())
|
||||
.copied()
|
||||
.unwrap_or_else(|| capture_type.default_scope());
|
||||
let indent_capture = IndentCapture {
|
||||
capture_type,
|
||||
scope,
|
||||
};
|
||||
// Apply additional settings for this capture
|
||||
for property in query.property_settings(m.pattern_index) {
|
||||
match property.key.as_ref() {
|
||||
"scope" => {
|
||||
indent_capture.scope = match property.value.as_deref() {
|
||||
Some("all") => IndentScope::All,
|
||||
Some("tail") => IndentScope::Tail,
|
||||
Some(s) => {
|
||||
panic!("Invalid indent query: Unknown value for \"scope\" property (\"{}\")", s);
|
||||
}
|
||||
None => {
|
||||
panic!(
|
||||
"Invalid indent query: Missing value for \"scope\" property"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
panic!(
|
||||
"Invalid indent query: Unknown property \"{}\"",
|
||||
property.key
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
added_indent_captures.push((capture.node.id(), indent_capture))
|
||||
added_indent_captures.push((node_id, indent_capture))
|
||||
}
|
||||
for (node_id, mut capture) in added_indent_captures {
|
||||
// Set the anchor for all align queries.
|
||||
if let IndentCaptureType::Align(_) = capture.capture_type {
|
||||
let anchor = match anchor {
|
||||
None => {
|
||||
log::error!(
|
||||
"Invalid indent query: @align requires an accompanying @anchor."
|
||||
);
|
||||
continue;
|
||||
}
|
||||
Some(anchor) => anchor,
|
||||
let Some(anchor) = anchor else {
|
||||
log::error!("Invalid indent query: @align requires an accompanying @anchor.");
|
||||
continue;
|
||||
};
|
||||
let line = text.byte_to_line(anchor.start_byte() as usize);
|
||||
let line_start = text.line_to_byte(line);
|
||||
capture.capture_type = IndentCaptureType::Align(
|
||||
text.line(anchor.start_position().row)
|
||||
.byte_slice(0..anchor.start_position().column),
|
||||
text.byte_slice(line_start..anchor.start_byte() as usize),
|
||||
);
|
||||
}
|
||||
indent_captures
|
||||
@@ -691,13 +768,15 @@ fn extend_nodes<'a>(
|
||||
// - the cursor is on the same line as the end of the node OR
|
||||
// - the line that the cursor is on is more indented than the
|
||||
// first line of the node
|
||||
if deepest_preceding.end_position().row == line {
|
||||
if text.byte_to_line(deepest_preceding.end_byte() as usize) == line {
|
||||
extend_node = true;
|
||||
} else {
|
||||
let cursor_indent =
|
||||
indent_level_for_line(text.line(line), tab_width, indent_width);
|
||||
let node_indent = indent_level_for_line(
|
||||
text.line(deepest_preceding.start_position().row),
|
||||
text.line(
|
||||
text.byte_to_line(deepest_preceding.start_byte() as usize),
|
||||
),
|
||||
tab_width,
|
||||
indent_width,
|
||||
);
|
||||
@@ -714,7 +793,7 @@ fn extend_nodes<'a>(
|
||||
if node_captured && stop_extend {
|
||||
stop_extend = false;
|
||||
} else if extend_node && !stop_extend {
|
||||
*node = deepest_preceding;
|
||||
*node = deepest_preceding.clone();
|
||||
break;
|
||||
}
|
||||
// If the tree contains a syntax error, `deepest_preceding` may not
|
||||
@@ -731,17 +810,17 @@ fn extend_nodes<'a>(
|
||||
/// - The indent captures for all relevant nodes.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn init_indent_query<'a, 'b>(
|
||||
query: &Query,
|
||||
query: &IndentQuery,
|
||||
syntax: &'a Syntax,
|
||||
text: RopeSlice<'b>,
|
||||
tab_width: usize,
|
||||
indent_width: usize,
|
||||
line: usize,
|
||||
byte_pos: usize,
|
||||
new_line_byte_pos: Option<usize>,
|
||||
byte_pos: u32,
|
||||
new_line_byte_pos: Option<u32>,
|
||||
) -> Option<(Node<'a>, HashMap<usize, Vec<IndentCapture<'b>>>)> {
|
||||
// The innermost tree-sitter node which is considered for the indent
|
||||
// computation. It may change if some predeceding node is extended
|
||||
// computation. It may change if some preceding node is extended
|
||||
let mut node = syntax
|
||||
.tree()
|
||||
.root_node()
|
||||
@@ -751,37 +830,25 @@ fn init_indent_query<'a, 'b>(
|
||||
// The query range should intersect with all nodes directly preceding
|
||||
// the position of the indent query in case one of them is extended.
|
||||
let mut deepest_preceding = None; // The deepest node preceding the indent query position
|
||||
let mut tree_cursor = node.walk();
|
||||
for child in node.children(&mut tree_cursor) {
|
||||
for child in node.children() {
|
||||
if child.byte_range().end <= byte_pos {
|
||||
deepest_preceding = Some(child);
|
||||
deepest_preceding = Some(child.clone());
|
||||
}
|
||||
}
|
||||
deepest_preceding = deepest_preceding.map(|mut prec| {
|
||||
// Get the deepest directly preceding node
|
||||
while prec.child_count() > 0 {
|
||||
prec = prec.child(prec.child_count() - 1).unwrap();
|
||||
prec = prec.child(prec.child_count() - 1).unwrap().clone();
|
||||
}
|
||||
prec
|
||||
});
|
||||
let query_range = deepest_preceding
|
||||
.as_ref()
|
||||
.map(|prec| prec.byte_range().end - 1..byte_pos + 1)
|
||||
.unwrap_or(byte_pos..byte_pos + 1);
|
||||
|
||||
crate::syntax::PARSER.with(|ts_parser| {
|
||||
let mut ts_parser = ts_parser.borrow_mut();
|
||||
let mut cursor = ts_parser.cursors.pop().unwrap_or_default();
|
||||
let query_result = query_indents(
|
||||
query,
|
||||
syntax,
|
||||
&mut cursor,
|
||||
text,
|
||||
query_range,
|
||||
new_line_byte_pos,
|
||||
);
|
||||
ts_parser.cursors.push(cursor);
|
||||
(query_result, deepest_preceding)
|
||||
})
|
||||
let query_result = query_indents(query, syntax, text, query_range, new_line_byte_pos);
|
||||
(query_result, deepest_preceding)
|
||||
};
|
||||
let extend_captures = query_result.extend_captures;
|
||||
|
||||
@@ -839,7 +906,7 @@ fn init_indent_query<'a, 'b>(
|
||||
/// ```
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn treesitter_indent_for_pos<'a>(
|
||||
query: &Query,
|
||||
query: &IndentQuery,
|
||||
syntax: &Syntax,
|
||||
tab_width: usize,
|
||||
indent_width: usize,
|
||||
@@ -848,7 +915,7 @@ pub fn treesitter_indent_for_pos<'a>(
|
||||
pos: usize,
|
||||
new_line: bool,
|
||||
) -> Option<Indentation<'a>> {
|
||||
let byte_pos = text.char_to_byte(pos);
|
||||
let byte_pos = text.char_to_byte(pos) as u32;
|
||||
let new_line_byte_pos = new_line.then_some(byte_pos);
|
||||
let (mut node, mut indent_captures) = init_indent_query(
|
||||
query,
|
||||
@@ -868,7 +935,7 @@ pub fn treesitter_indent_for_pos<'a>(
|
||||
let mut indent_for_line_below = Indentation::default();
|
||||
|
||||
loop {
|
||||
let is_first = is_first_in_line(node, text, new_line_byte_pos);
|
||||
let is_first = is_first_in_line(&node, text, new_line_byte_pos);
|
||||
|
||||
// Apply all indent definitions for this node.
|
||||
// Since we only iterate over each node once, we can remove the
|
||||
@@ -891,8 +958,8 @@ pub fn treesitter_indent_for_pos<'a>(
|
||||
}
|
||||
|
||||
if let Some(parent) = node.parent() {
|
||||
let node_line = get_node_start_line(node, new_line_byte_pos);
|
||||
let parent_line = get_node_start_line(parent, new_line_byte_pos);
|
||||
let node_line = get_node_start_line(text, &node, new_line_byte_pos);
|
||||
let parent_line = get_node_start_line(text, &parent, new_line_byte_pos);
|
||||
|
||||
if node_line != parent_line {
|
||||
// Don't add indent for the line below the line of the query
|
||||
@@ -914,8 +981,9 @@ pub fn treesitter_indent_for_pos<'a>(
|
||||
} else {
|
||||
// Only add the indentation for the line below if that line
|
||||
// is not after the line that the indentation is calculated for.
|
||||
if (node.start_position().row < line)
|
||||
|| (new_line && node.start_position().row == line && node.start_byte() < byte_pos)
|
||||
let node_start_line = text.byte_to_line(node.start_byte() as usize);
|
||||
if node_start_line < line
|
||||
|| (new_line && node_start_line == line && node.start_byte() < byte_pos)
|
||||
{
|
||||
result.add_line(indent_for_line_below);
|
||||
}
|
||||
@@ -930,7 +998,7 @@ pub fn treesitter_indent_for_pos<'a>(
|
||||
/// This is done either using treesitter, or if that's not available by copying the indentation from the current line
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn indent_for_newline(
|
||||
language_config: Option<&LanguageConfiguration>,
|
||||
loader: &syntax::Loader,
|
||||
syntax: Option<&Syntax>,
|
||||
indent_heuristic: &IndentationHeuristic,
|
||||
indent_style: &IndentStyle,
|
||||
@@ -947,7 +1015,7 @@ pub fn indent_for_newline(
|
||||
Some(syntax),
|
||||
) = (
|
||||
indent_heuristic,
|
||||
language_config.and_then(|config| config.indent_query()),
|
||||
syntax.and_then(|syntax| loader.indent_query(syntax.root_language())),
|
||||
syntax,
|
||||
) {
|
||||
if let Some(indent) = treesitter_indent_for_pos(
|
||||
@@ -1015,10 +1083,10 @@ pub fn indent_for_newline(
|
||||
indent_style.as_str().repeat(indent_level)
|
||||
}
|
||||
|
||||
pub fn get_scopes(syntax: Option<&Syntax>, text: RopeSlice, pos: usize) -> Vec<&'static str> {
|
||||
pub fn get_scopes<'a>(syntax: Option<&'a Syntax>, text: RopeSlice, pos: usize) -> Vec<&'a str> {
|
||||
let mut scopes = Vec::new();
|
||||
if let Some(syntax) = syntax {
|
||||
let pos = text.char_to_byte(pos);
|
||||
let pos = text.char_to_byte(pos) as u32;
|
||||
let mut node = match syntax
|
||||
.tree()
|
||||
.root_node()
|
||||
|
@@ -53,7 +53,7 @@ pub use smartstring::SmartString;
|
||||
pub type Tendril = SmartString<smartstring::LazyCompact>;
|
||||
|
||||
#[doc(inline)]
|
||||
pub use {regex, tree_sitter};
|
||||
pub use {regex, tree_house::tree_sitter};
|
||||
|
||||
pub use position::{
|
||||
char_idx_at_visual_offset, coords_at_pos, pos_at_coords, softwrapped_dimensions,
|
||||
@@ -73,3 +73,5 @@ pub use line_ending::{LineEnding, NATIVE_LINE_ENDING};
|
||||
pub use transaction::{Assoc, Change, ChangeSet, Deletion, Operation, Transaction};
|
||||
|
||||
pub use uri::Uri;
|
||||
|
||||
pub use tree_house::Language;
|
||||
|
@@ -1,7 +1,7 @@
|
||||
use std::iter;
|
||||
|
||||
use crate::tree_sitter::Node;
|
||||
use ropey::RopeSlice;
|
||||
use tree_sitter::Node;
|
||||
|
||||
use crate::movement::Direction::{self, Backward, Forward};
|
||||
use crate::Syntax;
|
||||
@@ -75,7 +75,7 @@ fn find_pair(
|
||||
pos_: usize,
|
||||
traverse_parents: bool,
|
||||
) -> Option<usize> {
|
||||
let pos = doc.char_to_byte(pos_);
|
||||
let pos = doc.char_to_byte(pos_) as u32;
|
||||
|
||||
let root = syntax.tree_for_byte_range(pos, pos).root_node();
|
||||
let mut node = root.descendant_for_byte_range(pos, pos)?;
|
||||
@@ -128,7 +128,7 @@ fn find_pair(
|
||||
if find_pair_end(doc, sibling.prev_sibling(), start_char, end_char, Backward)
|
||||
.is_some()
|
||||
{
|
||||
return doc.try_byte_to_char(sibling.start_byte()).ok();
|
||||
return doc.try_byte_to_char(sibling.start_byte() as usize).ok();
|
||||
}
|
||||
}
|
||||
} else if node.is_named() {
|
||||
@@ -144,9 +144,9 @@ fn find_pair(
|
||||
if node.child_count() != 0 {
|
||||
return None;
|
||||
}
|
||||
let node_start = doc.byte_to_char(node.start_byte());
|
||||
find_matching_bracket_plaintext(doc.byte_slice(node.byte_range()), pos_ - node_start)
|
||||
.map(|pos| pos + node_start)
|
||||
let node_start = doc.byte_to_char(node.start_byte() as usize);
|
||||
let node_text = doc.byte_slice(node.start_byte() as usize..node.end_byte() as usize);
|
||||
find_matching_bracket_plaintext(node_text, pos_ - node_start).map(|pos| pos + node_start)
|
||||
}
|
||||
|
||||
/// Returns the position of the matching bracket under cursor.
|
||||
@@ -304,7 +304,7 @@ fn as_char(doc: RopeSlice, node: &Node) -> Option<(usize, char)> {
|
||||
if node.byte_range().len() != 1 {
|
||||
return None;
|
||||
}
|
||||
let pos = doc.try_byte_to_char(node.start_byte()).ok()?;
|
||||
let pos = doc.try_byte_to_char(node.start_byte() as usize).ok()?;
|
||||
Some((pos, doc.char(pos)))
|
||||
}
|
||||
|
||||
|
@@ -1,7 +1,6 @@
|
||||
use std::{cmp::Reverse, iter};
|
||||
use std::{borrow::Cow, cmp::Reverse, iter};
|
||||
|
||||
use ropey::iter::Chars;
|
||||
use tree_sitter::{Node, QueryCursor};
|
||||
|
||||
use crate::{
|
||||
char_idx_at_visual_offset,
|
||||
@@ -13,9 +12,10 @@ use crate::{
|
||||
},
|
||||
line_ending::rope_is_line_ending,
|
||||
position::char_idx_at_visual_block_offset,
|
||||
syntax::LanguageConfiguration,
|
||||
syntax,
|
||||
text_annotations::TextAnnotations,
|
||||
textobject::TextObject,
|
||||
tree_sitter::Node,
|
||||
visual_offset_from_block, Range, RopeSlice, Selection, Syntax,
|
||||
};
|
||||
|
||||
@@ -560,21 +560,23 @@ fn reached_target(target: WordMotionTarget, prev_ch: char, next_ch: char) -> boo
|
||||
|
||||
/// Finds the range of the next or previous textobject in the syntax sub-tree of `node`.
|
||||
/// Returns the range in the forwards direction.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn goto_treesitter_object(
|
||||
slice: RopeSlice,
|
||||
range: Range,
|
||||
object_name: &str,
|
||||
dir: Direction,
|
||||
slice_tree: Node,
|
||||
lang_config: &LanguageConfiguration,
|
||||
slice_tree: &Node,
|
||||
syntax: &Syntax,
|
||||
loader: &syntax::Loader,
|
||||
count: usize,
|
||||
) -> Range {
|
||||
let textobject_query = loader.textobject_query(syntax.root_language());
|
||||
let get_range = move |range: Range| -> Option<Range> {
|
||||
let byte_pos = slice.char_to_byte(range.cursor(slice));
|
||||
|
||||
let cap_name = |t: TextObject| format!("{}.{}", object_name, t);
|
||||
let mut cursor = QueryCursor::new();
|
||||
let nodes = lang_config.textobject_query()?.capture_nodes_any(
|
||||
let nodes = textobject_query?.capture_nodes_any(
|
||||
&[
|
||||
&cap_name(TextObject::Movement),
|
||||
&cap_name(TextObject::Around),
|
||||
@@ -582,7 +584,6 @@ pub fn goto_treesitter_object(
|
||||
],
|
||||
slice_tree,
|
||||
slice,
|
||||
&mut cursor,
|
||||
)?;
|
||||
|
||||
let node = match dir {
|
||||
@@ -617,14 +618,15 @@ pub fn goto_treesitter_object(
|
||||
last_range
|
||||
}
|
||||
|
||||
fn find_parent_start(mut node: Node) -> Option<Node> {
|
||||
fn find_parent_start<'tree>(node: &Node<'tree>) -> Option<Node<'tree>> {
|
||||
let start = node.start_byte();
|
||||
let mut node = Cow::Borrowed(node);
|
||||
|
||||
while node.start_byte() >= start || !node.is_named() {
|
||||
node = node.parent()?;
|
||||
node = Cow::Owned(node.parent()?);
|
||||
}
|
||||
|
||||
Some(node)
|
||||
Some(node.into_owned())
|
||||
}
|
||||
|
||||
pub fn move_parent_node_end(
|
||||
@@ -635,8 +637,8 @@ pub fn move_parent_node_end(
|
||||
movement: Movement,
|
||||
) -> Selection {
|
||||
selection.transform(|range| {
|
||||
let start_from = text.char_to_byte(range.from());
|
||||
let start_to = text.char_to_byte(range.to());
|
||||
let start_from = text.char_to_byte(range.from()) as u32;
|
||||
let start_to = text.char_to_byte(range.to()) as u32;
|
||||
|
||||
let mut node = match syntax.named_descendant_for_byte_range(start_from, start_to) {
|
||||
Some(node) => node,
|
||||
@@ -654,18 +656,18 @@ pub fn move_parent_node_end(
|
||||
// moving forward, we always want to move one past the end of the
|
||||
// current node, so use the end byte of the current node, which is an exclusive
|
||||
// end of the range
|
||||
Direction::Forward => text.byte_to_char(node.end_byte()),
|
||||
Direction::Forward => text.byte_to_char(node.end_byte() as usize),
|
||||
|
||||
// moving backward, we want the cursor to land on the start char of
|
||||
// the current node, or if it is already at the start of a node, to traverse up to
|
||||
// the parent
|
||||
Direction::Backward => {
|
||||
let end_head = text.byte_to_char(node.start_byte());
|
||||
let end_head = text.byte_to_char(node.start_byte() as usize);
|
||||
|
||||
// if we're already on the beginning, look up to the parent
|
||||
if end_head == range.cursor(text) {
|
||||
node = find_parent_start(node).unwrap_or(node);
|
||||
text.byte_to_char(node.start_byte())
|
||||
node = find_parent_start(&node).unwrap_or(node);
|
||||
text.byte_to_char(node.start_byte() as usize)
|
||||
} else {
|
||||
end_head
|
||||
}
|
||||
|
@@ -4,8 +4,8 @@ pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection)
|
||||
let cursor = &mut syntax.walk();
|
||||
|
||||
selection.transform(|range| {
|
||||
let from = text.char_to_byte(range.from());
|
||||
let to = text.char_to_byte(range.to());
|
||||
let from = text.char_to_byte(range.from()) as u32;
|
||||
let to = text.char_to_byte(range.to()) as u32;
|
||||
|
||||
let byte_range = from..to;
|
||||
cursor.reset_to_byte_range(from, to);
|
||||
@@ -17,8 +17,8 @@ pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection)
|
||||
}
|
||||
|
||||
let node = cursor.node();
|
||||
let from = text.byte_to_char(node.start_byte());
|
||||
let to = text.byte_to_char(node.end_byte());
|
||||
let from = text.byte_to_char(node.start_byte() as usize);
|
||||
let to = text.byte_to_char(node.end_byte() as usize);
|
||||
|
||||
Range::new(to, from).with_direction(range.direction())
|
||||
})
|
||||
@@ -53,10 +53,10 @@ pub fn select_next_sibling(syntax: &Syntax, text: RopeSlice, selection: Selectio
|
||||
}
|
||||
|
||||
pub fn select_all_siblings(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||
selection.transform_iter(|range| {
|
||||
let mut cursor = syntax.walk();
|
||||
let mut cursor = syntax.walk();
|
||||
selection.transform_iter(move |range| {
|
||||
let (from, to) = range.into_byte_range(text);
|
||||
cursor.reset_to_byte_range(from, to);
|
||||
cursor.reset_to_byte_range(from as u32, to as u32);
|
||||
|
||||
if !cursor.goto_parent_with(|parent| parent.child_count() > 1) {
|
||||
return vec![range].into_iter();
|
||||
@@ -67,21 +67,18 @@ pub fn select_all_siblings(syntax: &Syntax, text: RopeSlice, selection: Selectio
|
||||
}
|
||||
|
||||
pub fn select_all_children(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||
selection.transform_iter(|range| {
|
||||
let mut cursor = syntax.walk();
|
||||
let mut cursor = syntax.walk();
|
||||
selection.transform_iter(move |range| {
|
||||
let (from, to) = range.into_byte_range(text);
|
||||
cursor.reset_to_byte_range(from, to);
|
||||
cursor.reset_to_byte_range(from as u32, to as u32);
|
||||
select_children(&mut cursor, text, range).into_iter()
|
||||
})
|
||||
}
|
||||
|
||||
fn select_children<'n>(
|
||||
cursor: &'n mut TreeCursor<'n>,
|
||||
text: RopeSlice,
|
||||
range: Range,
|
||||
) -> Vec<Range> {
|
||||
fn select_children(cursor: &mut TreeCursor, text: RopeSlice, range: Range) -> Vec<Range> {
|
||||
let children = cursor
|
||||
.named_children()
|
||||
.children()
|
||||
.filter(|child| child.is_named())
|
||||
.map(|child| Range::from_node(child, text, range.direction()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@@ -98,7 +95,7 @@ pub fn select_prev_sibling(syntax: &Syntax, text: RopeSlice, selection: Selectio
|
||||
text,
|
||||
selection,
|
||||
|cursor| {
|
||||
while !cursor.goto_prev_sibling() {
|
||||
while !cursor.goto_previous_sibling() {
|
||||
if !cursor.goto_parent() {
|
||||
break;
|
||||
}
|
||||
@@ -121,16 +118,16 @@ where
|
||||
let cursor = &mut syntax.walk();
|
||||
|
||||
selection.transform(|range| {
|
||||
let from = text.char_to_byte(range.from());
|
||||
let to = text.char_to_byte(range.to());
|
||||
let from = text.char_to_byte(range.from()) as u32;
|
||||
let to = text.char_to_byte(range.to()) as u32;
|
||||
|
||||
cursor.reset_to_byte_range(from, to);
|
||||
|
||||
motion(cursor);
|
||||
|
||||
let node = cursor.node();
|
||||
let from = text.byte_to_char(node.start_byte());
|
||||
let to = text.byte_to_char(node.end_byte());
|
||||
let from = text.byte_to_char(node.start_byte() as usize);
|
||||
let to = text.byte_to_char(node.end_byte() as usize);
|
||||
|
||||
Range::new(from, to).with_direction(direction.unwrap_or_else(|| range.direction()))
|
||||
})
|
||||
|
@@ -89,11 +89,6 @@ impl From<(usize, usize)> for Position {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Position> for tree_sitter::Point {
|
||||
fn from(pos: Position) -> Self {
|
||||
Self::new(pos.row, pos.col)
|
||||
}
|
||||
}
|
||||
/// Convert a character index to (line, column) coordinates.
|
||||
///
|
||||
/// column in `char` count which can be used for row:column display in
|
||||
|
@@ -9,13 +9,13 @@ use crate::{
|
||||
},
|
||||
line_ending::get_line_ending,
|
||||
movement::Direction,
|
||||
tree_sitter::Node,
|
||||
Assoc, ChangeSet, RopeSlice,
|
||||
};
|
||||
use helix_stdx::range::is_subset;
|
||||
use helix_stdx::rope::{self, RopeSliceExt};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::{borrow::Cow, iter, slice};
|
||||
use tree_sitter::Node;
|
||||
|
||||
/// A single selection range.
|
||||
///
|
||||
@@ -76,8 +76,8 @@ impl Range {
|
||||
}
|
||||
|
||||
pub fn from_node(node: Node, text: RopeSlice, direction: Direction) -> Self {
|
||||
let from = text.byte_to_char(node.start_byte());
|
||||
let to = text.byte_to_char(node.end_byte());
|
||||
let from = text.byte_to_char(node.start_byte() as usize);
|
||||
let to = text.byte_to_char(node.end_byte() as usize);
|
||||
Range::new(from, to).with_direction(direction)
|
||||
}
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
use std::ops::{Index, IndexMut};
|
||||
|
||||
use hashbrown::HashSet;
|
||||
use foldhash::HashSet;
|
||||
use helix_stdx::range::{is_exact_subset, is_subset};
|
||||
use helix_stdx::Range;
|
||||
use ropey::Rope;
|
||||
@@ -35,7 +35,7 @@ impl ActiveSnippet {
|
||||
let snippet = Self {
|
||||
ranges: snippet.ranges,
|
||||
tabstops: snippet.tabstops,
|
||||
active_tabstops: HashSet::new(),
|
||||
active_tabstops: HashSet::default(),
|
||||
current_tabstop: TabstopIdx(0),
|
||||
};
|
||||
(snippet.tabstops.len() != 1).then_some(snippet)
|
||||
|
File diff suppressed because it is too large
Load Diff
629
helix-core/src/syntax/config.rs
Normal file
629
helix-core/src/syntax/config.rs
Normal file
@@ -0,0 +1,629 @@
|
||||
use crate::{auto_pairs::AutoPairs, diagnostic::Severity, Language};
|
||||
|
||||
use globset::GlobSet;
|
||||
use helix_stdx::rope;
|
||||
use serde::{ser::SerializeSeq as _, Deserialize, Serialize};
|
||||
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::{self, Display},
|
||||
num::NonZeroU8,
|
||||
path::PathBuf,
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct Configuration {
|
||||
pub language: Vec<LanguageConfiguration>,
|
||||
#[serde(default)]
|
||||
pub language_server: HashMap<String, LanguageServerConfiguration>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct LanguageConfiguration {
|
||||
#[serde(skip)]
|
||||
pub(super) language: Option<Language>,
|
||||
|
||||
#[serde(rename = "name")]
|
||||
pub language_id: String, // c-sharp, rust, tsx
|
||||
#[serde(rename = "language-id")]
|
||||
// see the table under https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentItem
|
||||
pub language_server_language_id: Option<String>, // csharp, rust, typescriptreact, for the language-server
|
||||
pub scope: String, // source.rust
|
||||
pub file_types: Vec<FileType>, // filename extension or ends_with? <Gemfile, rb, etc>
|
||||
#[serde(default)]
|
||||
pub shebangs: Vec<String>, // interpreter(s) associated with language
|
||||
#[serde(default)]
|
||||
pub roots: Vec<String>, // these indicate project roots <.git, Cargo.toml>
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing,
|
||||
deserialize_with = "from_comment_tokens",
|
||||
alias = "comment-token"
|
||||
)]
|
||||
pub comment_tokens: Option<Vec<String>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing,
|
||||
deserialize_with = "from_block_comment_tokens"
|
||||
)]
|
||||
pub block_comment_tokens: Option<Vec<BlockCommentToken>>,
|
||||
pub text_width: Option<usize>,
|
||||
pub soft_wrap: Option<SoftWrap>,
|
||||
|
||||
#[serde(default)]
|
||||
pub auto_format: bool,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub formatter: Option<FormatterConfiguration>,
|
||||
|
||||
/// If set, overrides `editor.path-completion`.
|
||||
pub path_completion: Option<bool>,
|
||||
/// If set, overrides `editor.word-completion`.
|
||||
pub word_completion: Option<WordCompletion>,
|
||||
|
||||
#[serde(default)]
|
||||
pub diagnostic_severity: Severity,
|
||||
|
||||
pub grammar: Option<String>, // tree-sitter grammar name, defaults to language_id
|
||||
|
||||
// content_regex
|
||||
#[serde(default, skip_serializing, deserialize_with = "deserialize_regex")]
|
||||
pub injection_regex: Option<rope::Regex>,
|
||||
// first_line_regex
|
||||
//
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Vec::is_empty",
|
||||
serialize_with = "serialize_lang_features",
|
||||
deserialize_with = "deserialize_lang_features"
|
||||
)]
|
||||
pub language_servers: Vec<LanguageServerFeatures>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub indent: Option<IndentationConfiguration>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub debugger: Option<DebugAdapterConfig>,
|
||||
|
||||
/// Automatic insertion of pairs to parentheses, brackets,
|
||||
/// etc. Defaults to true. Optionally, this can be a list of 2-tuples
|
||||
/// to specify a list of characters to pair. This overrides the
|
||||
/// global setting.
|
||||
#[serde(default, skip_serializing, deserialize_with = "deserialize_auto_pairs")]
|
||||
pub auto_pairs: Option<AutoPairs>,
|
||||
|
||||
pub rulers: Option<Vec<u16>>, // if set, override editor's rulers
|
||||
|
||||
/// Hardcoded LSP root directories relative to the workspace root, like `examples` or `tools/fuzz`.
|
||||
/// Falling back to the current working directory if none are configured.
|
||||
pub workspace_lsp_roots: Option<Vec<PathBuf>>,
|
||||
#[serde(default)]
|
||||
pub persistent_diagnostic_sources: Vec<String>,
|
||||
/// Overrides the `editor.rainbow-brackets` config key for the language.
|
||||
pub rainbow_brackets: Option<bool>,
|
||||
}
|
||||
|
||||
impl LanguageConfiguration {
|
||||
pub fn language(&self) -> Language {
|
||||
// This value must be set by `super::Loader::new`.
|
||||
self.language.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub enum FileType {
|
||||
/// The extension of the file, either the `Path::extension` or the full
|
||||
/// filename if the file does not have an extension.
|
||||
Extension(String),
|
||||
/// A Unix-style path glob. This is compared to the file's absolute path, so
|
||||
/// it can be used to detect files based on their directories. If the glob
|
||||
/// is not an absolute path and does not already start with a glob pattern,
|
||||
/// a glob pattern will be prepended to it.
|
||||
Glob(globset::Glob),
|
||||
}
|
||||
|
||||
impl Serialize for FileType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
use serde::ser::SerializeMap;
|
||||
|
||||
match self {
|
||||
FileType::Extension(extension) => serializer.serialize_str(extension),
|
||||
FileType::Glob(glob) => {
|
||||
let mut map = serializer.serialize_map(Some(1))?;
|
||||
map.serialize_entry("glob", glob.glob())?;
|
||||
map.end()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for FileType {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::de::Deserializer<'de>,
|
||||
{
|
||||
struct FileTypeVisitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for FileTypeVisitor {
|
||||
type Value = FileType;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("string or table")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(FileType::Extension(value.to_string()))
|
||||
}
|
||||
|
||||
fn visit_map<M>(self, mut map: M) -> Result<Self::Value, M::Error>
|
||||
where
|
||||
M: serde::de::MapAccess<'de>,
|
||||
{
|
||||
match map.next_entry::<String, String>()? {
|
||||
Some((key, mut glob)) if key == "glob" => {
|
||||
// If the glob isn't an absolute path or already starts
|
||||
// with a glob pattern, add a leading glob so we
|
||||
// properly match relative paths.
|
||||
if !glob.starts_with('/') && !glob.starts_with("*/") {
|
||||
glob.insert_str(0, "*/");
|
||||
}
|
||||
|
||||
globset::Glob::new(glob.as_str())
|
||||
.map(FileType::Glob)
|
||||
.map_err(|err| {
|
||||
serde::de::Error::custom(format!("invalid `glob` pattern: {}", err))
|
||||
})
|
||||
}
|
||||
Some((key, _value)) => Err(serde::de::Error::custom(format!(
|
||||
"unknown key in `file-types` list: {}",
|
||||
key
|
||||
))),
|
||||
None => Err(serde::de::Error::custom(
|
||||
"expected a `suffix` key in the `file-types` entry",
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_any(FileTypeVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
fn from_comment_tokens<'de, D>(deserializer: D) -> Result<Option<Vec<String>>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum CommentTokens {
|
||||
Multiple(Vec<String>),
|
||||
Single(String),
|
||||
}
|
||||
Ok(
|
||||
Option::<CommentTokens>::deserialize(deserializer)?.map(|tokens| match tokens {
|
||||
CommentTokens::Single(val) => vec![val],
|
||||
CommentTokens::Multiple(vals) => vals,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct BlockCommentToken {
|
||||
pub start: String,
|
||||
pub end: String,
|
||||
}
|
||||
|
||||
impl Default for BlockCommentToken {
|
||||
fn default() -> Self {
|
||||
BlockCommentToken {
|
||||
start: "/*".to_string(),
|
||||
end: "*/".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn from_block_comment_tokens<'de, D>(
|
||||
deserializer: D,
|
||||
) -> Result<Option<Vec<BlockCommentToken>>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum BlockCommentTokens {
|
||||
Multiple(Vec<BlockCommentToken>),
|
||||
Single(BlockCommentToken),
|
||||
}
|
||||
Ok(
|
||||
Option::<BlockCommentTokens>::deserialize(deserializer)?.map(|tokens| match tokens {
|
||||
BlockCommentTokens::Single(val) => vec![val],
|
||||
BlockCommentTokens::Multiple(vals) => vals,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum LanguageServerFeature {
|
||||
Format,
|
||||
GotoDeclaration,
|
||||
GotoDefinition,
|
||||
GotoTypeDefinition,
|
||||
GotoReference,
|
||||
GotoImplementation,
|
||||
// Goto, use bitflags, combining previous Goto members?
|
||||
SignatureHelp,
|
||||
Hover,
|
||||
DocumentHighlight,
|
||||
Completion,
|
||||
CodeAction,
|
||||
WorkspaceCommand,
|
||||
DocumentSymbols,
|
||||
WorkspaceSymbols,
|
||||
// Symbols, use bitflags, see above?
|
||||
Diagnostics,
|
||||
RenameSymbol,
|
||||
InlayHints,
|
||||
DocumentColors,
|
||||
}
|
||||
|
||||
impl Display for LanguageServerFeature {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
use LanguageServerFeature::*;
|
||||
let feature = match self {
|
||||
Format => "format",
|
||||
GotoDeclaration => "goto-declaration",
|
||||
GotoDefinition => "goto-definition",
|
||||
GotoTypeDefinition => "goto-type-definition",
|
||||
GotoReference => "goto-reference",
|
||||
GotoImplementation => "goto-implementation",
|
||||
SignatureHelp => "signature-help",
|
||||
Hover => "hover",
|
||||
DocumentHighlight => "document-highlight",
|
||||
Completion => "completion",
|
||||
CodeAction => "code-action",
|
||||
WorkspaceCommand => "workspace-command",
|
||||
DocumentSymbols => "document-symbols",
|
||||
WorkspaceSymbols => "workspace-symbols",
|
||||
Diagnostics => "diagnostics",
|
||||
RenameSymbol => "rename-symbol",
|
||||
InlayHints => "inlay-hints",
|
||||
DocumentColors => "document-colors",
|
||||
};
|
||||
write!(f, "{feature}",)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(untagged, rename_all = "kebab-case", deny_unknown_fields)]
|
||||
enum LanguageServerFeatureConfiguration {
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
Features {
|
||||
#[serde(default, skip_serializing_if = "HashSet::is_empty")]
|
||||
only_features: HashSet<LanguageServerFeature>,
|
||||
#[serde(default, skip_serializing_if = "HashSet::is_empty")]
|
||||
except_features: HashSet<LanguageServerFeature>,
|
||||
name: String,
|
||||
},
|
||||
Simple(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LanguageServerFeatures {
|
||||
pub name: String,
|
||||
pub only: HashSet<LanguageServerFeature>,
|
||||
pub excluded: HashSet<LanguageServerFeature>,
|
||||
}
|
||||
|
||||
impl LanguageServerFeatures {
|
||||
pub fn has_feature(&self, feature: LanguageServerFeature) -> bool {
|
||||
(self.only.is_empty() || self.only.contains(&feature)) && !self.excluded.contains(&feature)
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_lang_features<'de, D>(
|
||||
deserializer: D,
|
||||
) -> Result<Vec<LanguageServerFeatures>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let raw: Vec<LanguageServerFeatureConfiguration> = Deserialize::deserialize(deserializer)?;
|
||||
let res = raw
|
||||
.into_iter()
|
||||
.map(|config| match config {
|
||||
LanguageServerFeatureConfiguration::Simple(name) => LanguageServerFeatures {
|
||||
name,
|
||||
..Default::default()
|
||||
},
|
||||
LanguageServerFeatureConfiguration::Features {
|
||||
only_features,
|
||||
except_features,
|
||||
name,
|
||||
} => LanguageServerFeatures {
|
||||
name,
|
||||
only: only_features,
|
||||
excluded: except_features,
|
||||
},
|
||||
})
|
||||
.collect();
|
||||
Ok(res)
|
||||
}
|
||||
fn serialize_lang_features<S>(
|
||||
map: &Vec<LanguageServerFeatures>,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let mut serializer = serializer.serialize_seq(Some(map.len()))?;
|
||||
for features in map {
|
||||
let features = if features.only.is_empty() && features.excluded.is_empty() {
|
||||
LanguageServerFeatureConfiguration::Simple(features.name.to_owned())
|
||||
} else {
|
||||
LanguageServerFeatureConfiguration::Features {
|
||||
only_features: features.only.clone(),
|
||||
except_features: features.excluded.clone(),
|
||||
name: features.name.to_owned(),
|
||||
}
|
||||
};
|
||||
serializer.serialize_element(&features)?;
|
||||
}
|
||||
serializer.end()
|
||||
}
|
||||
|
||||
fn deserialize_required_root_patterns<'de, D>(deserializer: D) -> Result<Option<GlobSet>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let patterns = Vec::<String>::deserialize(deserializer)?;
|
||||
if patterns.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
let mut builder = globset::GlobSetBuilder::new();
|
||||
for pattern in patterns {
|
||||
let glob = globset::Glob::new(&pattern).map_err(serde::de::Error::custom)?;
|
||||
builder.add(glob);
|
||||
}
|
||||
builder.build().map(Some).map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct LanguageServerConfiguration {
|
||||
pub command: String,
|
||||
#[serde(default)]
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
pub args: Vec<String>,
|
||||
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
|
||||
pub environment: HashMap<String, String>,
|
||||
#[serde(default, skip_serializing, deserialize_with = "deserialize_lsp_config")]
|
||||
pub config: Option<serde_json::Value>,
|
||||
#[serde(default = "default_timeout")]
|
||||
pub timeout: u64,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing,
|
||||
deserialize_with = "deserialize_required_root_patterns"
|
||||
)]
|
||||
pub required_root_patterns: Option<GlobSet>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct FormatterConfiguration {
|
||||
pub command: String,
|
||||
#[serde(default)]
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
pub args: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct AdvancedCompletion {
|
||||
pub name: Option<String>,
|
||||
pub completion: Option<String>,
|
||||
pub default: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "kebab-case", untagged)]
|
||||
pub enum DebugConfigCompletion {
|
||||
Named(String),
|
||||
Advanced(AdvancedCompletion),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum DebugArgumentValue {
|
||||
String(String),
|
||||
Array(Vec<String>),
|
||||
Boolean(bool),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct DebugTemplate {
|
||||
pub name: String,
|
||||
pub request: String,
|
||||
#[serde(default)]
|
||||
pub completion: Vec<DebugConfigCompletion>,
|
||||
pub args: HashMap<String, DebugArgumentValue>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct DebugAdapterConfig {
|
||||
pub name: String,
|
||||
pub transport: String,
|
||||
#[serde(default)]
|
||||
pub command: String,
|
||||
#[serde(default)]
|
||||
pub args: Vec<String>,
|
||||
pub port_arg: Option<String>,
|
||||
pub templates: Vec<DebugTemplate>,
|
||||
#[serde(default)]
|
||||
pub quirks: DebuggerQuirks,
|
||||
}
|
||||
|
||||
// Different workarounds for adapters' differences
|
||||
#[derive(Debug, Default, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub struct DebuggerQuirks {
|
||||
#[serde(default)]
|
||||
pub absolute_paths: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct IndentationConfiguration {
|
||||
#[serde(deserialize_with = "deserialize_tab_width")]
|
||||
pub tab_width: usize,
|
||||
pub unit: String,
|
||||
}
|
||||
|
||||
/// How the indentation for a newly inserted line should be determined.
|
||||
/// If the selected heuristic is not available (e.g. because the current
|
||||
/// language has no tree-sitter indent queries), a simpler one will be used.
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum IndentationHeuristic {
|
||||
/// Just copy the indentation of the line that the cursor is currently on.
|
||||
Simple,
|
||||
/// Use tree-sitter indent queries to compute the expected absolute indentation level of the new line.
|
||||
TreeSitter,
|
||||
/// Use tree-sitter indent queries to compute the expected difference in indentation between the new line
|
||||
/// and the line before. Add this to the actual indentation level of the line before.
|
||||
#[default]
|
||||
Hybrid,
|
||||
}
|
||||
|
||||
/// Configuration for auto pairs
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields, untagged)]
|
||||
pub enum AutoPairConfig {
|
||||
/// Enables or disables auto pairing. False means disabled. True means to use the default pairs.
|
||||
Enable(bool),
|
||||
|
||||
/// The mappings of pairs.
|
||||
Pairs(HashMap<char, char>),
|
||||
}
|
||||
|
||||
impl Default for AutoPairConfig {
|
||||
fn default() -> Self {
|
||||
AutoPairConfig::Enable(true)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&AutoPairConfig> for Option<AutoPairs> {
|
||||
fn from(auto_pair_config: &AutoPairConfig) -> Self {
|
||||
match auto_pair_config {
|
||||
AutoPairConfig::Enable(false) => None,
|
||||
AutoPairConfig::Enable(true) => Some(AutoPairs::default()),
|
||||
AutoPairConfig::Pairs(pairs) => Some(AutoPairs::new(pairs.iter())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AutoPairConfig> for Option<AutoPairs> {
|
||||
fn from(auto_pairs_config: AutoPairConfig) -> Self {
|
||||
(&auto_pairs_config).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for AutoPairConfig {
|
||||
type Err = std::str::ParseBoolError;
|
||||
|
||||
// only do bool parsing for runtime setting
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let enable: bool = s.parse()?;
|
||||
Ok(AutoPairConfig::Enable(enable))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(default, rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct SoftWrap {
|
||||
/// Soft wrap lines that exceed viewport width. Default to off
|
||||
// NOTE: Option on purpose because the struct is shared between language config and global config.
|
||||
// By default the option is None so that the language config falls back to the global config unless explicitly set.
|
||||
pub enable: Option<bool>,
|
||||
/// Maximum space left free at the end of the line.
|
||||
/// This space is used to wrap text at word boundaries. If that is not possible within this limit
|
||||
/// the word is simply split at the end of the line.
|
||||
///
|
||||
/// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views.
|
||||
///
|
||||
/// Default to 20
|
||||
pub max_wrap: Option<u16>,
|
||||
/// Maximum number of indentation that can be carried over from the previous line when softwrapping.
|
||||
/// If a line is indented further then this limit it is rendered at the start of the viewport instead.
|
||||
///
|
||||
/// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views.
|
||||
///
|
||||
/// Default to 40
|
||||
pub max_indent_retain: Option<u16>,
|
||||
/// Indicator placed at the beginning of softwrapped lines
|
||||
///
|
||||
/// Defaults to ↪
|
||||
pub wrap_indicator: Option<String>,
|
||||
/// Softwrap at `text_width` instead of viewport width if it is shorter
|
||||
pub wrap_at_text_width: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(default, rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct WordCompletion {
|
||||
pub enable: Option<bool>,
|
||||
pub trigger_length: Option<NonZeroU8>,
|
||||
}
|
||||
|
||||
fn deserialize_regex<'de, D>(deserializer: D) -> Result<Option<rope::Regex>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Option::<String>::deserialize(deserializer)?
|
||||
.map(|buf| rope::Regex::new(&buf).map_err(serde::de::Error::custom))
|
||||
.transpose()
|
||||
}
|
||||
|
||||
fn deserialize_lsp_config<'de, D>(deserializer: D) -> Result<Option<serde_json::Value>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Option::<toml::Value>::deserialize(deserializer)?
|
||||
.map(|toml| toml.try_into().map_err(serde::de::Error::custom))
|
||||
.transpose()
|
||||
}
|
||||
|
||||
fn deserialize_tab_width<'de, D>(deserializer: D) -> Result<usize, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
usize::deserialize(deserializer).and_then(|n| {
|
||||
if n > 0 && n <= 16 {
|
||||
Ok(n)
|
||||
} else {
|
||||
Err(serde::de::Error::custom(
|
||||
"tab width must be a value from 1 to 16 inclusive",
|
||||
))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn deserialize_auto_pairs<'de, D>(deserializer: D) -> Result<Option<AutoPairs>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Ok(Option::<AutoPairConfig>::deserialize(deserializer)?.and_then(AutoPairConfig::into))
|
||||
}
|
||||
|
||||
fn default_timeout() -> u64 {
|
||||
20
|
||||
}
|
@@ -1,264 +0,0 @@
|
||||
use std::{cmp::Reverse, ops::Range};
|
||||
|
||||
use super::{LanguageLayer, LayerId};
|
||||
|
||||
use slotmap::HopSlotMap;
|
||||
use tree_sitter::Node;
|
||||
|
||||
/// The byte range of an injection layer.
|
||||
///
|
||||
/// Injection ranges may overlap, but all overlapping parts are subsets of their parent ranges.
|
||||
/// This allows us to sort the ranges ahead of time in order to efficiently find a range that
|
||||
/// contains a point with maximum depth.
|
||||
#[derive(Debug)]
|
||||
struct InjectionRange {
|
||||
start: usize,
|
||||
end: usize,
|
||||
layer_id: LayerId,
|
||||
depth: u32,
|
||||
}
|
||||
|
||||
pub struct TreeCursor<'a> {
|
||||
layers: &'a HopSlotMap<LayerId, LanguageLayer>,
|
||||
root: LayerId,
|
||||
current: LayerId,
|
||||
injection_ranges: Vec<InjectionRange>,
|
||||
// TODO: Ideally this would be a `tree_sitter::TreeCursor<'a>` but
|
||||
// that returns very surprising results in testing.
|
||||
cursor: Node<'a>,
|
||||
}
|
||||
|
||||
impl<'a> TreeCursor<'a> {
|
||||
pub(super) fn new(layers: &'a HopSlotMap<LayerId, LanguageLayer>, root: LayerId) -> Self {
|
||||
let mut injection_ranges = Vec::new();
|
||||
|
||||
for (layer_id, layer) in layers.iter() {
|
||||
// Skip the root layer
|
||||
if layer.parent.is_none() {
|
||||
continue;
|
||||
}
|
||||
for byte_range in layer.ranges.iter() {
|
||||
let range = InjectionRange {
|
||||
start: byte_range.start_byte,
|
||||
end: byte_range.end_byte,
|
||||
layer_id,
|
||||
depth: layer.depth,
|
||||
};
|
||||
injection_ranges.push(range);
|
||||
}
|
||||
}
|
||||
|
||||
injection_ranges.sort_unstable_by_key(|range| (range.end, Reverse(range.depth)));
|
||||
|
||||
let cursor = layers[root].tree().root_node();
|
||||
|
||||
Self {
|
||||
layers,
|
||||
root,
|
||||
current: root,
|
||||
injection_ranges,
|
||||
cursor,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn node(&self) -> Node<'a> {
|
||||
self.cursor
|
||||
}
|
||||
|
||||
pub fn goto_parent(&mut self) -> bool {
|
||||
if let Some(parent) = self.node().parent() {
|
||||
self.cursor = parent;
|
||||
return true;
|
||||
}
|
||||
|
||||
// If we are already on the root layer, we cannot ascend.
|
||||
if self.current == self.root {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Ascend to the parent layer.
|
||||
let range = self.node().byte_range();
|
||||
let parent_id = self.layers[self.current]
|
||||
.parent
|
||||
.expect("non-root layers have a parent");
|
||||
self.current = parent_id;
|
||||
let root = self.layers[self.current].tree().root_node();
|
||||
self.cursor = root
|
||||
.descendant_for_byte_range(range.start, range.end)
|
||||
.unwrap_or(root);
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
pub fn goto_parent_with<P>(&mut self, predicate: P) -> bool
|
||||
where
|
||||
P: Fn(&Node) -> bool,
|
||||
{
|
||||
while self.goto_parent() {
|
||||
if predicate(&self.node()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Finds the injection layer that has exactly the same range as the given `range`.
|
||||
fn layer_id_of_byte_range(&self, search_range: Range<usize>) -> Option<LayerId> {
|
||||
let start_idx = self
|
||||
.injection_ranges
|
||||
.partition_point(|range| range.end < search_range.end);
|
||||
|
||||
self.injection_ranges[start_idx..]
|
||||
.iter()
|
||||
.take_while(|range| range.end == search_range.end)
|
||||
.find_map(|range| (range.start == search_range.start).then_some(range.layer_id))
|
||||
}
|
||||
|
||||
fn goto_first_child_impl(&mut self, named: bool) -> bool {
|
||||
// Check if the current node's range is an exact injection layer range.
|
||||
if let Some(layer_id) = self
|
||||
.layer_id_of_byte_range(self.node().byte_range())
|
||||
.filter(|&layer_id| layer_id != self.current)
|
||||
{
|
||||
// Switch to the child layer.
|
||||
self.current = layer_id;
|
||||
self.cursor = self.layers[self.current].tree().root_node();
|
||||
return true;
|
||||
}
|
||||
|
||||
let child = if named {
|
||||
self.cursor.named_child(0)
|
||||
} else {
|
||||
self.cursor.child(0)
|
||||
};
|
||||
|
||||
if let Some(child) = child {
|
||||
// Otherwise descend in the current tree.
|
||||
self.cursor = child;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn goto_first_child(&mut self) -> bool {
|
||||
self.goto_first_child_impl(false)
|
||||
}
|
||||
|
||||
pub fn goto_first_named_child(&mut self) -> bool {
|
||||
self.goto_first_child_impl(true)
|
||||
}
|
||||
|
||||
fn goto_next_sibling_impl(&mut self, named: bool) -> bool {
|
||||
let sibling = if named {
|
||||
self.cursor.next_named_sibling()
|
||||
} else {
|
||||
self.cursor.next_sibling()
|
||||
};
|
||||
|
||||
if let Some(sibling) = sibling {
|
||||
self.cursor = sibling;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn goto_next_sibling(&mut self) -> bool {
|
||||
self.goto_next_sibling_impl(false)
|
||||
}
|
||||
|
||||
pub fn goto_next_named_sibling(&mut self) -> bool {
|
||||
self.goto_next_sibling_impl(true)
|
||||
}
|
||||
|
||||
fn goto_prev_sibling_impl(&mut self, named: bool) -> bool {
|
||||
let sibling = if named {
|
||||
self.cursor.prev_named_sibling()
|
||||
} else {
|
||||
self.cursor.prev_sibling()
|
||||
};
|
||||
|
||||
if let Some(sibling) = sibling {
|
||||
self.cursor = sibling;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn goto_prev_sibling(&mut self) -> bool {
|
||||
self.goto_prev_sibling_impl(false)
|
||||
}
|
||||
|
||||
pub fn goto_prev_named_sibling(&mut self) -> bool {
|
||||
self.goto_prev_sibling_impl(true)
|
||||
}
|
||||
|
||||
/// Finds the injection layer that contains the given start-end range.
|
||||
fn layer_id_containing_byte_range(&self, start: usize, end: usize) -> LayerId {
|
||||
let start_idx = self
|
||||
.injection_ranges
|
||||
.partition_point(|range| range.end < end);
|
||||
|
||||
self.injection_ranges[start_idx..]
|
||||
.iter()
|
||||
.take_while(|range| range.start < end || range.depth > 1)
|
||||
.find_map(|range| (range.start <= start).then_some(range.layer_id))
|
||||
.unwrap_or(self.root)
|
||||
}
|
||||
|
||||
pub fn reset_to_byte_range(&mut self, start: usize, end: usize) {
|
||||
self.current = self.layer_id_containing_byte_range(start, end);
|
||||
let root = self.layers[self.current].tree().root_node();
|
||||
self.cursor = root.descendant_for_byte_range(start, end).unwrap_or(root);
|
||||
}
|
||||
|
||||
/// Returns an iterator over the children of the node the TreeCursor is on
|
||||
/// at the time this is called.
|
||||
pub fn children(&'a mut self) -> ChildIter<'a> {
|
||||
let parent = self.node();
|
||||
|
||||
ChildIter {
|
||||
cursor: self,
|
||||
parent,
|
||||
named: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns an iterator over the named children of the node the TreeCursor is on
|
||||
/// at the time this is called.
|
||||
pub fn named_children(&'a mut self) -> ChildIter<'a> {
|
||||
let parent = self.node();
|
||||
|
||||
ChildIter {
|
||||
cursor: self,
|
||||
parent,
|
||||
named: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ChildIter<'n> {
|
||||
cursor: &'n mut TreeCursor<'n>,
|
||||
parent: Node<'n>,
|
||||
named: bool,
|
||||
}
|
||||
|
||||
impl<'n> Iterator for ChildIter<'n> {
|
||||
type Item = Node<'n>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
// first iteration, just visit the first child
|
||||
if self.cursor.node() == self.parent {
|
||||
self.cursor
|
||||
.goto_first_child_impl(self.named)
|
||||
.then(|| self.cursor.node())
|
||||
} else {
|
||||
self.cursor
|
||||
.goto_next_sibling_impl(self.named)
|
||||
.then(|| self.cursor.node())
|
||||
}
|
||||
}
|
||||
}
|
@@ -5,7 +5,7 @@ use std::ops::Range;
|
||||
use std::ptr::NonNull;
|
||||
|
||||
use crate::doc_formatter::FormattedGrapheme;
|
||||
use crate::syntax::Highlight;
|
||||
use crate::syntax::{Highlight, OverlayHighlights};
|
||||
use crate::{Position, Tendril};
|
||||
|
||||
/// An inline annotation is continuous text shown
|
||||
@@ -300,10 +300,7 @@ impl<'a> TextAnnotations<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn collect_overlay_highlights(
|
||||
&self,
|
||||
char_range: Range<usize>,
|
||||
) -> Vec<(usize, Range<usize>)> {
|
||||
pub fn collect_overlay_highlights(&self, char_range: Range<usize>) -> OverlayHighlights {
|
||||
let mut highlights = Vec::new();
|
||||
self.reset_pos(char_range.start);
|
||||
for char_idx in char_range {
|
||||
@@ -311,11 +308,11 @@ impl<'a> TextAnnotations<'a> {
|
||||
// we don't know the number of chars the original grapheme takes
|
||||
// however it doesn't matter as highlight boundaries are automatically
|
||||
// aligned to grapheme boundaries in the rendering code
|
||||
highlights.push((highlight.0, char_idx..char_idx + 1))
|
||||
highlights.push((highlight, char_idx..char_idx + 1));
|
||||
}
|
||||
}
|
||||
|
||||
highlights
|
||||
OverlayHighlights::Heterogenous { highlights }
|
||||
}
|
||||
|
||||
/// Add new inline annotations.
|
||||
|
@@ -1,13 +1,12 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use ropey::RopeSlice;
|
||||
use tree_sitter::{Node, QueryCursor};
|
||||
|
||||
use crate::chars::{categorize_char, char_is_whitespace, CharCategory};
|
||||
use crate::graphemes::{next_grapheme_boundary, prev_grapheme_boundary};
|
||||
use crate::line_ending::rope_is_line_ending;
|
||||
use crate::movement::Direction;
|
||||
use crate::syntax::LanguageConfiguration;
|
||||
use crate::syntax;
|
||||
use crate::Range;
|
||||
use crate::{surround, Syntax};
|
||||
|
||||
@@ -260,18 +259,18 @@ pub fn textobject_treesitter(
|
||||
range: Range,
|
||||
textobject: TextObject,
|
||||
object_name: &str,
|
||||
slice_tree: Node,
|
||||
lang_config: &LanguageConfiguration,
|
||||
syntax: &Syntax,
|
||||
loader: &syntax::Loader,
|
||||
_count: usize,
|
||||
) -> Range {
|
||||
let root = syntax.tree().root_node();
|
||||
let textobject_query = loader.textobject_query(syntax.root_language());
|
||||
let get_range = move || -> Option<Range> {
|
||||
let byte_pos = slice.char_to_byte(range.cursor(slice));
|
||||
|
||||
let capture_name = format!("{}.{}", object_name, textobject); // eg. function.inner
|
||||
let mut cursor = QueryCursor::new();
|
||||
let node = lang_config
|
||||
.textobject_query()?
|
||||
.capture_nodes(&capture_name, slice_tree, slice, &mut cursor)?
|
||||
let node = textobject_query?
|
||||
.capture_nodes(&capture_name, &root, slice)?
|
||||
.filter(|node| node.byte_range().contains(&byte_pos))
|
||||
.min_by_key(|node| node.byte_range().len())?;
|
||||
|
||||
|
@@ -19,6 +19,16 @@ pub enum Operation {
|
||||
Insert(Tendril),
|
||||
}
|
||||
|
||||
impl Operation {
|
||||
/// The number of characters affected by the operation.
|
||||
pub fn len_chars(&self) -> usize {
|
||||
match self {
|
||||
Self::Retain(n) | Self::Delete(n) => *n,
|
||||
Self::Insert(s) => s.chars().count(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub enum Assoc {
|
||||
Before,
|
||||
|
@@ -1,12 +1,11 @@
|
||||
use arc_swap::ArcSwap;
|
||||
use helix_core::{
|
||||
indent::{indent_level_for_line, treesitter_indent_for_pos, IndentStyle},
|
||||
syntax::{Configuration, Loader},
|
||||
syntax::{config::Configuration, Loader},
|
||||
Syntax,
|
||||
};
|
||||
use helix_stdx::rope::RopeSliceExt;
|
||||
use ropey::Rope;
|
||||
use std::{ops::Range, path::PathBuf, process::Command, sync::Arc};
|
||||
use std::{ops::Range, path::PathBuf, process::Command};
|
||||
|
||||
#[test]
|
||||
fn test_treesitter_indent_rust() {
|
||||
@@ -196,17 +195,12 @@ fn test_treesitter_indent(
|
||||
runtime.push("../runtime");
|
||||
std::env::set_var("HELIX_RUNTIME", runtime.to_str().unwrap());
|
||||
|
||||
let language_config = loader.language_config_for_scope(lang_scope).unwrap();
|
||||
let language = loader.language_for_scope(lang_scope).unwrap();
|
||||
let language_config = loader.language(language).config();
|
||||
let indent_style = IndentStyle::from_str(&language_config.indent.as_ref().unwrap().unit);
|
||||
let highlight_config = language_config.highlight_config(&[]).unwrap();
|
||||
let text = doc.slice(..);
|
||||
let syntax = Syntax::new(
|
||||
text,
|
||||
highlight_config,
|
||||
Arc::new(ArcSwap::from_pointee(loader)),
|
||||
)
|
||||
.unwrap();
|
||||
let indent_query = language_config.indent_query().unwrap();
|
||||
let syntax = Syntax::new(text, language, &loader).unwrap();
|
||||
let indent_query = loader.indent_query(language).unwrap();
|
||||
|
||||
for i in 0..doc.len_lines() {
|
||||
let line = text.line(i);
|
||||
|
@@ -22,6 +22,11 @@ serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] }
|
||||
thiserror.workspace = true
|
||||
slotmap.workspace = true
|
||||
futures-executor.workspace = true
|
||||
futures-util.workspace = true
|
||||
tokio-stream.workspace = true
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
fern = "0.7"
|
||||
|
@@ -1,10 +1,11 @@
|
||||
use crate::{
|
||||
requests::DisconnectArguments,
|
||||
registry::DebugAdapterId,
|
||||
requests::{DisconnectArguments, TerminateArguments},
|
||||
transport::{Payload, Request, Response, Transport},
|
||||
types::*,
|
||||
Error, Result,
|
||||
};
|
||||
use helix_core::syntax::DebuggerQuirks;
|
||||
use helix_core::syntax::config::{DebugAdapterConfig, DebuggerQuirks};
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
@@ -27,12 +28,14 @@ use tokio::{
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Client {
|
||||
id: usize,
|
||||
id: DebugAdapterId,
|
||||
_process: Option<Child>,
|
||||
server_tx: UnboundedSender<Payload>,
|
||||
request_counter: AtomicU64,
|
||||
connection_type: Option<ConnectionType>,
|
||||
starting_request_args: Option<Value>,
|
||||
/// The socket address of the debugger, if using TCP transport.
|
||||
pub socket: Option<SocketAddr>,
|
||||
pub caps: Option<DebuggerCapabilities>,
|
||||
// thread_id -> frames
|
||||
pub stack_frames: HashMap<ThreadId, Vec<StackFrame>>,
|
||||
@@ -41,23 +44,20 @@ pub struct Client {
|
||||
/// Currently active frame for the current thread.
|
||||
pub active_frame: Option<usize>,
|
||||
pub quirks: DebuggerQuirks,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum ConnectionType {
|
||||
Launch,
|
||||
Attach,
|
||||
/// The config which was used to start this debugger.
|
||||
pub config: Option<DebugAdapterConfig>,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
// Spawn a process and communicate with it by either TCP or stdio
|
||||
// The returned stream includes the Client ID so consumers can differentiate between multiple clients
|
||||
pub async fn process(
|
||||
transport: &str,
|
||||
command: &str,
|
||||
args: Vec<&str>,
|
||||
port_arg: Option<&str>,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
id: DebugAdapterId,
|
||||
) -> Result<(Self, UnboundedReceiver<(DebugAdapterId, Payload)>)> {
|
||||
if command.is_empty() {
|
||||
return Result::Err(Error::Other(anyhow!("Command not provided")));
|
||||
}
|
||||
@@ -72,9 +72,9 @@ impl Client {
|
||||
rx: Box<dyn AsyncBufRead + Unpin + Send>,
|
||||
tx: Box<dyn AsyncWrite + Unpin + Send>,
|
||||
err: Option<Box<dyn AsyncBufRead + Unpin + Send>>,
|
||||
id: usize,
|
||||
id: DebugAdapterId,
|
||||
process: Option<Child>,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
) -> Result<(Self, UnboundedReceiver<(DebugAdapterId, Payload)>)> {
|
||||
let (server_rx, server_tx) = Transport::start(rx, tx, err, id);
|
||||
let (client_tx, client_rx) = unbounded_channel();
|
||||
|
||||
@@ -86,22 +86,24 @@ impl Client {
|
||||
caps: None,
|
||||
connection_type: None,
|
||||
starting_request_args: None,
|
||||
socket: None,
|
||||
stack_frames: HashMap::new(),
|
||||
thread_states: HashMap::new(),
|
||||
thread_id: None,
|
||||
active_frame: None,
|
||||
quirks: DebuggerQuirks::default(),
|
||||
config: None,
|
||||
};
|
||||
|
||||
tokio::spawn(Self::recv(server_rx, client_tx));
|
||||
tokio::spawn(Self::recv(id, server_rx, client_tx));
|
||||
|
||||
Ok((client, client_rx))
|
||||
}
|
||||
|
||||
pub async fn tcp(
|
||||
addr: std::net::SocketAddr,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
id: DebugAdapterId,
|
||||
) -> Result<(Self, UnboundedReceiver<(DebugAdapterId, Payload)>)> {
|
||||
let stream = TcpStream::connect(addr).await?;
|
||||
let (rx, tx) = stream.into_split();
|
||||
Self::streams(Box::new(BufReader::new(rx)), Box::new(tx), None, id, None)
|
||||
@@ -110,8 +112,8 @@ impl Client {
|
||||
pub fn stdio(
|
||||
cmd: &str,
|
||||
args: Vec<&str>,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
id: DebugAdapterId,
|
||||
) -> Result<(Self, UnboundedReceiver<(DebugAdapterId, Payload)>)> {
|
||||
// Resolve path to the binary
|
||||
let cmd = helix_stdx::env::which(cmd)?;
|
||||
|
||||
@@ -162,8 +164,8 @@ impl Client {
|
||||
cmd: &str,
|
||||
args: Vec<&str>,
|
||||
port_format: &str,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
id: DebugAdapterId,
|
||||
) -> Result<(Self, UnboundedReceiver<(DebugAdapterId, Payload)>)> {
|
||||
let port = Self::get_port().await.unwrap();
|
||||
|
||||
let process = Command::new(cmd)
|
||||
@@ -178,40 +180,49 @@ impl Client {
|
||||
|
||||
// Wait for adapter to become ready for connection
|
||||
time::sleep(time::Duration::from_millis(500)).await;
|
||||
|
||||
let stream = TcpStream::connect(SocketAddr::new(
|
||||
IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
|
||||
port,
|
||||
))
|
||||
.await?;
|
||||
let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), port);
|
||||
let stream = TcpStream::connect(socket).await?;
|
||||
|
||||
let (rx, tx) = stream.into_split();
|
||||
Self::streams(
|
||||
let mut result = Self::streams(
|
||||
Box::new(BufReader::new(rx)),
|
||||
Box::new(tx),
|
||||
None,
|
||||
id,
|
||||
Some(process),
|
||||
)
|
||||
);
|
||||
|
||||
// Set the socket address for the client
|
||||
if let Ok((client, _)) = &mut result {
|
||||
client.socket = Some(socket);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
async fn recv(mut server_rx: UnboundedReceiver<Payload>, client_tx: UnboundedSender<Payload>) {
|
||||
async fn recv(
|
||||
id: DebugAdapterId,
|
||||
mut server_rx: UnboundedReceiver<Payload>,
|
||||
client_tx: UnboundedSender<(DebugAdapterId, Payload)>,
|
||||
) {
|
||||
while let Some(msg) = server_rx.recv().await {
|
||||
match msg {
|
||||
Payload::Event(ev) => {
|
||||
client_tx.send(Payload::Event(ev)).expect("Failed to send");
|
||||
client_tx
|
||||
.send((id, Payload::Event(ev)))
|
||||
.expect("Failed to send");
|
||||
}
|
||||
Payload::Response(_) => unreachable!(),
|
||||
Payload::Request(req) => {
|
||||
client_tx
|
||||
.send(Payload::Request(req))
|
||||
.send((id, Payload::Request(req)))
|
||||
.expect("Failed to send");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn id(&self) -> usize {
|
||||
pub fn id(&self) -> DebugAdapterId {
|
||||
self.id
|
||||
}
|
||||
|
||||
@@ -354,6 +365,14 @@ impl Client {
|
||||
self.call::<requests::Disconnect>(args)
|
||||
}
|
||||
|
||||
pub fn terminate(
|
||||
&mut self,
|
||||
args: Option<TerminateArguments>,
|
||||
) -> impl Future<Output = Result<Value>> {
|
||||
self.connection_type = None;
|
||||
self.call::<requests::Terminate>(args)
|
||||
}
|
||||
|
||||
pub fn launch(&mut self, args: serde_json::Value) -> impl Future<Output = Result<Value>> {
|
||||
self.connection_type = Some(ConnectionType::Launch);
|
||||
self.starting_request_args = Some(args.clone());
|
||||
|
@@ -1,8 +1,9 @@
|
||||
mod client;
|
||||
pub mod registry;
|
||||
mod transport;
|
||||
mod types;
|
||||
|
||||
pub use client::{Client, ConnectionType};
|
||||
pub use client::Client;
|
||||
pub use transport::{Payload, Response, Transport};
|
||||
pub use types::*;
|
||||
|
||||
@@ -31,6 +32,7 @@ pub type Result<T> = core::result::Result<T, Error>;
|
||||
#[derive(Debug)]
|
||||
pub enum Request {
|
||||
RunInTerminal(<requests::RunInTerminal as types::Request>::Arguments),
|
||||
StartDebugging(<requests::StartDebugging as types::Request>::Arguments),
|
||||
}
|
||||
|
||||
impl Request {
|
||||
@@ -40,6 +42,7 @@ impl Request {
|
||||
let arguments = arguments.unwrap_or_default();
|
||||
let request = match command {
|
||||
requests::RunInTerminal::COMMAND => Self::RunInTerminal(parse_value(arguments)?),
|
||||
requests::StartDebugging::COMMAND => Self::StartDebugging(parse_value(arguments)?),
|
||||
_ => return Err(Error::Unhandled),
|
||||
};
|
||||
|
||||
|
114
helix-dap/src/registry.rs
Normal file
114
helix-dap/src/registry.rs
Normal file
@@ -0,0 +1,114 @@
|
||||
use crate::{Client, Payload, Result, StackFrame};
|
||||
use futures_executor::block_on;
|
||||
use futures_util::stream::SelectAll;
|
||||
use helix_core::syntax::config::DebugAdapterConfig;
|
||||
use slotmap::SlotMap;
|
||||
use std::fmt;
|
||||
use tokio_stream::wrappers::UnboundedReceiverStream;
|
||||
|
||||
/// The resgistry is a struct that manages and owns multiple debugger clients
|
||||
/// This holds the responsibility of managing the lifecycle of each client
|
||||
/// plus showing the heirarcihical nature betweeen them
|
||||
pub struct Registry {
|
||||
inner: SlotMap<DebugAdapterId, Client>,
|
||||
/// The active debugger client
|
||||
///
|
||||
/// TODO: You can have multiple active debuggers, so the concept of a single active debugger
|
||||
/// may need to be changed
|
||||
current_client_id: Option<DebugAdapterId>,
|
||||
/// A stream of incoming messages from all debuggers
|
||||
pub incoming: SelectAll<UnboundedReceiverStream<(DebugAdapterId, Payload)>>,
|
||||
}
|
||||
|
||||
impl Registry {
|
||||
/// Creates a new DebuggerService instance
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
inner: SlotMap::with_key(),
|
||||
current_client_id: None,
|
||||
incoming: SelectAll::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_client(
|
||||
&mut self,
|
||||
socket: Option<std::net::SocketAddr>,
|
||||
config: &DebugAdapterConfig,
|
||||
) -> Result<DebugAdapterId> {
|
||||
self.inner.try_insert_with_key(|id| {
|
||||
let result = match socket {
|
||||
Some(socket) => block_on(Client::tcp(socket, id)),
|
||||
None => block_on(Client::process(
|
||||
&config.transport,
|
||||
&config.command,
|
||||
config.args.iter().map(|arg| arg.as_str()).collect(),
|
||||
config.port_arg.as_deref(),
|
||||
id,
|
||||
)),
|
||||
};
|
||||
|
||||
let (mut client, receiver) = result?;
|
||||
self.incoming.push(UnboundedReceiverStream::new(receiver));
|
||||
|
||||
client.config = Some(config.clone());
|
||||
block_on(client.initialize(config.name.clone()))?;
|
||||
client.quirks = config.quirks.clone();
|
||||
|
||||
Ok(client)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn remove_client(&mut self, id: DebugAdapterId) {
|
||||
self.inner.remove(id);
|
||||
}
|
||||
|
||||
pub fn get_client(&self, id: DebugAdapterId) -> Option<&Client> {
|
||||
self.inner.get(id)
|
||||
}
|
||||
|
||||
pub fn get_client_mut(&mut self, id: DebugAdapterId) -> Option<&mut Client> {
|
||||
self.inner.get_mut(id)
|
||||
}
|
||||
|
||||
pub fn get_active_client(&self) -> Option<&Client> {
|
||||
self.current_client_id.and_then(|id| self.get_client(id))
|
||||
}
|
||||
|
||||
pub fn get_active_client_mut(&mut self) -> Option<&mut Client> {
|
||||
self.current_client_id
|
||||
.and_then(|id| self.get_client_mut(id))
|
||||
}
|
||||
|
||||
pub fn set_active_client(&mut self, id: DebugAdapterId) {
|
||||
if self.get_client(id).is_some() {
|
||||
self.current_client_id = Some(id);
|
||||
} else {
|
||||
self.current_client_id = None;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unset_active_client(&mut self) {
|
||||
self.current_client_id = None;
|
||||
}
|
||||
|
||||
pub fn current_stack_frame(&self) -> Option<&StackFrame> {
|
||||
self.get_active_client()
|
||||
.and_then(|debugger| debugger.current_stack_frame())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Registry {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
slotmap::new_key_type! {
|
||||
pub struct DebugAdapterId;
|
||||
}
|
||||
|
||||
impl fmt::Display for DebugAdapterId {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self.0)
|
||||
}
|
||||
}
|
@@ -1,10 +1,10 @@
|
||||
use crate::{Error, Result};
|
||||
use crate::{registry::DebugAdapterId, Error, Result};
|
||||
use anyhow::Context;
|
||||
use log::{error, info, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use std::{collections::HashMap, fmt::Debug};
|
||||
use tokio::{
|
||||
io::{AsyncBufRead, AsyncBufReadExt, AsyncReadExt, AsyncWrite, AsyncWriteExt},
|
||||
sync::{
|
||||
@@ -52,7 +52,7 @@ pub enum Payload {
|
||||
#[derive(Debug)]
|
||||
pub struct Transport {
|
||||
#[allow(unused)]
|
||||
id: usize,
|
||||
id: DebugAdapterId,
|
||||
pending_requests: Mutex<HashMap<u64, Sender<Result<Response>>>>,
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ impl Transport {
|
||||
server_stdout: Box<dyn AsyncBufRead + Unpin + Send>,
|
||||
server_stdin: Box<dyn AsyncWrite + Unpin + Send>,
|
||||
server_stderr: Option<Box<dyn AsyncBufRead + Unpin + Send>>,
|
||||
id: usize,
|
||||
id: DebugAdapterId,
|
||||
) -> (UnboundedReceiver<Payload>, UnboundedSender<Payload>) {
|
||||
let (client_tx, rx) = unbounded_channel();
|
||||
let (tx, client_rx) = unbounded_channel();
|
||||
@@ -73,7 +73,7 @@ impl Transport {
|
||||
|
||||
let transport = Arc::new(transport);
|
||||
|
||||
tokio::spawn(Self::recv(transport.clone(), server_stdout, client_tx));
|
||||
tokio::spawn(Self::recv(id, transport.clone(), server_stdout, client_tx));
|
||||
tokio::spawn(Self::send(transport, server_stdin, client_rx));
|
||||
if let Some(stderr) = server_stderr {
|
||||
tokio::spawn(Self::err(stderr));
|
||||
@@ -83,12 +83,14 @@ impl Transport {
|
||||
}
|
||||
|
||||
async fn recv_server_message(
|
||||
id: DebugAdapterId,
|
||||
reader: &mut Box<dyn AsyncBufRead + Unpin + Send>,
|
||||
buffer: &mut String,
|
||||
content: &mut Vec<u8>,
|
||||
) -> Result<Payload> {
|
||||
let mut content_length = None;
|
||||
loop {
|
||||
buffer.truncate(0);
|
||||
buffer.clear();
|
||||
if reader.read_line(buffer).await? == 0 {
|
||||
return Err(Error::StreamClosed);
|
||||
};
|
||||
@@ -117,17 +119,17 @@ impl Transport {
|
||||
}
|
||||
|
||||
let content_length = content_length.context("missing content length")?;
|
||||
content.resize(content_length, 0);
|
||||
reader.read_exact(content).await?;
|
||||
let msg = std::str::from_utf8(content).context("invalid utf8 from server")?;
|
||||
|
||||
//TODO: reuse vector
|
||||
let mut content = vec![0; content_length];
|
||||
reader.read_exact(&mut content).await?;
|
||||
let msg = std::str::from_utf8(&content).context("invalid utf8 from server")?;
|
||||
|
||||
info!("<- DAP {}", msg);
|
||||
info!("[{}] <- DAP {}", id, msg);
|
||||
|
||||
// try parsing as output (server response) or call (server request)
|
||||
let output: serde_json::Result<Payload> = serde_json::from_str(msg);
|
||||
|
||||
content.clear();
|
||||
|
||||
Ok(output?)
|
||||
}
|
||||
|
||||
@@ -163,7 +165,7 @@ impl Transport {
|
||||
server_stdin: &mut Box<dyn AsyncWrite + Unpin + Send>,
|
||||
request: String,
|
||||
) -> Result<()> {
|
||||
info!("-> DAP {}", request);
|
||||
info!("[{}] -> DAP {}", self.id, request);
|
||||
|
||||
// send the headers
|
||||
server_stdin
|
||||
@@ -178,15 +180,18 @@ impl Transport {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn process_response(res: Response) -> Result<Response> {
|
||||
fn process_response(&self, res: Response) -> Result<Response> {
|
||||
if res.success {
|
||||
info!("<- DAP success in response to {}", res.request_seq);
|
||||
info!(
|
||||
"[{}] <- DAP success in response to {}",
|
||||
self.id, res.request_seq
|
||||
);
|
||||
|
||||
Ok(res)
|
||||
} else {
|
||||
error!(
|
||||
"<- DAP error {:?} ({:?}) for command #{} {}",
|
||||
res.message, res.body, res.request_seq, res.command
|
||||
"[{}] <- DAP error {:?} ({:?}) for command #{} {}",
|
||||
self.id, res.message, res.body, res.request_seq, res.command
|
||||
);
|
||||
|
||||
Err(Error::Other(anyhow::format_err!("{:?}", res.body)))
|
||||
@@ -204,7 +209,7 @@ impl Transport {
|
||||
let tx = self.pending_requests.lock().await.remove(&request_seq);
|
||||
|
||||
match tx {
|
||||
Some(tx) => match tx.send(Self::process_response(res)).await {
|
||||
Some(tx) => match tx.send(self.process_response(res)).await {
|
||||
Ok(_) => (),
|
||||
Err(_) => error!(
|
||||
"Tried sending response into a closed channel (id={:?}), original request likely timed out",
|
||||
@@ -224,12 +229,12 @@ impl Transport {
|
||||
ref seq,
|
||||
..
|
||||
}) => {
|
||||
info!("<- DAP request {} #{}", command, seq);
|
||||
info!("[{}] <- DAP request {} #{}", self.id, command, seq);
|
||||
client_tx.send(msg).expect("Failed to send");
|
||||
Ok(())
|
||||
}
|
||||
Payload::Event(ref event) => {
|
||||
info!("<- DAP event {:?}", event);
|
||||
info!("[{}] <- DAP event {:?}", self.id, event);
|
||||
client_tx.send(msg).expect("Failed to send");
|
||||
Ok(())
|
||||
}
|
||||
@@ -237,17 +242,26 @@ impl Transport {
|
||||
}
|
||||
|
||||
async fn recv(
|
||||
id: DebugAdapterId,
|
||||
transport: Arc<Self>,
|
||||
mut server_stdout: Box<dyn AsyncBufRead + Unpin + Send>,
|
||||
client_tx: UnboundedSender<Payload>,
|
||||
) {
|
||||
let mut recv_buffer = String::new();
|
||||
let mut content_buffer = Vec::new();
|
||||
loop {
|
||||
match Self::recv_server_message(&mut server_stdout, &mut recv_buffer).await {
|
||||
match Self::recv_server_message(
|
||||
id,
|
||||
&mut server_stdout,
|
||||
&mut recv_buffer,
|
||||
&mut content_buffer,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(msg) => match transport.process_server_message(&client_tx, msg).await {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
error!("err: <- {err:?}");
|
||||
error!(" [{id}] err: <- {err:?}");
|
||||
break;
|
||||
}
|
||||
},
|
||||
|
@@ -438,6 +438,21 @@ pub mod requests {
|
||||
const COMMAND: &'static str = "disconnect";
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TerminateArguments {
|
||||
pub restart: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Terminate {}
|
||||
|
||||
impl Request for Terminate {
|
||||
type Arguments = Option<TerminateArguments>;
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "terminate";
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ConfigurationDone {}
|
||||
|
||||
@@ -752,6 +767,21 @@ pub mod requests {
|
||||
type Result = RunInTerminalResponse;
|
||||
const COMMAND: &'static str = "runInTerminal";
|
||||
}
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StartDebuggingArguments {
|
||||
pub request: ConnectionType,
|
||||
pub configuration: Value,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum StartDebugging {}
|
||||
|
||||
impl Request for StartDebugging {
|
||||
type Arguments = StartDebuggingArguments;
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "startDebugging";
|
||||
}
|
||||
}
|
||||
|
||||
// Events
|
||||
@@ -992,6 +1022,13 @@ pub mod events {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ConnectionType {
|
||||
Launch,
|
||||
Attach,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_module_id_from_number() {
|
||||
let raw = r#"{"id": 0, "name": "Name"}"#;
|
||||
|
@@ -13,7 +13,7 @@ homepage.workspace = true
|
||||
|
||||
[dependencies]
|
||||
foldhash.workspace = true
|
||||
hashbrown = "0.15"
|
||||
hashbrown = "0.16"
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] }
|
||||
# the event registry is essentially read only but must be an rwlock so we can
|
||||
# setup new events on initialization, hardware-lock-elision hugely benefits this case
|
||||
|
@@ -19,9 +19,8 @@ helix-stdx = { path = "../helix-stdx" }
|
||||
|
||||
anyhow = "1"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
toml = "0.8"
|
||||
toml.workspace = true
|
||||
etcetera = "0.10"
|
||||
tree-sitter.workspace = true
|
||||
once_cell = "1.21"
|
||||
log = "0.4"
|
||||
|
||||
@@ -32,5 +31,4 @@ cc = { version = "1" }
|
||||
threadpool = { version = "1.0" }
|
||||
tempfile.workspace = true
|
||||
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||
libloading = "0.8"
|
||||
tree-house.workspace = true
|
||||
|
@@ -9,7 +9,7 @@ use std::{
|
||||
sync::mpsc::channel,
|
||||
};
|
||||
use tempfile::TempPath;
|
||||
use tree_sitter::Language;
|
||||
use tree_house::tree_sitter::Grammar;
|
||||
|
||||
#[cfg(unix)]
|
||||
const DYLIB_EXTENSION: &str = "so";
|
||||
@@ -61,28 +61,21 @@ const BUILD_TARGET: &str = env!("BUILD_TARGET");
|
||||
const REMOTE_NAME: &str = "origin";
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn get_language(name: &str) -> Result<Language> {
|
||||
pub fn get_language(name: &str) -> Result<Option<Grammar>> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn get_language(name: &str) -> Result<Language> {
|
||||
use libloading::{Library, Symbol};
|
||||
pub fn get_language(name: &str) -> Result<Option<Grammar>> {
|
||||
let mut rel_library_path = PathBuf::new().join("grammars").join(name);
|
||||
rel_library_path.set_extension(DYLIB_EXTENSION);
|
||||
let library_path = crate::runtime_file(&rel_library_path);
|
||||
if !library_path.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let library = unsafe { Library::new(&library_path) }
|
||||
.with_context(|| format!("Error opening dynamic library {:?}", library_path))?;
|
||||
let language_fn_name = format!("tree_sitter_{}", name.replace('-', "_"));
|
||||
let language = unsafe {
|
||||
let language_fn: Symbol<unsafe extern "C" fn() -> Language> = library
|
||||
.get(language_fn_name.as_bytes())
|
||||
.with_context(|| format!("Failed to load symbol {}", language_fn_name))?;
|
||||
language_fn()
|
||||
};
|
||||
std::mem::forget(library);
|
||||
Ok(language)
|
||||
let grammar = unsafe { Grammar::new(name, &library_path) }?;
|
||||
Ok(Some(grammar))
|
||||
}
|
||||
|
||||
fn ensure_git_is_available() -> Result<()> {
|
||||
@@ -220,6 +213,27 @@ fn get_grammar_configs() -> Result<Vec<GrammarConfiguration>> {
|
||||
Ok(grammars)
|
||||
}
|
||||
|
||||
pub fn get_grammar_names() -> Result<Option<HashSet<String>>> {
|
||||
let config: Configuration = crate::config::user_lang_config()
|
||||
.context("Could not parse languages.toml")?
|
||||
.try_into()?;
|
||||
|
||||
let grammars = match config.grammar_selection {
|
||||
Some(GrammarSelection::Only { only: selections }) => Some(selections),
|
||||
Some(GrammarSelection::Except { except: rejections }) => Some(
|
||||
config
|
||||
.grammar
|
||||
.into_iter()
|
||||
.map(|grammar| grammar.grammar_id)
|
||||
.filter(|id| !rejections.contains(id))
|
||||
.collect(),
|
||||
),
|
||||
None => None,
|
||||
};
|
||||
|
||||
Ok(grammars)
|
||||
}
|
||||
|
||||
fn run_parallel<F, Res>(grammars: Vec<GrammarConfiguration>, job: F) -> Vec<(String, Result<Res>)>
|
||||
where
|
||||
F: Fn(GrammarConfiguration) -> Result<Res> + Send + 'static + Clone,
|
||||
|
@@ -244,7 +244,12 @@ pub fn merge_toml_values(left: toml::Value, right: toml::Value, merge_depth: usi
|
||||
/// Otherwise (workspace, false) is returned
|
||||
pub fn find_workspace() -> (PathBuf, bool) {
|
||||
let current_dir = current_working_dir();
|
||||
for ancestor in current_dir.ancestors() {
|
||||
find_workspace_in(current_dir)
|
||||
}
|
||||
|
||||
pub fn find_workspace_in(dir: impl AsRef<Path>) -> (PathBuf, bool) {
|
||||
let dir = dir.as_ref();
|
||||
for ancestor in dir.ancestors() {
|
||||
if ancestor.join(".git").exists()
|
||||
|| ancestor.join(".svn").exists()
|
||||
|| ancestor.join(".jj").exists()
|
||||
@@ -254,7 +259,7 @@ pub fn find_workspace() -> (PathBuf, bool) {
|
||||
}
|
||||
}
|
||||
|
||||
(current_dir, true)
|
||||
(dir.to_owned(), true)
|
||||
}
|
||||
|
||||
fn default_config_file() -> PathBuf {
|
||||
|
@@ -23,7 +23,7 @@ license = "MIT"
|
||||
[dependencies]
|
||||
bitflags.workspace = true
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = "1.0.140"
|
||||
serde_json = "1.0.143"
|
||||
url = {version = "2.5.4", features = ["serde"]}
|
||||
|
||||
[features]
|
||||
|
@@ -19,14 +19,14 @@ helix-loader = { path = "../helix-loader" }
|
||||
helix-lsp-types = { path = "../helix-lsp-types" }
|
||||
|
||||
anyhow = "1.0"
|
||||
futures-executor = "0.3"
|
||||
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
|
||||
futures-executor.workspace = true
|
||||
futures-util.workspace = true
|
||||
globset = "0.4.16"
|
||||
log = "0.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
tokio = { version = "1.44", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
||||
tokio-stream = "0.1.17"
|
||||
tokio = { version = "1.47", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
||||
tokio-stream.workspace = true
|
||||
parking_lot.workspace = true
|
||||
arc-swap = "1"
|
||||
slotmap.workspace = true
|
||||
|
@@ -10,7 +10,7 @@ use crate::lsp::{
|
||||
DidChangeWorkspaceFoldersParams, OneOf, PositionEncodingKind, SignatureHelp, Url,
|
||||
WorkspaceFolder, WorkspaceFoldersChangeEvent,
|
||||
};
|
||||
use helix_core::{find_workspace, syntax::LanguageServerFeature, ChangeSet, Rope};
|
||||
use helix_core::{find_workspace, syntax::config::LanguageServerFeature, ChangeSet, Rope};
|
||||
use helix_loader::VERSION_AND_GIT_HASH;
|
||||
use helix_stdx::path;
|
||||
use parking_lot::Mutex;
|
||||
@@ -176,6 +176,29 @@ impl Client {
|
||||
self.did_change_workspace(vec![workspace_for_uri(root_uri)], Vec::new())
|
||||
}
|
||||
|
||||
/// Merge FormattingOptions with 'config.format' and return it
|
||||
fn get_merged_formatting_options(
|
||||
&self,
|
||||
options: lsp::FormattingOptions,
|
||||
) -> lsp::FormattingOptions {
|
||||
let config_format = self
|
||||
.config
|
||||
.as_ref()
|
||||
.and_then(|cfg| cfg.get("format"))
|
||||
.and_then(|fmt| HashMap::<String, lsp::FormattingProperty>::deserialize(fmt).ok());
|
||||
|
||||
if let Some(mut properties) = config_format {
|
||||
// passed in options take precedence over 'config.format'
|
||||
properties.extend(options.properties);
|
||||
lsp::FormattingOptions {
|
||||
properties,
|
||||
..options
|
||||
}
|
||||
} else {
|
||||
options
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity, clippy::too_many_arguments)]
|
||||
pub fn start(
|
||||
cmd: &str,
|
||||
@@ -201,6 +224,7 @@ impl Client {
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.current_dir(&root_path)
|
||||
// make sure the process is reaped on drop
|
||||
.kill_on_drop(true)
|
||||
.spawn();
|
||||
@@ -1167,23 +1191,7 @@ impl Client {
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
// merge FormattingOptions with 'config.format'
|
||||
let config_format = self
|
||||
.config
|
||||
.as_ref()
|
||||
.and_then(|cfg| cfg.get("format"))
|
||||
.and_then(|fmt| HashMap::<String, lsp::FormattingProperty>::deserialize(fmt).ok());
|
||||
|
||||
let options = if let Some(mut properties) = config_format {
|
||||
// passed in options take precedence over 'config.format'
|
||||
properties.extend(options.properties);
|
||||
lsp::FormattingOptions {
|
||||
properties,
|
||||
..options
|
||||
}
|
||||
} else {
|
||||
options
|
||||
};
|
||||
let options = self.get_merged_formatting_options(options);
|
||||
|
||||
let params = lsp::DocumentFormattingParams {
|
||||
text_document,
|
||||
@@ -1209,6 +1217,8 @@ impl Client {
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let options = self.get_merged_formatting_options(options);
|
||||
|
||||
let params = lsp::DocumentRangeFormattingParams {
|
||||
text_document,
|
||||
range,
|
||||
|
@@ -12,7 +12,7 @@ pub use jsonrpc::Call;
|
||||
pub use lsp::{Position, Url};
|
||||
|
||||
use futures_util::stream::select_all::SelectAll;
|
||||
use helix_core::syntax::{
|
||||
use helix_core::syntax::config::{
|
||||
LanguageConfiguration, LanguageServerConfiguration, LanguageServerFeatures,
|
||||
};
|
||||
use helix_stdx::path;
|
||||
|
@@ -90,11 +90,12 @@ impl Transport {
|
||||
async fn recv_server_message(
|
||||
reader: &mut (impl AsyncBufRead + Unpin + Send),
|
||||
buffer: &mut String,
|
||||
content: &mut Vec<u8>,
|
||||
language_server_name: &str,
|
||||
) -> Result<ServerMessage> {
|
||||
let mut content_length = None;
|
||||
loop {
|
||||
buffer.truncate(0);
|
||||
buffer.clear();
|
||||
if reader.read_line(buffer).await? == 0 {
|
||||
return Err(Error::StreamClosed);
|
||||
};
|
||||
@@ -126,17 +127,17 @@ impl Transport {
|
||||
}
|
||||
|
||||
let content_length = content_length.context("missing content length")?;
|
||||
|
||||
//TODO: reuse vector
|
||||
let mut content = vec![0; content_length];
|
||||
reader.read_exact(&mut content).await?;
|
||||
let msg = std::str::from_utf8(&content).context("invalid utf8 from server")?;
|
||||
content.resize(content_length, 0);
|
||||
reader.read_exact(content).await?;
|
||||
let msg = std::str::from_utf8(content).context("invalid utf8 from server")?;
|
||||
|
||||
info!("{language_server_name} <- {msg}");
|
||||
|
||||
// try parsing as output (server response) or call (server request)
|
||||
let output: serde_json::Result<ServerMessage> = serde_json::from_str(msg);
|
||||
|
||||
content.clear();
|
||||
|
||||
Ok(output?)
|
||||
}
|
||||
|
||||
@@ -255,9 +256,15 @@ impl Transport {
|
||||
client_tx: UnboundedSender<(LanguageServerId, jsonrpc::Call)>,
|
||||
) {
|
||||
let mut recv_buffer = String::new();
|
||||
let mut content_buffer = Vec::new();
|
||||
loop {
|
||||
match Self::recv_server_message(&mut server_stdout, &mut recv_buffer, &transport.name)
|
||||
.await
|
||||
match Self::recv_server_message(
|
||||
&mut server_stdout,
|
||||
&mut recv_buffer,
|
||||
&mut content_buffer,
|
||||
&transport.name,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(msg) => {
|
||||
match transport
|
||||
|
@@ -15,15 +15,15 @@ homepage.workspace = true
|
||||
dunce = "1.0"
|
||||
etcetera = "0.10"
|
||||
ropey.workspace = true
|
||||
which = "7.0"
|
||||
which = "8.0"
|
||||
regex-cursor = "0.1.5"
|
||||
bitflags.workspace = true
|
||||
once_cell = "1.21"
|
||||
regex-automata = "0.4.9"
|
||||
regex-automata = "0.4.10"
|
||||
unicode-segmentation.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
windows-sys = { version = "0.59", features = ["Win32_Foundation", "Win32_Security", "Win32_Security_Authorization", "Win32_Storage_FileSystem", "Win32_System_Threading"] }
|
||||
windows-sys = { version = "0.61", features = ["Win32_Foundation", "Win32_Security", "Win32_Security_Authorization", "Win32_Storage_FileSystem", "Win32_System_Threading"] }
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
rustix = { version = "1.0", features = ["fs"] }
|
||||
|
@@ -1,3 +1,4 @@
|
||||
//! Functions for working with the host environment.
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
ffi::{OsStr, OsString},
|
||||
@@ -10,9 +11,9 @@ use once_cell::sync::Lazy;
|
||||
// We keep the CWD as a static so that we can access it in places where we don't have access to the Editor
|
||||
static CWD: RwLock<Option<PathBuf>> = RwLock::new(None);
|
||||
|
||||
// Get the current working directory.
|
||||
// This information is managed internally as the call to std::env::current_dir
|
||||
// might fail if the cwd has been deleted.
|
||||
/// Get the current working directory.
|
||||
/// This information is managed internally as the call to std::env::current_dir
|
||||
/// might fail if the cwd has been deleted.
|
||||
pub fn current_working_dir() -> PathBuf {
|
||||
if let Some(path) = &*CWD.read().unwrap() {
|
||||
return path.clone();
|
||||
@@ -37,6 +38,7 @@ pub fn current_working_dir() -> PathBuf {
|
||||
cwd
|
||||
}
|
||||
|
||||
/// Update the current working directory.
|
||||
pub fn set_current_working_dir(path: impl AsRef<Path>) -> std::io::Result<Option<PathBuf>> {
|
||||
let path = crate::path::canonicalize(path);
|
||||
std::env::set_current_dir(&path)?;
|
||||
@@ -45,14 +47,17 @@ pub fn set_current_working_dir(path: impl AsRef<Path>) -> std::io::Result<Option
|
||||
Ok(cwd.replace(path))
|
||||
}
|
||||
|
||||
/// Checks if the given environment variable is set.
|
||||
pub fn env_var_is_set(env_var_name: &str) -> bool {
|
||||
std::env::var_os(env_var_name).is_some()
|
||||
}
|
||||
|
||||
/// Checks if a binary with the given name exists.
|
||||
pub fn binary_exists<T: AsRef<OsStr>>(binary_name: T) -> bool {
|
||||
which::which(binary_name).is_ok()
|
||||
}
|
||||
|
||||
/// Attempts to find a binary of the given name. See [which](https://linux.die.net/man/1/which).
|
||||
pub fn which<T: AsRef<OsStr>>(
|
||||
binary_name: T,
|
||||
) -> Result<std::path::PathBuf, ExecutableNotFoundError> {
|
||||
|
@@ -1,3 +1,4 @@
|
||||
//! Functions for managine file metadata.
|
||||
//! From <https://github.com/Freaky/faccess>
|
||||
|
||||
use std::io;
|
||||
@@ -70,6 +71,16 @@ mod imp {
|
||||
perms.set_mode(new_perms);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
use std::fs::{File, FileTimes};
|
||||
use std::os::macos::fs::FileTimesExt;
|
||||
|
||||
let to_file = File::options().write(true).open(to)?;
|
||||
let times = FileTimes::new().set_created(from_meta.created()?);
|
||||
to_file.set_times(times)?;
|
||||
}
|
||||
|
||||
std::fs::set_permissions(to, perms)?;
|
||||
|
||||
Ok(())
|
||||
@@ -108,7 +119,13 @@ mod imp {
|
||||
|
||||
use std::ffi::c_void;
|
||||
|
||||
use std::os::windows::{ffi::OsStrExt, fs::OpenOptionsExt, io::AsRawHandle};
|
||||
use std::os::windows::{
|
||||
ffi::OsStrExt,
|
||||
fs::{FileTimesExt, OpenOptionsExt},
|
||||
io::AsRawHandle,
|
||||
};
|
||||
|
||||
use std::fs::{File, FileTimes};
|
||||
|
||||
struct SecurityDescriptor {
|
||||
sd: PSECURITY_DESCRIPTOR,
|
||||
@@ -412,6 +429,10 @@ mod imp {
|
||||
let meta = std::fs::metadata(from)?;
|
||||
let perms = meta.permissions();
|
||||
|
||||
let to_file = File::options().write(true).open(to)?;
|
||||
let times = FileTimes::new().set_created(meta.created()?);
|
||||
to_file.set_times(times)?;
|
||||
|
||||
std::fs::set_permissions(to, perms)?;
|
||||
|
||||
Ok(())
|
||||
|
@@ -1,3 +1,6 @@
|
||||
//! Extensions to the standard library. A collection of helper functions
|
||||
//! used throughout helix.
|
||||
|
||||
pub mod env;
|
||||
pub mod faccess;
|
||||
pub mod path;
|
||||
|
@@ -1,3 +1,5 @@
|
||||
//! Functions for working with [Path].
|
||||
|
||||
pub use etcetera::home_dir;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex_cursor::{engines::meta::Regex, Input};
|
||||
@@ -140,6 +142,7 @@ pub fn canonicalize(path: impl AsRef<Path>) -> PathBuf {
|
||||
normalize(path)
|
||||
}
|
||||
|
||||
/// Convert path into a relative path
|
||||
pub fn get_relative_path<'a, P>(path: P) -> Cow<'a, Path>
|
||||
where
|
||||
P: Into<Cow<'a, Path>>,
|
||||
|
@@ -1,3 +1,5 @@
|
||||
//! Provides [Range] type expanding on [RangeBounds].
|
||||
|
||||
use std::ops::{self, RangeBounds};
|
||||
|
||||
/// A range of `char`s within the text.
|
||||
@@ -66,6 +68,7 @@ pub fn is_subset<const ALLOW_EMPTY: bool>(
|
||||
}
|
||||
}
|
||||
|
||||
/// Similar to is_subset but requires each element of `super_set` to be matched
|
||||
pub fn is_exact_subset(
|
||||
mut super_set: impl Iterator<Item = Range>,
|
||||
mut sub_set: impl Iterator<Item = Range>,
|
||||
|
@@ -1,3 +1,4 @@
|
||||
//! Functions and types for working with [RopeSlice]
|
||||
use std::fmt;
|
||||
use std::ops::{Bound, RangeBounds};
|
||||
|
||||
@@ -8,6 +9,7 @@ use ropey::iter::Chunks;
|
||||
use ropey::RopeSlice;
|
||||
use unicode_segmentation::{GraphemeCursor, GraphemeIncomplete};
|
||||
|
||||
/// Additional utility functions for [RopeSlice]
|
||||
pub trait RopeSliceExt<'a>: Sized {
|
||||
fn ends_with(self, text: &str) -> bool;
|
||||
fn starts_with(self, text: &str) -> bool;
|
||||
@@ -135,7 +137,9 @@ pub trait RopeSliceExt<'a>: Sized {
|
||||
/// let graphemes: Vec<_> = text.graphemes().collect();
|
||||
/// assert_eq!(graphemes.as_slice(), &["😶🌫️", "🏴☠️", "🖼️"]);
|
||||
/// ```
|
||||
fn graphemes(self) -> RopeGraphemes<'a>;
|
||||
fn graphemes(self) -> RopeGraphemes<'a> {
|
||||
self.graphemes_at(0)
|
||||
}
|
||||
/// Returns an iterator over the grapheme clusters in the slice, reversed.
|
||||
///
|
||||
/// The returned iterator starts at the end of the slice and ends at the beginning of the
|
||||
@@ -150,7 +154,127 @@ pub trait RopeSliceExt<'a>: Sized {
|
||||
/// let graphemes: Vec<_> = text.graphemes_rev().collect();
|
||||
/// assert_eq!(graphemes.as_slice(), &["🖼️", "🏴☠️", "😶🌫️"]);
|
||||
/// ```
|
||||
fn graphemes_rev(self) -> RevRopeGraphemes<'a>;
|
||||
fn graphemes_rev(self) -> RopeGraphemes<'a>;
|
||||
/// Returns an iterator over the grapheme clusters in the slice at the given byte index.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use ropey::Rope;
|
||||
/// # use helix_stdx::rope::RopeSliceExt;
|
||||
/// let text = Rope::from_str("😶🌫️🏴☠️🖼️");
|
||||
/// // 14 is the byte index of the pirate flag's starting cluster boundary.
|
||||
/// let graphemes: Vec<_> = text.slice(..).graphemes_at(14).collect();
|
||||
/// assert_eq!(graphemes.as_slice(), &["🏴☠️", "🖼️"]);
|
||||
/// // 27 is the byte index of the pirate flag's ending cluster boundary.
|
||||
/// let graphemes: Vec<_> = text.slice(..).graphemes_at(27).reversed().collect();
|
||||
/// assert_eq!(graphemes.as_slice(), &["🏴☠️", "😶🌫️"]);
|
||||
/// ```
|
||||
fn graphemes_at(self, byte_idx: usize) -> RopeGraphemes<'a>;
|
||||
/// Returns an iterator over the grapheme clusters in a rope and the byte index where each
|
||||
/// grapheme cluster starts.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use ropey::Rope;
|
||||
/// # use helix_stdx::rope::RopeSliceExt;
|
||||
/// let text = Rope::from_str("😶🌫️🏴☠️🖼️");
|
||||
/// let slice = text.slice(..);
|
||||
/// let graphemes: Vec<_> = slice.grapheme_indices_at(0).collect();
|
||||
/// assert_eq!(
|
||||
/// graphemes.as_slice(),
|
||||
/// &[(0, "😶🌫️".into()), (14, "🏴☠️".into()), (27, "🖼️".into())]
|
||||
/// );
|
||||
/// let graphemes: Vec<_> = slice.grapheme_indices_at(slice.len_bytes()).reversed().collect();
|
||||
/// assert_eq!(
|
||||
/// graphemes.as_slice(),
|
||||
/// &[(27, "🖼️".into()), (14, "🏴☠️".into()), (0, "😶🌫️".into())]
|
||||
/// );
|
||||
/// ```
|
||||
fn grapheme_indices_at(self, byte_idx: usize) -> RopeGraphemeIndices<'a>;
|
||||
/// Finds the byte index of the next grapheme boundary after `byte_idx`.
|
||||
///
|
||||
/// If the byte index lies on the last grapheme cluster in the slice then this function
|
||||
/// returns `RopeSlice::len_bytes`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use ropey::Rope;
|
||||
/// # use helix_stdx::rope::RopeSliceExt;
|
||||
/// let text = Rope::from_str("😶🌫️🏴☠️🖼️");
|
||||
/// let slice = text.slice(..);
|
||||
/// let mut byte_idx = 0;
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).next(), Some("😶🌫️".into()));
|
||||
/// byte_idx = slice.next_grapheme_boundary(byte_idx);
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).next(), Some("🏴☠️".into()));
|
||||
///
|
||||
/// // If `byte_idx` does not lie on a character or grapheme boundary then this function is
|
||||
/// // functionally the same as `ceil_grapheme_boundary`.
|
||||
/// assert_eq!(slice.next_grapheme_boundary(byte_idx - 1), byte_idx);
|
||||
/// assert_eq!(slice.next_grapheme_boundary(byte_idx - 2), byte_idx);
|
||||
/// assert_eq!(slice.next_grapheme_boundary(byte_idx + 1), slice.next_grapheme_boundary(byte_idx));
|
||||
/// assert_eq!(slice.next_grapheme_boundary(byte_idx + 2), slice.next_grapheme_boundary(byte_idx));
|
||||
///
|
||||
/// byte_idx = slice.next_grapheme_boundary(byte_idx);
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).next(), Some("🖼️".into()));
|
||||
/// byte_idx = slice.next_grapheme_boundary(byte_idx);
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).next(), None);
|
||||
/// assert_eq!(byte_idx, slice.len_bytes());
|
||||
/// ```
|
||||
fn next_grapheme_boundary(self, byte_idx: usize) -> usize {
|
||||
self.nth_next_grapheme_boundary(byte_idx, 1)
|
||||
}
|
||||
/// Finds the byte index of the `n`th grapheme cluster after the given `byte_idx`.
|
||||
///
|
||||
/// If there are fewer than `n` grapheme clusters after `byte_idx` in the rope then this
|
||||
/// function returns `RopeSlice::len_bytes`.
|
||||
///
|
||||
/// This is functionally equivalent to calling `next_grapheme_boundary` `n` times but is more
|
||||
/// efficient.
|
||||
fn nth_next_grapheme_boundary(self, byte_idx: usize, n: usize) -> usize;
|
||||
/// Finds the byte index of the previous grapheme boundary before `byte_idx`.
|
||||
///
|
||||
/// If the byte index lies on the first grapheme cluster in the slice then this function
|
||||
/// returns zero.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use ropey::Rope;
|
||||
/// # use helix_stdx::rope::RopeSliceExt;
|
||||
/// let text = Rope::from_str("😶🌫️🏴☠️🖼️");
|
||||
/// let slice = text.slice(..);
|
||||
/// let mut byte_idx = text.len_bytes();
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), Some("🖼️".into()));
|
||||
/// byte_idx = slice.prev_grapheme_boundary(byte_idx);
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), Some("🏴☠️".into()));
|
||||
///
|
||||
/// // If `byte_idx` does not lie on a character or grapheme boundary then this function is
|
||||
/// // functionally the same as `floor_grapheme_boundary`.
|
||||
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx + 1), byte_idx);
|
||||
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx + 2), byte_idx);
|
||||
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx - 1), slice.prev_grapheme_boundary(byte_idx));
|
||||
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx - 2), slice.prev_grapheme_boundary(byte_idx));
|
||||
///
|
||||
/// byte_idx = slice.prev_grapheme_boundary(byte_idx);
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), Some("😶🌫️".into()));
|
||||
/// byte_idx = slice.prev_grapheme_boundary(byte_idx);
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), None);
|
||||
/// assert_eq!(byte_idx, 0);
|
||||
/// ```
|
||||
fn prev_grapheme_boundary(self, byte_idx: usize) -> usize {
|
||||
self.nth_prev_grapheme_boundary(byte_idx, 1)
|
||||
}
|
||||
/// Finds the byte index of the `n`th grapheme cluster before the given `byte_idx`.
|
||||
///
|
||||
/// If there are fewer than `n` grapheme clusters before `byte_idx` in the rope then this
|
||||
/// function returns zero.
|
||||
///
|
||||
/// This is functionally equivalent to calling `prev_grapheme_boundary` `n` times but is more
|
||||
/// efficient.
|
||||
fn nth_prev_grapheme_boundary(self, byte_idx: usize, n: usize) -> usize;
|
||||
}
|
||||
|
||||
impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
|
||||
@@ -335,31 +459,111 @@ impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn graphemes(self) -> RopeGraphemes<'a> {
|
||||
let mut chunks = self.chunks();
|
||||
let first_chunk = chunks.next().unwrap_or("");
|
||||
fn graphemes_rev(self) -> RopeGraphemes<'a> {
|
||||
self.graphemes_at(self.len_bytes()).reversed()
|
||||
}
|
||||
|
||||
fn graphemes_at(self, byte_idx: usize) -> RopeGraphemes<'a> {
|
||||
// Bounds check
|
||||
assert!(byte_idx <= self.len_bytes());
|
||||
|
||||
let (mut chunks, chunk_byte_idx, _, _) = self.chunks_at_byte(byte_idx);
|
||||
let current_chunk = chunks.next().unwrap_or("");
|
||||
|
||||
RopeGraphemes {
|
||||
text: self,
|
||||
chunks,
|
||||
cur_chunk: first_chunk,
|
||||
cur_chunk_start: 0,
|
||||
cursor: GraphemeCursor::new(0, self.len_bytes(), true),
|
||||
current_chunk,
|
||||
chunk_byte_idx,
|
||||
cursor: GraphemeCursor::new(byte_idx, self.len_bytes(), true),
|
||||
is_reversed: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn graphemes_rev(self) -> RevRopeGraphemes<'a> {
|
||||
let (mut chunks, mut cur_chunk_start, _, _) = self.chunks_at_byte(self.len_bytes());
|
||||
chunks.reverse();
|
||||
let first_chunk = chunks.next().unwrap_or("");
|
||||
cur_chunk_start -= first_chunk.len();
|
||||
RevRopeGraphemes {
|
||||
text: self,
|
||||
chunks,
|
||||
cur_chunk: first_chunk,
|
||||
cur_chunk_start,
|
||||
cursor: GraphemeCursor::new(self.len_bytes(), self.len_bytes(), true),
|
||||
fn grapheme_indices_at(self, byte_idx: usize) -> RopeGraphemeIndices<'a> {
|
||||
// Bounds check
|
||||
assert!(byte_idx <= self.len_bytes());
|
||||
RopeGraphemeIndices {
|
||||
front_offset: byte_idx,
|
||||
iter: self.graphemes_at(byte_idx),
|
||||
is_reversed: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn nth_next_grapheme_boundary(self, mut byte_idx: usize, n: usize) -> usize {
|
||||
// Bounds check
|
||||
assert!(byte_idx <= self.len_bytes());
|
||||
|
||||
byte_idx = self.floor_char_boundary(byte_idx);
|
||||
|
||||
// Get the chunk with our byte index in it.
|
||||
let (mut chunk, mut chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx);
|
||||
|
||||
// Set up the grapheme cursor.
|
||||
let mut gc = GraphemeCursor::new(byte_idx, self.len_bytes(), true);
|
||||
|
||||
// Find the nth next grapheme cluster boundary.
|
||||
for _ in 0..n {
|
||||
loop {
|
||||
match gc.next_boundary(chunk, chunk_byte_idx) {
|
||||
Ok(None) => return self.len_bytes(),
|
||||
Ok(Some(boundary)) => {
|
||||
byte_idx = boundary;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::NextChunk) => {
|
||||
chunk_byte_idx += chunk.len();
|
||||
let (a, _, _, _) = self.chunk_at_byte(chunk_byte_idx);
|
||||
chunk = a;
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(n)) => {
|
||||
let ctx_chunk = self.chunk_at_byte(n - 1).0;
|
||||
gc.provide_context(ctx_chunk, n - ctx_chunk.len());
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
byte_idx
|
||||
}
|
||||
|
||||
fn nth_prev_grapheme_boundary(self, mut byte_idx: usize, n: usize) -> usize {
|
||||
// Bounds check
|
||||
assert!(byte_idx <= self.len_bytes());
|
||||
|
||||
byte_idx = self.ceil_char_boundary(byte_idx);
|
||||
|
||||
// Get the chunk with our byte index in it.
|
||||
let (mut chunk, mut chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx);
|
||||
|
||||
// Set up the grapheme cursor.
|
||||
let mut gc = GraphemeCursor::new(byte_idx, self.len_bytes(), true);
|
||||
|
||||
for _ in 0..n {
|
||||
loop {
|
||||
match gc.prev_boundary(chunk, chunk_byte_idx) {
|
||||
Ok(None) => return 0,
|
||||
Ok(Some(boundary)) => {
|
||||
byte_idx = boundary;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::PrevChunk) => {
|
||||
let (a, b, _, _) = self.chunk_at_byte(chunk_byte_idx - 1);
|
||||
chunk = a;
|
||||
chunk_byte_idx = b;
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(n)) => {
|
||||
let ctx_chunk = self.chunk_at_byte(n - 1).0;
|
||||
gc.provide_context(ctx_chunk, n - ctx_chunk.len());
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
byte_idx
|
||||
}
|
||||
}
|
||||
|
||||
// copied from std
|
||||
@@ -370,13 +574,19 @@ const fn is_utf8_char_boundary(b: u8) -> bool {
|
||||
}
|
||||
|
||||
/// An iterator over the graphemes of a `RopeSlice`.
|
||||
///
|
||||
/// This iterator is cursor-like: rather than implementing DoubleEndedIterator it can be reversed
|
||||
/// like a cursor. This style matches `Bytes` and `Chars` iterator types in Ropey and is more
|
||||
/// natural and useful for wrapping `GraphemeCursor`.
|
||||
#[derive(Clone)]
|
||||
pub struct RopeGraphemes<'a> {
|
||||
text: RopeSlice<'a>,
|
||||
chunks: Chunks<'a>,
|
||||
cur_chunk: &'a str,
|
||||
cur_chunk_start: usize,
|
||||
current_chunk: &'a str,
|
||||
/// Byte index of the start of the current chunk.
|
||||
chunk_byte_idx: usize,
|
||||
cursor: GraphemeCursor,
|
||||
is_reversed: bool,
|
||||
}
|
||||
|
||||
impl fmt::Debug for RopeGraphemes<'_> {
|
||||
@@ -384,112 +594,178 @@ impl fmt::Debug for RopeGraphemes<'_> {
|
||||
f.debug_struct("RopeGraphemes")
|
||||
.field("text", &self.text)
|
||||
.field("chunks", &self.chunks)
|
||||
.field("cur_chunk", &self.cur_chunk)
|
||||
.field("cur_chunk_start", &self.cur_chunk_start)
|
||||
.field("current_chunk", &self.current_chunk)
|
||||
.field("chunk_byte_idx", &self.chunk_byte_idx)
|
||||
// .field("cursor", &self.cursor)
|
||||
.field("is_reversed", &self.is_reversed)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> RopeGraphemes<'a> {
|
||||
#[allow(clippy::should_implement_trait)]
|
||||
pub fn next(&mut self) -> Option<RopeSlice<'a>> {
|
||||
if self.is_reversed {
|
||||
self.prev_impl()
|
||||
} else {
|
||||
self.next_impl()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prev(&mut self) -> Option<RopeSlice<'a>> {
|
||||
if self.is_reversed {
|
||||
self.next_impl()
|
||||
} else {
|
||||
self.prev_impl()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reverse(&mut self) {
|
||||
self.is_reversed = !self.is_reversed;
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn reversed(mut self) -> Self {
|
||||
self.reverse();
|
||||
self
|
||||
}
|
||||
|
||||
fn next_impl(&mut self) -> Option<RopeSlice<'a>> {
|
||||
let a = self.cursor.cur_cursor();
|
||||
let b;
|
||||
loop {
|
||||
match self
|
||||
.cursor
|
||||
.next_boundary(self.current_chunk, self.chunk_byte_idx)
|
||||
{
|
||||
Ok(None) => return None,
|
||||
Ok(Some(boundary)) => {
|
||||
b = boundary;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::NextChunk) => {
|
||||
self.chunk_byte_idx += self.current_chunk.len();
|
||||
self.current_chunk = self.chunks.next().unwrap_or("");
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(idx)) => {
|
||||
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
|
||||
self.cursor.provide_context(chunk, byte_idx);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
if a < self.chunk_byte_idx {
|
||||
Some(self.text.byte_slice(a..b))
|
||||
} else {
|
||||
let a2 = a - self.chunk_byte_idx;
|
||||
let b2 = b - self.chunk_byte_idx;
|
||||
Some((&self.current_chunk[a2..b2]).into())
|
||||
}
|
||||
}
|
||||
|
||||
fn prev_impl(&mut self) -> Option<RopeSlice<'a>> {
|
||||
let a = self.cursor.cur_cursor();
|
||||
let b;
|
||||
loop {
|
||||
match self
|
||||
.cursor
|
||||
.prev_boundary(self.current_chunk, self.chunk_byte_idx)
|
||||
{
|
||||
Ok(None) => return None,
|
||||
Ok(Some(boundary)) => {
|
||||
b = boundary;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::PrevChunk) => {
|
||||
self.current_chunk = self.chunks.prev().unwrap_or("");
|
||||
self.chunk_byte_idx -= self.current_chunk.len();
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(idx)) => {
|
||||
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
|
||||
self.cursor.provide_context(chunk, byte_idx);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
if a >= self.chunk_byte_idx + self.current_chunk.len() {
|
||||
Some(self.text.byte_slice(b..a))
|
||||
} else {
|
||||
let a2 = a - self.chunk_byte_idx;
|
||||
let b2 = b - self.chunk_byte_idx;
|
||||
Some((&self.current_chunk[b2..a2]).into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for RopeGraphemes<'a> {
|
||||
type Item = RopeSlice<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let a = self.cursor.cur_cursor();
|
||||
let b;
|
||||
loop {
|
||||
match self
|
||||
.cursor
|
||||
.next_boundary(self.cur_chunk, self.cur_chunk_start)
|
||||
{
|
||||
Ok(None) => {
|
||||
return None;
|
||||
}
|
||||
Ok(Some(n)) => {
|
||||
b = n;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::NextChunk) => {
|
||||
self.cur_chunk_start += self.cur_chunk.len();
|
||||
self.cur_chunk = self.chunks.next().unwrap_or("");
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(idx)) => {
|
||||
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
|
||||
self.cursor.provide_context(chunk, byte_idx);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
RopeGraphemes::next(self)
|
||||
}
|
||||
}
|
||||
|
||||
if a < self.cur_chunk_start {
|
||||
Some(self.text.byte_slice(a..b))
|
||||
/// An iterator over the grapheme clusters in a rope and the byte indices where each grapheme
|
||||
/// cluster starts.
|
||||
///
|
||||
/// This iterator wraps `RopeGraphemes` and is also cursor-like. Use `reverse` or `reversed` to
|
||||
/// toggle the direction of the iterator. See [RopeGraphemes].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RopeGraphemeIndices<'a> {
|
||||
front_offset: usize,
|
||||
iter: RopeGraphemes<'a>,
|
||||
is_reversed: bool,
|
||||
}
|
||||
|
||||
impl<'a> RopeGraphemeIndices<'a> {
|
||||
#[allow(clippy::should_implement_trait)]
|
||||
pub fn next(&mut self) -> Option<(usize, RopeSlice<'a>)> {
|
||||
if self.is_reversed {
|
||||
self.prev_impl()
|
||||
} else {
|
||||
let a2 = a - self.cur_chunk_start;
|
||||
let b2 = b - self.cur_chunk_start;
|
||||
Some((&self.cur_chunk[a2..b2]).into())
|
||||
self.next_impl()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over the graphemes of a `RopeSlice` in reverse.
|
||||
#[derive(Clone)]
|
||||
pub struct RevRopeGraphemes<'a> {
|
||||
text: RopeSlice<'a>,
|
||||
chunks: Chunks<'a>,
|
||||
cur_chunk: &'a str,
|
||||
cur_chunk_start: usize,
|
||||
cursor: GraphemeCursor,
|
||||
}
|
||||
pub fn prev(&mut self) -> Option<(usize, RopeSlice<'a>)> {
|
||||
if self.is_reversed {
|
||||
self.next_impl()
|
||||
} else {
|
||||
self.prev_impl()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for RevRopeGraphemes<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("RevRopeGraphemes")
|
||||
.field("text", &self.text)
|
||||
.field("chunks", &self.chunks)
|
||||
.field("cur_chunk", &self.cur_chunk)
|
||||
.field("cur_chunk_start", &self.cur_chunk_start)
|
||||
// .field("cursor", &self.cursor)
|
||||
.finish()
|
||||
pub fn reverse(&mut self) {
|
||||
self.is_reversed = !self.is_reversed;
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn reversed(mut self) -> Self {
|
||||
self.reverse();
|
||||
self
|
||||
}
|
||||
|
||||
fn next_impl(&mut self) -> Option<(usize, RopeSlice<'a>)> {
|
||||
let slice = self.iter.next()?;
|
||||
let idx = self.front_offset;
|
||||
self.front_offset += slice.len_bytes();
|
||||
Some((idx, slice))
|
||||
}
|
||||
|
||||
fn prev_impl(&mut self) -> Option<(usize, RopeSlice<'a>)> {
|
||||
let slice = self.iter.prev()?;
|
||||
self.front_offset -= slice.len_bytes();
|
||||
Some((self.front_offset, slice))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for RevRopeGraphemes<'a> {
|
||||
type Item = RopeSlice<'a>;
|
||||
impl<'a> Iterator for RopeGraphemeIndices<'a> {
|
||||
type Item = (usize, RopeSlice<'a>);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let a = self.cursor.cur_cursor();
|
||||
let b;
|
||||
loop {
|
||||
match self
|
||||
.cursor
|
||||
.prev_boundary(self.cur_chunk, self.cur_chunk_start)
|
||||
{
|
||||
Ok(None) => {
|
||||
return None;
|
||||
}
|
||||
Ok(Some(n)) => {
|
||||
b = n;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::PrevChunk) => {
|
||||
self.cur_chunk = self.chunks.next().unwrap_or("");
|
||||
self.cur_chunk_start -= self.cur_chunk.len();
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(idx)) => {
|
||||
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
|
||||
self.cursor.provide_context(chunk, byte_idx);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
if a >= self.cur_chunk_start + self.cur_chunk.len() {
|
||||
Some(self.text.byte_slice(b..a))
|
||||
} else {
|
||||
let a2 = a - self.cur_chunk_start;
|
||||
let b2 = b - self.cur_chunk_start;
|
||||
Some((&self.cur_chunk[b2..a2]).into())
|
||||
}
|
||||
RopeGraphemeIndices::next(self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -54,14 +54,14 @@ anyhow = "1"
|
||||
once_cell = "1.21"
|
||||
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] }
|
||||
tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] }
|
||||
crossterm = { version = "0.28", features = ["event-stream"] }
|
||||
tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["termina"] }
|
||||
termina = { workspace = true, features = ["event-stream"] }
|
||||
signal-hook = "0.3"
|
||||
tokio-stream = "0.1"
|
||||
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
|
||||
arc-swap = { version = "1.7.1" }
|
||||
termini = "1"
|
||||
indexmap = "2.9"
|
||||
indexmap = "2.11"
|
||||
|
||||
# Logging
|
||||
fern = "0.7"
|
||||
@@ -82,7 +82,7 @@ open = "5.3.2"
|
||||
url = "2.5.4"
|
||||
|
||||
# config
|
||||
toml = "0.8"
|
||||
toml.workspace = true
|
||||
|
||||
serde_json = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
@@ -91,12 +91,11 @@ serde = { version = "1.0", features = ["derive"] }
|
||||
grep-regex = "0.1.13"
|
||||
grep-searcher = "0.1.14"
|
||||
|
||||
dashmap = "6.0"
|
||||
|
||||
[target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100
|
||||
signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] }
|
||||
libc = "0.2.172"
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
crossterm = { version = "0.28", features = ["event-stream", "use-dev-tty", "libc"] }
|
||||
libc = "0.2.175"
|
||||
|
||||
[build-dependencies]
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
|
@@ -30,28 +30,27 @@ use crate::{
|
||||
};
|
||||
|
||||
use log::{debug, error, info, warn};
|
||||
#[cfg(not(feature = "integration"))]
|
||||
use std::io::stdout;
|
||||
use std::{io::stdin, path::Path, sync::Arc};
|
||||
use std::{
|
||||
io::{stdin, IsTerminal},
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
#[cfg(not(windows))]
|
||||
use anyhow::Context;
|
||||
use anyhow::Error;
|
||||
use anyhow::{Context, Error};
|
||||
|
||||
use crossterm::{event::Event as CrosstermEvent, tty::IsTty};
|
||||
#[cfg(not(windows))]
|
||||
use {signal_hook::consts::signal, signal_hook_tokio::Signals};
|
||||
#[cfg(windows)]
|
||||
type Signals = futures_util::stream::Empty<()>;
|
||||
|
||||
#[cfg(not(feature = "integration"))]
|
||||
use tui::backend::CrosstermBackend;
|
||||
use tui::backend::TerminaBackend;
|
||||
|
||||
#[cfg(feature = "integration")]
|
||||
use tui::backend::TestBackend;
|
||||
|
||||
#[cfg(not(feature = "integration"))]
|
||||
type TerminalBackend = CrosstermBackend<std::io::Stdout>;
|
||||
type TerminalBackend = TerminaBackend;
|
||||
|
||||
#[cfg(feature = "integration")]
|
||||
type TerminalBackend = TestBackend;
|
||||
@@ -104,7 +103,8 @@ impl Application {
|
||||
let theme_loader = theme::Loader::new(&theme_parent_dirs);
|
||||
|
||||
#[cfg(not(feature = "integration"))]
|
||||
let backend = CrosstermBackend::new(stdout(), &config.editor);
|
||||
let backend = TerminaBackend::new((&config.editor).into())
|
||||
.context("failed to create terminal backend")?;
|
||||
|
||||
#[cfg(feature = "integration")]
|
||||
let backend = TestBackend::new(120, 150);
|
||||
@@ -123,7 +123,11 @@ impl Application {
|
||||
})),
|
||||
handlers,
|
||||
);
|
||||
Self::load_configured_theme(&mut editor, &config.load());
|
||||
Self::load_configured_theme(
|
||||
&mut editor,
|
||||
&config.load(),
|
||||
terminal.backend().supports_true_color(),
|
||||
);
|
||||
|
||||
let keys = Box::new(Map::new(Arc::clone(&config), |config: &Config| {
|
||||
&config.keys
|
||||
@@ -214,7 +218,7 @@ impl Application {
|
||||
} else {
|
||||
editor.new_file(Action::VerticalSplit);
|
||||
}
|
||||
} else if stdin().is_tty() || cfg!(feature = "integration") {
|
||||
} else if stdin().is_terminal() || cfg!(feature = "integration") {
|
||||
editor.new_file(Action::VerticalSplit);
|
||||
} else {
|
||||
editor
|
||||
@@ -282,7 +286,7 @@ impl Application {
|
||||
|
||||
pub async fn event_loop<S>(&mut self, input_stream: &mut S)
|
||||
where
|
||||
S: Stream<Item = std::io::Result<crossterm::event::Event>> + Unpin,
|
||||
S: Stream<Item = std::io::Result<termina::Event>> + Unpin,
|
||||
{
|
||||
self.render().await;
|
||||
|
||||
@@ -295,7 +299,7 @@ impl Application {
|
||||
|
||||
pub async fn event_loop_until_idle<S>(&mut self, input_stream: &mut S) -> bool
|
||||
where
|
||||
S: Stream<Item = std::io::Result<crossterm::event::Event>> + Unpin,
|
||||
S: Stream<Item = std::io::Result<termina::Event>> + Unpin,
|
||||
{
|
||||
loop {
|
||||
if self.editor.should_close() {
|
||||
@@ -356,6 +360,8 @@ impl Application {
|
||||
}
|
||||
|
||||
pub fn handle_config_events(&mut self, config_event: ConfigEvent) {
|
||||
let old_editor_config = self.editor.config();
|
||||
|
||||
match config_event {
|
||||
ConfigEvent::Refresh => self.refresh_config(),
|
||||
|
||||
@@ -365,7 +371,7 @@ impl Application {
|
||||
ConfigEvent::Update(editor_config) => {
|
||||
let mut app_config = (*self.config.load().clone()).clone();
|
||||
app_config.editor = *editor_config;
|
||||
if let Err(err) = self.terminal.reconfigure(app_config.editor.clone().into()) {
|
||||
if let Err(err) = self.terminal.reconfigure((&app_config.editor).into()) {
|
||||
self.editor.set_error(err.to_string());
|
||||
};
|
||||
self.config.store(Arc::new(app_config));
|
||||
@@ -374,7 +380,7 @@ impl Application {
|
||||
|
||||
// Update all the relevant members in the editor after updating
|
||||
// the configuration.
|
||||
self.editor.refresh_config();
|
||||
self.editor.refresh_config(&old_editor_config);
|
||||
|
||||
// reset view position in case softwrap was enabled/disabled
|
||||
let scrolloff = self.editor.config().scrolloff;
|
||||
@@ -384,33 +390,37 @@ impl Application {
|
||||
}
|
||||
}
|
||||
|
||||
/// refresh language config after config change
|
||||
fn refresh_language_config(&mut self) -> Result<(), Error> {
|
||||
let lang_loader = helix_core::config::user_lang_loader()?;
|
||||
|
||||
self.editor.syn_loader.store(Arc::new(lang_loader));
|
||||
for document in self.editor.documents.values_mut() {
|
||||
document.detect_language(self.editor.syn_loader.clone());
|
||||
let diagnostics = Editor::doc_diagnostics(
|
||||
&self.editor.language_servers,
|
||||
&self.editor.diagnostics,
|
||||
document,
|
||||
);
|
||||
document.replace_diagnostics(diagnostics, &[], None);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn refresh_config(&mut self) {
|
||||
let mut refresh_config = || -> Result<(), Error> {
|
||||
let default_config = Config::load_default()
|
||||
.map_err(|err| anyhow::anyhow!("Failed to load config: {}", err))?;
|
||||
self.refresh_language_config()?;
|
||||
// Refresh theme after config change
|
||||
Self::load_configured_theme(&mut self.editor, &default_config);
|
||||
self.terminal
|
||||
.reconfigure(default_config.editor.clone().into())?;
|
||||
|
||||
// Update the syntax language loader before setting the theme. Setting the theme will
|
||||
// call `Loader::set_scopes` which must be done before the documents are re-parsed for
|
||||
// the sake of locals highlighting.
|
||||
let lang_loader = helix_core::config::user_lang_loader()?;
|
||||
self.editor.syn_loader.store(Arc::new(lang_loader));
|
||||
Self::load_configured_theme(
|
||||
&mut self.editor,
|
||||
&default_config,
|
||||
self.terminal.backend().supports_true_color(),
|
||||
);
|
||||
|
||||
// Re-parse any open documents with the new language config.
|
||||
let lang_loader = self.editor.syn_loader.load();
|
||||
for document in self.editor.documents.values_mut() {
|
||||
// Re-detect .editorconfig
|
||||
document.detect_editor_config();
|
||||
document.detect_language(&lang_loader);
|
||||
let diagnostics = Editor::doc_diagnostics(
|
||||
&self.editor.language_servers,
|
||||
&self.editor.diagnostics,
|
||||
document,
|
||||
);
|
||||
document.replace_diagnostics(diagnostics, &[], None);
|
||||
}
|
||||
|
||||
self.terminal.reconfigure((&default_config.editor).into())?;
|
||||
// Store new config
|
||||
self.config.store(Arc::new(default_config));
|
||||
Ok(())
|
||||
@@ -427,8 +437,8 @@ impl Application {
|
||||
}
|
||||
|
||||
/// Load the theme set in configuration
|
||||
fn load_configured_theme(editor: &mut Editor, config: &Config) {
|
||||
let true_color = config.editor.true_color || crate::true_color();
|
||||
fn load_configured_theme(editor: &mut Editor, config: &Config, terminal_true_color: bool) {
|
||||
let true_color = terminal_true_color || config.editor.true_color || crate::true_color();
|
||||
let theme = config
|
||||
.theme
|
||||
.as_ref()
|
||||
@@ -500,7 +510,7 @@ impl Application {
|
||||
// https://github.com/neovim/neovim/issues/12322
|
||||
// https://github.com/neovim/neovim/pull/13084
|
||||
for retries in 1..=10 {
|
||||
match self.claim_term().await {
|
||||
match self.terminal.claim() {
|
||||
Ok(()) => break,
|
||||
Err(err) if retries == 10 => panic!("Failed to claim terminal: {}", err),
|
||||
Err(_) => continue,
|
||||
@@ -570,16 +580,41 @@ impl Application {
|
||||
doc.set_last_saved_revision(doc_save_event.revision, doc_save_event.save_time);
|
||||
|
||||
let lines = doc_save_event.text.len_lines();
|
||||
let bytes = doc_save_event.text.len_bytes();
|
||||
let size = doc_save_event.text.len_bytes();
|
||||
|
||||
enum Size {
|
||||
Bytes(u16),
|
||||
HumanReadable(f32, &'static str),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Size {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Bytes(bytes) => write!(f, "{bytes}B"),
|
||||
Self::HumanReadable(size, suffix) => write!(f, "{size:.1}{suffix}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let size = if size < 1024 {
|
||||
Size::Bytes(size as u16)
|
||||
} else {
|
||||
const SUFFIX: [&str; 4] = ["B", "KiB", "MiB", "GiB"];
|
||||
let mut size = size as f32;
|
||||
let mut i = 0;
|
||||
while i < SUFFIX.len() - 1 && size >= 1024.0 {
|
||||
size /= 1024.0;
|
||||
i += 1;
|
||||
}
|
||||
Size::HumanReadable(size, SUFFIX[i])
|
||||
};
|
||||
|
||||
self.editor
|
||||
.set_doc_path(doc_save_event.doc_id, &doc_save_event.path);
|
||||
// TODO: fix being overwritten by lsp
|
||||
self.editor.set_status(format!(
|
||||
"'{}' written, {}L {}B",
|
||||
"'{}' written, {lines}L {size}",
|
||||
get_relative_path(&doc_save_event.path).to_string_lossy(),
|
||||
lines,
|
||||
bytes
|
||||
));
|
||||
}
|
||||
|
||||
@@ -601,8 +636,8 @@ impl Application {
|
||||
// limit render calls for fast language server messages
|
||||
helix_event::request_redraw();
|
||||
}
|
||||
EditorEvent::DebuggerEvent(payload) => {
|
||||
let needs_render = self.editor.handle_debugger_message(payload).await;
|
||||
EditorEvent::DebuggerEvent((id, payload)) => {
|
||||
let needs_render = self.editor.handle_debugger_message(id, payload).await;
|
||||
if needs_render {
|
||||
self.render().await;
|
||||
}
|
||||
@@ -624,7 +659,7 @@ impl Application {
|
||||
false
|
||||
}
|
||||
|
||||
pub async fn handle_terminal_events(&mut self, event: std::io::Result<CrosstermEvent>) {
|
||||
pub async fn handle_terminal_events(&mut self, event: std::io::Result<termina::Event>) {
|
||||
let mut cx = crate::compositor::Context {
|
||||
editor: &mut self.editor,
|
||||
jobs: &mut self.jobs,
|
||||
@@ -632,9 +667,9 @@ impl Application {
|
||||
};
|
||||
// Handle key events
|
||||
let should_redraw = match event.unwrap() {
|
||||
CrosstermEvent::Resize(width, height) => {
|
||||
termina::Event::WindowResized(termina::WindowSize { rows, cols, .. }) => {
|
||||
self.terminal
|
||||
.resize(Rect::new(0, 0, width, height))
|
||||
.resize(Rect::new(0, 0, cols, rows))
|
||||
.expect("Unable to resize terminal");
|
||||
|
||||
let area = self.terminal.size().expect("couldn't get terminal size");
|
||||
@@ -642,11 +677,11 @@ impl Application {
|
||||
self.compositor.resize(area);
|
||||
|
||||
self.compositor
|
||||
.handle_event(&Event::Resize(width, height), &mut cx)
|
||||
.handle_event(&Event::Resize(cols, rows), &mut cx)
|
||||
}
|
||||
// Ignore keyboard release events.
|
||||
CrosstermEvent::Key(crossterm::event::KeyEvent {
|
||||
kind: crossterm::event::KeyEventKind::Release,
|
||||
termina::Event::Key(termina::event::KeyEvent {
|
||||
kind: termina::event::KeyEventKind::Release,
|
||||
..
|
||||
}) => false,
|
||||
event => self.compositor.handle_event(&event.into(), &mut cx),
|
||||
@@ -1088,36 +1123,48 @@ impl Application {
|
||||
lsp::ShowDocumentResult { success: true }
|
||||
}
|
||||
|
||||
async fn claim_term(&mut self) -> std::io::Result<()> {
|
||||
let terminal_config = self.config.load().editor.clone().into();
|
||||
self.terminal.claim(terminal_config)
|
||||
}
|
||||
|
||||
fn restore_term(&mut self) -> std::io::Result<()> {
|
||||
let terminal_config = self.config.load().editor.clone().into();
|
||||
use helix_view::graphics::CursorKind;
|
||||
self.terminal
|
||||
.backend_mut()
|
||||
.show_cursor(CursorKind::Block)
|
||||
.ok();
|
||||
self.terminal.restore(terminal_config)
|
||||
self.terminal.restore()
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "integration"))]
|
||||
pub fn event_stream(&self) -> impl Stream<Item = std::io::Result<termina::Event>> + Unpin {
|
||||
use termina::Terminal as _;
|
||||
let reader = self.terminal.backend().terminal().event_reader();
|
||||
termina::EventStream::new(reader, |event| !event.is_escape())
|
||||
}
|
||||
|
||||
#[cfg(feature = "integration")]
|
||||
pub fn event_stream(&self) -> impl Stream<Item = std::io::Result<termina::Event>> + Unpin {
|
||||
use std::{
|
||||
pin::Pin,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
/// A dummy stream that never polls as ready.
|
||||
pub struct DummyEventStream;
|
||||
|
||||
impl Stream for DummyEventStream {
|
||||
type Item = std::io::Result<termina::Event>;
|
||||
|
||||
fn poll_next(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
Poll::Pending
|
||||
}
|
||||
}
|
||||
|
||||
DummyEventStream
|
||||
}
|
||||
|
||||
pub async fn run<S>(&mut self, input_stream: &mut S) -> Result<i32, Error>
|
||||
where
|
||||
S: Stream<Item = std::io::Result<crossterm::event::Event>> + Unpin,
|
||||
S: Stream<Item = std::io::Result<termina::Event>> + Unpin,
|
||||
{
|
||||
self.claim_term().await?;
|
||||
|
||||
// Exit the alternate screen and disable raw mode before panicking
|
||||
let hook = std::panic::take_hook();
|
||||
std::panic::set_hook(Box::new(move |info| {
|
||||
// We can't handle errors properly inside this closure. And it's
|
||||
// probably not a good idea to `unwrap()` inside a panic handler.
|
||||
// So we just ignore the `Result`.
|
||||
let _ = TerminalBackend::force_restore();
|
||||
hook(info);
|
||||
}));
|
||||
self.terminal.claim()?;
|
||||
|
||||
self.event_loop(input_stream).await;
|
||||
|
||||
|
@@ -1,5 +1,6 @@
|
||||
pub(crate) mod dap;
|
||||
pub(crate) mod lsp;
|
||||
pub(crate) mod syntax;
|
||||
pub(crate) mod typed;
|
||||
|
||||
pub use dap::*;
|
||||
@@ -11,6 +12,7 @@ use helix_stdx::{
|
||||
};
|
||||
use helix_vcs::{FileChange, Hunk};
|
||||
pub use lsp::*;
|
||||
pub use syntax::*;
|
||||
use tui::{
|
||||
text::{Span, Spans},
|
||||
widgets::Cell,
|
||||
@@ -20,7 +22,8 @@ pub use typed::*;
|
||||
use helix_core::{
|
||||
char_idx_at_visual_offset,
|
||||
chars::char_is_word,
|
||||
command_line, comment,
|
||||
command_line::{self, Args},
|
||||
comment,
|
||||
doc_formatter::TextFormat,
|
||||
encoding, find_workspace,
|
||||
graphemes::{self, next_grapheme_boundary},
|
||||
@@ -34,7 +37,7 @@ use helix_core::{
|
||||
regex::{self, Regex},
|
||||
search::{self, CharMatcher},
|
||||
selection, surround,
|
||||
syntax::{BlockCommentToken, LanguageServerFeature},
|
||||
syntax::config::{BlockCommentToken, LanguageServerFeature},
|
||||
text_annotations::{Overlay, TextAnnotations},
|
||||
textobject,
|
||||
unicode::width::UnicodeWidthChar,
|
||||
@@ -44,6 +47,7 @@ use helix_core::{
|
||||
use helix_view::{
|
||||
document::{FormatterError, Mode, SCRATCH_BUFFER_NAME},
|
||||
editor::Action,
|
||||
expansion,
|
||||
info::Info,
|
||||
input::KeyEvent,
|
||||
keyboard::KeyCode,
|
||||
@@ -405,9 +409,13 @@ impl MappableCommand {
|
||||
buffer_picker, "Open buffer picker",
|
||||
jumplist_picker, "Open jumplist picker",
|
||||
symbol_picker, "Open symbol picker",
|
||||
syntax_symbol_picker, "Open symbol picker from syntax information",
|
||||
lsp_or_syntax_symbol_picker, "Open symbol picker from LSP or syntax information",
|
||||
changed_file_picker, "Open changed file picker",
|
||||
select_references_to_symbol_under_cursor, "Select symbol references",
|
||||
workspace_symbol_picker, "Open workspace symbol picker",
|
||||
syntax_workspace_symbol_picker, "Open workspace symbol picker from syntax information",
|
||||
lsp_or_syntax_workspace_symbol_picker, "Open workspace symbol picker from LSP or syntax information",
|
||||
diagnostics_picker, "Open diagnostic picker",
|
||||
workspace_diagnostics_picker, "Open workspace diagnostic picker",
|
||||
last_picker, "Open last picker",
|
||||
@@ -466,6 +474,8 @@ impl MappableCommand {
|
||||
smart_tab, "Insert tab if all cursors have all whitespace to their left; otherwise, run a separate command.",
|
||||
insert_tab, "Insert tab char",
|
||||
insert_newline, "Insert newline char",
|
||||
insert_char_interactive, "Insert an interactively-chosen char",
|
||||
append_char_interactive, "Append an interactively-chosen char",
|
||||
delete_char_backward, "Delete previous char",
|
||||
delete_char_forward, "Delete next char",
|
||||
delete_word_backward, "Delete previous word",
|
||||
@@ -565,6 +575,8 @@ impl MappableCommand {
|
||||
goto_prev_comment, "Goto previous comment",
|
||||
goto_next_test, "Goto next test",
|
||||
goto_prev_test, "Goto previous test",
|
||||
goto_next_xml_element, "Goto next (X)HTML element",
|
||||
goto_prev_xml_element, "Goto previous (X)HTML element",
|
||||
goto_next_entry, "Goto next pairing",
|
||||
goto_prev_entry, "Goto previous pairing",
|
||||
goto_next_paragraph, "Goto next paragraph",
|
||||
@@ -599,8 +611,10 @@ impl MappableCommand {
|
||||
command_palette, "Open command palette",
|
||||
goto_word, "Jump to a two-character label",
|
||||
extend_to_word, "Extend to a two-character label",
|
||||
goto_next_tabstop, "goto next snippet placeholder",
|
||||
goto_prev_tabstop, "goto next snippet placeholder",
|
||||
goto_next_tabstop, "Goto next snippet placeholder",
|
||||
goto_prev_tabstop, "Goto next snippet placeholder",
|
||||
rotate_selections_first, "Make the first selection your primary one",
|
||||
rotate_selections_last, "Make the last selection your primary one",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3187,9 +3201,11 @@ fn buffer_picker(cx: &mut Context) {
|
||||
.into()
|
||||
}),
|
||||
];
|
||||
let initial_cursor = if items.len() <= 1 { 0 } else { 1 };
|
||||
let picker = Picker::new(columns, 2, items, (), |cx, meta, action| {
|
||||
cx.editor.switch(meta.id, action);
|
||||
})
|
||||
.with_initial_cursor(initial_cursor)
|
||||
.with_preview(|editor, meta| {
|
||||
let doc = &editor.documents.get(&meta.id)?;
|
||||
let lines = doc.selections().values().next().map(|selection| {
|
||||
@@ -3503,12 +3519,12 @@ fn insert_with_indent(cx: &mut Context, cursor_fallback: IndentFallbackPos) {
|
||||
enter_insert_mode(cx);
|
||||
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let loader = cx.editor.syn_loader.load();
|
||||
|
||||
let text = doc.text().slice(..);
|
||||
let contents = doc.text();
|
||||
let selection = doc.selection(view.id);
|
||||
|
||||
let language_config = doc.language_config();
|
||||
let syntax = doc.syntax();
|
||||
let tab_width = doc.tab_width();
|
||||
|
||||
@@ -3524,7 +3540,7 @@ fn insert_with_indent(cx: &mut Context, cursor_fallback: IndentFallbackPos) {
|
||||
let line_end_index = cursor_line_start;
|
||||
|
||||
let indent = indent::indent_for_newline(
|
||||
language_config,
|
||||
&loader,
|
||||
syntax,
|
||||
&doc.config.load().indent_heuristic,
|
||||
&doc.indent_style,
|
||||
@@ -3634,6 +3650,7 @@ fn open(cx: &mut Context, open: Open, comment_continuation: CommentContinuation)
|
||||
enter_insert_mode(cx);
|
||||
let config = cx.editor.config();
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let loader = cx.editor.syn_loader.load();
|
||||
|
||||
let text = doc.text().slice(..);
|
||||
let contents = doc.text();
|
||||
@@ -3683,7 +3700,7 @@ fn open(cx: &mut Context, open: Open, comment_continuation: CommentContinuation)
|
||||
let indent = match line.first_non_whitespace_char() {
|
||||
Some(pos) if continue_comment_token.is_some() => line.slice(..pos).to_string(),
|
||||
_ => indent::indent_for_newline(
|
||||
doc.language_config(),
|
||||
&loader,
|
||||
doc.syntax(),
|
||||
&config.indent_heuristic,
|
||||
&doc.indent_style,
|
||||
@@ -3723,11 +3740,13 @@ fn open(cx: &mut Context, open: Open, comment_continuation: CommentContinuation)
|
||||
.map(|token| token.len() + 1) // `+ 1` for the extra space added
|
||||
.unwrap_or_default();
|
||||
for i in 0..count {
|
||||
// pos -> beginning of reference line,
|
||||
// + (i * (1+indent_len + comment_len)) -> beginning of i'th line from pos (possibly including comment token)
|
||||
// pos -> beginning of reference line,
|
||||
// + (i * (line_ending_len + indent_len + comment_len)) -> beginning of i'th line from pos (possibly including comment token)
|
||||
// + indent_len + comment_len -> -> indent for i'th line
|
||||
ranges.push(Range::point(
|
||||
pos + (i * (1 + indent_len + comment_len)) + indent_len + comment_len,
|
||||
pos + (i * (doc.line_ending.len_chars() + indent_len + comment_len))
|
||||
+ indent_len
|
||||
+ comment_len,
|
||||
));
|
||||
}
|
||||
|
||||
@@ -3761,7 +3780,8 @@ fn normal_mode(cx: &mut Context) {
|
||||
}
|
||||
|
||||
// Store a jump on the jumplist.
|
||||
fn push_jump(view: &mut View, doc: &Document) {
|
||||
fn push_jump(view: &mut View, doc: &mut Document) {
|
||||
doc.append_changes_to_history(view);
|
||||
let jump = (doc.id(), doc.selection(view.id).clone());
|
||||
view.jumps.push(jump);
|
||||
}
|
||||
@@ -4090,7 +4110,7 @@ fn hunk_range(hunk: Hunk, text: RopeSlice) -> Range {
|
||||
}
|
||||
|
||||
pub mod insert {
|
||||
use crate::events::PostInsertChar;
|
||||
use crate::{events::PostInsertChar, key};
|
||||
|
||||
use super::*;
|
||||
pub type Hook = fn(&Rope, &Selection, char) -> Option<Transaction>;
|
||||
@@ -4169,11 +4189,15 @@ pub mod insert {
|
||||
}
|
||||
|
||||
pub fn insert_tab(cx: &mut Context) {
|
||||
insert_tab_impl(cx, 1)
|
||||
}
|
||||
|
||||
fn insert_tab_impl(cx: &mut Context, count: usize) {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
// TODO: round out to nearest indentation level (for example a line with 3 spaces should
|
||||
// indent by one to reach 4 spaces).
|
||||
|
||||
let indent = Tendril::from(doc.indent_style.as_str());
|
||||
let indent = Tendril::from(doc.indent_style.as_str().repeat(count));
|
||||
let transaction = Transaction::insert(
|
||||
doc.text(),
|
||||
&doc.selection(view.id).clone().cursors(doc.text().slice(..)),
|
||||
@@ -4182,9 +4206,53 @@ pub mod insert {
|
||||
doc.apply(&transaction, view.id);
|
||||
}
|
||||
|
||||
pub fn append_char_interactive(cx: &mut Context) {
|
||||
// Save the current mode, so we can restore it later.
|
||||
let mode = cx.editor.mode;
|
||||
append_mode(cx);
|
||||
insert_selection_interactive(cx, mode);
|
||||
}
|
||||
|
||||
pub fn insert_char_interactive(cx: &mut Context) {
|
||||
let mode = cx.editor.mode;
|
||||
insert_mode(cx);
|
||||
insert_selection_interactive(cx, mode);
|
||||
}
|
||||
|
||||
fn insert_selection_interactive(cx: &mut Context, old_mode: Mode) {
|
||||
let count = cx.count();
|
||||
|
||||
// need to wait for next key
|
||||
cx.on_next_key(move |cx, event| {
|
||||
match event {
|
||||
KeyEvent {
|
||||
code: KeyCode::Char(ch),
|
||||
..
|
||||
} => {
|
||||
for _ in 0..count {
|
||||
insert::insert_char(cx, ch)
|
||||
}
|
||||
}
|
||||
key!(Enter) => {
|
||||
if count != 1 {
|
||||
cx.editor
|
||||
.set_error("inserting multiple newlines not yet supported");
|
||||
return;
|
||||
}
|
||||
insert_newline(cx)
|
||||
}
|
||||
key!(Tab) => insert_tab_impl(cx, count),
|
||||
_ => (),
|
||||
};
|
||||
// Restore the old mode.
|
||||
cx.editor.mode = old_mode;
|
||||
});
|
||||
}
|
||||
|
||||
pub fn insert_newline(cx: &mut Context) {
|
||||
let config = cx.editor.config();
|
||||
let (view, doc) = current_ref!(cx.editor);
|
||||
let loader = cx.editor.syn_loader.load();
|
||||
let text = doc.text().slice(..);
|
||||
let line_ending = doc.line_ending.as_str();
|
||||
|
||||
@@ -4203,6 +4271,7 @@ pub mod insert {
|
||||
None
|
||||
};
|
||||
|
||||
let mut last_pos = 0;
|
||||
let mut transaction = Transaction::change_by_selection(contents, selection, |range| {
|
||||
// Tracks the number of trailing whitespace characters deleted by this selection.
|
||||
let mut chars_deleted = 0;
|
||||
@@ -4224,13 +4293,14 @@ pub mod insert {
|
||||
let (from, to, local_offs) = if let Some(idx) =
|
||||
text.slice(line_start..pos).last_non_whitespace_char()
|
||||
{
|
||||
let first_trailing_whitespace_char = (line_start + idx + 1).min(pos);
|
||||
let first_trailing_whitespace_char = (line_start + idx + 1).clamp(last_pos, pos);
|
||||
last_pos = pos;
|
||||
let line = text.line(current_line);
|
||||
|
||||
let indent = match line.first_non_whitespace_char() {
|
||||
Some(pos) if continue_comment_token.is_some() => line.slice(..pos).to_string(),
|
||||
_ => indent::indent_for_newline(
|
||||
doc.language_config(),
|
||||
&loader,
|
||||
doc.syntax(),
|
||||
&config.indent_heuristic,
|
||||
&doc.indent_style,
|
||||
@@ -5287,6 +5357,22 @@ fn rotate_selections_backward(cx: &mut Context) {
|
||||
rotate_selections(cx, Direction::Backward)
|
||||
}
|
||||
|
||||
fn rotate_selections_first(cx: &mut Context) {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let mut selection = doc.selection(view.id).clone();
|
||||
selection.set_primary_index(0);
|
||||
doc.set_selection(view.id, selection);
|
||||
}
|
||||
|
||||
fn rotate_selections_last(cx: &mut Context) {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let mut selection = doc.selection(view.id).clone();
|
||||
let len = selection.len();
|
||||
selection.set_primary_index(len - 1);
|
||||
doc.set_selection(view.id, selection);
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ReorderStrategy {
|
||||
RotateForward,
|
||||
RotateBackward,
|
||||
@@ -5299,34 +5385,50 @@ fn reorder_selection_contents(cx: &mut Context, strategy: ReorderStrategy) {
|
||||
let text = doc.text().slice(..);
|
||||
|
||||
let selection = doc.selection(view.id);
|
||||
let mut fragments: Vec<_> = selection
|
||||
|
||||
let mut ranges: Vec<_> = selection
|
||||
.slices(text)
|
||||
.map(|fragment| fragment.chunks().collect())
|
||||
.collect();
|
||||
|
||||
let group = count
|
||||
.map(|count| count.get())
|
||||
.unwrap_or(fragments.len()) // default to rotating everything as one group
|
||||
.min(fragments.len());
|
||||
let rotate_by = count.map_or(1, |count| count.get().min(ranges.len()));
|
||||
|
||||
for chunk in fragments.chunks_mut(group) {
|
||||
// TODO: also modify main index
|
||||
match strategy {
|
||||
ReorderStrategy::RotateForward => chunk.rotate_right(1),
|
||||
ReorderStrategy::RotateBackward => chunk.rotate_left(1),
|
||||
ReorderStrategy::Reverse => chunk.reverse(),
|
||||
};
|
||||
}
|
||||
let primary_index = match strategy {
|
||||
ReorderStrategy::RotateForward => {
|
||||
ranges.rotate_right(rotate_by);
|
||||
// Like `usize::wrapping_add`, but provide a custom range from `0` to `ranges.len()`
|
||||
(selection.primary_index() + ranges.len() + rotate_by) % ranges.len()
|
||||
}
|
||||
ReorderStrategy::RotateBackward => {
|
||||
ranges.rotate_left(rotate_by);
|
||||
// Like `usize::wrapping_sub`, but provide a custom range from `0` to `ranges.len()`
|
||||
(selection.primary_index() + ranges.len() - rotate_by) % ranges.len()
|
||||
}
|
||||
ReorderStrategy::Reverse => {
|
||||
if rotate_by % 2 == 0 {
|
||||
// nothing changed, if we reverse something an even
|
||||
// amount of times, the output will be the same
|
||||
return;
|
||||
}
|
||||
ranges.reverse();
|
||||
// -1 to turn 1-based len into 0-based index
|
||||
(ranges.len() - 1) - selection.primary_index()
|
||||
}
|
||||
};
|
||||
|
||||
let transaction = Transaction::change(
|
||||
doc.text(),
|
||||
selection
|
||||
.ranges()
|
||||
.iter()
|
||||
.zip(fragments)
|
||||
.zip(ranges)
|
||||
.map(|(range, fragment)| (range.from(), range.to(), Some(fragment))),
|
||||
);
|
||||
|
||||
doc.set_selection(
|
||||
view.id,
|
||||
Selection::new(selection.ranges().into(), primary_index),
|
||||
);
|
||||
doc.apply(&transaction, view.id);
|
||||
}
|
||||
|
||||
@@ -5787,19 +5889,14 @@ fn goto_ts_object_impl(cx: &mut Context, object: &'static str, direction: Direct
|
||||
let count = cx.count();
|
||||
let motion = move |editor: &mut Editor| {
|
||||
let (view, doc) = current!(editor);
|
||||
if let Some((lang_config, syntax)) = doc.language_config().zip(doc.syntax()) {
|
||||
let loader = editor.syn_loader.load();
|
||||
if let Some(syntax) = doc.syntax() {
|
||||
let text = doc.text().slice(..);
|
||||
let root = syntax.tree().root_node();
|
||||
|
||||
let selection = doc.selection(view.id).clone().transform(|range| {
|
||||
let new_range = movement::goto_treesitter_object(
|
||||
text,
|
||||
range,
|
||||
object,
|
||||
direction,
|
||||
root,
|
||||
lang_config,
|
||||
count,
|
||||
text, range, object, direction, &root, syntax, &loader, count,
|
||||
);
|
||||
|
||||
if editor.mode == Mode::Select {
|
||||
@@ -5863,6 +5960,14 @@ fn goto_prev_test(cx: &mut Context) {
|
||||
goto_ts_object_impl(cx, "test", Direction::Backward)
|
||||
}
|
||||
|
||||
fn goto_next_xml_element(cx: &mut Context) {
|
||||
goto_ts_object_impl(cx, "xml-element", Direction::Forward)
|
||||
}
|
||||
|
||||
fn goto_prev_xml_element(cx: &mut Context) {
|
||||
goto_ts_object_impl(cx, "xml-element", Direction::Backward)
|
||||
}
|
||||
|
||||
fn goto_next_entry(cx: &mut Context) {
|
||||
goto_ts_object_impl(cx, "entry", Direction::Forward)
|
||||
}
|
||||
@@ -5887,21 +5992,15 @@ fn select_textobject(cx: &mut Context, objtype: textobject::TextObject) {
|
||||
if let Some(ch) = event.char() {
|
||||
let textobject = move |editor: &mut Editor| {
|
||||
let (view, doc) = current!(editor);
|
||||
let loader = editor.syn_loader.load();
|
||||
let text = doc.text().slice(..);
|
||||
|
||||
let textobject_treesitter = |obj_name: &str, range: Range| -> Range {
|
||||
let (lang_config, syntax) = match doc.language_config().zip(doc.syntax()) {
|
||||
Some(t) => t,
|
||||
None => return range,
|
||||
let Some(syntax) = doc.syntax() else {
|
||||
return range;
|
||||
};
|
||||
textobject::textobject_treesitter(
|
||||
text,
|
||||
range,
|
||||
objtype,
|
||||
obj_name,
|
||||
syntax.tree().root_node(),
|
||||
lang_config,
|
||||
count,
|
||||
text, range, objtype, obj_name, syntax, &loader, count,
|
||||
)
|
||||
};
|
||||
|
||||
@@ -5936,6 +6035,7 @@ fn select_textobject(cx: &mut Context, objtype: textobject::TextObject) {
|
||||
'c' => textobject_treesitter("comment", range),
|
||||
'T' => textobject_treesitter("test", range),
|
||||
'e' => textobject_treesitter("entry", range),
|
||||
'x' => textobject_treesitter("xml-element", range),
|
||||
'p' => textobject::textobject_paragraph(text, range, objtype, count),
|
||||
'm' => textobject::textobject_pair_surround_closest(
|
||||
doc.syntax(),
|
||||
@@ -5980,6 +6080,7 @@ fn select_textobject(cx: &mut Context, objtype: textobject::TextObject) {
|
||||
("e", "Data structure entry (tree-sitter)"),
|
||||
("m", "Closest surrounding pair (tree-sitter)"),
|
||||
("g", "Change"),
|
||||
("x", "(X)HTML element (tree-sitter)"),
|
||||
(" ", "... or any character acting as a pair"),
|
||||
];
|
||||
|
||||
@@ -6157,64 +6258,52 @@ enum ShellBehavior {
|
||||
}
|
||||
|
||||
fn shell_pipe(cx: &mut Context) {
|
||||
shell_prompt(cx, "pipe:".into(), ShellBehavior::Replace);
|
||||
shell_prompt_for_behavior(cx, "pipe:".into(), ShellBehavior::Replace);
|
||||
}
|
||||
|
||||
fn shell_pipe_to(cx: &mut Context) {
|
||||
shell_prompt(cx, "pipe-to:".into(), ShellBehavior::Ignore);
|
||||
shell_prompt_for_behavior(cx, "pipe-to:".into(), ShellBehavior::Ignore);
|
||||
}
|
||||
|
||||
fn shell_insert_output(cx: &mut Context) {
|
||||
shell_prompt(cx, "insert-output:".into(), ShellBehavior::Insert);
|
||||
shell_prompt_for_behavior(cx, "insert-output:".into(), ShellBehavior::Insert);
|
||||
}
|
||||
|
||||
fn shell_append_output(cx: &mut Context) {
|
||||
shell_prompt(cx, "append-output:".into(), ShellBehavior::Append);
|
||||
shell_prompt_for_behavior(cx, "append-output:".into(), ShellBehavior::Append);
|
||||
}
|
||||
|
||||
fn shell_keep_pipe(cx: &mut Context) {
|
||||
ui::prompt(
|
||||
cx,
|
||||
"keep-pipe:".into(),
|
||||
Some('|'),
|
||||
ui::completers::none,
|
||||
move |cx, input: &str, event: PromptEvent| {
|
||||
let shell = &cx.editor.config().shell;
|
||||
if event != PromptEvent::Validate {
|
||||
return;
|
||||
}
|
||||
if input.is_empty() {
|
||||
return;
|
||||
}
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let selection = doc.selection(view.id);
|
||||
shell_prompt(cx, "keep-pipe:".into(), |cx, args| {
|
||||
let shell = &cx.editor.config().shell;
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let selection = doc.selection(view.id);
|
||||
|
||||
let mut ranges = SmallVec::with_capacity(selection.len());
|
||||
let old_index = selection.primary_index();
|
||||
let mut index: Option<usize> = None;
|
||||
let text = doc.text().slice(..);
|
||||
let mut ranges = SmallVec::with_capacity(selection.len());
|
||||
let old_index = selection.primary_index();
|
||||
let mut index: Option<usize> = None;
|
||||
let text = doc.text().slice(..);
|
||||
|
||||
for (i, range) in selection.ranges().iter().enumerate() {
|
||||
let fragment = range.slice(text);
|
||||
if let Err(err) = shell_impl(shell, input, Some(fragment.into())) {
|
||||
log::debug!("Shell command failed: {}", err);
|
||||
} else {
|
||||
ranges.push(*range);
|
||||
if i >= old_index && index.is_none() {
|
||||
index = Some(ranges.len() - 1);
|
||||
}
|
||||
for (i, range) in selection.ranges().iter().enumerate() {
|
||||
let fragment = range.slice(text);
|
||||
if let Err(err) = shell_impl(shell, args.join(" ").as_str(), Some(fragment.into())) {
|
||||
log::debug!("Shell command failed: {}", err);
|
||||
} else {
|
||||
ranges.push(*range);
|
||||
if i >= old_index && index.is_none() {
|
||||
index = Some(ranges.len() - 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ranges.is_empty() {
|
||||
cx.editor.set_error("No selections remaining");
|
||||
return;
|
||||
}
|
||||
if ranges.is_empty() {
|
||||
cx.editor.set_error("No selections remaining");
|
||||
return;
|
||||
}
|
||||
|
||||
let index = index.unwrap_or_else(|| ranges.len() - 1);
|
||||
doc.set_selection(view.id, Selection::new(ranges, index));
|
||||
},
|
||||
);
|
||||
let index = index.unwrap_or_else(|| ranges.len() - 1);
|
||||
doc.set_selection(view.id, Selection::new(ranges, index));
|
||||
});
|
||||
}
|
||||
|
||||
fn shell_impl(shell: &[String], cmd: &str, input: Option<Rope>) -> anyhow::Result<Tendril> {
|
||||
@@ -6369,25 +6458,35 @@ fn shell(cx: &mut compositor::Context, cmd: &str, behavior: &ShellBehavior) {
|
||||
view.ensure_cursor_in_view(doc, config.scrolloff);
|
||||
}
|
||||
|
||||
fn shell_prompt(cx: &mut Context, prompt: Cow<'static, str>, behavior: ShellBehavior) {
|
||||
fn shell_prompt<F>(cx: &mut Context, prompt: Cow<'static, str>, mut callback_fn: F)
|
||||
where
|
||||
F: FnMut(&mut compositor::Context, Args) + 'static,
|
||||
{
|
||||
ui::prompt(
|
||||
cx,
|
||||
prompt,
|
||||
Some('|'),
|
||||
ui::completers::shell,
|
||||
move |cx, input: &str, event: PromptEvent| {
|
||||
if event != PromptEvent::Validate {
|
||||
|editor, input| complete_command_args(editor, SHELL_SIGNATURE, &SHELL_COMPLETER, input, 0),
|
||||
move |cx, input, event| {
|
||||
if event != PromptEvent::Validate || input.is_empty() {
|
||||
return;
|
||||
}
|
||||
if input.is_empty() {
|
||||
return;
|
||||
match Args::parse(input, SHELL_SIGNATURE, true, |token| {
|
||||
expansion::expand(cx.editor, token).map_err(|err| err.into())
|
||||
}) {
|
||||
Ok(args) => callback_fn(cx, args),
|
||||
Err(err) => cx.editor.set_error(err.to_string()),
|
||||
}
|
||||
|
||||
shell(cx, input, &behavior);
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
fn shell_prompt_for_behavior(cx: &mut Context, prompt: Cow<'static, str>, behavior: ShellBehavior) {
|
||||
shell_prompt(cx, prompt, move |cx, args| {
|
||||
shell(cx, args.join(" ").as_str(), &behavior)
|
||||
})
|
||||
}
|
||||
|
||||
fn suspend(_cx: &mut Context) {
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
@@ -6717,6 +6816,10 @@ fn jump_to_word(cx: &mut Context, behaviour: Movement) {
|
||||
// Calculate the jump candidates: ranges for any visible words with two or
|
||||
// more characters.
|
||||
let alphabet = &cx.editor.config().jump_label_alphabet;
|
||||
if alphabet.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let jump_label_limit = alphabet.len() * alphabet.len();
|
||||
let mut words = Vec::with_capacity(jump_label_limit);
|
||||
let (view, doc) = current_ref!(cx.editor);
|
||||
@@ -6806,3 +6909,34 @@ fn jump_to_word(cx: &mut Context, behaviour: Movement) {
|
||||
}
|
||||
jump_to_label(cx, words, behaviour)
|
||||
}
|
||||
|
||||
fn lsp_or_syntax_symbol_picker(cx: &mut Context) {
|
||||
let doc = doc!(cx.editor);
|
||||
|
||||
if doc
|
||||
.language_servers_with_feature(LanguageServerFeature::DocumentSymbols)
|
||||
.next()
|
||||
.is_some()
|
||||
{
|
||||
lsp::symbol_picker(cx);
|
||||
} else if doc.syntax().is_some() {
|
||||
syntax_symbol_picker(cx);
|
||||
} else {
|
||||
cx.editor
|
||||
.set_error("No language server supporting document symbols or syntax info available");
|
||||
}
|
||||
}
|
||||
|
||||
fn lsp_or_syntax_workspace_symbol_picker(cx: &mut Context) {
|
||||
let doc = doc!(cx.editor);
|
||||
|
||||
if doc
|
||||
.language_servers_with_feature(LanguageServerFeature::WorkspaceSymbols)
|
||||
.next()
|
||||
.is_some()
|
||||
{
|
||||
lsp::workspace_symbol_picker(cx);
|
||||
} else {
|
||||
syntax_workspace_symbol_picker(cx);
|
||||
}
|
||||
}
|
||||
|
@@ -5,13 +5,12 @@ use crate::{
|
||||
ui::{self, overlay::overlaid, Picker, Popup, Prompt, PromptEvent, Text},
|
||||
};
|
||||
use dap::{StackFrame, Thread, ThreadStates};
|
||||
use helix_core::syntax::{DebugArgumentValue, DebugConfigCompletion, DebugTemplate};
|
||||
use helix_dap::{self as dap, Client};
|
||||
use helix_core::syntax::config::{DebugArgumentValue, DebugConfigCompletion, DebugTemplate};
|
||||
use helix_dap::{self as dap, requests::TerminateArguments};
|
||||
use helix_lsp::block_on;
|
||||
use helix_view::editor::Breakpoint;
|
||||
|
||||
use serde_json::{to_value, Value};
|
||||
use tokio_stream::wrappers::UnboundedReceiverStream;
|
||||
use tui::text::Spans;
|
||||
|
||||
use std::collections::HashMap;
|
||||
@@ -59,7 +58,12 @@ fn thread_picker(
|
||||
move |cx, thread, _action| callback_fn(cx.editor, thread),
|
||||
)
|
||||
.with_preview(move |editor, thread| {
|
||||
let frames = editor.debugger.as_ref()?.stack_frames.get(&thread.id)?;
|
||||
let frames = editor
|
||||
.debug_adapters
|
||||
.get_active_client()
|
||||
.as_ref()?
|
||||
.stack_frames
|
||||
.get(&thread.id)?;
|
||||
let frame = frames.first()?;
|
||||
let path = frame.source.as_ref()?.path.as_ref()?.as_path();
|
||||
let pos = Some((
|
||||
@@ -116,34 +120,16 @@ pub fn dap_start_impl(
|
||||
params: Option<Vec<std::borrow::Cow<str>>>,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
let doc = doc!(cx.editor);
|
||||
|
||||
let config = doc
|
||||
.language_config()
|
||||
.and_then(|config| config.debugger.as_ref())
|
||||
.ok_or_else(|| anyhow!("No debug adapter available for language"))?;
|
||||
|
||||
let result = match socket {
|
||||
Some(socket) => block_on(Client::tcp(socket, 0)),
|
||||
None => block_on(Client::process(
|
||||
&config.transport,
|
||||
&config.command,
|
||||
config.args.iter().map(|arg| arg.as_str()).collect(),
|
||||
config.port_arg.as_deref(),
|
||||
0,
|
||||
)),
|
||||
};
|
||||
|
||||
let (mut debugger, events) = match result {
|
||||
Ok(r) => r,
|
||||
Err(e) => bail!("Failed to start debug session: {}", e),
|
||||
};
|
||||
|
||||
let request = debugger.initialize(config.name.clone());
|
||||
if let Err(e) = block_on(request) {
|
||||
bail!("Failed to initialize debug adapter: {}", e);
|
||||
}
|
||||
|
||||
debugger.quirks = config.quirks.clone();
|
||||
let id = cx
|
||||
.editor
|
||||
.debug_adapters
|
||||
.start_client(socket, config)
|
||||
.map_err(|e| anyhow!("Failed to start debug client: {}", e))?;
|
||||
|
||||
// TODO: avoid refetching all of this... pass a config in
|
||||
let template = match name {
|
||||
@@ -209,6 +195,13 @@ pub fn dap_start_impl(
|
||||
// }
|
||||
};
|
||||
|
||||
let debugger = match cx.editor.debug_adapters.get_client_mut(id) {
|
||||
Some(child) => child,
|
||||
None => {
|
||||
bail!("Failed to get child debugger.");
|
||||
}
|
||||
};
|
||||
|
||||
match &template.request[..] {
|
||||
"launch" => {
|
||||
let call = debugger.launch(args);
|
||||
@@ -222,14 +215,12 @@ pub fn dap_start_impl(
|
||||
};
|
||||
|
||||
// TODO: either await "initialized" or buffer commands until event is received
|
||||
cx.editor.debugger = Some(debugger);
|
||||
let stream = UnboundedReceiverStream::new(events);
|
||||
cx.editor.debugger_events.push(stream);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn dap_launch(cx: &mut Context) {
|
||||
if cx.editor.debugger.is_some() {
|
||||
// TODO: Now that we support multiple Clients, we could run multiple debuggers at once but for now keep this as is
|
||||
if cx.editor.debug_adapters.get_active_client().is_some() {
|
||||
cx.editor.set_error("Debugger is already running");
|
||||
return;
|
||||
}
|
||||
@@ -283,7 +274,7 @@ pub fn dap_launch(cx: &mut Context) {
|
||||
}
|
||||
|
||||
pub fn dap_restart(cx: &mut Context) {
|
||||
let debugger = match &cx.editor.debugger {
|
||||
let debugger = match cx.editor.debug_adapters.get_active_client() {
|
||||
Some(debugger) => debugger,
|
||||
None => {
|
||||
cx.editor.set_error("Debugger is not running");
|
||||
@@ -582,12 +573,17 @@ pub fn dap_variables(cx: &mut Context) {
|
||||
}
|
||||
|
||||
pub fn dap_terminate(cx: &mut Context) {
|
||||
cx.editor.set_status("Terminating debug session...");
|
||||
let debugger = debugger!(cx.editor);
|
||||
|
||||
let request = debugger.disconnect(None);
|
||||
let terminate_arguments = Some(TerminateArguments {
|
||||
restart: Some(false),
|
||||
});
|
||||
|
||||
let request = debugger.terminate(terminate_arguments);
|
||||
dap_callback(cx.jobs, request, |editor, _compositor, _response: ()| {
|
||||
// editor.set_error(format!("Failed to disconnect: {}", e));
|
||||
editor.debugger = None;
|
||||
editor.debug_adapters.unset_active_client();
|
||||
});
|
||||
}
|
||||
|
||||
|
@@ -14,7 +14,7 @@ use tui::{text::Span, widgets::Row};
|
||||
use super::{align_view, push_jump, Align, Context, Editor};
|
||||
|
||||
use helix_core::{
|
||||
diagnostic::DiagnosticProvider, syntax::LanguageServerFeature,
|
||||
diagnostic::DiagnosticProvider, syntax::config::LanguageServerFeature,
|
||||
text_annotations::InlineAnnotation, Selection, Uri,
|
||||
};
|
||||
use helix_stdx::path;
|
||||
@@ -46,7 +46,7 @@ macro_rules! language_server_with_feature {
|
||||
match language_server {
|
||||
Some(language_server) => language_server,
|
||||
None => {
|
||||
$editor.set_status(format!(
|
||||
$editor.set_error(format!(
|
||||
"No configured language server supports {}",
|
||||
$feature
|
||||
));
|
||||
@@ -231,6 +231,13 @@ fn diag_picker(
|
||||
}
|
||||
}
|
||||
|
||||
flat_diag.sort_by(|a, b| {
|
||||
a.diag
|
||||
.severity
|
||||
.unwrap_or(lsp::DiagnosticSeverity::HINT)
|
||||
.cmp(&b.diag.severity.unwrap_or(lsp::DiagnosticSeverity::HINT))
|
||||
});
|
||||
|
||||
let styles = DiagnosticStyles {
|
||||
hint: cx.editor.theme.get("hint"),
|
||||
info: cx.editor.theme.get("info"),
|
||||
@@ -252,6 +259,9 @@ fn diag_picker(
|
||||
.into()
|
||||
},
|
||||
),
|
||||
ui::PickerColumn::new("source", |item: &PickerDiagnostic, _| {
|
||||
item.diag.source.as_deref().unwrap_or("").into()
|
||||
}),
|
||||
ui::PickerColumn::new("code", |item: &PickerDiagnostic, _| {
|
||||
match item.diag.code.as_ref() {
|
||||
Some(NumberOrString::Number(n)) => n.to_string().into(),
|
||||
@@ -263,12 +273,12 @@ fn diag_picker(
|
||||
item.diag.message.as_str().into()
|
||||
}),
|
||||
];
|
||||
let mut primary_column = 2; // message
|
||||
let mut primary_column = 3; // message
|
||||
|
||||
if format == DiagnosticsFormat::ShowSourcePath {
|
||||
columns.insert(
|
||||
// between message code and message
|
||||
2,
|
||||
3,
|
||||
ui::PickerColumn::new("path", |item: &PickerDiagnostic, _| {
|
||||
if let Some(path) = item.location.uri.as_path() {
|
||||
path::get_truncated_path(path)
|
||||
@@ -804,7 +814,9 @@ pub fn code_action(cx: &mut Context) {
|
||||
});
|
||||
picker.move_down(); // pre-select the first item
|
||||
|
||||
let popup = Popup::new("code-action", picker).with_scrollbar(false);
|
||||
let popup = Popup::new("code-action", picker)
|
||||
.with_scrollbar(false)
|
||||
.auto_close(true);
|
||||
|
||||
compositor.replace_or_push("code-action", popup);
|
||||
};
|
||||
@@ -923,7 +935,13 @@ where
|
||||
}
|
||||
let call = move |editor: &mut Editor, compositor: &mut Compositor| {
|
||||
if locations.is_empty() {
|
||||
editor.set_error("No definition found.");
|
||||
editor.set_error(match feature {
|
||||
LanguageServerFeature::GotoDeclaration => "No declaration found.",
|
||||
LanguageServerFeature::GotoDefinition => "No definition found.",
|
||||
LanguageServerFeature::GotoTypeDefinition => "No type definition found.",
|
||||
LanguageServerFeature::GotoImplementation => "No implementation found.",
|
||||
_ => "No location found.",
|
||||
});
|
||||
} else {
|
||||
goto_impl(editor, compositor, locations);
|
||||
}
|
||||
@@ -1357,6 +1375,7 @@ fn compute_inlay_hints_for_view(
|
||||
let mut padding_after_inlay_hints = Vec::new();
|
||||
|
||||
let doc_text = doc.text();
|
||||
let inlay_hints_length_limit = doc.config.load().lsp.inlay_hints_length_limit;
|
||||
|
||||
for hint in hints {
|
||||
let char_idx =
|
||||
@@ -1367,7 +1386,7 @@ fn compute_inlay_hints_for_view(
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let label = match hint.label {
|
||||
let mut label = match hint.label {
|
||||
lsp::InlayHintLabel::String(s) => s,
|
||||
lsp::InlayHintLabel::LabelParts(parts) => parts
|
||||
.into_iter()
|
||||
@@ -1375,6 +1394,31 @@ fn compute_inlay_hints_for_view(
|
||||
.collect::<Vec<_>>()
|
||||
.join(""),
|
||||
};
|
||||
// Truncate the hint if too long
|
||||
if let Some(limit) = inlay_hints_length_limit {
|
||||
// Limit on displayed width
|
||||
use helix_core::unicode::{
|
||||
segmentation::UnicodeSegmentation, width::UnicodeWidthStr,
|
||||
};
|
||||
|
||||
let width = label.width();
|
||||
let limit = limit.get().into();
|
||||
if width > limit {
|
||||
let mut floor_boundary = 0;
|
||||
let mut acc = 0;
|
||||
for (i, grapheme_cluster) in label.grapheme_indices(true) {
|
||||
acc += grapheme_cluster.width();
|
||||
|
||||
if acc > limit {
|
||||
floor_boundary = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
label.truncate(floor_boundary);
|
||||
label.push('…');
|
||||
}
|
||||
}
|
||||
|
||||
let inlay_hints_vec = match hint.kind {
|
||||
Some(lsp::InlayHintKind::TYPE) => &mut type_inlay_hints,
|
||||
|
446
helix-term/src/commands/syntax.rs
Normal file
446
helix-term/src/commands/syntax.rs
Normal file
@@ -0,0 +1,446 @@
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
iter,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use dashmap::DashMap;
|
||||
use futures_util::FutureExt;
|
||||
use grep_regex::RegexMatcherBuilder;
|
||||
use grep_searcher::{sinks, BinaryDetection, SearcherBuilder};
|
||||
use helix_core::{
|
||||
syntax::{Loader, QueryIterEvent},
|
||||
Rope, RopeSlice, Selection, Syntax, Uri,
|
||||
};
|
||||
use helix_stdx::{
|
||||
path,
|
||||
rope::{self, RopeSliceExt},
|
||||
};
|
||||
use helix_view::{
|
||||
align_view,
|
||||
document::{from_reader, SCRATCH_BUFFER_NAME},
|
||||
Align, Document, DocumentId, Editor,
|
||||
};
|
||||
use ignore::{DirEntry, WalkBuilder, WalkState};
|
||||
|
||||
use crate::{
|
||||
filter_picker_entry,
|
||||
ui::{
|
||||
overlay::overlaid,
|
||||
picker::{Injector, PathOrId},
|
||||
Picker, PickerColumn,
|
||||
},
|
||||
};
|
||||
|
||||
use super::Context;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum TagKind {
|
||||
Class,
|
||||
Constant,
|
||||
Function,
|
||||
Interface,
|
||||
Macro,
|
||||
Module,
|
||||
Struct,
|
||||
Type,
|
||||
}
|
||||
|
||||
impl TagKind {
|
||||
fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Class => "class",
|
||||
Self::Constant => "constant",
|
||||
Self::Function => "function",
|
||||
Self::Interface => "interface",
|
||||
Self::Macro => "macro",
|
||||
Self::Module => "module",
|
||||
Self::Struct => "struct",
|
||||
Self::Type => "type",
|
||||
}
|
||||
}
|
||||
|
||||
fn from_name(name: &str) -> Option<Self> {
|
||||
match name {
|
||||
"class" => Some(TagKind::Class),
|
||||
"constant" => Some(TagKind::Constant),
|
||||
"function" => Some(TagKind::Function),
|
||||
"interface" => Some(TagKind::Interface),
|
||||
"macro" => Some(TagKind::Macro),
|
||||
"module" => Some(TagKind::Module),
|
||||
"struct" => Some(TagKind::Struct),
|
||||
"type" => Some(TagKind::Type),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: Uri is cheap to clone and DocumentId is Copy
|
||||
#[derive(Debug, Clone)]
|
||||
enum UriOrDocumentId {
|
||||
Uri(Uri),
|
||||
Id(DocumentId),
|
||||
}
|
||||
|
||||
impl UriOrDocumentId {
|
||||
fn path_or_id(&self) -> Option<PathOrId<'_>> {
|
||||
match self {
|
||||
Self::Id(id) => Some(PathOrId::Id(*id)),
|
||||
Self::Uri(uri) => uri.as_path().map(PathOrId::Path),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Tag {
|
||||
kind: TagKind,
|
||||
name: String,
|
||||
start: usize,
|
||||
end: usize,
|
||||
start_line: usize,
|
||||
end_line: usize,
|
||||
doc: UriOrDocumentId,
|
||||
}
|
||||
|
||||
fn tags_iter<'a>(
|
||||
syntax: &'a Syntax,
|
||||
loader: &'a Loader,
|
||||
text: RopeSlice<'a>,
|
||||
doc: UriOrDocumentId,
|
||||
pattern: Option<&'a rope::Regex>,
|
||||
) -> impl Iterator<Item = Tag> + 'a {
|
||||
let mut tags_iter = syntax.tags(text, loader, ..);
|
||||
|
||||
iter::from_fn(move || loop {
|
||||
let QueryIterEvent::Match(mat) = tags_iter.next()? else {
|
||||
continue;
|
||||
};
|
||||
let query = &loader
|
||||
.tag_query(tags_iter.current_language())
|
||||
.expect("must have a tags query to emit matches")
|
||||
.query;
|
||||
let Some(kind) = query
|
||||
.capture_name(mat.capture)
|
||||
.strip_prefix("definition.")
|
||||
.and_then(TagKind::from_name)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let range = mat.node.byte_range();
|
||||
if pattern.is_some_and(|pattern| {
|
||||
!pattern.is_match(text.regex_input_at_bytes(range.start as usize..range.end as usize))
|
||||
}) {
|
||||
continue;
|
||||
}
|
||||
let start = text.byte_to_char(range.start as usize);
|
||||
let end = text.byte_to_char(range.end as usize);
|
||||
return Some(Tag {
|
||||
kind,
|
||||
name: text.slice(start..end).to_string(),
|
||||
start,
|
||||
end,
|
||||
start_line: text.char_to_line(start),
|
||||
end_line: text.char_to_line(end),
|
||||
doc: doc.clone(),
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
pub fn syntax_symbol_picker(cx: &mut Context) {
|
||||
let doc = doc!(cx.editor);
|
||||
let Some(syntax) = doc.syntax() else {
|
||||
cx.editor
|
||||
.set_error("Syntax tree is not available on this buffer");
|
||||
return;
|
||||
};
|
||||
let doc_id = doc.id();
|
||||
let text = doc.text().slice(..);
|
||||
let loader = cx.editor.syn_loader.load();
|
||||
let tags = tags_iter(syntax, &loader, text, UriOrDocumentId::Id(doc.id()), None);
|
||||
|
||||
let columns = vec![
|
||||
PickerColumn::new("kind", |tag: &Tag, _| tag.kind.as_str().into()),
|
||||
PickerColumn::new("name", |tag: &Tag, _| tag.name.as_str().into()),
|
||||
];
|
||||
|
||||
let picker = Picker::new(
|
||||
columns,
|
||||
1, // name
|
||||
tags,
|
||||
(),
|
||||
move |cx, tag, action| {
|
||||
cx.editor.switch(doc_id, action);
|
||||
let view = view_mut!(cx.editor);
|
||||
let doc = doc_mut!(cx.editor, &doc_id);
|
||||
doc.set_selection(view.id, Selection::single(tag.start, tag.end));
|
||||
if action.align_view(view, doc.id()) {
|
||||
align_view(doc, view, Align::Center)
|
||||
}
|
||||
},
|
||||
)
|
||||
.with_preview(|_editor, tag| {
|
||||
Some((tag.doc.path_or_id()?, Some((tag.start_line, tag.end_line))))
|
||||
})
|
||||
.truncate_start(false);
|
||||
|
||||
cx.push_layer(Box::new(overlaid(picker)));
|
||||
}
|
||||
|
||||
pub fn syntax_workspace_symbol_picker(cx: &mut Context) {
|
||||
#[derive(Debug)]
|
||||
struct SearchState {
|
||||
searcher_builder: SearcherBuilder,
|
||||
walk_builder: WalkBuilder,
|
||||
regex_matcher_builder: RegexMatcherBuilder,
|
||||
rope_regex_builder: rope::RegexBuilder,
|
||||
search_root: PathBuf,
|
||||
/// A cache of files that have been parsed in prior searches.
|
||||
syntax_cache: DashMap<PathBuf, Option<(Rope, Syntax)>>,
|
||||
}
|
||||
|
||||
let mut searcher_builder = SearcherBuilder::new();
|
||||
searcher_builder.binary_detection(BinaryDetection::quit(b'\x00'));
|
||||
|
||||
// Search from the workspace that the currently focused document is within. This behaves like global
|
||||
// search most of the time but helps when you have two projects open in splits.
|
||||
let search_root = if let Some(path) = doc!(cx.editor).path() {
|
||||
helix_loader::find_workspace_in(path).0
|
||||
} else {
|
||||
helix_loader::find_workspace().0
|
||||
};
|
||||
|
||||
let absolute_root = search_root
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| search_root.clone());
|
||||
|
||||
let config = cx.editor.config();
|
||||
let dedup_symlinks = config.file_picker.deduplicate_links;
|
||||
|
||||
let mut walk_builder = WalkBuilder::new(&search_root);
|
||||
walk_builder
|
||||
.hidden(config.file_picker.hidden)
|
||||
.parents(config.file_picker.parents)
|
||||
.ignore(config.file_picker.ignore)
|
||||
.follow_links(config.file_picker.follow_symlinks)
|
||||
.git_ignore(config.file_picker.git_ignore)
|
||||
.git_global(config.file_picker.git_global)
|
||||
.git_exclude(config.file_picker.git_exclude)
|
||||
.max_depth(config.file_picker.max_depth)
|
||||
.filter_entry(move |entry| filter_picker_entry(entry, &absolute_root, dedup_symlinks))
|
||||
.add_custom_ignore_filename(helix_loader::config_dir().join("ignore"))
|
||||
.add_custom_ignore_filename(".helix/ignore");
|
||||
|
||||
let mut regex_matcher_builder = RegexMatcherBuilder::new();
|
||||
regex_matcher_builder.case_smart(config.search.smart_case);
|
||||
let mut rope_regex_builder = rope::RegexBuilder::new();
|
||||
rope_regex_builder.syntax(rope::Config::new().case_insensitive(config.search.smart_case));
|
||||
let state = SearchState {
|
||||
searcher_builder,
|
||||
walk_builder,
|
||||
regex_matcher_builder,
|
||||
rope_regex_builder,
|
||||
search_root,
|
||||
syntax_cache: DashMap::default(),
|
||||
};
|
||||
let reg = cx.register.unwrap_or('/');
|
||||
cx.editor.registers.last_search_register = reg;
|
||||
let columns = vec![
|
||||
PickerColumn::new("kind", |tag: &Tag, _| tag.kind.as_str().into()),
|
||||
PickerColumn::new("name", |tag: &Tag, _| tag.name.as_str().into()).without_filtering(),
|
||||
PickerColumn::new("path", |tag: &Tag, state: &SearchState| {
|
||||
match &tag.doc {
|
||||
UriOrDocumentId::Uri(uri) => {
|
||||
if let Some(path) = uri.as_path() {
|
||||
let path = if let Ok(stripped) = path.strip_prefix(&state.search_root) {
|
||||
stripped
|
||||
} else {
|
||||
path
|
||||
};
|
||||
path.to_string_lossy().into()
|
||||
} else {
|
||||
uri.to_string().into()
|
||||
}
|
||||
}
|
||||
// This picker only uses `Id` for scratch buffers for better display.
|
||||
UriOrDocumentId::Id(_) => SCRATCH_BUFFER_NAME.into(),
|
||||
}
|
||||
}),
|
||||
];
|
||||
|
||||
let get_tags = |query: &str,
|
||||
editor: &mut Editor,
|
||||
state: Arc<SearchState>,
|
||||
injector: &Injector<_, _>| {
|
||||
if query.len() < 3 {
|
||||
return async { Ok(()) }.boxed();
|
||||
}
|
||||
// Attempt to find the tag in any open documents.
|
||||
let pattern = match state.rope_regex_builder.build(query) {
|
||||
Ok(pattern) => pattern,
|
||||
Err(err) => return async { Err(anyhow::anyhow!(err)) }.boxed(),
|
||||
};
|
||||
let loader = editor.syn_loader.load();
|
||||
for doc in editor.documents() {
|
||||
let Some(syntax) = doc.syntax() else { continue };
|
||||
let text = doc.text().slice(..);
|
||||
let uri_or_id = doc
|
||||
.uri()
|
||||
.map(UriOrDocumentId::Uri)
|
||||
.unwrap_or_else(|| UriOrDocumentId::Id(doc.id()));
|
||||
for tag in tags_iter(syntax, &loader, text.slice(..), uri_or_id, Some(&pattern)) {
|
||||
if injector.push(tag).is_err() {
|
||||
return async { Ok(()) }.boxed();
|
||||
}
|
||||
}
|
||||
}
|
||||
if !state.search_root.exists() {
|
||||
return async { Err(anyhow::anyhow!("Current working directory does not exist")) }
|
||||
.boxed();
|
||||
}
|
||||
let matcher = match state.regex_matcher_builder.build(query) {
|
||||
Ok(matcher) => {
|
||||
// Clear any "Failed to compile regex" errors out of the statusline.
|
||||
editor.clear_status();
|
||||
matcher
|
||||
}
|
||||
Err(err) => {
|
||||
log::info!(
|
||||
"Failed to compile search pattern in workspace symbol search: {}",
|
||||
err
|
||||
);
|
||||
return async { Err(anyhow::anyhow!("Failed to compile regex")) }.boxed();
|
||||
}
|
||||
};
|
||||
let pattern = Arc::new(pattern);
|
||||
let injector = injector.clone();
|
||||
let loader = editor.syn_loader.load();
|
||||
let documents: HashSet<_> = editor
|
||||
.documents()
|
||||
.filter_map(Document::path)
|
||||
.cloned()
|
||||
.collect();
|
||||
async move {
|
||||
let searcher = state.searcher_builder.build();
|
||||
state.walk_builder.build_parallel().run(|| {
|
||||
let mut searcher = searcher.clone();
|
||||
let matcher = matcher.clone();
|
||||
let injector = injector.clone();
|
||||
let loader = loader.clone();
|
||||
let documents = &documents;
|
||||
let pattern = pattern.clone();
|
||||
let syntax_cache = &state.syntax_cache;
|
||||
Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
|
||||
let entry = match entry {
|
||||
Ok(entry) => entry,
|
||||
Err(_) => return WalkState::Continue,
|
||||
};
|
||||
match entry.file_type() {
|
||||
Some(entry) if entry.is_file() => {}
|
||||
// skip everything else
|
||||
_ => return WalkState::Continue,
|
||||
};
|
||||
let path = entry.path();
|
||||
// If this document is open, skip it because we've already processed it above.
|
||||
if documents.contains(path) {
|
||||
return WalkState::Continue;
|
||||
};
|
||||
let mut quit = false;
|
||||
let sink = sinks::UTF8(|_line, _content| {
|
||||
if !syntax_cache.contains_key(path) {
|
||||
// Read the file into a Rope and attempt to recognize the language
|
||||
// and parse it with tree-sitter. Save the Rope and Syntax for future
|
||||
// queries.
|
||||
syntax_cache.insert(path.to_path_buf(), syntax_for_path(path, &loader));
|
||||
};
|
||||
let entry = syntax_cache.get(path).unwrap();
|
||||
let Some((text, syntax)) = entry.value() else {
|
||||
// If the file couldn't be parsed, move on.
|
||||
return Ok(false);
|
||||
};
|
||||
let uri = Uri::from(path::normalize(path));
|
||||
for tag in tags_iter(
|
||||
syntax,
|
||||
&loader,
|
||||
text.slice(..),
|
||||
UriOrDocumentId::Uri(uri),
|
||||
Some(&pattern),
|
||||
) {
|
||||
if injector.push(tag).is_err() {
|
||||
quit = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Quit after seeing the first regex match. We only care to find files
|
||||
// that contain the pattern and then we run the tags query within
|
||||
// those. The location and contents of a match are irrelevant - it's
|
||||
// only important _if_ a file matches.
|
||||
Ok(false)
|
||||
});
|
||||
if let Err(err) = searcher.search_path(&matcher, path, sink) {
|
||||
log::info!("Workspace syntax search error: {}, {}", path.display(), err);
|
||||
}
|
||||
if quit {
|
||||
WalkState::Quit
|
||||
} else {
|
||||
WalkState::Continue
|
||||
}
|
||||
})
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
.boxed()
|
||||
};
|
||||
let picker = Picker::new(
|
||||
columns,
|
||||
1, // name
|
||||
[],
|
||||
state,
|
||||
move |cx, tag, action| {
|
||||
let doc_id = match &tag.doc {
|
||||
UriOrDocumentId::Id(id) => *id,
|
||||
UriOrDocumentId::Uri(uri) => match cx.editor.open(uri.as_path().expect(""), action) {
|
||||
Ok(id) => id,
|
||||
Err(e) => {
|
||||
cx.editor
|
||||
.set_error(format!("Failed to open file '{uri:?}': {e}"));
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
let doc = doc_mut!(cx.editor, &doc_id);
|
||||
let view = view_mut!(cx.editor);
|
||||
let len_chars = doc.text().len_chars();
|
||||
if tag.start >= len_chars || tag.end > len_chars {
|
||||
cx.editor.set_error("The location you jumped to does not exist anymore because the file has changed.");
|
||||
return;
|
||||
}
|
||||
doc.set_selection(view.id, Selection::single(tag.start, tag.end));
|
||||
if action.align_view(view, doc.id()) {
|
||||
align_view(doc, view, Align::Center)
|
||||
}
|
||||
},
|
||||
)
|
||||
.with_dynamic_query(get_tags, Some(275))
|
||||
.with_preview(move |_editor, tag| {
|
||||
Some((
|
||||
tag.doc.path_or_id()?,
|
||||
Some((tag.start_line, tag.end_line)),
|
||||
))
|
||||
})
|
||||
.truncate_start(false);
|
||||
cx.push_layer(Box::new(overlaid(picker)));
|
||||
}
|
||||
|
||||
/// Create a Rope and language config for a given existing path without creating a full Document.
|
||||
fn syntax_for_path(path: &Path, loader: &Loader) -> Option<(Rope, Syntax)> {
|
||||
let mut file = std::fs::File::open(path).ok()?;
|
||||
let (rope, _encoding, _has_bom) = from_reader(&mut file, None).ok()?;
|
||||
let text = rope.slice(..);
|
||||
let language = loader
|
||||
.language_for_filename(path)
|
||||
.or_else(|| loader.language_for_shebang(text))?;
|
||||
Syntax::new(text, language, loader)
|
||||
.ok()
|
||||
.map(|syntax| (rope, syntax))
|
||||
}
|
@@ -29,15 +29,6 @@ pub struct TypableCommand {
|
||||
pub signature: Signature,
|
||||
}
|
||||
|
||||
impl TypableCommand {
|
||||
fn completer_for_argument_number(&self, n: usize) -> &Completer {
|
||||
match self.completer.positional_args.get(n) {
|
||||
Some(completer) => completer,
|
||||
_ => &self.completer.var_args,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CommandCompleter {
|
||||
// Arguments with specific completion methods based on their position.
|
||||
@@ -68,6 +59,13 @@ impl CommandCompleter {
|
||||
var_args: completer,
|
||||
}
|
||||
}
|
||||
|
||||
fn for_argument_number(&self, n: usize) -> &Completer {
|
||||
match self.positional_args.get(n) {
|
||||
Some(completer) => completer,
|
||||
_ => &self.var_args,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn quit(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> {
|
||||
@@ -104,6 +102,10 @@ fn open(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow:
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
open_impl(cx, args, Action::Replace)
|
||||
}
|
||||
|
||||
fn open_impl(cx: &mut compositor::Context, args: Args, action: Action) -> anyhow::Result<()> {
|
||||
for arg in args {
|
||||
let (path, pos) = crate::args::parse_file(&arg);
|
||||
let path = helix_stdx::path::expand_tilde(path);
|
||||
@@ -113,7 +115,8 @@ fn open(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow:
|
||||
let callback = async move {
|
||||
let call: job::Callback = job::Callback::EditorCompositor(Box::new(
|
||||
move |editor: &mut Editor, compositor: &mut Compositor| {
|
||||
let picker = ui::file_picker(editor, path.into_owned());
|
||||
let picker =
|
||||
ui::file_picker(editor, path.into_owned()).with_default_action(action);
|
||||
compositor.push(Box::new(overlaid(picker)));
|
||||
},
|
||||
));
|
||||
@@ -122,7 +125,7 @@ fn open(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow:
|
||||
cx.jobs.callback(callback);
|
||||
} else {
|
||||
// Otherwise, just open the file
|
||||
let _ = cx.editor.open(&path, Action::Replace)?;
|
||||
let _ = cx.editor.open(&path, action)?;
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let pos = Selection::point(pos_at_coords(doc.text().slice(..), pos, true));
|
||||
doc.set_selection(view.id, pos);
|
||||
@@ -230,38 +233,51 @@ fn force_buffer_close(
|
||||
buffer_close_by_ids_impl(cx, &document_ids, true)
|
||||
}
|
||||
|
||||
fn buffer_gather_others_impl(editor: &mut Editor) -> Vec<DocumentId> {
|
||||
let current_document = &doc!(editor).id();
|
||||
editor
|
||||
.documents()
|
||||
.map(|doc| doc.id())
|
||||
.filter(|doc_id| doc_id != current_document)
|
||||
.collect()
|
||||
fn buffer_gather_others_impl(editor: &mut Editor, skip_visible: bool) -> Vec<DocumentId> {
|
||||
if skip_visible {
|
||||
let visible_document_ids = editor
|
||||
.tree
|
||||
.views()
|
||||
.map(|view| &view.0.doc)
|
||||
.collect::<HashSet<_>>();
|
||||
editor
|
||||
.documents()
|
||||
.map(|doc| doc.id())
|
||||
.filter(|doc_id| !visible_document_ids.contains(doc_id))
|
||||
.collect()
|
||||
} else {
|
||||
let current_document = &doc!(editor).id();
|
||||
editor
|
||||
.documents()
|
||||
.map(|doc| doc.id())
|
||||
.filter(|doc_id| doc_id != current_document)
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
fn buffer_close_others(
|
||||
cx: &mut compositor::Context,
|
||||
_args: Args,
|
||||
args: Args,
|
||||
event: PromptEvent,
|
||||
) -> anyhow::Result<()> {
|
||||
if event != PromptEvent::Validate {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let document_ids = buffer_gather_others_impl(cx.editor);
|
||||
let document_ids = buffer_gather_others_impl(cx.editor, args.has_flag("skip-visible"));
|
||||
buffer_close_by_ids_impl(cx, &document_ids, false)
|
||||
}
|
||||
|
||||
fn force_buffer_close_others(
|
||||
cx: &mut compositor::Context,
|
||||
_args: Args,
|
||||
args: Args,
|
||||
event: PromptEvent,
|
||||
) -> anyhow::Result<()> {
|
||||
if event != PromptEvent::Validate {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let document_ids = buffer_gather_others_impl(cx.editor);
|
||||
let document_ids = buffer_gather_others_impl(cx.editor, args.has_flag("skip-visible"));
|
||||
buffer_close_by_ids_impl(cx, &document_ids, true)
|
||||
}
|
||||
|
||||
@@ -321,7 +337,11 @@ fn buffer_previous(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_impl(cx: &mut compositor::Context, path: Option<&str>, force: bool) -> anyhow::Result<()> {
|
||||
fn write_impl(
|
||||
cx: &mut compositor::Context,
|
||||
path: Option<&str>,
|
||||
options: WriteOptions,
|
||||
) -> anyhow::Result<()> {
|
||||
let config = cx.editor.config();
|
||||
let jobs = &mut cx.jobs;
|
||||
let (view, doc) = current!(cx.editor);
|
||||
@@ -340,14 +360,14 @@ fn write_impl(cx: &mut compositor::Context, path: Option<&str>, force: bool) ->
|
||||
doc.append_changes_to_history(view);
|
||||
|
||||
let (view, doc) = current_ref!(cx.editor);
|
||||
let fmt = if config.auto_format {
|
||||
let fmt = if config.auto_format && options.auto_format {
|
||||
doc.auto_format(cx.editor).map(|fmt| {
|
||||
let callback = make_format_callback(
|
||||
doc.id(),
|
||||
doc.version(),
|
||||
view.id,
|
||||
fmt,
|
||||
Some((path.map(Into::into), force)),
|
||||
Some((path.map(Into::into), options.force)),
|
||||
);
|
||||
|
||||
jobs.add(Job::with_callback(callback).wait_before_exiting());
|
||||
@@ -358,7 +378,7 @@ fn write_impl(cx: &mut compositor::Context, path: Option<&str>, force: bool) ->
|
||||
|
||||
if fmt.is_none() {
|
||||
let id = doc.id();
|
||||
cx.editor.save(id, path, force)?;
|
||||
cx.editor.save(id, path, options.force)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -423,12 +443,25 @@ fn insert_final_newline(doc: &mut Document, view_id: ViewId) {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct WriteOptions {
|
||||
pub force: bool,
|
||||
pub auto_format: bool,
|
||||
}
|
||||
|
||||
fn write(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> {
|
||||
if event != PromptEvent::Validate {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
write_impl(cx, args.first(), false)
|
||||
write_impl(
|
||||
cx,
|
||||
args.first(),
|
||||
WriteOptions {
|
||||
force: false,
|
||||
auto_format: !args.has_flag(WRITE_NO_FORMAT_FLAG.name),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn force_write(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> {
|
||||
@@ -436,7 +469,14 @@ fn force_write(cx: &mut compositor::Context, args: Args, event: PromptEvent) ->
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
write_impl(cx, args.first(), true)
|
||||
write_impl(
|
||||
cx,
|
||||
args.first(),
|
||||
WriteOptions {
|
||||
force: true,
|
||||
auto_format: !args.has_flag(WRITE_NO_FORMAT_FLAG.name),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn write_buffer_close(
|
||||
@@ -448,7 +488,14 @@ fn write_buffer_close(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
write_impl(cx, args.first(), false)?;
|
||||
write_impl(
|
||||
cx,
|
||||
args.first(),
|
||||
WriteOptions {
|
||||
force: false,
|
||||
auto_format: !args.has_flag(WRITE_NO_FORMAT_FLAG.name),
|
||||
},
|
||||
)?;
|
||||
|
||||
let document_ids = buffer_gather_paths_impl(cx.editor, args);
|
||||
buffer_close_by_ids_impl(cx, &document_ids, false)
|
||||
@@ -463,7 +510,14 @@ fn force_write_buffer_close(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
write_impl(cx, args.first(), true)?;
|
||||
write_impl(
|
||||
cx,
|
||||
args.first(),
|
||||
WriteOptions {
|
||||
force: true,
|
||||
auto_format: !args.has_flag(WRITE_NO_FORMAT_FLAG.name),
|
||||
},
|
||||
)?;
|
||||
|
||||
let document_ids = buffer_gather_paths_impl(cx.editor, args);
|
||||
buffer_close_by_ids_impl(cx, &document_ids, false)
|
||||
@@ -644,7 +698,14 @@ fn write_quit(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> a
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
write_impl(cx, args.first(), false)?;
|
||||
write_impl(
|
||||
cx,
|
||||
args.first(),
|
||||
WriteOptions {
|
||||
force: false,
|
||||
auto_format: !args.has_flag(WRITE_NO_FORMAT_FLAG.name),
|
||||
},
|
||||
)?;
|
||||
cx.block_try_flush_writes()?;
|
||||
quit(cx, Args::default(), event)
|
||||
}
|
||||
@@ -658,7 +719,14 @@ fn force_write_quit(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
write_impl(cx, args.first(), true)?;
|
||||
write_impl(
|
||||
cx,
|
||||
args.first(),
|
||||
WriteOptions {
|
||||
force: true,
|
||||
auto_format: !args.has_flag(WRITE_NO_FORMAT_FLAG.name),
|
||||
},
|
||||
)?;
|
||||
cx.block_try_flush_writes()?;
|
||||
force_quit(cx, Args::default(), event)
|
||||
}
|
||||
@@ -780,7 +848,7 @@ pub fn write_all_impl(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_all(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyhow::Result<()> {
|
||||
fn write_all(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow::Result<()> {
|
||||
if event != PromptEvent::Validate {
|
||||
return Ok(());
|
||||
}
|
||||
@@ -790,14 +858,14 @@ fn write_all(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> a
|
||||
WriteAllOptions {
|
||||
force: false,
|
||||
write_scratch: true,
|
||||
auto_format: true,
|
||||
auto_format: !args.has_flag(WRITE_NO_FORMAT_FLAG.name),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn force_write_all(
|
||||
cx: &mut compositor::Context,
|
||||
_args: Args,
|
||||
args: Args,
|
||||
event: PromptEvent,
|
||||
) -> anyhow::Result<()> {
|
||||
if event != PromptEvent::Validate {
|
||||
@@ -809,14 +877,14 @@ fn force_write_all(
|
||||
WriteAllOptions {
|
||||
force: true,
|
||||
write_scratch: true,
|
||||
auto_format: true,
|
||||
auto_format: !args.has_flag(WRITE_NO_FORMAT_FLAG.name),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn write_all_quit(
|
||||
cx: &mut compositor::Context,
|
||||
_args: Args,
|
||||
args: Args,
|
||||
event: PromptEvent,
|
||||
) -> anyhow::Result<()> {
|
||||
if event != PromptEvent::Validate {
|
||||
@@ -827,7 +895,7 @@ fn write_all_quit(
|
||||
WriteAllOptions {
|
||||
force: false,
|
||||
write_scratch: true,
|
||||
auto_format: true,
|
||||
auto_format: !args.has_flag(WRITE_NO_FORMAT_FLAG.name),
|
||||
},
|
||||
)?;
|
||||
quit_all_impl(cx, false)
|
||||
@@ -835,7 +903,7 @@ fn write_all_quit(
|
||||
|
||||
fn force_write_all_quit(
|
||||
cx: &mut compositor::Context,
|
||||
_args: Args,
|
||||
args: Args,
|
||||
event: PromptEvent,
|
||||
) -> anyhow::Result<()> {
|
||||
if event != PromptEvent::Validate {
|
||||
@@ -846,7 +914,7 @@ fn force_write_all_quit(
|
||||
WriteAllOptions {
|
||||
force: true,
|
||||
write_scratch: true,
|
||||
auto_format: true,
|
||||
auto_format: !args.has_flag(WRITE_NO_FORMAT_FLAG.name),
|
||||
},
|
||||
);
|
||||
quit_all_impl(cx, true)
|
||||
@@ -1404,7 +1472,14 @@ fn update(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyho
|
||||
|
||||
let (_view, doc) = current!(cx.editor);
|
||||
if doc.is_modified() {
|
||||
write(cx, args, event)
|
||||
write_impl(
|
||||
cx,
|
||||
None,
|
||||
WriteOptions {
|
||||
force: false,
|
||||
auto_format: !args.has_flag(WRITE_NO_FORMAT_FLAG.name),
|
||||
},
|
||||
)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
@@ -1672,59 +1747,52 @@ fn tree_sitter_highlight_name(
|
||||
_args: Args,
|
||||
event: PromptEvent,
|
||||
) -> anyhow::Result<()> {
|
||||
fn find_highlight_at_cursor(
|
||||
cx: &mut compositor::Context<'_>,
|
||||
) -> Option<helix_core::syntax::Highlight> {
|
||||
use helix_core::syntax::HighlightEvent;
|
||||
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let syntax = doc.syntax()?;
|
||||
let text = doc.text().slice(..);
|
||||
let cursor = doc.selection(view.id).primary().cursor(text);
|
||||
let byte = text.char_to_byte(cursor);
|
||||
let node = syntax.descendant_for_byte_range(byte, byte)?;
|
||||
// Query the same range as the one used in syntax highlighting.
|
||||
let range = {
|
||||
// Calculate viewport byte ranges:
|
||||
let row = text.char_to_line(doc.view_offset(view.id).anchor.min(text.len_chars()));
|
||||
// Saturating subs to make it inclusive zero indexing.
|
||||
let last_line = text.len_lines().saturating_sub(1);
|
||||
let height = view.inner_area(doc).height;
|
||||
let last_visible_line = (row + height as usize).saturating_sub(1).min(last_line);
|
||||
let start = text.line_to_byte(row.min(last_line));
|
||||
let end = text.line_to_byte(last_visible_line + 1);
|
||||
|
||||
start..end
|
||||
};
|
||||
|
||||
let mut highlight = None;
|
||||
|
||||
for event in syntax.highlight_iter(text, Some(range), None) {
|
||||
match event.unwrap() {
|
||||
HighlightEvent::Source { start, end }
|
||||
if start == node.start_byte() && end == node.end_byte() =>
|
||||
{
|
||||
return highlight;
|
||||
}
|
||||
HighlightEvent::HighlightStart(hl) => {
|
||||
highlight = Some(hl);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
if event != PromptEvent::Validate {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let Some(highlight) = find_highlight_at_cursor(cx) else {
|
||||
let (view, doc) = current_ref!(cx.editor);
|
||||
let Some(syntax) = doc.syntax() else {
|
||||
return Ok(());
|
||||
};
|
||||
let text = doc.text().slice(..);
|
||||
let cursor = doc.selection(view.id).primary().cursor(text);
|
||||
let byte = text.char_to_byte(cursor) as u32;
|
||||
// Query the same range as the one used in syntax highlighting.
|
||||
let range = {
|
||||
// Calculate viewport byte ranges:
|
||||
let row = text.char_to_line(doc.view_offset(view.id).anchor.min(text.len_chars()));
|
||||
// Saturating subs to make it inclusive zero indexing.
|
||||
let last_line = text.len_lines().saturating_sub(1);
|
||||
let height = view.inner_area(doc).height;
|
||||
let last_visible_line = (row + height as usize).saturating_sub(1).min(last_line);
|
||||
let start = text.line_to_byte(row.min(last_line)) as u32;
|
||||
let end = text.line_to_byte(last_visible_line + 1) as u32;
|
||||
|
||||
let content = cx.editor.theme.scope(highlight.0).to_string();
|
||||
start..end
|
||||
};
|
||||
|
||||
let loader = cx.editor.syn_loader.load();
|
||||
let mut highlighter = syntax.highlighter(text, &loader, range);
|
||||
let mut highlights = Vec::new();
|
||||
|
||||
while highlighter.next_event_offset() <= byte {
|
||||
let (event, new_highlights) = highlighter.advance();
|
||||
if event == helix_core::syntax::HighlightEvent::Refresh {
|
||||
highlights.clear();
|
||||
}
|
||||
highlights.extend(new_highlights);
|
||||
}
|
||||
|
||||
let content = highlights
|
||||
.into_iter()
|
||||
.fold(String::new(), |mut acc, highlight| {
|
||||
if !acc.is_empty() {
|
||||
acc.push_str(", ");
|
||||
}
|
||||
acc.push_str(cx.editor.theme.scope(highlight));
|
||||
acc
|
||||
});
|
||||
|
||||
let callback = async move {
|
||||
let call: job::Callback = Callback::EditorCompositor(Box::new(
|
||||
@@ -1750,10 +1818,7 @@ fn vsplit(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyho
|
||||
if args.is_empty() {
|
||||
split(cx.editor, Action::VerticalSplit);
|
||||
} else {
|
||||
for arg in args {
|
||||
cx.editor
|
||||
.open(&PathBuf::from(arg.as_ref()), Action::VerticalSplit)?;
|
||||
}
|
||||
open_impl(cx, args, Action::VerticalSplit)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -1767,10 +1832,7 @@ fn hsplit(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyho
|
||||
if args.is_empty() {
|
||||
split(cx.editor, Action::HorizontalSplit);
|
||||
} else {
|
||||
for arg in args {
|
||||
cx.editor
|
||||
.open(&PathBuf::from(arg.as_ref()), Action::HorizontalSplit)?;
|
||||
}
|
||||
open_impl(cx, args, Action::HorizontalSplit)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -1801,7 +1863,7 @@ fn debug_eval(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> a
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(debugger) = cx.editor.debugger.as_mut() {
|
||||
if let Some(debugger) = cx.editor.debug_adapters.get_active_client() {
|
||||
let (frame, thread_id) = match (debugger.active_frame, debugger.thread_id) {
|
||||
(Some(frame), Some(thread_id)) => (frame, thread_id),
|
||||
_ => {
|
||||
@@ -2090,10 +2152,11 @@ fn language(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> any
|
||||
|
||||
let doc = doc_mut!(cx.editor);
|
||||
|
||||
let loader = cx.editor.syn_loader.load();
|
||||
if &args[0] == DEFAULT_LANGUAGE_NAME {
|
||||
doc.set_language(None, None)
|
||||
doc.set_language(None, &loader)
|
||||
} else {
|
||||
doc.set_language_by_language_id(&args[0], cx.editor.syn_loader.clone())?;
|
||||
doc.set_language_by_language_id(&args[0], &loader)?;
|
||||
}
|
||||
doc.detect_indent_and_line_ending();
|
||||
|
||||
@@ -2111,10 +2174,6 @@ fn sort(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow:
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
sort_impl(cx, args.has_flag("reverse"))
|
||||
}
|
||||
|
||||
fn sort_impl(cx: &mut compositor::Context, reverse: bool) -> anyhow::Result<()> {
|
||||
let scrolloff = cx.editor.config().scrolloff;
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let text = doc.text().slice(..);
|
||||
@@ -2130,10 +2189,14 @@ fn sort_impl(cx: &mut compositor::Context, reverse: bool) -> anyhow::Result<()>
|
||||
.map(|fragment| fragment.chunks().collect())
|
||||
.collect();
|
||||
|
||||
fragments.sort_by(match reverse {
|
||||
true => |a: &Tendril, b: &Tendril| b.cmp(a),
|
||||
false => |a: &Tendril, b: &Tendril| a.cmp(b),
|
||||
});
|
||||
fragments.sort_by(
|
||||
match (args.has_flag("insensitive"), args.has_flag("reverse")) {
|
||||
(true, true) => |a: &Tendril, b: &Tendril| b.to_lowercase().cmp(&a.to_lowercase()),
|
||||
(true, false) => |a: &Tendril, b: &Tendril| a.to_lowercase().cmp(&b.to_lowercase()),
|
||||
(false, true) => |a: &Tendril, b: &Tendril| b.cmp(a),
|
||||
(false, false) => |a: &Tendril, b: &Tendril| a.cmp(b),
|
||||
},
|
||||
);
|
||||
|
||||
let transaction = Transaction::change(
|
||||
doc.text(),
|
||||
@@ -2199,8 +2262,8 @@ fn tree_sitter_subtree(
|
||||
if let Some(syntax) = doc.syntax() {
|
||||
let primary_selection = doc.selection(view.id).primary();
|
||||
let text = doc.text();
|
||||
let from = text.char_to_byte(primary_selection.from());
|
||||
let to = text.char_to_byte(primary_selection.to());
|
||||
let from = text.char_to_byte(primary_selection.from()) as u32;
|
||||
let to = text.char_to_byte(primary_selection.to()) as u32;
|
||||
if let Some(selected_node) = syntax.descendant_for_byte_range(from, to) {
|
||||
let mut contents = String::from("```tsq\n");
|
||||
helix_core::syntax::pretty_print_tree(&mut contents, selected_node)?;
|
||||
@@ -2469,7 +2532,16 @@ fn move_buffer(cx: &mut compositor::Context, args: Args, event: PromptEvent) ->
|
||||
.path()
|
||||
.context("Scratch buffer cannot be moved. Use :write instead")?
|
||||
.clone();
|
||||
let new_path = args.first().unwrap().to_string();
|
||||
let new_path: PathBuf = args.first().unwrap().into();
|
||||
|
||||
// if new_path is a directory, append the original file name
|
||||
// to move the file into that directory.
|
||||
let new_path = old_path
|
||||
.file_name()
|
||||
.filter(|_| new_path.is_dir())
|
||||
.map(|old_file_name| new_path.join(old_file_name))
|
||||
.unwrap_or(new_path);
|
||||
|
||||
if let Err(err) = cx.editor.move_path(&old_path, new_path.as_ref()) {
|
||||
bail!("Could not move file: {err}");
|
||||
}
|
||||
@@ -2568,23 +2640,41 @@ fn noop(_cx: &mut compositor::Context, _args: Args, _event: PromptEvent) -> anyh
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// This command accepts a single boolean --skip-visible flag and no positionals.
|
||||
const BUFFER_CLOSE_OTHERS_SIGNATURE: Signature = Signature {
|
||||
positionals: (0, Some(0)),
|
||||
flags: &[Flag {
|
||||
name: "skip-visible",
|
||||
alias: Some('s'),
|
||||
doc: "don't close buffers that are visible",
|
||||
..Flag::DEFAULT
|
||||
}],
|
||||
..Signature::DEFAULT
|
||||
};
|
||||
|
||||
// TODO: SHELL_SIGNATURE should specify var args for arguments, so that just completers::filename can be used,
|
||||
// but Signature does not yet allow for var args.
|
||||
|
||||
/// This command handles all of its input as-is with no quoting or flags.
|
||||
const SHELL_SIGNATURE: Signature = Signature {
|
||||
pub const SHELL_SIGNATURE: Signature = Signature {
|
||||
positionals: (1, Some(2)),
|
||||
raw_after: Some(1),
|
||||
..Signature::DEFAULT
|
||||
};
|
||||
|
||||
const SHELL_COMPLETER: CommandCompleter = CommandCompleter::positional(&[
|
||||
pub const SHELL_COMPLETER: CommandCompleter = CommandCompleter::positional(&[
|
||||
// Command name
|
||||
completers::program,
|
||||
// Shell argument(s)
|
||||
completers::repeating_filenames,
|
||||
]);
|
||||
|
||||
const WRITE_NO_FORMAT_FLAG: Flag = Flag {
|
||||
name: "no-format",
|
||||
doc: "skip auto-formatting",
|
||||
..Flag::DEFAULT
|
||||
};
|
||||
|
||||
pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
TypableCommand {
|
||||
name: "quit",
|
||||
@@ -2647,10 +2737,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
doc: "Close all buffers but the currently focused one.",
|
||||
fun: buffer_close_others,
|
||||
completer: CommandCompleter::none(),
|
||||
signature: Signature {
|
||||
positionals: (0, Some(0)),
|
||||
..Signature::DEFAULT
|
||||
},
|
||||
signature: BUFFER_CLOSE_OTHERS_SIGNATURE,
|
||||
},
|
||||
TypableCommand {
|
||||
name: "buffer-close-others!",
|
||||
@@ -2658,10 +2745,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
doc: "Force close all buffers but the currently focused one.",
|
||||
fun: force_buffer_close_others,
|
||||
completer: CommandCompleter::none(),
|
||||
signature: Signature {
|
||||
positionals: (0, Some(0)),
|
||||
..Signature::DEFAULT
|
||||
},
|
||||
signature: BUFFER_CLOSE_OTHERS_SIGNATURE,
|
||||
},
|
||||
TypableCommand {
|
||||
name: "buffer-close-all",
|
||||
@@ -2715,6 +2799,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
completer: CommandCompleter::positional(&[completers::filename]),
|
||||
signature: Signature {
|
||||
positionals: (0, Some(1)),
|
||||
flags: &[WRITE_NO_FORMAT_FLAG],
|
||||
..Signature::DEFAULT
|
||||
},
|
||||
},
|
||||
@@ -2726,6 +2811,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
completer: CommandCompleter::positional(&[completers::filename]),
|
||||
signature: Signature {
|
||||
positionals: (0, Some(1)),
|
||||
flags: &[WRITE_NO_FORMAT_FLAG],
|
||||
..Signature::DEFAULT
|
||||
},
|
||||
},
|
||||
@@ -2737,6 +2823,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
completer: CommandCompleter::positional(&[completers::filename]),
|
||||
signature: Signature {
|
||||
positionals: (0, Some(1)),
|
||||
flags: &[WRITE_NO_FORMAT_FLAG],
|
||||
..Signature::DEFAULT
|
||||
},
|
||||
},
|
||||
@@ -2748,6 +2835,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
completer: CommandCompleter::positional(&[completers::filename]),
|
||||
signature: Signature {
|
||||
positionals: (0, Some(1)),
|
||||
flags: &[WRITE_NO_FORMAT_FLAG],
|
||||
..Signature::DEFAULT
|
||||
},
|
||||
},
|
||||
@@ -2828,6 +2916,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
completer: CommandCompleter::positional(&[completers::filename]),
|
||||
signature: Signature {
|
||||
positionals: (0, Some(1)),
|
||||
flags: &[WRITE_NO_FORMAT_FLAG],
|
||||
..Signature::DEFAULT
|
||||
},
|
||||
},
|
||||
@@ -2839,6 +2928,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
completer: CommandCompleter::positional(&[completers::filename]),
|
||||
signature: Signature {
|
||||
positionals: (0, Some(1)),
|
||||
flags: &[WRITE_NO_FORMAT_FLAG],
|
||||
..Signature::DEFAULT
|
||||
},
|
||||
},
|
||||
@@ -2850,6 +2940,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
completer: CommandCompleter::none(),
|
||||
signature: Signature {
|
||||
positionals: (0, Some(0)),
|
||||
flags: &[WRITE_NO_FORMAT_FLAG],
|
||||
..Signature::DEFAULT
|
||||
},
|
||||
},
|
||||
@@ -2861,6 +2952,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
completer: CommandCompleter::none(),
|
||||
signature: Signature {
|
||||
positionals: (0, Some(0)),
|
||||
flags: &[WRITE_NO_FORMAT_FLAG],
|
||||
..Signature::DEFAULT
|
||||
},
|
||||
},
|
||||
@@ -2872,6 +2964,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
completer: CommandCompleter::none(),
|
||||
signature: Signature {
|
||||
positionals: (0, Some(0)),
|
||||
flags: &[WRITE_NO_FORMAT_FLAG],
|
||||
..Signature::DEFAULT
|
||||
},
|
||||
},
|
||||
@@ -2883,6 +2976,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
completer: CommandCompleter::none(),
|
||||
signature: Signature {
|
||||
positionals: (0, Some(0)),
|
||||
flags: &[WRITE_NO_FORMAT_FLAG],
|
||||
..Signature::DEFAULT
|
||||
},
|
||||
},
|
||||
@@ -3147,6 +3241,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
completer: CommandCompleter::none(),
|
||||
signature: Signature {
|
||||
positionals: (0, Some(0)),
|
||||
flags: &[WRITE_NO_FORMAT_FLAG],
|
||||
..Signature::DEFAULT
|
||||
},
|
||||
},
|
||||
@@ -3361,6 +3456,12 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
signature: Signature {
|
||||
positionals: (0, Some(0)),
|
||||
flags: &[
|
||||
Flag {
|
||||
name: "insensitive",
|
||||
alias: Some('i'),
|
||||
doc: "sort the ranges case-insensitively",
|
||||
..Flag::DEFAULT
|
||||
},
|
||||
Flag {
|
||||
name: "reverse",
|
||||
alias: Some('r'),
|
||||
@@ -3728,14 +3829,15 @@ fn complete_command_line(editor: &Editor, input: &str) -> Vec<ui::prompt::Comple
|
||||
.get(command)
|
||||
.map_or_else(Vec::new, |cmd| {
|
||||
let args_offset = command.len() + 1;
|
||||
complete_command_args(editor, cmd, rest, args_offset)
|
||||
complete_command_args(editor, cmd.signature, &cmd.completer, rest, args_offset)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn complete_command_args(
|
||||
pub fn complete_command_args(
|
||||
editor: &Editor,
|
||||
command: &TypableCommand,
|
||||
signature: Signature,
|
||||
completer: &CommandCompleter,
|
||||
input: &str,
|
||||
offset: usize,
|
||||
) -> Vec<ui::prompt::Completion> {
|
||||
@@ -3747,7 +3849,7 @@ fn complete_command_args(
|
||||
let cursor = input.len();
|
||||
let prefix = &input[..cursor];
|
||||
let mut tokenizer = Tokenizer::new(prefix, false);
|
||||
let mut args = Args::new(command.signature, false);
|
||||
let mut args = Args::new(signature, false);
|
||||
let mut final_token = None;
|
||||
let mut is_last_token = true;
|
||||
|
||||
@@ -3791,7 +3893,7 @@ fn complete_command_args(
|
||||
.len()
|
||||
.checked_sub(1)
|
||||
.expect("completion state to be positional");
|
||||
let completer = command.completer_for_argument_number(n);
|
||||
let completer = completer.for_argument_number(n);
|
||||
|
||||
completer(editor, &token.content)
|
||||
.into_iter()
|
||||
@@ -3800,7 +3902,7 @@ fn complete_command_args(
|
||||
}
|
||||
CompletionState::Flag(_) => fuzzy_match(
|
||||
token.content.trim_start_matches('-'),
|
||||
command.signature.flags.iter().map(|flag| flag.name),
|
||||
signature.flags.iter().map(|flag| flag.name),
|
||||
false,
|
||||
)
|
||||
.into_iter()
|
||||
@@ -3825,7 +3927,7 @@ fn complete_command_args(
|
||||
.len()
|
||||
.checked_sub(1)
|
||||
.expect("completion state to be positional");
|
||||
command.completer_for_argument_number(n)
|
||||
completer.for_argument_number(n)
|
||||
});
|
||||
complete_expand(editor, &token, arg_completer, offset + token.content_start)
|
||||
}
|
||||
@@ -3865,10 +3967,12 @@ fn quote_completion<'a>(
|
||||
span.content = Cow::Owned(format!(
|
||||
"'{}{}'",
|
||||
// Escape any inner single quotes by doubling them.
|
||||
replace(token.content.as_ref().into(), '\'', "''"),
|
||||
replace(token.content[..range.start].into(), '\'', "''"),
|
||||
replace(span.content, '\'', "''")
|
||||
));
|
||||
// Ignore `range.start` here since we're replacing the entire token.
|
||||
// Ignore `range.start` here since we're replacing the entire token. We used
|
||||
// `range.start` above to emulate the replacement that using `range.start` would have
|
||||
// done.
|
||||
((offset + token.content_start).., span)
|
||||
}
|
||||
TokenKind::Quoted(quote) => {
|
||||
|
@@ -1,8 +1,8 @@
|
||||
use helix_event::{events, register_event};
|
||||
use helix_view::document::Mode;
|
||||
use helix_view::events::{
|
||||
DiagnosticsDidChange, DocumentDidChange, DocumentDidClose, DocumentDidOpen, DocumentFocusLost,
|
||||
LanguageServerExited, LanguageServerInitialized, SelectionDidChange,
|
||||
ConfigDidChange, DiagnosticsDidChange, DocumentDidChange, DocumentDidClose, DocumentDidOpen,
|
||||
DocumentFocusLost, LanguageServerExited, LanguageServerInitialized, SelectionDidChange,
|
||||
};
|
||||
|
||||
use crate::commands;
|
||||
@@ -26,4 +26,5 @@ pub fn register() {
|
||||
register_event::<DiagnosticsDidChange>();
|
||||
register_event::<LanguageServerInitialized>();
|
||||
register_event::<LanguageServerExited>();
|
||||
register_event::<ConfigDidChange>();
|
||||
}
|
||||
|
@@ -8,7 +8,7 @@ use crate::events;
|
||||
use crate::handlers::auto_save::AutoSaveHandler;
|
||||
use crate::handlers::signature_help::SignatureHelpHandler;
|
||||
|
||||
pub use helix_view::handlers::Handlers;
|
||||
pub use helix_view::handlers::{word_index, Handlers};
|
||||
|
||||
use self::document_colors::DocumentColorsHandler;
|
||||
|
||||
@@ -26,12 +26,14 @@ pub fn setup(config: Arc<ArcSwap<Config>>) -> Handlers {
|
||||
let signature_hints = SignatureHelpHandler::new().spawn();
|
||||
let auto_save = AutoSaveHandler::new().spawn();
|
||||
let document_colors = DocumentColorsHandler::default().spawn();
|
||||
let word_index = word_index::Handler::spawn();
|
||||
|
||||
let handlers = Handlers {
|
||||
completions: helix_view::handlers::completion::CompletionHandler::new(event_tx),
|
||||
signature_hints,
|
||||
auto_save,
|
||||
document_colors,
|
||||
word_index,
|
||||
};
|
||||
|
||||
helix_view::handlers::register_hooks(&handlers);
|
||||
|
@@ -2,7 +2,7 @@ use std::collections::HashMap;
|
||||
|
||||
use helix_core::chars::char_is_word;
|
||||
use helix_core::completion::CompletionProvider;
|
||||
use helix_core::syntax::LanguageServerFeature;
|
||||
use helix_core::syntax::config::LanguageServerFeature;
|
||||
use helix_event::{register_hook, TaskHandle};
|
||||
use helix_lsp::lsp;
|
||||
use helix_stdx::rope::RopeSliceExt;
|
||||
@@ -30,6 +30,7 @@ mod item;
|
||||
mod path;
|
||||
mod request;
|
||||
mod resolve;
|
||||
mod word;
|
||||
|
||||
async fn handle_response(
|
||||
requests: &mut JoinSet<CompletionResponse>,
|
||||
@@ -82,7 +83,7 @@ async fn replace_completions(
|
||||
fn show_completion(
|
||||
editor: &mut Editor,
|
||||
compositor: &mut Compositor,
|
||||
items: Vec<CompletionItem>,
|
||||
mut items: Vec<CompletionItem>,
|
||||
context: HashMap<CompletionProvider, ResponseContext>,
|
||||
trigger: Trigger,
|
||||
) {
|
||||
@@ -101,6 +102,7 @@ fn show_completion(
|
||||
if ui.completion.is_some() {
|
||||
return;
|
||||
}
|
||||
word::retain_valid_completions(trigger, doc, view.id, &mut items);
|
||||
editor.handlers.completions.active_completions = context;
|
||||
|
||||
let completion_area = ui.set_completion(editor, items, trigger.pos, size);
|
||||
|
@@ -5,7 +5,7 @@ use std::time::Duration;
|
||||
use arc_swap::ArcSwap;
|
||||
use futures_util::Future;
|
||||
use helix_core::completion::CompletionProvider;
|
||||
use helix_core::syntax::LanguageServerFeature;
|
||||
use helix_core::syntax::config::LanguageServerFeature;
|
||||
use helix_event::{cancelable_future, TaskController, TaskHandle};
|
||||
use helix_lsp::lsp;
|
||||
use helix_lsp::lsp::{CompletionContext, CompletionTriggerKind};
|
||||
@@ -28,6 +28,8 @@ use crate::job::{dispatch, dispatch_blocking};
|
||||
use crate::ui;
|
||||
use crate::ui::editor::InsertEvent;
|
||||
|
||||
use super::word;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub(super) enum TriggerKind {
|
||||
Auto,
|
||||
@@ -242,10 +244,15 @@ fn request_completions(
|
||||
doc.selection(view.id).clone(),
|
||||
doc,
|
||||
handle.clone(),
|
||||
savepoint,
|
||||
savepoint.clone(),
|
||||
) {
|
||||
requests.spawn_blocking(path_completion_request);
|
||||
}
|
||||
if let Some(word_completion_request) =
|
||||
word::completion(editor, trigger, handle.clone(), savepoint)
|
||||
{
|
||||
requests.spawn_blocking(word_completion_request);
|
||||
}
|
||||
|
||||
let ui = compositor.find::<ui::EditorView>().unwrap();
|
||||
ui.last_insert.1.push(InsertEvent::RequestCompletion);
|
||||
|
134
helix-term/src/handlers/completion/word.rs
Normal file
134
helix-term/src/handlers/completion/word.rs
Normal file
@@ -0,0 +1,134 @@
|
||||
use std::{borrow::Cow, sync::Arc};
|
||||
|
||||
use helix_core::{
|
||||
self as core, chars::char_is_word, completion::CompletionProvider, movement, Transaction,
|
||||
};
|
||||
use helix_event::TaskHandle;
|
||||
use helix_stdx::rope::RopeSliceExt as _;
|
||||
use helix_view::{
|
||||
document::SavePoint, handlers::completion::ResponseContext, Document, Editor, ViewId,
|
||||
};
|
||||
|
||||
use super::{request::TriggerKind, CompletionItem, CompletionItems, CompletionResponse, Trigger};
|
||||
|
||||
const COMPLETION_KIND: &str = "word";
|
||||
|
||||
pub(super) fn completion(
|
||||
editor: &Editor,
|
||||
trigger: Trigger,
|
||||
handle: TaskHandle,
|
||||
savepoint: Arc<SavePoint>,
|
||||
) -> Option<impl FnOnce() -> CompletionResponse> {
|
||||
if !doc!(editor).word_completion_enabled() {
|
||||
return None;
|
||||
}
|
||||
let config = editor.config().word_completion;
|
||||
let doc_config = doc!(editor)
|
||||
.language_config()
|
||||
.and_then(|config| config.word_completion);
|
||||
let trigger_length = doc_config
|
||||
.and_then(|c| c.trigger_length)
|
||||
.unwrap_or(config.trigger_length)
|
||||
.get() as usize;
|
||||
|
||||
let (view, doc) = current_ref!(editor);
|
||||
let rope = doc.text().clone();
|
||||
let word_index = editor.handlers.word_index().clone();
|
||||
let text = doc.text().slice(..);
|
||||
let selection = doc.selection(view.id).clone();
|
||||
let pos = selection.primary().cursor(text);
|
||||
|
||||
let cursor = movement::move_prev_word_start(text, core::Range::point(pos), 1);
|
||||
if cursor.head == pos {
|
||||
return None;
|
||||
}
|
||||
if trigger.kind != TriggerKind::Manual
|
||||
&& text
|
||||
.slice(cursor.head..)
|
||||
.graphemes()
|
||||
.take(trigger_length)
|
||||
.take_while(|g| g.chars().all(char_is_word))
|
||||
.count()
|
||||
!= trigger_length
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
let typed_word_range = cursor.head..pos;
|
||||
let typed_word = text.slice(typed_word_range.clone());
|
||||
let edit_diff = if typed_word
|
||||
.char(typed_word.len_chars().saturating_sub(1))
|
||||
.is_whitespace()
|
||||
{
|
||||
0
|
||||
} else {
|
||||
typed_word.len_chars()
|
||||
};
|
||||
|
||||
if handle.is_canceled() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let future = move || {
|
||||
let text = rope.slice(..);
|
||||
let typed_word: Cow<_> = text.slice(typed_word_range).into();
|
||||
let items = word_index
|
||||
.matches(&typed_word)
|
||||
.into_iter()
|
||||
.filter(|word| word.as_str() != typed_word.as_ref())
|
||||
.map(|word| {
|
||||
let transaction = Transaction::change_by_selection(&rope, &selection, |range| {
|
||||
let cursor = range.cursor(text);
|
||||
(cursor - edit_diff, cursor, Some((&word).into()))
|
||||
});
|
||||
CompletionItem::Other(core::CompletionItem {
|
||||
transaction,
|
||||
label: word.into(),
|
||||
kind: Cow::Borrowed(COMPLETION_KIND),
|
||||
documentation: None,
|
||||
provider: CompletionProvider::Word,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
CompletionResponse {
|
||||
items: CompletionItems::Other(items),
|
||||
provider: CompletionProvider::Word,
|
||||
context: ResponseContext {
|
||||
is_incomplete: false,
|
||||
priority: 0,
|
||||
savepoint,
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
Some(future)
|
||||
}
|
||||
|
||||
pub(super) fn retain_valid_completions(
|
||||
trigger: Trigger,
|
||||
doc: &Document,
|
||||
view_id: ViewId,
|
||||
items: &mut Vec<CompletionItem>,
|
||||
) {
|
||||
if trigger.kind == TriggerKind::Manual {
|
||||
return;
|
||||
}
|
||||
|
||||
let text = doc.text().slice(..);
|
||||
let cursor = doc.selection(view_id).primary().cursor(text);
|
||||
if text
|
||||
.get_char(cursor.saturating_sub(1))
|
||||
.is_some_and(|ch| ch.is_whitespace())
|
||||
{
|
||||
items.retain(|item| {
|
||||
!matches!(
|
||||
item,
|
||||
CompletionItem::Other(core::CompletionItem {
|
||||
provider: CompletionProvider::Word,
|
||||
..
|
||||
})
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
@@ -1,7 +1,7 @@
|
||||
use std::{collections::HashSet, time::Duration};
|
||||
|
||||
use futures_util::{stream::FuturesOrdered, StreamExt};
|
||||
use helix_core::{syntax::LanguageServerFeature, text_annotations::InlineAnnotation};
|
||||
use helix_core::{syntax::config::LanguageServerFeature, text_annotations::InlineAnnotation};
|
||||
use helix_event::{cancelable_future, register_hook};
|
||||
use helix_lsp::lsp;
|
||||
use helix_view::{
|
||||
@@ -81,6 +81,10 @@ fn request_document_colors(editor: &mut Editor, doc_id: DocumentId) {
|
||||
})
|
||||
.collect();
|
||||
|
||||
if futures.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
tokio::spawn(async move {
|
||||
let mut all_colors = Vec::new();
|
||||
loop {
|
||||
|
@@ -1,7 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use helix_core::syntax::LanguageServerFeature;
|
||||
use helix_core::syntax::config::LanguageServerFeature;
|
||||
use helix_event::{cancelable_future, register_hook, send_blocking, TaskController, TaskHandle};
|
||||
use helix_lsp::lsp::{self, SignatureInformation};
|
||||
use helix_stdx::rope::RopeSliceExt;
|
||||
|
@@ -1,22 +1,33 @@
|
||||
use crate::config::{Config, ConfigLoadError};
|
||||
use crossterm::{
|
||||
style::{Color, StyledContent, Stylize},
|
||||
tty::IsTty,
|
||||
};
|
||||
use helix_core::config::{default_lang_config, user_lang_config};
|
||||
use helix_loader::grammar::load_runtime_file;
|
||||
use std::io::Write;
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
io::{IsTerminal, Write},
|
||||
};
|
||||
use termina::{
|
||||
style::{ColorSpec, StyleExt as _, Stylized},
|
||||
Terminal as _,
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum TsFeature {
|
||||
Highlight,
|
||||
TextObject,
|
||||
AutoIndent,
|
||||
Tags,
|
||||
RainbowBracket,
|
||||
}
|
||||
|
||||
impl TsFeature {
|
||||
pub fn all() -> &'static [Self] {
|
||||
&[Self::Highlight, Self::TextObject, Self::AutoIndent]
|
||||
&[
|
||||
Self::Highlight,
|
||||
Self::TextObject,
|
||||
Self::AutoIndent,
|
||||
Self::Tags,
|
||||
Self::RainbowBracket,
|
||||
]
|
||||
}
|
||||
|
||||
pub fn runtime_filename(&self) -> &'static str {
|
||||
@@ -24,6 +35,8 @@ impl TsFeature {
|
||||
Self::Highlight => "highlights.scm",
|
||||
Self::TextObject => "textobjects.scm",
|
||||
Self::AutoIndent => "indents.scm",
|
||||
Self::Tags => "tags.scm",
|
||||
Self::RainbowBracket => "rainbows.scm",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +45,8 @@ impl TsFeature {
|
||||
Self::Highlight => "Syntax Highlighting",
|
||||
Self::TextObject => "Treesitter Textobjects",
|
||||
Self::AutoIndent => "Auto Indent",
|
||||
Self::Tags => "Code Navigation Tags",
|
||||
Self::RainbowBracket => "Rainbow Brackets",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,6 +55,8 @@ impl TsFeature {
|
||||
Self::Highlight => "Highlight",
|
||||
Self::TextObject => "Textobject",
|
||||
Self::AutoIndent => "Indent",
|
||||
Self::Tags => "Tags",
|
||||
Self::RainbowBracket => "Rainbow",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -134,6 +151,15 @@ pub fn clipboard() -> std::io::Result<()> {
|
||||
}
|
||||
|
||||
pub fn languages_all() -> std::io::Result<()> {
|
||||
languages(None)
|
||||
}
|
||||
|
||||
pub fn languages_selection() -> std::io::Result<()> {
|
||||
let selection = helix_loader::grammar::get_grammar_names().unwrap_or_default();
|
||||
languages(selection)
|
||||
}
|
||||
|
||||
fn languages(selection: Option<HashSet<String>>) -> std::io::Result<()> {
|
||||
let stdout = std::io::stdout();
|
||||
let mut stdout = stdout.lock();
|
||||
|
||||
@@ -160,21 +186,24 @@ pub fn languages_all() -> std::io::Result<()> {
|
||||
headings.push(feat.short_title())
|
||||
}
|
||||
|
||||
let terminal_cols = crossterm::terminal::size().map(|(c, _)| c).unwrap_or(80);
|
||||
let terminal_cols = termina::PlatformTerminal::new()
|
||||
.and_then(|terminal| terminal.get_dimensions())
|
||||
.map(|size| size.cols)
|
||||
.unwrap_or(80);
|
||||
let column_width = terminal_cols as usize / headings.len();
|
||||
let is_terminal = std::io::stdout().is_tty();
|
||||
let is_terminal = std::io::stdout().is_terminal();
|
||||
|
||||
let fit = |s: &str| -> StyledContent<String> {
|
||||
let fit = |s: &str| -> Stylized<'static> {
|
||||
format!(
|
||||
"{:column_width$}",
|
||||
s.get(..column_width - 2)
|
||||
.map(|s| format!("{}…", s))
|
||||
.unwrap_or_else(|| s.to_string())
|
||||
)
|
||||
.stylize()
|
||||
.stylized()
|
||||
};
|
||||
let color = |s: StyledContent<String>, c: Color| if is_terminal { s.with(c) } else { s };
|
||||
let bold = |s: StyledContent<String>| if is_terminal { s.bold() } else { s };
|
||||
let color = |s: Stylized<'static>, c: ColorSpec| if is_terminal { s.foreground(c) } else { s };
|
||||
let bold = |s: Stylized<'static>| if is_terminal { s.bold() } else { s };
|
||||
|
||||
for heading in headings {
|
||||
write!(stdout, "{}", bold(fit(heading)))?;
|
||||
@@ -185,24 +214,33 @@ pub fn languages_all() -> std::io::Result<()> {
|
||||
.language
|
||||
.sort_unstable_by_key(|l| l.language_id.clone());
|
||||
|
||||
let check_binary = |cmd: Option<&str>| match cmd {
|
||||
Some(cmd) => match helix_stdx::env::which(cmd) {
|
||||
Ok(_) => color(fit(&format!("✓ {}", cmd)), Color::Green),
|
||||
Err(_) => color(fit(&format!("✘ {}", cmd)), Color::Red),
|
||||
let check_binary_with_name = |cmd: Option<(&str, &str)>| match cmd {
|
||||
Some((name, cmd)) => match helix_stdx::env::which(cmd) {
|
||||
Ok(_) => color(fit(&format!("✓ {}", name)), ColorSpec::BRIGHT_GREEN),
|
||||
Err(_) => color(fit(&format!("✘ {}", name)), ColorSpec::BRIGHT_RED),
|
||||
},
|
||||
None => color(fit("None"), Color::Yellow),
|
||||
None => color(fit("None"), ColorSpec::BRIGHT_YELLOW),
|
||||
};
|
||||
|
||||
let check_binary = |cmd: Option<&str>| check_binary_with_name(cmd.map(|cmd| (cmd, cmd)));
|
||||
|
||||
for lang in &syn_loader_conf.language {
|
||||
if selection
|
||||
.as_ref()
|
||||
.is_some_and(|s| !s.contains(&lang.language_id))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
write!(stdout, "{}", fit(&lang.language_id))?;
|
||||
|
||||
let mut cmds = lang.language_servers.iter().filter_map(|ls| {
|
||||
syn_loader_conf
|
||||
.language_server
|
||||
.get(&ls.name)
|
||||
.map(|config| config.command.as_str())
|
||||
.map(|config| (ls.name.as_str(), config.command.as_str()))
|
||||
});
|
||||
write!(stdout, "{}", check_binary(cmds.next()))?;
|
||||
write!(stdout, "{}", check_binary_with_name(cmds.next()))?;
|
||||
|
||||
let dap = lang.debugger.as_ref().map(|dap| dap.command.as_str());
|
||||
write!(stdout, "{}", check_binary(dap))?;
|
||||
@@ -215,8 +253,8 @@ pub fn languages_all() -> std::io::Result<()> {
|
||||
|
||||
for ts_feat in TsFeature::all() {
|
||||
match load_runtime_file(&lang.language_id, ts_feat.runtime_filename()).is_ok() {
|
||||
true => write!(stdout, "{}", color(fit("✓"), Color::Green))?,
|
||||
false => write!(stdout, "{}", color(fit("✘"), Color::Red))?,
|
||||
true => write!(stdout, "{}", color(fit("✓"), ColorSpec::BRIGHT_GREEN))?,
|
||||
false => write!(stdout, "{}", color(fit("✘"), ColorSpec::BRIGHT_RED))?,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -224,10 +262,18 @@ pub fn languages_all() -> std::io::Result<()> {
|
||||
|
||||
for cmd in cmds {
|
||||
write!(stdout, "{}", fit(""))?;
|
||||
writeln!(stdout, "{}", check_binary(Some(cmd)))?;
|
||||
writeln!(stdout, "{}", check_binary_with_name(Some(cmd)))?;
|
||||
}
|
||||
}
|
||||
|
||||
if selection.is_some() {
|
||||
writeln!(
|
||||
stdout,
|
||||
"\nThis list is filtered according to the 'use-grammars' option in languages.toml file.\n\
|
||||
To see the full list, use the '--health all' or '--health all-languages' option."
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -283,10 +329,12 @@ pub fn language(lang_str: String) -> std::io::Result<()> {
|
||||
|
||||
probe_protocols(
|
||||
"language server",
|
||||
lang.language_servers
|
||||
.iter()
|
||||
.filter_map(|ls| syn_loader_conf.language_server.get(&ls.name))
|
||||
.map(|config| config.command.as_str()),
|
||||
lang.language_servers.iter().filter_map(|ls| {
|
||||
syn_loader_conf
|
||||
.language_server
|
||||
.get(&ls.name)
|
||||
.map(|config| (ls.name.as_str(), config.command.as_str()))
|
||||
}),
|
||||
)?;
|
||||
|
||||
probe_protocol(
|
||||
@@ -323,7 +371,7 @@ fn probe_parser(grammar_name: &str) -> std::io::Result<()> {
|
||||
}
|
||||
|
||||
/// Display diagnostics about multiple LSPs and DAPs.
|
||||
fn probe_protocols<'a, I: Iterator<Item = &'a str> + 'a>(
|
||||
fn probe_protocols<'a, I: Iterator<Item = (&'a str, &'a str)> + 'a>(
|
||||
protocol_name: &str,
|
||||
server_cmds: I,
|
||||
) -> std::io::Result<()> {
|
||||
@@ -338,12 +386,12 @@ fn probe_protocols<'a, I: Iterator<Item = &'a str> + 'a>(
|
||||
}
|
||||
writeln!(stdout)?;
|
||||
|
||||
for cmd in server_cmds {
|
||||
let (path, icon) = match helix_stdx::env::which(cmd) {
|
||||
for (name, cmd) in server_cmds {
|
||||
let (diag, icon) = match helix_stdx::env::which(cmd) {
|
||||
Ok(path) => (path.display().to_string().green(), "✓".green()),
|
||||
Err(_) => (format!("'{}' not found in $PATH", cmd).red(), "✘".red()),
|
||||
};
|
||||
writeln!(stdout, " {} {}: {}", icon, cmd, path)?;
|
||||
writeln!(stdout, " {} {}: {}", icon, name, diag)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -354,19 +402,18 @@ fn probe_protocol(protocol_name: &str, server_cmd: Option<String>) -> std::io::R
|
||||
let stdout = std::io::stdout();
|
||||
let mut stdout = stdout.lock();
|
||||
|
||||
let cmd_name = match server_cmd {
|
||||
Some(ref cmd) => cmd.as_str().green(),
|
||||
None => "None".yellow(),
|
||||
write!(stdout, "Configured {}:", protocol_name)?;
|
||||
let Some(cmd) = server_cmd else {
|
||||
writeln!(stdout, "{}", " None".yellow())?;
|
||||
return Ok(());
|
||||
};
|
||||
writeln!(stdout, "Configured {}: {}", protocol_name, cmd_name)?;
|
||||
writeln!(stdout)?;
|
||||
|
||||
if let Some(cmd) = server_cmd {
|
||||
let path = match helix_stdx::env::which(&cmd) {
|
||||
Ok(path) => path.display().to_string().green(),
|
||||
Err(_) => format!("'{}' not found in $PATH", cmd).red(),
|
||||
};
|
||||
writeln!(stdout, "Binary for {}: {}", protocol_name, path)?;
|
||||
}
|
||||
let (diag, icon) = match helix_stdx::env::which(&cmd) {
|
||||
Ok(path) => (path.display().to_string().green(), "✓".green()),
|
||||
Err(_) => (format!("'{}' not found in $PATH", cmd).red(), "✘".red()),
|
||||
};
|
||||
writeln!(stdout, " {} {}", icon, diag)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -388,9 +435,16 @@ fn probe_treesitter_feature(lang: &str, feature: TsFeature) -> std::io::Result<(
|
||||
|
||||
pub fn print_health(health_arg: Option<String>) -> std::io::Result<()> {
|
||||
match health_arg.as_deref() {
|
||||
Some("languages") => languages_all()?,
|
||||
Some("languages") => languages_selection()?,
|
||||
Some("all-languages") => languages_all()?,
|
||||
Some("clipboard") => clipboard()?,
|
||||
None | Some("all") => {
|
||||
None => {
|
||||
general()?;
|
||||
clipboard()?;
|
||||
writeln!(std::io::stdout().lock())?;
|
||||
languages_selection()?;
|
||||
}
|
||||
Some("all") => {
|
||||
general()?;
|
||||
clipboard()?;
|
||||
writeln!(std::io::stdout().lock())?;
|
||||
|
@@ -120,6 +120,7 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
|
||||
"e" => goto_prev_entry,
|
||||
"T" => goto_prev_test,
|
||||
"p" => goto_prev_paragraph,
|
||||
"x" => goto_prev_xml_element,
|
||||
"space" => add_newline_above,
|
||||
},
|
||||
"]" => { "Right bracket"
|
||||
@@ -134,6 +135,7 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
|
||||
"e" => goto_next_entry,
|
||||
"T" => goto_next_test,
|
||||
"p" => goto_next_paragraph,
|
||||
"x" => goto_next_xml_element,
|
||||
"space" => add_newline_below,
|
||||
},
|
||||
|
||||
@@ -227,8 +229,8 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
|
||||
"E" => file_explorer_in_current_buffer_directory,
|
||||
"b" => buffer_picker,
|
||||
"j" => jumplist_picker,
|
||||
"s" => symbol_picker,
|
||||
"S" => workspace_symbol_picker,
|
||||
"s" => lsp_or_syntax_symbol_picker,
|
||||
"S" => lsp_or_syntax_workspace_symbol_picker,
|
||||
"d" => diagnostics_picker,
|
||||
"D" => workspace_diagnostics_picker,
|
||||
"g" => changed_file_picker,
|
||||
|
@@ -76,8 +76,7 @@ fn open_external_url_callback(
|
||||
let commands = open::commands(url.as_str());
|
||||
async {
|
||||
for cmd in commands {
|
||||
let mut command = tokio::process::Command::new(cmd.get_program());
|
||||
command.args(cmd.get_args());
|
||||
let mut command: tokio::process::Command = cmd.into();
|
||||
if command.output().await.is_ok() {
|
||||
return Ok(job::Callback::Editor(Box::new(|_| {})));
|
||||
}
|
||||
|
@@ -1,5 +1,4 @@
|
||||
use anyhow::{Context, Error, Result};
|
||||
use crossterm::event::EventStream;
|
||||
use helix_loader::VERSION_AND_GIT_HASH;
|
||||
use helix_term::application::Application;
|
||||
use helix_term::args::Args;
|
||||
@@ -63,8 +62,10 @@ FLAGS:
|
||||
-h, --help Prints help information
|
||||
--tutor Loads the tutorial
|
||||
--health [CATEGORY] Checks for potential errors in editor setup
|
||||
CATEGORY can be a language or one of 'clipboard', 'languages'
|
||||
or 'all'. 'all' is the default if not specified.
|
||||
CATEGORY can be a language or one of 'clipboard', 'languages',
|
||||
'all-languages' or 'all'. 'languages' is filtered according to
|
||||
user config, 'all-languages' and 'all' are not. If not specified,
|
||||
the default is the same as 'all', but with languages filtering.
|
||||
-g, --grammar {{fetch|build}} Fetches or builds tree-sitter grammars listed in languages.toml
|
||||
-c, --config <file> Specifies a file to use for configuration
|
||||
-v Increases logging verbosity each use for up to 3 times
|
||||
@@ -149,8 +150,9 @@ FLAGS:
|
||||
|
||||
// TODO: use the thread local executor to spawn the application task separately from the work pool
|
||||
let mut app = Application::new(args, config, lang_loader).context("unable to start Helix")?;
|
||||
let mut events = app.event_stream();
|
||||
|
||||
let exit_code = app.run(&mut EventStream::new()).await?;
|
||||
let exit_code = app.run(&mut events).await?;
|
||||
|
||||
Ok(exit_code)
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user