mirror of
https://github.com/helix-editor/helix.git
synced 2025-10-06 00:13:28 +02:00
Compare commits
565 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
7a51085e8a | ||
|
20a132e36f | ||
|
2b0835b295 | ||
|
0902ede7b1 | ||
|
9400d74307 | ||
|
be2b452a39 | ||
|
c0dbd6dc3f | ||
|
85492e587c | ||
|
1ac576f2b3 | ||
|
29d6a5a9b6 | ||
|
3d76fa0b81 | ||
|
43fc073cb3 | ||
|
c94c0d9f1c | ||
|
610ce93600 | ||
|
05161aa85e | ||
|
e8cc7ace75 | ||
|
6fdf5d0920 | ||
|
43997f1936 | ||
|
61828ea519 | ||
|
0712eb3e3b | ||
|
cf8f59ddd0 | ||
|
98851d1594 | ||
|
37fed4de80 | ||
|
5eb9a0167f | ||
|
94203a97e5 | ||
|
e01c53551d | ||
|
7044d7d804 | ||
|
37520f46ae | ||
|
b157c5a8a4 | ||
|
6fcab90d16 | ||
|
a229f405cc | ||
|
31b7596f09 | ||
|
4fc991fdec | ||
|
08ee949dcb | ||
|
db3470d973 | ||
|
8081e9f052 | ||
|
00b2d616eb | ||
|
8330f6af20 | ||
|
c1f677ff75 | ||
|
eeb3f8e963 | ||
|
c1f90a127b | ||
|
fbb98300df | ||
|
8d7a25b4d4 | ||
|
3f603b27f1 | ||
|
bfa533fe78 | ||
|
194b09fbc1 | ||
|
f31e85aca4 | ||
|
24352b2729 | ||
|
bde0307c87 | ||
|
970a111aa3 | ||
|
5a60989efe | ||
|
9a04064373 | ||
|
b67686d318 | ||
|
0d0165b76e | ||
|
1493ff7657 | ||
|
b0aaf08995 | ||
|
79caa7b72b | ||
|
19247ff0ec | ||
|
9bfb0caf1b | ||
|
7633c5acd3 | ||
|
f1e90ac2e3 | ||
|
d62ad8b595 | ||
|
fd02d1bf89 | ||
|
5f386fa355 | ||
|
5d14f56fa9 | ||
|
74a9dd51ff | ||
|
c484b08923 | ||
|
0062af6a19 | ||
|
737282d0e9 | ||
|
376d99a51d | ||
|
adf97e088e | ||
|
68bad148a5 | ||
|
78fba8683b | ||
|
0ff3e3ea38 | ||
|
c0b86afdc8 | ||
|
1c1aee74b4 | ||
|
5c810e5e52 | ||
|
86b1236b46 | ||
|
227e0108e9 | ||
|
a76e94848a | ||
|
49c5bc5934 | ||
|
14e2ced440 | ||
|
e83cdf3fd3 | ||
|
e6c36e82cf | ||
|
78d37fd332 | ||
|
59c691d2db | ||
|
b13d44156c | ||
|
bdbf423876 | ||
|
7bb1db3ab5 | ||
|
0846822371 | ||
|
f044059a2a | ||
|
c15996aff5 | ||
|
f9ad1cafdc | ||
|
39f7ba36e0 | ||
|
c1251aecc7 | ||
|
6a6a9ab2b3 | ||
|
78a6f77e99 | ||
|
66637be700 | ||
|
8e07e1b898 | ||
|
951fd1c80e | ||
|
93ec42d06e | ||
|
a494f47a5d | ||
|
b935fac957 | ||
|
9712bbb23b | ||
|
4526216139 | ||
|
f83843ceba | ||
|
40eb1268c7 | ||
|
e1a92fd399 | ||
|
806cc1c3b1 | ||
|
dd549e1729 | ||
|
77b1a4a768 | ||
|
7d2a77e53c | ||
|
24f86017a6 | ||
|
865881ba19 | ||
|
1ca6ba03ca | ||
|
d5ba0b5162 | ||
|
700058f433 | ||
|
c7b326be04 | ||
|
2af04325d8 | ||
|
a449156702 | ||
|
5af9136aec | ||
|
1cd710fe01 | ||
|
4e845409b6 | ||
|
c06155ace4 | ||
|
504d5ce8bd | ||
|
7b1d682fe5 | ||
|
4e1b3b12f3 | ||
|
a8cf0c6b90 | ||
|
368064e316 | ||
|
afec54485a | ||
|
24f90ba8d8 | ||
|
af21e2a5b4 | ||
|
e023a78919 | ||
|
6118486eb2 | ||
|
a1207fd768 | ||
|
d11b652139 | ||
|
fd0e4b1159 | ||
|
a629343476 | ||
|
ab2a0f325b | ||
|
989407f190 | ||
|
966fbc5984 | ||
|
1422449537 | ||
|
eeb9b39857 | ||
|
4429993842 | ||
|
23907a063c | ||
|
3a83a764e3 | ||
|
8a7aec6414 | ||
|
225484c26c | ||
|
4c424d5ee4 | ||
|
e267dc834a | ||
|
59acee308d | ||
|
97d4b2b5fe | ||
|
bd549d8a20 | ||
|
7083b98a38 | ||
|
a19a6ca01e | ||
|
1bcb624ae6 | ||
|
f88c077f99 | ||
|
fdb9a1677b | ||
|
59b5bf3178 | ||
|
fa83426011 | ||
|
bf773db451 | ||
|
5995568c1d | ||
|
547c3ecd0c | ||
|
e90276df0b | ||
|
f0cd02d5ef | ||
|
828d39e736 | ||
|
718d4ab0f0 | ||
|
23553bd37c | ||
|
ad62e1e129 | ||
|
e7f5ec5561 | ||
|
f5b95beef6 | ||
|
6c11708fb3 | ||
|
6ea477ab60 | ||
|
d3221b03a2 | ||
|
d6b6ad879e | ||
|
983a53bfb4 | ||
|
d090369404 | ||
|
05aeeaca0b | ||
|
36b975c4ce | ||
|
f10a06f4de | ||
|
094a0aa3f9 | ||
|
f7f55143a1 | ||
|
4c996f43df | ||
|
62561e9d23 | ||
|
5aead46f4b | ||
|
2a7ae963e1 | ||
|
e2833b5853 | ||
|
333c2949c2 | ||
|
48a0c80652 | ||
|
7bce91556a | ||
|
ed03be1450 | ||
|
5c007c2248 | ||
|
0ad7561135 | ||
|
1bcff796e5 | ||
|
a4fffaed9f | ||
|
d49e5323f9 | ||
|
4044c70eb2 | ||
|
759b850859 | ||
|
7d510429c5 | ||
|
f064894e57 | ||
|
e2d2f19fd0 | ||
|
a8e69e12f4 | ||
|
4080341977 | ||
|
7c9ebd05b8 | ||
|
80e920ba36 | ||
|
ac81b47a41 | ||
|
66a8612351 | ||
|
2302869836 | ||
|
add3be8528 | ||
|
df0d58e9f7 | ||
|
11c3ba9350 | ||
|
9d41113ae0 | ||
|
e22dbf102f | ||
|
2f4a9fea03 | ||
|
24314bd844 | ||
|
4b0205f690 | ||
|
9508684031 | ||
|
7315f6f3e4 | ||
|
8a53e34e66 | ||
|
72eb2ce1f1 | ||
|
5135fa37eb | ||
|
53d881f172 | ||
|
6728e44490 | ||
|
83bde1004d | ||
|
b8cafee9f5 | ||
|
1c747674b6 | ||
|
5c1a06d28e | ||
|
0b55b21f30 | ||
|
f453f8724d | ||
|
4563832318 | ||
|
392dfa0841 | ||
|
fd7080498e | ||
|
b2c8aa1ee7 | ||
|
440d4ae9df | ||
|
22b728d1eb | ||
|
89eb22525b | ||
|
ed45d380eb | ||
|
96d4ca5f73 | ||
|
56a9ce5d83 | ||
|
8ea5742b08 | ||
|
e7eab95b94 | ||
|
f5b0821860 | ||
|
9da0abaa5d | ||
|
22297d0b40 | ||
|
38ca8daa09 | ||
|
62c78c061c | ||
|
64d3e7b705 | ||
|
dd1f64d4dc | ||
|
a7b0cc730c | ||
|
3a34036310 | ||
|
97e6f2a38f | ||
|
6bfd001b48 | ||
|
ac6b2de0fd | ||
|
f80da7b4de | ||
|
85cf2648a2 | ||
|
a2fad4fcb0 | ||
|
f77dbc7c83 | ||
|
8d273a5613 | ||
|
ddbf03613d | ||
|
afc602d306 | ||
|
1a34a3ce57 | ||
|
e0a99ae51a | ||
|
b3b4e78585 | ||
|
97e12f5c5a | ||
|
05e5520ec0 | ||
|
939261fc07 | ||
|
5b45bdd80f | ||
|
1c6bc6d455 | ||
|
c238f20e1d | ||
|
9eacbc1887 | ||
|
41ee45ce54 | ||
|
b799b0d50e | ||
|
5e22694865 | ||
|
1af8dd9912 | ||
|
a8fd33ac01 | ||
|
449624965b | ||
|
2e02a1d6bc | ||
|
66afbc9fff | ||
|
3e4f81547c | ||
|
b18bda928f | ||
|
7767703979 | ||
|
bed9aced5f | ||
|
bd0d20a2b3 | ||
|
1bcae78f06 | ||
|
efaac6c5d3 | ||
|
c8794b30ee | ||
|
5b1a628e81 | ||
|
641255ccc8 | ||
|
7c9d3682db | ||
|
4d59f66b76 | ||
|
96935eb28d | ||
|
78967779bd | ||
|
61fe1dc9e8 | ||
|
dbaed0ba83 | ||
|
609f7363a1 | ||
|
ed97ecceb8 | ||
|
c0bbadcaaf | ||
|
d4fb1d0633 | ||
|
016640f4fb | ||
|
7ad8eaaef0 | ||
|
df3b88387b | ||
|
dac317e620 | ||
|
60c86eff89 | ||
|
d8351d35ab | ||
|
e98993d609 | ||
|
bf8437d098 | ||
|
dc8df7ba21 | ||
|
2b4de41bf0 | ||
|
d5d1a9b1ae | ||
|
5545f8ebb5 | ||
|
bcf70d8e67 | ||
|
43fbb6d965 | ||
|
032aaffa15 | ||
|
2dbf966293 | ||
|
0d73a4d23a | ||
|
d14ca05d6b | ||
|
de5e5863aa | ||
|
54f8e5c9c3 | ||
|
573cb39926 | ||
|
ffc89e483b | ||
|
dfd499f5a9 | ||
|
b4fd3148e3 | ||
|
96ae5897a1 | ||
|
84e939ef58 | ||
|
d906911417 | ||
|
30ac5869df | ||
|
8ffafb826f | ||
|
3633f85b38 | ||
|
9ed930b233 | ||
|
058796c18e | ||
|
17473b51d3 | ||
|
7961355ba1 | ||
|
72576822f3 | ||
|
85b4410703 | ||
|
177b6fcdc9 | ||
|
28fd704bce | ||
|
b55ca8fdb8 | ||
|
0eadeab8c7 | ||
|
5f329a22c4 | ||
|
3b3c396ca4 | ||
|
05d3ad4a0e | ||
|
d1854d8e6a | ||
|
8b85903116 | ||
|
09f5796537 | ||
|
c39d9f44a0 | ||
|
3042ff3e5a | ||
|
9963a5614d | ||
|
65868081fc | ||
|
4f2a01cc09 | ||
|
2bd8a9b39d | ||
|
31b431bfdd | ||
|
9dd17c46a2 | ||
|
757babb1b4 | ||
|
5803de2067 | ||
|
155c608237 | ||
|
9baddc825d | ||
|
fd9b826f2c | ||
|
5938ab1bf1 | ||
|
3b8d5102ac | ||
|
64bb1f7563 | ||
|
e2a23ac0b5 | ||
|
2e1aa5f15b | ||
|
a5ea61433c | ||
|
09d8c139af | ||
|
14a3502cf1 | ||
|
f2b709a3c3 | ||
|
f979bdc442 | ||
|
cde57dae35 | ||
|
6aa9838ea6 | ||
|
d6e8a44d85 | ||
|
bda05ec4bf | ||
|
83a8167402 | ||
|
ea59f77a6b | ||
|
0a6b60085a | ||
|
bc0084d071 | ||
|
48cb81eff1 | ||
|
814dcfa8d2 | ||
|
d943a51e3e | ||
|
0e51e5fbaf | ||
|
bf53aff27d | ||
|
413e477dc2 | ||
|
507a1f8dd6 | ||
|
c9cd06e904 | ||
|
bdd636d8ee | ||
|
9b8c5bdade | ||
|
0add0c5639 | ||
|
2d35b7b99c | ||
|
b6c58ea23e | ||
|
bb26c589b4 | ||
|
0e1e4edc5e | ||
|
8a609047c3 | ||
|
7bdead5b4b | ||
|
3b0ec750ff | ||
|
1befbd076c | ||
|
e36fc57fff | ||
|
698583c241 | ||
|
df0ea6674a | ||
|
430c80ff2a | ||
|
c6186ce600 | ||
|
cb31d20b46 | ||
|
9a1916ebfd | ||
|
00cccdc62a | ||
|
9939dbf119 | ||
|
cf7237d0b9 | ||
|
c63ad60c31 | ||
|
7b61c63ece | ||
|
b997d2cdeb | ||
|
289303a30d | ||
|
42f9718f55 | ||
|
27c1b3f98b | ||
|
5b920c53f0 | ||
|
4c410eef87 | ||
|
9c64650a26 | ||
|
2c89107349 | ||
|
e0180a4b88 | ||
|
5b20f6020a | ||
|
6265e196b7 | ||
|
0b0b1d850a | ||
|
2c7b75475f | ||
|
986828e75c | ||
|
03b2d81406 | ||
|
4d24a43651 | ||
|
81f51c13fa | ||
|
51328a4966 | ||
|
d6ccc150c7 | ||
|
ee2ba744a2 | ||
|
2a7e38a2b4 | ||
|
db7f693550 | ||
|
b42631942b | ||
|
f53d8411cb | ||
|
98fda6b8f0 | ||
|
9d2f2a9e32 | ||
|
2d42766a71 | ||
|
af657ef2ec | ||
|
94901b8677 | ||
|
ef155e62ef | ||
|
e315394631 | ||
|
8df6739759 | ||
|
5e4da09be2 | ||
|
890b51b568 | ||
|
3b87fce0ce | ||
|
2ad2838a27 | ||
|
c7759a5aa0 | ||
|
4ee66b8766 | ||
|
ba96f5d296 | ||
|
326293cb57 | ||
|
2c3e2b979b | ||
|
8cc6d68160 | ||
|
235a84d989 | ||
|
299da5a35b | ||
|
b001008a69 | ||
|
31212e133d | ||
|
1041a5bb07 | ||
|
0e779381a8 | ||
|
774ab6f8b6 | ||
|
c463142e5e | ||
|
34c6094604 | ||
|
2158366b24 | ||
|
fdad7d67aa | ||
|
c4085b4e88 | ||
|
5d3c69d565 | ||
|
ec599a1eac | ||
|
3a5e044c89 | ||
|
b3469df5bf | ||
|
c09b15197b | ||
|
09c994a97a | ||
|
b5b79e3656 | ||
|
e529f4eb21 | ||
|
56d00fa7f4 | ||
|
802ef20dbc | ||
|
b6b99b2487 | ||
|
839d210573 | ||
|
f55a012fb7 | ||
|
c5b210df59 | ||
|
dabec2d799 | ||
|
b78f70e602 | ||
|
53ee57f84a | ||
|
f247858055 | ||
|
3197c2536e | ||
|
838f69929d | ||
|
060a422c7e | ||
|
74102bfc6d | ||
|
d0b0c9b2ef | ||
|
132198323c | ||
|
be9dc5802a | ||
|
d93cd2a261 | ||
|
dfc70a12f3 | ||
|
28658836ee | ||
|
2aee5f02d0 | ||
|
7233ab2deb | ||
|
a964cbae65 | ||
|
91f2c60b36 | ||
|
89ad54a2e5 | ||
|
cc66475592 | ||
|
95ba4ff5bd | ||
|
f3e47bfee4 | ||
|
5230a2b669 | ||
|
66c035fa99 | ||
|
6709b4242f | ||
|
26dee49dc9 | ||
|
462c8a6ec8 | ||
|
afeaba1113 | ||
|
5a06263b78 | ||
|
bcab93c94e | ||
|
a938f5a87a | ||
|
3fc501c99f | ||
|
7087558918 | ||
|
56bddb12f8 | ||
|
5f5b383979 | ||
|
6458edecfd | ||
|
738e8a4dd3 | ||
|
9e22842d51 | ||
|
e2c74d26e0 | ||
|
c4970c617e | ||
|
a54b09e3fe | ||
|
94a1951d40 | ||
|
8759dc7e33 | ||
|
86102a651f | ||
|
2094ff1aaf | ||
|
d39baa3b4e | ||
|
0300dbdeb3 | ||
|
54dc2f8107 | ||
|
8fbda0abaf | ||
|
184abdc510 | ||
|
3a9e1c305b | ||
|
2d1ae2e44b | ||
|
6225401e84 | ||
|
3f62799656 | ||
|
b9797a7dd2 | ||
|
6c0f7eafc3 | ||
|
279db98d3c | ||
|
36fb8d1b1a | ||
|
f92fb966c0 | ||
|
ae32159247 | ||
|
4f2b8fb05a | ||
|
2a6210806b | ||
|
f5b1655eab | ||
|
e7f543fe66 | ||
|
eb0605c13d | ||
|
809990a3a4 | ||
|
d4c215b35d | ||
|
e388079a0b | ||
|
43c9eba037 | ||
|
c5492788a2 | ||
|
cc650c7f4f | ||
|
b3be6b269a | ||
|
0777948fc0 | ||
|
09390be6a5 | ||
|
c72475bc30 | ||
|
d6de5408b7 | ||
|
59d6b92e5b | ||
|
9678df1c62 | ||
|
541f7a0514 | ||
|
aac586b546 | ||
|
6bb653f820 | ||
|
3d64cf8356 | ||
|
fabee03983 | ||
|
26a55dcefd | ||
|
e11b67b0db | ||
|
0fa127b105 | ||
|
7d2d4ed4a8 | ||
|
5f3e806341 | ||
|
5fd0a2ddda | ||
|
fd709bc56d | ||
|
0f6e81b85b |
1
.envrc
1
.envrc
@@ -3,3 +3,4 @@ watch_file flake.lock
|
||||
|
||||
# try to use flakes, if it fails use normal nix (ie. shell.nix)
|
||||
use flake || use nix
|
||||
eval "$shellHook"
|
1
.github/ISSUE_TEMPLATE/bug_report.md
vendored
1
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -17,6 +17,7 @@ Please search on the issue tracker before creating one. -->
|
||||
### Environment
|
||||
|
||||
- Platform: <!-- macOS / Windows / Linux -->
|
||||
- Terminal emulator:
|
||||
- Helix version: <!-- 'hx -V' if using a release, 'git describe' if building from master -->
|
||||
|
||||
<details><summary>~/.cache/helix/helix.log</summary>
|
||||
|
41
.github/workflows/build.yml
vendored
41
.github/workflows/build.yml
vendored
@@ -13,9 +13,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
@@ -29,18 +27,21 @@ jobs:
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-registry-
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-index-
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
|
||||
|
||||
- name: Run cargo check
|
||||
uses: actions-rs/cargo@v1
|
||||
@@ -52,9 +53,7 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
@@ -68,23 +67,37 @@ jobs:
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-registry-
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-index-
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
|
||||
|
||||
- name: Copy minimal languages config
|
||||
run: cp .github/workflows/languages.toml ./languages.toml
|
||||
|
||||
- name: Cache test tree-sitter grammar
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: runtime/grammars
|
||||
key: ${{ runner.os }}-v2-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
|
||||
restore-keys: ${{ runner.os }}-v2-tree-sitter-grammars-
|
||||
|
||||
- name: Run cargo test
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --workspace
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -96,9 +109,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
@@ -113,18 +124,21 @@ jobs:
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-registry-
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-index-
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
|
||||
|
||||
- name: Run cargo fmt
|
||||
uses: actions-rs/cargo@v1
|
||||
@@ -136,16 +150,14 @@ jobs:
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
||||
args: --all-targets -- -D warnings
|
||||
|
||||
docs:
|
||||
name: Docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
@@ -159,18 +171,21 @@ jobs:
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-registry-
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-index-
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
|
||||
|
||||
- name: Generate docs
|
||||
uses: actions-rs/cargo@v1
|
||||
|
26
.github/workflows/cachix.yml
vendored
Normal file
26
.github/workflows/cachix.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
# Publish the Nix flake outputs to Cachix
|
||||
name: Cachix
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish Flake
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install nix
|
||||
uses: cachix/install-nix-action@v16
|
||||
|
||||
- name: Authenticate with Cachix
|
||||
uses: cachix/cachix-action@v10
|
||||
with:
|
||||
name: helix
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
|
||||
- name: Build nix flake
|
||||
run: nix build
|
18
.github/workflows/gh-pages.yml
vendored
18
.github/workflows/gh-pages.yml
vendored
@@ -4,12 +4,14 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdBook
|
||||
uses: peaceiris/actions-mdbook@v1
|
||||
@@ -18,10 +20,22 @@ jobs:
|
||||
# mdbook-version: '0.4.8'
|
||||
|
||||
- run: mdbook build book
|
||||
|
||||
- name: Set output directory
|
||||
run: |
|
||||
OUTDIR=$(basename ${{ github.ref }})
|
||||
echo "OUTDIR=$OUTDIR" >> $GITHUB_ENV
|
||||
|
||||
- name: Deploy
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
if: github.ref == 'refs/heads/master'
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book/book
|
||||
destination_dir: ./${{ env.OUTDIR }}
|
||||
|
||||
- name: Deploy stable
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
if: startswith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book/book
|
||||
|
26
.github/workflows/languages.toml
vendored
Normal file
26
.github/workflows/languages.toml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
# This languages.toml is used for testing in CI.
|
||||
|
||||
[[language]]
|
||||
name = "rust"
|
||||
scope = "source.rust"
|
||||
injection-regex = "rust"
|
||||
file-types = ["rs"]
|
||||
comment-token = "//"
|
||||
roots = ["Cargo.toml", "Cargo.lock"]
|
||||
indent = { tab-width = 4, unit = " " }
|
||||
|
||||
[[grammar]]
|
||||
name = "rust"
|
||||
source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "a360da0a29a19c281d08295a35ecd0544d2da211" }
|
||||
|
||||
[[language]]
|
||||
name = "nix"
|
||||
scope = "source.nix"
|
||||
injection-regex = "nix"
|
||||
file-types = ["nix"]
|
||||
shebangs = []
|
||||
roots = []
|
||||
comment-token = "#"
|
||||
|
||||
# A grammar entry is not necessary for this language - it is only used for
|
||||
# testing TOML merging behavior.
|
16
.github/workflows/release.yml
vendored
16
.github/workflows/release.yml
vendored
@@ -52,9 +52,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install ${{ matrix.rust }} toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
@@ -69,7 +67,7 @@ jobs:
|
||||
with:
|
||||
use-cross: ${{ matrix.cross }}
|
||||
command: test
|
||||
args: --release --locked --target ${{ matrix.target }}
|
||||
args: --release --locked --target ${{ matrix.target }} --workspace
|
||||
|
||||
- name: Build release binary
|
||||
uses: actions-rs/cargo@v1
|
||||
@@ -102,7 +100,7 @@ jobs:
|
||||
fi
|
||||
cp -r runtime dist
|
||||
|
||||
- uses: actions/upload-artifact@v2.3.1
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bins-${{ matrix.build }}
|
||||
path: dist
|
||||
@@ -113,15 +111,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: false
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
# with:
|
||||
# path: dist
|
||||
# - run: ls -al ./dist
|
||||
- run: ls -al bins-*
|
||||
|
||||
- name: Calculate tag name
|
||||
run: |
|
||||
|
188
.gitmodules
vendored
188
.gitmodules
vendored
@@ -1,188 +0,0 @@
|
||||
[submodule "helix-syntax/languages/tree-sitter-cpp"]
|
||||
path = helix-syntax/languages/tree-sitter-cpp
|
||||
url = https://github.com/tree-sitter/tree-sitter-cpp
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-javascript"]
|
||||
path = helix-syntax/languages/tree-sitter-javascript
|
||||
url = https://github.com/tree-sitter/tree-sitter-javascript
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-julia"]
|
||||
path = helix-syntax/languages/tree-sitter-julia
|
||||
url = https://github.com/tree-sitter/tree-sitter-julia
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-python"]
|
||||
path = helix-syntax/languages/tree-sitter-python
|
||||
url = https://github.com/tree-sitter/tree-sitter-python
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-typescript"]
|
||||
path = helix-syntax/languages/tree-sitter-typescript
|
||||
url = https://github.com/tree-sitter/tree-sitter-typescript
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-agda"]
|
||||
path = helix-syntax/languages/tree-sitter-agda
|
||||
url = https://github.com/tree-sitter/tree-sitter-agda
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-go"]
|
||||
path = helix-syntax/languages/tree-sitter-go
|
||||
url = https://github.com/tree-sitter/tree-sitter-go
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-ruby"]
|
||||
path = helix-syntax/languages/tree-sitter-ruby
|
||||
url = https://github.com/tree-sitter/tree-sitter-ruby
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-java"]
|
||||
path = helix-syntax/languages/tree-sitter-java
|
||||
url = https://github.com/tree-sitter/tree-sitter-java
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-php"]
|
||||
path = helix-syntax/languages/tree-sitter-php
|
||||
url = https://github.com/tree-sitter/tree-sitter-php
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-html"]
|
||||
path = helix-syntax/languages/tree-sitter-html
|
||||
url = https://github.com/tree-sitter/tree-sitter-html
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-scala"]
|
||||
path = helix-syntax/languages/tree-sitter-scala
|
||||
url = https://github.com/tree-sitter/tree-sitter-scala
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-bash"]
|
||||
path = helix-syntax/languages/tree-sitter-bash
|
||||
url = https://github.com/tree-sitter/tree-sitter-bash
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-rust"]
|
||||
path = helix-syntax/languages/tree-sitter-rust
|
||||
url = https://github.com/tree-sitter/tree-sitter-rust
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-json"]
|
||||
path = helix-syntax/languages/tree-sitter-json
|
||||
url = https://github.com/tree-sitter/tree-sitter-json
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-css"]
|
||||
path = helix-syntax/languages/tree-sitter-css
|
||||
url = https://github.com/tree-sitter/tree-sitter-css
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-c-sharp"]
|
||||
path = helix-syntax/languages/tree-sitter-c-sharp
|
||||
url = https://github.com/tree-sitter/tree-sitter-c-sharp
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-c"]
|
||||
path = helix-syntax/languages/tree-sitter-c
|
||||
url = https://github.com/tree-sitter/tree-sitter-c
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-haskell"]
|
||||
path = helix-syntax/languages/tree-sitter-haskell
|
||||
url = https://github.com/tree-sitter/tree-sitter-haskell
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-swift"]
|
||||
path = helix-syntax/languages/tree-sitter-swift
|
||||
url = https://github.com/tree-sitter/tree-sitter-swift
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-toml"]
|
||||
path = helix-syntax/languages/tree-sitter-toml
|
||||
url = https://github.com/ikatyang/tree-sitter-toml
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-elixir"]
|
||||
path = helix-syntax/languages/tree-sitter-elixir
|
||||
url = https://github.com/elixir-lang/tree-sitter-elixir
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-nix"]
|
||||
path = helix-syntax/languages/tree-sitter-nix
|
||||
url = https://github.com/cstrahan/tree-sitter-nix
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-latex"]
|
||||
path = helix-syntax/languages/tree-sitter-latex
|
||||
url = https://github.com/latex-lsp/tree-sitter-latex
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-ledger"]
|
||||
path = helix-syntax/languages/tree-sitter-ledger
|
||||
url = https://github.com/cbarrete/tree-sitter-ledger
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-protobuf"]
|
||||
path = helix-syntax/languages/tree-sitter-protobuf
|
||||
url = https://github.com/yusdacra/tree-sitter-protobuf.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-ocaml"]
|
||||
path = helix-syntax/languages/tree-sitter-ocaml
|
||||
url = https://github.com/tree-sitter/tree-sitter-ocaml
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-lua"]
|
||||
path = helix-syntax/languages/tree-sitter-lua
|
||||
url = https://github.com/nvim-treesitter/tree-sitter-lua
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-yaml"]
|
||||
path = helix-syntax/languages/tree-sitter-yaml
|
||||
url = https://github.com/ikatyang/tree-sitter-yaml
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-zig"]
|
||||
path = helix-syntax/languages/tree-sitter-zig
|
||||
url = https://github.com/maxxnino/tree-sitter-zig
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-svelte"]
|
||||
path = helix-syntax/languages/tree-sitter-svelte
|
||||
url = https://github.com/Himujjal/tree-sitter-svelte
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-vue"]
|
||||
path = helix-syntax/languages/tree-sitter-vue
|
||||
url = https://github.com/ikatyang/tree-sitter-vue
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-tsq"]
|
||||
path = helix-syntax/languages/tree-sitter-tsq
|
||||
url = https://github.com/tree-sitter/tree-sitter-tsq
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-cmake"]
|
||||
path = helix-syntax/languages/tree-sitter-cmake
|
||||
url = https://github.com/uyha/tree-sitter-cmake
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-glsl"]
|
||||
path = helix-syntax/languages/tree-sitter-glsl
|
||||
url = https://github.com/theHamsta/tree-sitter-glsl.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-perl"]
|
||||
path = helix-syntax/languages/tree-sitter-perl
|
||||
url = https://github.com/ganezdragon/tree-sitter-perl
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-comment"]
|
||||
path = helix-syntax/languages/tree-sitter-comment
|
||||
url = https://github.com/stsewd/tree-sitter-comment
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-wgsl"]
|
||||
path = helix-syntax/languages/tree-sitter-wgsl
|
||||
url = https://github.com/szebniok/tree-sitter-wgsl
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-llvm"]
|
||||
path = helix-syntax/languages/tree-sitter-llvm
|
||||
url = https://github.com/benwilliamgraham/tree-sitter-llvm
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-markdown"]
|
||||
path = helix-syntax/languages/tree-sitter-markdown
|
||||
url = https://github.com/MDeiml/tree-sitter-markdown
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-dart"]
|
||||
path = helix-syntax/languages/tree-sitter-dart
|
||||
url = https://github.com/UserNobody14/tree-sitter-dart.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-dockerfile"]
|
||||
path = helix-syntax/languages/tree-sitter-dockerfile
|
||||
url = https://github.com/camdencheek/tree-sitter-dockerfile.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-fish"]
|
||||
path = helix-syntax/languages/tree-sitter-fish
|
||||
url = https://github.com/ram02z/tree-sitter-fish
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-git-commit"]
|
||||
path = helix-syntax/languages/tree-sitter-git-commit
|
||||
url = https://github.com/the-mikedavis/tree-sitter-git-commit.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-git-diff"]
|
||||
path = helix-syntax/languages/tree-sitter-git-diff
|
||||
url = https://github.com/the-mikedavis/tree-sitter-git-diff.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-tablegen"]
|
||||
path = helix-syntax/languages/tree-sitter-tablegen
|
||||
url = https://github.com/Flakebi/tree-sitter-tablegen
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-git-rebase"]
|
||||
path = helix-syntax/languages/tree-sitter-git-rebase
|
||||
url = https://github.com/the-mikedavis/tree-sitter-git-rebase.git
|
||||
shallow = true
|
119
CHANGELOG.md
119
CHANGELOG.md
@@ -1,4 +1,123 @@
|
||||
|
||||
# 0.6.0 (2022-01-04)
|
||||
|
||||
Happy new year and a big shout out to all the contributors! We had 55 contributors in this release.
|
||||
|
||||
Helix has popped up in DPorts and Fedora Linux via COPR ([#1270](https://github.com/helix-editor/helix/pull/1270))
|
||||
|
||||
As usual the following is a brief summary, refer to the git history for a full log:
|
||||
|
||||
Breaking changes:
|
||||
|
||||
- fix: Normalize backtab into shift-tab
|
||||
|
||||
Features:
|
||||
|
||||
- Macros ([#1234](https://github.com/helix-editor/helix/pull/1234))
|
||||
- Add reverse search functionality ([#958](https://github.com/helix-editor/helix/pull/958))
|
||||
- Allow keys to be mapped to sequences of commands ([#589](https://github.com/helix-editor/helix/pull/589))
|
||||
- Make it possible to keybind TypableCommands ([#1169](https://github.com/helix-editor/helix/pull/1169))
|
||||
- Detect workspace root using language markers ([#1370](https://github.com/helix-editor/helix/pull/1370))
|
||||
- Add WORD textobject ([#991](https://github.com/helix-editor/helix/pull/991))
|
||||
- Add LSP rename_symbol (space-r) ([#1011](https://github.com/helix-editor/helix/pull/1011))
|
||||
- Added workspace_symbol_picker ([#1041](https://github.com/helix-editor/helix/pull/1041))
|
||||
- Detect filetype from shebang line ([#1001](https://github.com/helix-editor/helix/pull/1001))
|
||||
- Allow piping from stdin into a buffer on startup ([#996](https://github.com/helix-editor/helix/pull/996))
|
||||
- Add auto pairs for same-char pairs ([#1219](https://github.com/helix-editor/helix/pull/1219))
|
||||
- Update settings at runtime ([#798](https://github.com/helix-editor/helix/pull/798))
|
||||
- Enable thin LTO (cccc194)
|
||||
|
||||
Commands:
|
||||
- :wonly -- window only ([#1057](https://github.com/helix-editor/helix/pull/1057))
|
||||
- buffer-close (:bc, :bclose) ([#1035](https://github.com/helix-editor/helix/pull/1035))
|
||||
- Add :<line> and :goto <line> commands ([#1128](https://github.com/helix-editor/helix/pull/1128))
|
||||
- :sort command ([#1288](https://github.com/helix-editor/helix/pull/1288))
|
||||
- Add m textobject for pair under cursor ([#961](https://github.com/helix-editor/helix/pull/961))
|
||||
- Implement "Goto next buffer / Goto previous buffer" commands ([#950](https://github.com/helix-editor/helix/pull/950))
|
||||
- Implement "Goto last modification" command ([#1067](https://github.com/helix-editor/helix/pull/1067))
|
||||
- Add trim_selections command ([#1092](https://github.com/helix-editor/helix/pull/1092))
|
||||
- Add movement shortcut for history ([#1088](https://github.com/helix-editor/helix/pull/1088))
|
||||
- Add command to inc/dec number under cursor ([#1027](https://github.com/helix-editor/helix/pull/1027))
|
||||
- Add support for dates for increment/decrement
|
||||
- Align selections (&) ([#1101](https://github.com/helix-editor/helix/pull/1101))
|
||||
- Implement no-yank delete/change ([#1099](https://github.com/helix-editor/helix/pull/1099))
|
||||
- Implement black hole register ([#1165](https://github.com/helix-editor/helix/pull/1165))
|
||||
- gf as goto_file (gf) ([#1102](https://github.com/helix-editor/helix/pull/1102))
|
||||
- Add last modified file (gm) ([#1093](https://github.com/helix-editor/helix/pull/1093))
|
||||
- ensure_selections_forward ([#1393](https://github.com/helix-editor/helix/pull/1393))
|
||||
- Readline style insert mode ([#1039](https://github.com/helix-editor/helix/pull/1039))
|
||||
|
||||
Usability improvements and fixes:
|
||||
|
||||
- Detect filetype on :write ([#1141](https://github.com/helix-editor/helix/pull/1141))
|
||||
- Add single and double quotes to matching pairs ([#995](https://github.com/helix-editor/helix/pull/995))
|
||||
- Launch with defaults upon invalid config/theme (rather than panicking) ([#982](https://github.com/helix-editor/helix/pull/982))
|
||||
- If switching away from an empty scratch buffer, remove it ([#935](https://github.com/helix-editor/helix/pull/935))
|
||||
- Truncate the starts of file paths instead of the ends in picker ([#951](https://github.com/helix-editor/helix/pull/951))
|
||||
- Truncate the start of file paths in the StatusLine ([#1351](https://github.com/helix-editor/helix/pull/1351))
|
||||
- Prevent picker from previewing binaries or large file ([#939](https://github.com/helix-editor/helix/pull/939))
|
||||
- Inform when reaching undo/redo bounds ([#981](https://github.com/helix-editor/helix/pull/981))
|
||||
- search_impl will only align cursor center when it isn't in view ([#959](https://github.com/helix-editor/helix/pull/959))
|
||||
- Add <C-h>, <C-u>, <C-d>, Delete in prompt mode ([#1034](https://github.com/helix-editor/helix/pull/1034))
|
||||
- Restore screen position when aborting search ([#1047](https://github.com/helix-editor/helix/pull/1047))
|
||||
- Buffer picker: show is_modifier flag ([#1020](https://github.com/helix-editor/helix/pull/1020))
|
||||
- Add commit hash to version info, if present ([#957](https://github.com/helix-editor/helix/pull/957))
|
||||
- Implement indent-aware delete ([#1120](https://github.com/helix-editor/helix/pull/1120))
|
||||
- Jump to end char of surrounding pair from any cursor pos ([#1121](https://github.com/helix-editor/helix/pull/1121))
|
||||
- File picker configuration ([#988](https://github.com/helix-editor/helix/pull/988))
|
||||
- Fix surround cursor position calculation ([#1183](https://github.com/helix-editor/helix/pull/1183))
|
||||
- Accept count for goto_window ([#1033](https://github.com/helix-editor/helix/pull/1033))
|
||||
- Make kill_to_line_end behave like emacs ([#1235](https://github.com/helix-editor/helix/pull/1235))
|
||||
- Only use a single documentation popup ([#1241](https://github.com/helix-editor/helix/pull/1241))
|
||||
- ui: popup: Don't allow scrolling past the end of content (3307f44c)
|
||||
- Open files with spaces in filename, allow opening multiple files ([#1231](https://github.com/helix-editor/helix/pull/1231))
|
||||
- Allow paste commands to take a count ([#1261](https://github.com/helix-editor/helix/pull/1261))
|
||||
- Auto pairs selection ([#1254](https://github.com/helix-editor/helix/pull/1254))
|
||||
- Use a fuzzy matcher for commands ([#1386](https://github.com/helix-editor/helix/pull/1386))
|
||||
- Add c-s to pick word under doc cursor to prompt line & search completion ([#831](https://github.com/helix-editor/helix/pull/831))
|
||||
- Fix :earlier/:later missing changeset update ([#1069](https://github.com/helix-editor/helix/pull/1069))
|
||||
- Support extend for multiple goto ([#909](https://github.com/helix-editor/helix/pull/909))
|
||||
- Add arrow-key bindings for window switching ([#933](https://github.com/helix-editor/helix/pull/933))
|
||||
- Implement key ordering for info box ([#952](https://github.com/helix-editor/helix/pull/952))
|
||||
|
||||
LSP:
|
||||
- Implement MarkedString rendering (e128a8702)
|
||||
- Don't panic if init fails (d31bef7)
|
||||
- Configurable diagnostic severity ([#1325](https://github.com/helix-editor/helix/pull/1325))
|
||||
- Resolve completion item ([#1315](https://github.com/helix-editor/helix/pull/1315))
|
||||
- Code action command support ([#1304](https://github.com/helix-editor/helix/pull/1304))
|
||||
|
||||
Grammars:
|
||||
|
||||
- Adds mint language server ([#974](https://github.com/helix-editor/helix/pull/974))
|
||||
- Perl ([#978](https://github.com/helix-editor/helix/pull/978)) ([#1280](https://github.com/helix-editor/helix/pull/1280))
|
||||
- GLSL ([#993](https://github.com/helix-editor/helix/pull/993))
|
||||
- Racket ([#1143](https://github.com/helix-editor/helix/pull/1143))
|
||||
- WGSL ([#1166](https://github.com/helix-editor/helix/pull/1166))
|
||||
- LLVM ([#1167](https://github.com/helix-editor/helix/pull/1167)) ([#1388](https://github.com/helix-editor/helix/pull/1388)) ([#1409](https://github.com/helix-editor/helix/pull/1409)) ([#1398](https://github.com/helix-editor/helix/pull/1398))
|
||||
- Markdown (49e06787)
|
||||
- Scala ([#1278](https://github.com/helix-editor/helix/pull/1278))
|
||||
- Dart ([#1250](https://github.com/helix-editor/helix/pull/1250))
|
||||
- Fish ([#1308](https://github.com/helix-editor/helix/pull/1308))
|
||||
- Dockerfile ([#1303](https://github.com/helix-editor/helix/pull/1303))
|
||||
- Git (commit, rebase, diff) ([#1338](https://github.com/helix-editor/helix/pull/1338)) ([#1402](https://github.com/helix-editor/helix/pull/1402)) ([#1373](https://github.com/helix-editor/helix/pull/1373))
|
||||
- tree-sitter-comment ([#1300](https://github.com/helix-editor/helix/pull/1300))
|
||||
- Highlight comments in c, cpp, cmake and llvm ([#1309](https://github.com/helix-editor/helix/pull/1309))
|
||||
- Improve yaml syntax highlighting highlighting ([#1294](https://github.com/helix-editor/helix/pull/1294))
|
||||
- Improve rust syntax highlighting ([#1295](https://github.com/helix-editor/helix/pull/1295))
|
||||
- Add textobjects and indents to cmake ([#1307](https://github.com/helix-editor/helix/pull/1307))
|
||||
- Add textobjects and indents to c and cpp ([#1293](https://github.com/helix-editor/helix/pull/1293))
|
||||
|
||||
New themes:
|
||||
|
||||
- Solarized dark ([#999](https://github.com/helix-editor/helix/pull/999))
|
||||
- Solarized light ([#1010](https://github.com/helix-editor/helix/pull/1010))
|
||||
- Spacebones light ([#1131](https://github.com/helix-editor/helix/pull/1131))
|
||||
- Monokai Pro ([#1206](https://github.com/helix-editor/helix/pull/1206))
|
||||
- Base16 Light and Terminal ([#1078](https://github.com/helix-editor/helix/pull/1078))
|
||||
- and a default 16 color theme, truecolor detection
|
||||
- Dracula ([#1258](https://github.com/helix-editor/helix/pull/1258))
|
||||
|
||||
# 0.5.0 (2021-10-28)
|
||||
|
||||
A big shout out to all the contributors! We had 46 contributors in this release.
|
||||
|
799
Cargo.lock
generated
799
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
17
Cargo.toml
17
Cargo.toml
@@ -4,17 +4,26 @@ members = [
|
||||
"helix-view",
|
||||
"helix-term",
|
||||
"helix-tui",
|
||||
"helix-syntax",
|
||||
"helix-lsp",
|
||||
"helix-dap",
|
||||
"helix-loader",
|
||||
"xtask",
|
||||
]
|
||||
|
||||
# Build helix-syntax in release mode to make the code path faster in development.
|
||||
# [profile.dev.package."helix-syntax"]
|
||||
# opt-level = 3
|
||||
default-members = [
|
||||
"helix-term"
|
||||
]
|
||||
|
||||
[profile.dev]
|
||||
split-debuginfo = "unpacked"
|
||||
|
||||
[profile.release]
|
||||
lto = "thin"
|
||||
# debug = true
|
||||
|
||||
[profile.opt]
|
||||
inherits = "release"
|
||||
lto = "fat"
|
||||
codegen-units = 1
|
||||
# strip = "debuginfo" # TODO: or strip = true
|
||||
opt-level = 3
|
||||
|
@@ -36,16 +36,18 @@ We provide packaging for various distributions, but here's a quick method to
|
||||
build from source.
|
||||
|
||||
```
|
||||
git clone --recurse-submodules --shallow-submodules -j8 https://github.com/helix-editor/helix
|
||||
git clone https://github.com/helix-editor/helix
|
||||
cd helix
|
||||
cargo install --path helix-term
|
||||
hx --grammar fetch
|
||||
hx --grammar build
|
||||
```
|
||||
|
||||
This will install the `hx` binary to `$HOME/.cargo/bin`.
|
||||
This will install the `hx` binary to `$HOME/.cargo/bin` and build tree-sitter grammars.
|
||||
|
||||
Helix also needs its runtime files so make sure to copy/symlink the `runtime/` directory into the
|
||||
config directory (for example `~/.config/helix/runtime` on Linux/macOS, or `%AppData%/helix/runtime` on Windows).
|
||||
This location can be overriden via the `HELIX_RUNTIME` environment variable.
|
||||
This location can be overridden via the `HELIX_RUNTIME` environment variable.
|
||||
|
||||
Packages already solve this for you by wrapping the `hx` binary with a wrapper
|
||||
that sets the variable to the install dir.
|
||||
@@ -56,6 +58,7 @@ that sets the variable to the install dir.
|
||||
[](https://repology.org/project/helix/versions)
|
||||
|
||||
## MacOS
|
||||
|
||||
Helix can be installed on MacOS through homebrew via:
|
||||
|
||||
```
|
||||
|
32
TODO.md
32
TODO.md
@@ -1,32 +0,0 @@
|
||||
|
||||
- tree sitter:
|
||||
- markdown
|
||||
- regex
|
||||
- kotlin
|
||||
- clojure
|
||||
- erlang
|
||||
|
||||
- [ ] completion isIncomplete support
|
||||
|
||||
1
|
||||
- [ ] respect view fullscreen flag
|
||||
- [ ] Implement marks (superset of Selection/Range)
|
||||
|
||||
- [ ] = for auto indent line/selection
|
||||
- [ ] :x for closing buffers
|
||||
- [ ] lsp: signature help
|
||||
|
||||
2
|
||||
- [ ] macro recording
|
||||
- [ ] extend selection (treesitter select parent node) (replaces viw, vi(, va( etc )
|
||||
- [ ] selection align
|
||||
- [ ] store some state between restarts: file positions, prompt history
|
||||
- [ ] highlight matched characters in picker
|
||||
|
||||
3
|
||||
- [ ] diff mode with highlighting?
|
||||
- [ ] snippet support (tab to jump between marks)
|
||||
- [ ] gamelisp/wasm scripting
|
||||
|
||||
X
|
||||
- [ ] rendering via skulpin/skia or raw wgpu
|
@@ -11,7 +11,7 @@
|
||||
"ui.statusline" = { fg = "black", bg = "white" }
|
||||
"ui.statusline.inactive" = { fg = "gray", bg = "white" }
|
||||
"ui.help" = { modifiers = ["reversed"] }
|
||||
"ui.cursor" = { modifiers = ["reversed"] }
|
||||
"ui.cursor" = { fg = "white", modifiers = ["reversed"] }
|
||||
"variable" = "red"
|
||||
"constant.numeric" = "yellow"
|
||||
"constant" = "yellow"
|
||||
@@ -29,6 +29,15 @@
|
||||
"namespace" = "magenta"
|
||||
"ui.help" = { fg = "white", bg = "black" }
|
||||
|
||||
"markup.heading" = "blue"
|
||||
"markup.list" = "red"
|
||||
"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
|
||||
"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
|
||||
"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
|
||||
"markup.link.text" = "red"
|
||||
"markup.quote" = "cyan"
|
||||
"markup.raw" = "green"
|
||||
|
||||
"diff.plus" = "green"
|
||||
"diff.delta" = "yellow"
|
||||
"diff.minus" = "red"
|
||||
|
@@ -1,5 +1,7 @@
|
||||
# Summary
|
||||
|
||||
[Helix](./title-page.md)
|
||||
|
||||
- [Installation](./install.md)
|
||||
- [Usage](./usage.md)
|
||||
- [Keymap](./keymap.md)
|
||||
|
@@ -5,9 +5,29 @@ To override global configuration parameters, create a `config.toml` file located
|
||||
* Linux and Mac: `~/.config/helix/config.toml`
|
||||
* Windows: `%AppData%\helix\config.toml`
|
||||
|
||||
> Note: You may use `hx --edit-config` to create and edit the `config.toml` file.
|
||||
|
||||
Example config:
|
||||
|
||||
```toml
|
||||
theme = "onedark"
|
||||
|
||||
[editor]
|
||||
line-number = "relative"
|
||||
mouse = false
|
||||
|
||||
[editor.cursor-shape]
|
||||
insert = "bar"
|
||||
normal = "block"
|
||||
select = "underline"
|
||||
|
||||
[editor.file-picker]
|
||||
hidden = false
|
||||
```
|
||||
|
||||
## Editor
|
||||
|
||||
`[editor]` section of the config.
|
||||
### `[editor]` Section
|
||||
|
||||
| Key | Description | Default |
|
||||
|--|--|---------|
|
||||
@@ -16,16 +36,35 @@ To override global configuration parameters, create a `config.toml` file located
|
||||
| `middle-click-paste` | Middle click paste support. | `true` |
|
||||
| `scroll-lines` | Number of lines to scroll per scroll wheel step. | `3` |
|
||||
| `shell` | Shell to use when running external commands. | Unix: `["sh", "-c"]`<br/>Windows: `["cmd", "/C"]` |
|
||||
| `line-number` | Line number display (`absolute`, `relative`) | `absolute` |
|
||||
| `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` |
|
||||
| `auto-pairs` | Enable automatic insertion of pairs to parenthese, brackets, etc. | `true` |
|
||||
| `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers. | `absolute` |
|
||||
| `auto-completion` | Enable automatic pop up of auto-completion. | `true` |
|
||||
| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant. | `400` |
|
||||
| `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` |
|
||||
| `auto-info` | Whether to display infoboxes | `true` |
|
||||
| `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative. | `false` |
|
||||
|
||||
`[editor.filepicker]` section of the config. Sets options for file picker and global search. All but the last key listed in the default file-picker configuration below are IgnoreOptions: whether hidden files and files listed within ignore files are ignored by (not visible in) the helix file picker and global search. There is also one other key, `max-depth` available, which is not defined by default.
|
||||
### `[editor.cursor-shape]` Section
|
||||
|
||||
Defines the shape of cursor in each mode. Note that due to limitations
|
||||
of the terminal environment, only the primary cursor can change shape.
|
||||
|
||||
| Key | Description | Default |
|
||||
| --- | ----------- | ------- |
|
||||
| `normal` | Cursor shape in [normal mode][normal mode] | `block` |
|
||||
| `insert` | Cursor shape in [insert mode][insert mode] | `block` |
|
||||
| `select` | Cursor shape in [select mode][select mode] | `block` |
|
||||
|
||||
[normal mode]: ./keymap.md#normal-mode
|
||||
[insert mode]: ./keymap.md#insert-mode
|
||||
[select mode]: ./keymap.md#select--extend-mode
|
||||
|
||||
### `[editor.file-picker]` Section
|
||||
|
||||
Sets options for file picker and global search. All but the last key listed in
|
||||
the default file-picker configuration below are IgnoreOptions: whether hidden
|
||||
files and files listed within ignore files are ignored by (not visible in) the
|
||||
helix file picker and global search. There is also one other key, `max-depth`
|
||||
available, which is not defined by default.
|
||||
|
||||
| Key | Description | Default |
|
||||
|--|--|---------|
|
||||
@@ -37,9 +76,62 @@ To override global configuration parameters, create a `config.toml` file located
|
||||
|`git-exclude` | Enables reading `.git/info/exclude` files. | true
|
||||
|`max-depth` | Set with an integer value for maximum depth to recurse. | Defaults to `None`.
|
||||
|
||||
### `[editor.auto-pairs]` Section
|
||||
|
||||
Enable automatic insertion of pairs to parentheses, brackets, etc. Can be
|
||||
a simple boolean value, or a specific mapping of pairs of single characters.
|
||||
|
||||
| Key | Description |
|
||||
| --- | ----------- |
|
||||
| `false` | Completely disable auto pairing, regardless of language-specific settings
|
||||
| `true` | Use the default pairs: <code>(){}[]''""``</code>
|
||||
| Mapping of pairs | e.g. `{ "(" = ")", "{" = "}", ... }`
|
||||
|
||||
Example
|
||||
|
||||
```toml
|
||||
[editor.auto-pairs]
|
||||
'(' = ')'
|
||||
'{' = '}'
|
||||
'[' = ']'
|
||||
'"' = '"'
|
||||
'`' = '`'
|
||||
'<' = '>'
|
||||
```
|
||||
|
||||
Additionally, this setting can be used in a language config. Unless
|
||||
the editor setting is `false`, this will override the editor config in
|
||||
documents with this language.
|
||||
|
||||
Example `languages.toml` that adds <> and removes ''
|
||||
|
||||
```toml
|
||||
[[language]]
|
||||
name = "rust"
|
||||
|
||||
[language.auto-pairs]
|
||||
'(' = ')'
|
||||
'{' = '}'
|
||||
'[' = ']'
|
||||
'"' = '"'
|
||||
'`' = '`'
|
||||
'<' = '>'
|
||||
```
|
||||
|
||||
### `[editor.search]` Section
|
||||
|
||||
Search specific options.
|
||||
|
||||
| Key | Description | Default |
|
||||
|--|--|---------|
|
||||
| `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` |
|
||||
| `wrap-around`| Whether the search should wrap after depleting the matches | `true` |
|
||||
|
||||
|
||||
## LSP
|
||||
|
||||
To display all language server messages in the status line add the following to your `config.toml`:
|
||||
|
||||
```toml
|
||||
[lsp]
|
||||
display-messages = true
|
||||
|
@@ -1,41 +1,55 @@
|
||||
| Language | Syntax Highlighting | Treesitter Textobjects | Auto Indent | Default LSP |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| bash | ✓ | | | `bash-language-server` |
|
||||
| c | ✓ | | | `clangd` |
|
||||
| c-sharp | ✓ | | | |
|
||||
| c | ✓ | ✓ | ✓ | `clangd` |
|
||||
| c-sharp | ✓ | | | `OmniSharp` |
|
||||
| cmake | ✓ | ✓ | ✓ | `cmake-language-server` |
|
||||
| comment | ✓ | | | |
|
||||
| cpp | ✓ | | | `clangd` |
|
||||
| cpp | ✓ | ✓ | ✓ | `clangd` |
|
||||
| css | ✓ | | | |
|
||||
| dart | ✓ | | ✓ | `dart` |
|
||||
| dockerfile | ✓ | | | `docker-langserver` |
|
||||
| elixir | ✓ | | | `elixir-ls` |
|
||||
| elm | ✓ | | | `elm-language-server` |
|
||||
| erlang | ✓ | | | |
|
||||
| fish | ✓ | ✓ | ✓ | |
|
||||
| git-commit | ✓ | | | |
|
||||
| git-config | ✓ | | | |
|
||||
| git-diff | ✓ | | | |
|
||||
| git-rebase | ✓ | | | |
|
||||
| glsl | ✓ | | ✓ | |
|
||||
| go | ✓ | ✓ | ✓ | `gopls` |
|
||||
| graphql | ✓ | | | |
|
||||
| haskell | ✓ | | | `haskell-language-server-wrapper` |
|
||||
| hcl | ✓ | | ✓ | `terraform-ls` |
|
||||
| html | ✓ | | | |
|
||||
| iex | ✓ | | | |
|
||||
| java | ✓ | | | |
|
||||
| javascript | ✓ | | ✓ | |
|
||||
| javascript | ✓ | | ✓ | `typescript-language-server` |
|
||||
| json | ✓ | | ✓ | |
|
||||
| julia | ✓ | | | `julia` |
|
||||
| kotlin | ✓ | | | `kotlin-language-server` |
|
||||
| latex | ✓ | | | |
|
||||
| lean | ✓ | | | `lean` |
|
||||
| ledger | ✓ | | | |
|
||||
| llvm | ✓ | ✓ | ✓ | |
|
||||
| llvm-mir | ✓ | ✓ | ✓ | |
|
||||
| llvm-mir-yaml | ✓ | | ✓ | |
|
||||
| lua | ✓ | | ✓ | |
|
||||
| make | ✓ | | | |
|
||||
| markdown | ✓ | | | |
|
||||
| mint | | | | `mint` |
|
||||
| nix | ✓ | | ✓ | `rnix-lsp` |
|
||||
| ocaml | ✓ | | ✓ | |
|
||||
| ocaml-interface | ✓ | | | |
|
||||
| perl | ✓ | ✓ | ✓ | |
|
||||
| php | ✓ | | ✓ | |
|
||||
| php | ✓ | ✓ | ✓ | |
|
||||
| prolog | | | | `swipl` |
|
||||
| protobuf | ✓ | | ✓ | |
|
||||
| python | ✓ | ✓ | ✓ | `pylsp` |
|
||||
| racket | | | | `racket` |
|
||||
| regex | ✓ | | | |
|
||||
| rescript | ✓ | ✓ | | `rescript-language-server` |
|
||||
| ruby | ✓ | | ✓ | `solargraph` |
|
||||
| rust | ✓ | ✓ | ✓ | `rust-analyzer` |
|
||||
| scala | ✓ | | ✓ | `metals` |
|
||||
@@ -44,6 +58,7 @@
|
||||
| toml | ✓ | | | |
|
||||
| tsq | ✓ | | | |
|
||||
| tsx | ✓ | | | `typescript-language-server` |
|
||||
| twig | ✓ | | | |
|
||||
| typescript | ✓ | | ✓ | `typescript-language-server` |
|
||||
| vue | ✓ | | | |
|
||||
| wgsl | ✓ | | | |
|
||||
|
@@ -5,6 +5,10 @@
|
||||
| `:open`, `:o` | Open a file from disk into the current view. |
|
||||
| `:buffer-close`, `:bc`, `:bclose` | Close the current buffer. |
|
||||
| `:buffer-close!`, `:bc!`, `:bclose!` | Close the current buffer forcefully (ignoring unsaved changes). |
|
||||
| `:buffer-close-others`, `:bco`, `:bcloseother` | Close all buffers but the currently focused one. |
|
||||
| `:buffer-close-others!`, `:bco!`, `:bcloseother!` | Close all buffers but the currently focused one. |
|
||||
| `:buffer-close-all`, `:bca`, `:bcloseall` | Close all buffers, without quiting. |
|
||||
| `:buffer-close-all!`, `:bca!`, `:bcloseall!` | Close all buffers forcefully (ignoring unsaved changes), without quiting. |
|
||||
| `:write`, `:w` | Write changes to disk. Accepts an optional path (:write some/path.txt) |
|
||||
| `:new`, `:n` | Create a new scratch buffer. |
|
||||
| `:format`, `:fmt` | Format the file using the LSP formatter. |
|
||||
@@ -20,6 +24,7 @@
|
||||
| `:quit-all`, `:qa` | Close all views. |
|
||||
| `:quit-all!`, `:qa!` | Close all views forcefully (ignoring unsaved changes). |
|
||||
| `:cquit`, `:cq` | Quit with exit code (default 1). Accepts an optional integer exit code (:cq 2). |
|
||||
| `:cquit!`, `:cq!` | Quit with exit code (default 1) forcefully (ignoring unsaved changes). Accepts an optional integer exit code (:cq! 2). |
|
||||
| `:theme` | Change the editor theme. |
|
||||
| `:clipboard-yank` | Yank main selection into system clipboard. |
|
||||
| `:clipboard-yank-join` | Yank joined selections into system clipboard. A separator can be provided as first argument. Default value is newline. |
|
||||
@@ -37,10 +42,16 @@
|
||||
| `:encoding` | Set encoding based on `https://encoding.spec.whatwg.org` |
|
||||
| `:reload` | Discard changes and reload from the source file. |
|
||||
| `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. |
|
||||
| `:debug-start`, `:dbg` | Start a debug session from a given template with given parameters. |
|
||||
| `:debug-remote`, `:dbg-tcp` | Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters. |
|
||||
| `:debug-eval` | Evaluate expression in current debug context. |
|
||||
| `:vsplit`, `:vs` | Open the file in a vertical split. |
|
||||
| `:vsplit-new`, `:vnew` | Open a scratch buffer in a vertical split. |
|
||||
| `:hsplit`, `:hs`, `:sp` | Open the file in a horizontal split. |
|
||||
| `:hsplit-new`, `:hnew` | Open a scratch buffer in a horizontal split. |
|
||||
| `:tutor` | Open the tutorial. |
|
||||
| `:goto`, `:g` | Go to line number. |
|
||||
| `:set-option`, `:set` | Set a config option at runtime |
|
||||
| `:sort` | Sort ranges in selection. |
|
||||
| `:rsort` | Sort ranges in selection in reverse order. |
|
||||
| `:tree-sitter-subtree`, `:ts-subtree` | Display tree sitter subtree under cursor, primarily for debugging queries. |
|
||||
|
@@ -1,45 +1,68 @@
|
||||
# Adding languages
|
||||
|
||||
## Submodules
|
||||
## Language configuration
|
||||
|
||||
To add a new language, you should first add a tree-sitter submodule. To do this,
|
||||
you can run the command
|
||||
```sh
|
||||
git submodule add -f <repository> helix-syntax/languages/tree-sitter-<name>
|
||||
```
|
||||
For example, to add tree-sitter-ocaml you would run
|
||||
```sh
|
||||
git submodule add -f https://github.com/tree-sitter/tree-sitter-ocaml helix-syntax/languages/tree-sitter-ocaml
|
||||
```
|
||||
Make sure the submodule is shallow by doing
|
||||
```sh
|
||||
git config -f .gitmodules submodule.helix-syntax/languages/tree-sitter-<name>.shallow true
|
||||
```
|
||||
|
||||
or you can manually add `shallow = true` to `.gitmodules`.
|
||||
|
||||
## languages.toml
|
||||
|
||||
Next, you need to add the language to the [`languages.toml`][languages.toml] found in the root of
|
||||
the repository; this `languages.toml` file is included at compilation time, and
|
||||
is distinct from the `language.toml` file in the user's [configuration
|
||||
To add a new language, you need to add a `language` entry to the
|
||||
[`languages.toml`][languages.toml] found in the root of the repository;
|
||||
this `languages.toml` file is included at compilation time, and is
|
||||
distinct from the `languages.toml` file in the user's [configuration
|
||||
directory](../configuration.md).
|
||||
|
||||
```toml
|
||||
[[language]]
|
||||
name = "mylang"
|
||||
scope = "scope.mylang"
|
||||
injection-regex = "^mylang$"
|
||||
file-types = ["mylang", "myl"]
|
||||
comment-token = "#"
|
||||
indent = { tab-width = 2, unit = " " }
|
||||
```
|
||||
|
||||
These are the available keys and descriptions for the file.
|
||||
|
||||
| Key | Description |
|
||||
| ---- | ----------- |
|
||||
| name | The name of the language |
|
||||
| scope | A string like `source.js` that identifies the language. Currently, we strive to match the scope names used by popular TextMate grammars and by the Linguist library. Usually `source.<name>` or `text.<name>` in case of markup languages |
|
||||
| injection-regex | regex pattern that will be tested against a language name in order to determine whether this language should be used for a potential [language injection][treesitter-language-injection] site. |
|
||||
| file-types | The filetypes of the language, for example `["yml", "yaml"]` |
|
||||
| shebangs | The interpreters from the shebang line, for example `["sh", "bash"]` |
|
||||
| roots | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` |
|
||||
| auto-format | Whether to autoformat this language when saving |
|
||||
| diagnostic-severity | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) |
|
||||
| comment-token | The token to use as a comment-token |
|
||||
| indent | The indent to use. Has sub keys `tab-width` and `unit` |
|
||||
| config | Language server configuration |
|
||||
| Key | Description |
|
||||
| ---- | ----------- |
|
||||
| `name` | The name of the language |
|
||||
| `scope` | A string like `source.js` that identifies the language. Currently, we strive to match the scope names used by popular TextMate grammars and by the Linguist library. Usually `source.<name>` or `text.<name>` in case of markup languages |
|
||||
| `injection-regex` | regex pattern that will be tested against a language name in order to determine whether this language should be used for a potential [language injection][treesitter-language-injection] site. |
|
||||
| `file-types` | The filetypes of the language, for example `["yml", "yaml"]`. Extensions and full file names are supported. |
|
||||
| `shebangs` | The interpreters from the shebang line, for example `["sh", "bash"]` |
|
||||
| `roots` | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` |
|
||||
| `auto-format` | Whether to autoformat this language when saving |
|
||||
| `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) |
|
||||
| `comment-token` | The token to use as a comment-token |
|
||||
| `indent` | The indent to use. Has sub keys `tab-width` and `unit` |
|
||||
| `config` | Language server configuration |
|
||||
| `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) |
|
||||
|
||||
## Grammar configuration
|
||||
|
||||
If a tree-sitter grammar is available for the language, add a new `grammar`
|
||||
entry to `languages.toml`.
|
||||
|
||||
```toml
|
||||
[[grammar]]
|
||||
name = "mylang"
|
||||
source = { git = "https://github.com/example/mylang", rev = "a250c4582510ff34767ec3b7dcdd3c24e8c8aa68" }
|
||||
```
|
||||
|
||||
Grammar configuration takes these keys:
|
||||
|
||||
| Key | Description |
|
||||
| --- | ----------- |
|
||||
| `name` | The name of the tree-sitter grammar |
|
||||
| `source` | The method of fetching the grammar - a table with a schema defined below |
|
||||
|
||||
Where `source` is a table with either these keys when using a grammar from a
|
||||
git repository:
|
||||
|
||||
| Key | Description |
|
||||
| --- | ----------- |
|
||||
| `git` | A git remote URL from which the grammar should be cloned |
|
||||
| `rev` | The revision (commit hash or tag) which should be fetched |
|
||||
| `subpath` | A path within the grammar directory which should be built. Some grammar repositories host multiple grammars (for example `tree-sitter-typescript` and `tree-sitter-ocaml`) in subdirectories. This key is used to point `hx --grammar build` to the correct path for compilation. When omitted, the root of repository is used |
|
||||
|
||||
Or a `path` key with an absolute path to a locally available grammar directory.
|
||||
|
||||
## Queries
|
||||
|
||||
@@ -51,18 +74,14 @@ gives more info on how to write queries.
|
||||
|
||||
> NOTE: When evaluating queries, the first matching query takes
|
||||
precedence, which is different from other editors like neovim where
|
||||
the last matching query supercedes the ones before it. See
|
||||
the last matching query supersedes the ones before it. See
|
||||
[this issue][neovim-query-precedence] for an example.
|
||||
|
||||
## Common Issues
|
||||
|
||||
- If you get errors when building after switching branches, you may have to remove or update tree-sitter submodules. You can update submodules by running
|
||||
```sh
|
||||
git submodule sync; git submodule update --init
|
||||
```
|
||||
- Make sure to not use the `--remote` flag. To remove submodules look inside the `.gitmodules` and remove directories that are not present inside of it.
|
||||
- If you get errors when running after switching branches, you may have to update the tree-sitter grammars. Run `hx --grammar fetch` to fetch the grammars and `hx --grammar build` to build any out-of-date grammars.
|
||||
|
||||
- If a parser is segfaulting or you want to remove the parser, make sure to remove the submodule *and* the compiled parser in `runtime/grammar/<name>.so`
|
||||
- If a parser is segfaulting or you want to remove the parser, make sure to remove the compiled parser in `runtime/grammar/<name>.so`
|
||||
|
||||
- The indents query is `indents.toml`, *not* `indents.scm`. See [this](https://github.com/helix-editor/helix/issues/114) issue for more information.
|
||||
|
||||
|
@@ -21,10 +21,27 @@ The following [captures][tree-sitter-captures] are recognized:
|
||||
| `class.inside` |
|
||||
| `class.around` |
|
||||
| `parameter.inside` |
|
||||
| `comment.inside` |
|
||||
| `comment.around` |
|
||||
|
||||
[Example query files][textobject-examples] can be found in the helix GitHub repository.
|
||||
|
||||
## Queries for Textobject Based Navigation
|
||||
|
||||
[Tree-sitter based navigation][textobjects-nav] is done using captures in the
|
||||
following order:
|
||||
|
||||
- `object.movement`
|
||||
- `object.around`
|
||||
- `object.inside`
|
||||
|
||||
For example if a `function.around` capture has been already defined for a language
|
||||
in it's `textobjects.scm` file, function navigation should also work automatically.
|
||||
`function.movement` should be defined only if the node captured by `function.around`
|
||||
doesn't make sense in a navigation context.
|
||||
|
||||
[textobjects]: ../usage.md#textobjects
|
||||
[textobjects-nav]: ../usage.md#tree-sitter-textobject-based-navigation
|
||||
[tree-sitter-queries]: https://tree-sitter.github.io/tree-sitter/using-parsers#query-syntax
|
||||
[tree-sitter-captures]: https://tree-sitter.github.io/tree-sitter/using-parsers#capturing-nodes
|
||||
[textobject-examples]: https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+filename%3Atextobjects.scm&type=Code&ref=advsearch&l=&l=
|
||||
|
@@ -19,7 +19,12 @@ brew install helix
|
||||
|
||||
A [flake](https://nixos.wiki/wiki/Flakes) containing the package is available in
|
||||
the project root. The flake can also be used to spin up a reproducible development
|
||||
shell for working on Helix.
|
||||
shell for working on Helix with `nix develop`.
|
||||
|
||||
Flake outputs are cached for each push to master using
|
||||
[Cachix](https://www.cachix.org/). With Cachix
|
||||
[installed](https://docs.cachix.org/installation), `cachix use helix` will
|
||||
configure Nix to use cached outputs when possible.
|
||||
|
||||
### Arch Linux
|
||||
|
||||
@@ -39,7 +44,7 @@ sudo dnf install helix
|
||||
## Build from source
|
||||
|
||||
```
|
||||
git clone --recurse-submodules --shallow-submodules -j8 https://github.com/helix-editor/helix
|
||||
git clone https://github.com/helix-editor/helix
|
||||
cd helix
|
||||
cargo install --path helix-term
|
||||
```
|
||||
@@ -49,3 +54,9 @@ This will install the `hx` binary to `$HOME/.cargo/bin`.
|
||||
Helix also needs it's runtime files so make sure to copy/symlink the `runtime/` directory into the
|
||||
config directory (for example `~/.config/helix/runtime` on Linux/macOS). This location can be overriden
|
||||
via the `HELIX_RUNTIME` environment variable.
|
||||
|
||||
## Building tree-sitter grammars
|
||||
|
||||
Tree-sitter grammars must be fetched and compiled if not pre-packaged.
|
||||
Fetch grammars with `hx --grammar fetch` (requires `git`) and compile them
|
||||
with `hx --grammar build` (requires a C compiler).
|
||||
|
@@ -9,40 +9,33 @@
|
||||
|
||||
> NOTE: Unlike vim, `f`, `F`, `t` and `T` are not confined to the current line.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `h`/`Left` | Move left | `move_char_left` |
|
||||
| `j`/`Down` | Move down | `move_line_down` |
|
||||
| `k`/`Up` | Move up | `move_line_up` |
|
||||
| `l`/`Right` | Move right | `move_char_right` |
|
||||
| `w` | Move next word start | `move_next_word_start` |
|
||||
| `b` | Move previous word start | `move_prev_word_start` |
|
||||
| `e` | Move next word end | `move_next_word_end` |
|
||||
| `W` | Move next WORD start | `move_next_long_word_start` |
|
||||
| `B` | Move previous WORD start | `move_prev_long_word_start` |
|
||||
| `E` | Move next WORD end | `move_next_long_word_end` |
|
||||
| `t` | Find 'till next char | `find_till_char` |
|
||||
| `f` | Find next char | `find_next_char` |
|
||||
| `T` | Find 'till previous char | `till_prev_char` |
|
||||
| `F` | Find previous char | `find_prev_char` |
|
||||
| `Alt-.` | Repeat last motion (`f`, `t` or `m`) | `repeat_last_motion` |
|
||||
| `Home` | Move to the start of the line | `goto_line_start` |
|
||||
| `End` | Move to the end of the line | `goto_line_end` |
|
||||
| `PageUp` | Move page up | `page_up` |
|
||||
| `PageDown` | Move page down | `page_down` |
|
||||
| `Ctrl-u` | Move half page up | `half_page_up` |
|
||||
| `Ctrl-d` | Move half page down | `half_page_down` |
|
||||
| `Ctrl-i` | Jump forward on the jumplist | `jump_forward` |
|
||||
| `Ctrl-o` | Jump backward on the jumplist | `jump_backward` |
|
||||
| `Ctrl-s` | Save the current selection to the jumplist | `save_selection` |
|
||||
| `v` | Enter [select (extend) mode](#select--extend-mode) | `select_mode` |
|
||||
| `g` | Enter [goto mode](#goto-mode) | N/A |
|
||||
| `m` | Enter [match mode](#match-mode) | N/A |
|
||||
| `:` | Enter command mode | `command_mode` |
|
||||
| `z` | Enter [view mode](#view-mode) | N/A |
|
||||
| `Z` | Enter sticky [view mode](#view-mode) | N/A |
|
||||
| `Ctrl-w` | Enter [window mode](#window-mode) | N/A |
|
||||
| `Space` | Enter [space mode](#space-mode) | N/A |
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `h`, `Left` | Move left | `move_char_left` |
|
||||
| `j`, `Down` | Move down | `move_line_down` |
|
||||
| `k`, `Up` | Move up | `move_line_up` |
|
||||
| `l`, `Right` | Move right | `move_char_right` |
|
||||
| `w` | Move next word start | `move_next_word_start` |
|
||||
| `b` | Move previous word start | `move_prev_word_start` |
|
||||
| `e` | Move next word end | `move_next_word_end` |
|
||||
| `W` | Move next WORD start | `move_next_long_word_start` |
|
||||
| `B` | Move previous WORD start | `move_prev_long_word_start` |
|
||||
| `E` | Move next WORD end | `move_next_long_word_end` |
|
||||
| `t` | Find 'till next char | `find_till_char` |
|
||||
| `f` | Find next char | `find_next_char` |
|
||||
| `T` | Find 'till previous char | `till_prev_char` |
|
||||
| `F` | Find previous char | `find_prev_char` |
|
||||
| `G` | Go to line number `<n>` | `goto_line` |
|
||||
| `Alt-.` | Repeat last motion (`f`, `t` or `m`) | `repeat_last_motion` |
|
||||
| `Home` | Move to the start of the line | `goto_line_start` |
|
||||
| `End` | Move to the end of the line | `goto_line_end` |
|
||||
| `Ctrl-b`, `PageUp` | Move page up | `page_up` |
|
||||
| `Ctrl-f`, `PageDown` | Move page down | `page_down` |
|
||||
| `Ctrl-u` | Move half page up | `half_page_up` |
|
||||
| `Ctrl-d` | Move half page down | `half_page_down` |
|
||||
| `Ctrl-i` | Jump forward on the jumplist | `jump_forward` |
|
||||
| `Ctrl-o` | Jump backward on the jumplist | `jump_backward` |
|
||||
| `Ctrl-s` | Save the current selection to the jumplist | `save_selection` |
|
||||
|
||||
### Changes
|
||||
|
||||
@@ -82,45 +75,50 @@
|
||||
|
||||
#### Shell
|
||||
|
||||
| Key | Description | Command |
|
||||
| ------ | ----------- | ------- |
|
||||
| <code>|</code> | Pipe each selection through shell command, replacing with output | `shell_pipe` |
|
||||
| <code>Alt-|</code> | Pipe each selection into shell command, ignoring output | `shell_pipe_to` |
|
||||
| `!` | Run shell command, inserting output before each selection | `shell_insert_output` |
|
||||
| `Alt-!` | Run shell command, appending output after each selection | `shell_append_output` |
|
||||
| Key | Description | Command |
|
||||
| ------ | ----------- | ------- |
|
||||
| <code>|</code> | Pipe each selection through shell command, replacing with output | `shell_pipe` |
|
||||
| <code>Alt-|</code> | Pipe each selection into shell command, ignoring output | `shell_pipe_to` |
|
||||
| `!` | Run shell command, inserting output before each selection | `shell_insert_output` |
|
||||
| `Alt-!` | Run shell command, appending output after each selection | `shell_append_output` |
|
||||
| `$` | Pipe each selection into shell command, keep selections where command returned 0 | `shell_keep_pipe` |
|
||||
|
||||
|
||||
### Selection manipulation
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `s` | Select all regex matches inside selections | `select_regex` |
|
||||
| `S` | Split selection into subselections on regex matches | `split_selection` |
|
||||
| `Alt-s` | Split selection on newlines | `split_selection_on_newline` |
|
||||
| `&` | Align selection in columns | `align_selections` |
|
||||
| `_` | Trim whitespace from the selection | `trim_selections` |
|
||||
| `;` | Collapse selection onto a single cursor | `collapse_selection` |
|
||||
| `Alt-;` | Flip selection cursor and anchor | `flip_selections` |
|
||||
| `,` | Keep only the primary selection | `keep_primary_selection` |
|
||||
| `Alt-,` | Remove the primary selection | `remove_primary_selection` |
|
||||
| `C` | Copy selection onto the next line (Add cursor below) | `copy_selection_on_next_line` |
|
||||
| `Alt-C` | Copy selection onto the previous line (Add cursor above) | `copy_selection_on_prev_line` |
|
||||
| `(` | Rotate main selection backward | `rotate_selections_backward` |
|
||||
| `)` | Rotate main selection forward | `rotate_selections_forward` |
|
||||
| `Alt-(` | Rotate selection contents backward | `rotate_selection_contents_backward` |
|
||||
| `Alt-)` | Rotate selection contents forward | `rotate_selection_contents_forward` |
|
||||
| `%` | Select entire file | `select_all` |
|
||||
| `x` | Select current line, if already selected, extend to next line | `extend_line` |
|
||||
| `X` | Extend selection to line bounds (line-wise selection) | `extend_to_line_bounds` |
|
||||
| | Expand selection to parent syntax node TODO: pick a key (**TS**) | `expand_selection` |
|
||||
| `J` | Join lines inside selection | `join_selections` |
|
||||
| `K` | Keep selections matching the regex | `keep_selections` |
|
||||
| `Alt-K` | Remove selections matching the regex | `remove_selections` |
|
||||
| `$` | Pipe each selection into shell command, keep selections where command returned 0 | `shell_keep_pipe` |
|
||||
| `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` |
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `s` | Select all regex matches inside selections | `select_regex` |
|
||||
| `S` | Split selection into subselections on regex matches | `split_selection` |
|
||||
| `Alt-s` | Split selection on newlines | `split_selection_on_newline` |
|
||||
| `&` | Align selection in columns | `align_selections` |
|
||||
| `_` | Trim whitespace from the selection | `trim_selections` |
|
||||
| `;` | Collapse selection onto a single cursor | `collapse_selection` |
|
||||
| `Alt-;` | Flip selection cursor and anchor | `flip_selections` |
|
||||
| `Alt-:` | Ensures the selection is in forward direction | `ensure_selections_forward` |
|
||||
| `,` | Keep only the primary selection | `keep_primary_selection` |
|
||||
| `Alt-,` | Remove the primary selection | `remove_primary_selection` |
|
||||
| `C` | Copy selection onto the next line (Add cursor below) | `copy_selection_on_next_line` |
|
||||
| `Alt-C` | Copy selection onto the previous line (Add cursor above) | `copy_selection_on_prev_line` |
|
||||
| `(` | Rotate main selection backward | `rotate_selections_backward` |
|
||||
| `)` | Rotate main selection forward | `rotate_selections_forward` |
|
||||
| `Alt-(` | Rotate selection contents backward | `rotate_selection_contents_backward` |
|
||||
| `Alt-)` | Rotate selection contents forward | `rotate_selection_contents_forward` |
|
||||
| `%` | Select entire file | `select_all` |
|
||||
| `x` | Select current line, if already selected, extend to next line | `extend_line` |
|
||||
| `X` | Extend selection to line bounds (line-wise selection) | `extend_to_line_bounds` |
|
||||
| `J` | Join lines inside selection | `join_selections` |
|
||||
| `K` | Keep selections matching the regex | `keep_selections` |
|
||||
| `Alt-K` | Remove selections matching the regex | `remove_selections` |
|
||||
| `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` |
|
||||
| `Alt-k`, `Alt-up` | Expand selection to parent syntax node (**TS**) | `expand_selection` |
|
||||
| `Alt-j`, `Alt-down` | Shrink syntax tree object selection (**TS**) | `shrink_selection` |
|
||||
| `Alt-h`, `Alt-left` | Select previous sibling node in syntax tree (**TS**) | `select_prev_sibling` |
|
||||
| `Alt-l`, `Alt-right` | Select next sibling node in syntax tree (**TS**) | `select_next_sibling` |
|
||||
|
||||
### Search
|
||||
|
||||
Search commands all operate on the `/` register by default. Use `"<char>` to operate on a different one.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
@@ -134,6 +132,17 @@
|
||||
|
||||
These sub-modes are accessible from normal mode and typically switch back to normal mode after a command.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `v` | Enter [select (extend) mode](#select--extend-mode) | `select_mode` |
|
||||
| `g` | Enter [goto mode](#goto-mode) | N/A |
|
||||
| `m` | Enter [match mode](#match-mode) | N/A |
|
||||
| `:` | Enter command mode | `command_mode` |
|
||||
| `z` | Enter [view mode](#view-mode) | N/A |
|
||||
| `Z` | Enter sticky [view mode](#view-mode) | N/A |
|
||||
| `Ctrl-w` | Enter [window mode](#window-mode) | N/A |
|
||||
| `Space` | Enter [space mode](#space-mode) | N/A |
|
||||
|
||||
#### View mode
|
||||
|
||||
View mode is intended for scrolling and manipulating the view without changing
|
||||
@@ -142,18 +151,18 @@ key to return to normal mode after usage (useful when you're simply looking
|
||||
over text and not actively editing it).
|
||||
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `z` , `c` | Vertically center the line | `align_view_center` |
|
||||
| `t` | Align the line to the top of the screen | `align_view_top` |
|
||||
| `b` | Align the line to the bottom of the screen | `align_view_bottom` |
|
||||
| `m` | Align the line to the middle of the screen (horizontally) | `align_view_middle` |
|
||||
| `j` , `down` | Scroll the view downwards | `scroll_down` |
|
||||
| `k` , `up` | Scroll the view upwards | `scroll_up` |
|
||||
| `f` | Move page down | `page_down` |
|
||||
| `b` | Move page up | `page_up` |
|
||||
| `d` | Move half page down | `half_page_down` |
|
||||
| `u` | Move half page up | `half_page_up` |
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `z`, `c` | Vertically center the line | `align_view_center` |
|
||||
| `t` | Align the line to the top of the screen | `align_view_top` |
|
||||
| `b` | Align the line to the bottom of the screen | `align_view_bottom` |
|
||||
| `m` | Align the line to the middle of the screen (horizontally) | `align_view_middle` |
|
||||
| `j`, `down` | Scroll the view downwards | `scroll_down` |
|
||||
| `k`, `up` | Scroll the view upwards | `scroll_up` |
|
||||
| `Ctrl-f`, `PageDown` | Move page down | `page_down` |
|
||||
| `Ctrl-b`, `PageUp` | Move page up | `page_up` |
|
||||
| `Ctrl-d` | Move half page down | `half_page_down` |
|
||||
| `Ctrl-u` | Move half page up | `half_page_up` |
|
||||
|
||||
#### Goto mode
|
||||
|
||||
@@ -201,19 +210,19 @@ TODO: Mappings for selecting syntax nodes (a superset of `[`).
|
||||
|
||||
This layer is similar to vim keybindings as kakoune does not support window.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ------------- | ------- |
|
||||
| `w`, `Ctrl-w` | Switch to next window | `rotate_view` |
|
||||
| `v`, `Ctrl-v` | Vertical right split | `vsplit` |
|
||||
| `s`, `Ctrl-s` | Horizontal bottom split | `hsplit` |
|
||||
| `h`, `Ctrl-h`, `left` | Move to left split | `jump_view_left` |
|
||||
| `f` | Go to files in the selection in horizontal splits | `goto_file` |
|
||||
| `F` | Go to files in the selection in vertical splits | `goto_file` |
|
||||
| `j`, `Ctrl-j`, `down` | Move to split below | `jump_view_down` |
|
||||
| `k`, `Ctrl-k`, `up` | Move to split above | `jump_view_up` |
|
||||
| `l`, `Ctrl-l`, `right` | Move to right split | `jump_view_right` |
|
||||
| `q`, `Ctrl-q` | Close current window | `wclose` |
|
||||
| `o`, `Ctrl-o` | Only keep the current window, closing all the others | `wonly` |
|
||||
| Key | Description | Command |
|
||||
| ----- | ------------- | ------- |
|
||||
| `w`, `Ctrl-w` | Switch to next window | `rotate_view` |
|
||||
| `v`, `Ctrl-v` | Vertical right split | `vsplit` |
|
||||
| `s`, `Ctrl-s` | Horizontal bottom split | `hsplit` |
|
||||
| `f` | Go to files in the selection in horizontal splits | `goto_file` |
|
||||
| `F` | Go to files in the selection in vertical splits | `goto_file` |
|
||||
| `h`, `Ctrl-h`, `Left` | Move to left split | `jump_view_left` |
|
||||
| `j`, `Ctrl-j`, `Down` | Move to split below | `jump_view_down` |
|
||||
| `k`, `Ctrl-k`, `Up` | Move to split above | `jump_view_up` |
|
||||
| `l`, `Ctrl-l`, `Right` | Move to right split | `jump_view_right` |
|
||||
| `q`, `Ctrl-q` | Close current window | `wclose` |
|
||||
| `o`, `Ctrl-o` | Only keep the current window, closing all the others | `wonly` |
|
||||
|
||||
#### Space mode
|
||||
|
||||
@@ -237,6 +246,7 @@ This layer is a kludge of mappings, mostly pickers.
|
||||
| `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` |
|
||||
| `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` |
|
||||
| `/` | Global search in workspace folder | `global_search` |
|
||||
| `?` | Open command palette | `command_palette` |
|
||||
|
||||
> TIP: Global search displays results in a fuzzy picker, use `space + '` to bring it back up after opening a file.
|
||||
|
||||
@@ -253,44 +263,67 @@ Displays documentation for item under cursor.
|
||||
|
||||
Mappings in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaired).
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `[d` | Go to previous diagnostic (**LSP**) | `goto_prev_diag` |
|
||||
| `]d` | Go to next diagnostic (**LSP**) | `goto_next_diag` |
|
||||
| `[D` | Go to first diagnostic in document (**LSP**) | `goto_first_diag` |
|
||||
| `]D` | Go to last diagnostic in document (**LSP**) | `goto_last_diag` |
|
||||
| `[space` | Add newline above | `add_newline_above` |
|
||||
| `]space` | Add newline below | `add_newline_below` |
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `[d` | Go to previous diagnostic (**LSP**) | `goto_prev_diag` |
|
||||
| `]d` | Go to next diagnostic (**LSP**) | `goto_next_diag` |
|
||||
| `[D` | Go to first diagnostic in document (**LSP**) | `goto_first_diag` |
|
||||
| `]D` | Go to last diagnostic in document (**LSP**) | `goto_last_diag` |
|
||||
| `]f` | Go to next function (**TS**) | `goto_next_function` |
|
||||
| `[f` | Go to previous function (**TS**) | `goto_prev_function` |
|
||||
| `]c` | Go to next class (**TS**) | `goto_next_class` |
|
||||
| `[c` | Go to previous class (**TS**) | `goto_prev_class` |
|
||||
| `]a` | Go to next argument/parameter (**TS**) | `goto_next_parameter` |
|
||||
| `[a` | Go to previous argument/parameter (**TS**) | `goto_prev_parameter` |
|
||||
| `]o` | Go to next comment (**TS**) | `goto_next_comment` |
|
||||
| `[o` | Go to previous comment (**TS**) | `goto_prev_comment` |
|
||||
| `[space` | Add newline above | `add_newline_above` |
|
||||
| `]space` | Add newline below | `add_newline_below` |
|
||||
|
||||
## Insert Mode
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `Escape` | Switch to normal mode | `normal_mode` |
|
||||
| `Ctrl-x` | Autocomplete | `completion` |
|
||||
| `Ctrl-r` | Insert a register content | `insert_register` |
|
||||
| `Ctrl-w` | Delete previous word | `delete_word_backward` |
|
||||
| `Alt-d` | Delete next word | `delete_word_forward` |
|
||||
| `Alt-b`, `Alt-Left` | Backward a word | `move_prev_word_end` |
|
||||
| `Ctrl-b`, `Left` | Backward a char | `move_char_left` |
|
||||
| `Alt-f`, `Alt-Right` | Forward a word | `move_next_word_start` |
|
||||
| `Ctrl-f`, `Right` | Forward a char | `move_char_right` |
|
||||
| `Ctrl-e`, `End` | move to line end | `goto_line_end_newline` |
|
||||
| `Ctrl-a`, `Home` | move to line start | `goto_line_start` |
|
||||
| `Ctrl-u` | delete to start of line | `kill_to_line_start` |
|
||||
| `Ctrl-k` | delete to end of line | `kill_to_line_end` |
|
||||
| `backspace`, `Ctrl-h` | delete previous char | `delete_char_backward` |
|
||||
| `delete`, `Ctrl-d` | delete previous char | `delete_char_forward` |
|
||||
| `Ctrl-p`, `Up` | move to previous line | `move_line_up` |
|
||||
| `Ctrl-n`, `Down` | move to next line | `move_line_down` |
|
||||
We support many readline/emacs style bindings in insert mode for
|
||||
convenience. These can be helpful for making simple modifications
|
||||
without escaping to normal mode, but beware that you will not have an
|
||||
undo-able "save point" until you return to normal mode.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `Escape` | Switch to normal mode | `normal_mode` |
|
||||
| `Ctrl-x` | Autocomplete | `completion` |
|
||||
| `Ctrl-r` | Insert a register content | `insert_register` |
|
||||
| `Ctrl-w`, `Alt-Backspace` | Delete previous word | `delete_word_backward` |
|
||||
| `Alt-d` | Delete next word | `delete_word_forward` |
|
||||
| `Alt-b`, `Alt-Left` | Backward a word | `move_prev_word_end` |
|
||||
| `Ctrl-b`, `Left` | Backward a char | `move_char_left` |
|
||||
| `Alt-f`, `Alt-Right` | Forward a word | `move_next_word_start` |
|
||||
| `Ctrl-f`, `Right` | Forward a char | `move_char_right` |
|
||||
| `Ctrl-e`, `End` | Move to line end | `goto_line_end_newline` |
|
||||
| `Ctrl-a`, `Home` | Move to line start | `goto_line_start` |
|
||||
| `Ctrl-u` | Delete to start of line | `kill_to_line_start` |
|
||||
| `Ctrl-k` | Delete to end of line | `kill_to_line_end` |
|
||||
| `Ctrl-j`, `Enter` | Insert new line | `insert_newline` |
|
||||
| `Backspace`, `Ctrl-h` | Delete previous char | `delete_char_backward` |
|
||||
| `Delete`, `Ctrl-d` | Delete previous char | `delete_char_forward` |
|
||||
| `Ctrl-p`, `Up` | Move to previous line | `move_line_up` |
|
||||
| `Ctrl-n`, `Down` | Move to next line | `move_line_down` |
|
||||
| `PageUp` | Move one page up | `page_up` |
|
||||
| `PageDown` | Move one page down | `page_down` |
|
||||
| `Alt->` | Go to end of buffer | `goto_file_end` |
|
||||
| `Alt-<` | Go to start of buffer | `goto_file_start` |
|
||||
|
||||
## Select / extend mode
|
||||
|
||||
I'm still pondering whether to keep this mode or not. It changes movement
|
||||
commands (including goto) to extend the existing selection instead of replacing it.
|
||||
This mode echoes Normal mode, but changes any movements to extend
|
||||
selections rather than replace them. Goto motions are also changed to
|
||||
extend, so that `vgl` for example extends the selection to the end of
|
||||
the line.
|
||||
|
||||
> NOTE: It's a bit confusing at the moment because extend hasn't been
|
||||
> implemented for all movement commands yet.
|
||||
Search is also affected. By default, `n` and `N` will remove the current
|
||||
selection and select the next instance of the search term. Toggling this
|
||||
mode before pressing `n` or `N` makes it possible to keep the current
|
||||
selection. Toggling it on and off during your iterative searching allows
|
||||
you to selectively add search terms to your selections.
|
||||
|
||||
# Picker
|
||||
|
||||
@@ -299,7 +332,11 @@ Keys to use within picker. Remapping currently not supported.
|
||||
| Key | Description |
|
||||
| ----- | ------------- |
|
||||
| `Up`, `Ctrl-k`, `Ctrl-p` | Previous entry |
|
||||
| `PageUp`, `Ctrl-b` | Page up |
|
||||
| `Down`, `Ctrl-j`, `Ctrl-n` | Next entry |
|
||||
| `PageDown`, `Ctrl-f` | Page down |
|
||||
| `Home` | Go to first entry |
|
||||
| `End` | Go to last entry |
|
||||
| `Ctrl-space` | Filter options |
|
||||
| `Enter` | Open selected |
|
||||
| `Ctrl-s` | Open horizontally |
|
||||
|
@@ -4,10 +4,37 @@ Language-specific settings and settings for particular language servers can be c
|
||||
|
||||
Changes made to the `languages.toml` file in a user's [configuration directory](./configuration.md) are merged with helix's defaults on start-up, such that a user's settings will take precedence over defaults in the event of a collision. For example, the default `languages.toml` sets rust's `auto-format` to `true`. If a user wants to disable auto-format, they can change the `languages.toml` in their [configuration directory](./configuration.md) to make the rust entry read like the example below; the new key/value pair `auto-format = false` will override the default when the two sets of settings are merged on start-up:
|
||||
|
||||
```
|
||||
```toml
|
||||
# in <config_dir>/helix/languages.toml
|
||||
|
||||
[[language]]
|
||||
name = "rust"
|
||||
auto-format = false
|
||||
```
|
||||
|
||||
## Tree-sitter grammars
|
||||
|
||||
Tree-sitter grammars can also be configured in `languages.toml`:
|
||||
|
||||
```toml
|
||||
# in <config_dir>/helix/languages.toml
|
||||
|
||||
[[grammar]]
|
||||
name = "rust"
|
||||
source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "a250c4582510ff34767ec3b7dcdd3c24e8c8aa68" }
|
||||
|
||||
[[grammar]]
|
||||
name = "c"
|
||||
source = { path = "/path/to/tree-sitter-c" }
|
||||
```
|
||||
|
||||
You may use a top-level `use-grammars` key to control which grammars are fetched and built.
|
||||
|
||||
```toml
|
||||
# Note: this key must come **before** the [[language]] and [[grammar]] sections
|
||||
use-grammars = { only = [ "rust", "c", "cpp" ] }
|
||||
# or
|
||||
use-grammars = { except = [ "yaml", "json" ] }
|
||||
```
|
||||
|
||||
When omitted, all grammars are fetched and built.
|
||||
|
@@ -1,14 +1,14 @@
|
||||
# Themes
|
||||
|
||||
First you'll need to place selected themes in your `themes` directory (i.e `~/.config/helix/themes`), the directory might have to be created beforehand.
|
||||
|
||||
To use a custom theme add `theme = <name>` to your [`config.toml`](./configuration.md) or override it during runtime using `:theme <name>`.
|
||||
|
||||
The default theme.toml can be found [here](https://github.com/helix-editor/helix/blob/master/theme.toml), and user submitted themes [here](https://github.com/helix-editor/helix/blob/master/runtime/themes).
|
||||
To use a theme add `theme = "<name>"` to your [`config.toml`](./configuration.md) at the very top of the file before the first section or select it during runtime using `:theme <name>`.
|
||||
|
||||
## Creating a theme
|
||||
|
||||
First create a file with the name of your theme as file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes`).
|
||||
Create a file with the name of your theme as file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes`). The directory might have to be created beforehand.
|
||||
|
||||
The names "default" and "base16_default" are reserved for the builtin themes and cannot be overridden by user defined themes.
|
||||
|
||||
The default theme.toml can be found [here](https://github.com/helix-editor/helix/blob/master/theme.toml), and user submitted themes [here](https://github.com/helix-editor/helix/blob/master/runtime/themes).
|
||||
|
||||
Each line in the theme file is specified as below:
|
||||
|
||||
@@ -147,6 +147,7 @@ We use a similar set of scopes as
|
||||
- `repeat` - `for`, `while`, `loop`
|
||||
- `import` - `import`, `export`
|
||||
- `return`
|
||||
- `exception`
|
||||
- `operator` - `or`, `in`
|
||||
- `directive` - Preprocessor directives (`#if` in C)
|
||||
- `function` - `fn`, `func`
|
||||
@@ -165,14 +166,17 @@ We use a similar set of scopes as
|
||||
|
||||
- `markup`
|
||||
- `heading`
|
||||
- `marker`
|
||||
- `1`, `2`, `3`, `4`, `5`, `6` - heading text for h1 through h6
|
||||
- `list`
|
||||
- `unnumbered`
|
||||
- `numbered`
|
||||
- `bold`
|
||||
- `italic`
|
||||
- `link`
|
||||
- `url`
|
||||
- `label`
|
||||
- `url` - urls pointed to by links
|
||||
- `label` - non-url link references
|
||||
- `text` - url and image descriptions in links
|
||||
- `quote`
|
||||
- `raw`
|
||||
- `inline`
|
||||
@@ -188,6 +192,18 @@ We use a similar set of scopes as
|
||||
|
||||
These scopes are used for theming the editor interface.
|
||||
|
||||
- `markup`
|
||||
- `normal`
|
||||
- `completion` - for completion doc popup ui
|
||||
- `hover` - for hover popup ui
|
||||
- `heading`
|
||||
- `completion` - for completion doc popup ui
|
||||
- `hover` - for hover popup ui
|
||||
- `raw`
|
||||
- `inline`
|
||||
- `completion` - for completion doc popup ui
|
||||
- `hover` - for hover popup ui
|
||||
|
||||
|
||||
| Key | Notes |
|
||||
| --- | --- |
|
||||
@@ -202,12 +218,12 @@ These scopes are used for theming the editor interface.
|
||||
| `ui.statusline` | Statusline |
|
||||
| `ui.statusline.inactive` | Statusline (unfocused document) |
|
||||
| `ui.popup` | |
|
||||
| `ui.popup.info` | |
|
||||
| `ui.window` | |
|
||||
| `ui.help` | |
|
||||
| `ui.text` | |
|
||||
| `ui.text.focus` | |
|
||||
| `ui.info` | |
|
||||
| `ui.info.text` | |
|
||||
| `ui.text.info` | |
|
||||
| `ui.menu` | |
|
||||
| `ui.menu.selected` | |
|
||||
| `ui.selection` | For selections in the editing area |
|
||||
|
15
book/src/title-page.md
Normal file
15
book/src/title-page.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# Helix
|
||||
|
||||
Docs for bleeding edge master can be found at
|
||||
[https://docs.helix-editor.com/master](https://docs.helix-editor.com/master).
|
||||
|
||||
See the [usage] section for a quick overview of the editor, [keymap]
|
||||
section for all available keybindings and the [configuration] section
|
||||
for defining custom keybindings, setting themes, etc.
|
||||
|
||||
Refer the [FAQ] for common questions.
|
||||
|
||||
[FAQ]: https://github.com/helix-editor/helix/wiki/FAQ
|
||||
[usage]: ./usage.md
|
||||
[keymap]: ./keymap.md
|
||||
[configuration]: ./configuration.md
|
@@ -42,7 +42,7 @@ helix. The keymappings have been inspired from [vim-sandwich](https://github.com
|
||||
`ms` acts on a selection, so select the text first and use `ms<char>`. `mr` and `md` work
|
||||
on the closest pairs found and selections are not required; use counts to act in outer pairs.
|
||||
|
||||
It can also act on multiple seletions (yay!). For example, to change every occurance of `(use)` to `[use]`:
|
||||
It can also act on multiple selections (yay!). For example, to change every occurrence of `(use)` to `[use]`:
|
||||
|
||||
- `%` to select the whole file
|
||||
- `s` to split the selections on a search term
|
||||
@@ -68,9 +68,29 @@ Currently supported: `word`, `surround`, `function`, `class`, `parameter`.
|
||||
| `(`, `[`, `'`, etc | Specified surround pairs |
|
||||
| `f` | Function |
|
||||
| `c` | Class |
|
||||
| `p` | Parameter |
|
||||
| `a` | Argument/parameter |
|
||||
| `o` | Comment |
|
||||
|
||||
Note: `f`, `c`, etc need a tree-sitter grammar active for the current
|
||||
> NOTE: `f`, `c`, etc need a tree-sitter grammar active for the current
|
||||
document and a special tree-sitter query file to work properly. [Only
|
||||
some grammars](https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+filename%3Atextobjects.scm&type=Code&ref=advsearch&l=&l=)
|
||||
currently have the query file implemented. Contributions are welcome !
|
||||
some grammars][lang-support] currently have the query file implemented.
|
||||
Contributions are welcome!
|
||||
|
||||
## Tree-sitter Textobject Based Navigation
|
||||
|
||||
Navigating between functions, classes, parameters, etc is made
|
||||
possible by leveraging tree-sitter and textobjects queries. For
|
||||
example to move to the next function use `]f`, to move to previous
|
||||
class use `[c`, and so on.
|
||||
|
||||
![tree-sitter-nav-demo][tree-sitter-nav-demo]
|
||||
|
||||
See the [unimpaired][unimpaired-keybinds] section of the keybind
|
||||
documentation for the full reference.
|
||||
|
||||
> NOTE: This feature is dependent on tree-sitter based textobjects
|
||||
and therefore requires the corresponding query file to work properly.
|
||||
|
||||
[lang-support]: ./lang-support.md
|
||||
[unimpaired-keybinds]: ./keymap.md#unimpaired
|
||||
[tree-sitter-nav-demo]: https://user-images.githubusercontent.com/23398472/152332550-7dfff043-36a2-4aec-b8f2-77c13eb56d6f.gif
|
||||
|
@@ -1,12 +1,13 @@
|
||||
|
||||
| Crate | Description |
|
||||
| ----------- | ----------- |
|
||||
| helix-core | Core editing primitives, functional. |
|
||||
| helix-syntax | Tree-sitter grammars |
|
||||
| helix-lsp | Language server client |
|
||||
| helix-view | UI abstractions for use in backends, imperative shell. |
|
||||
| helix-term | Terminal UI |
|
||||
| helix-tui | TUI primitives, forked from tui-rs, inspired by Cursive |
|
||||
| Crate | Description |
|
||||
| ----------- | ----------- |
|
||||
| helix-core | Core editing primitives, functional. |
|
||||
| helix-lsp | Language server client |
|
||||
| helix-dap | Debug Adapter Protocol (DAP) client |
|
||||
| helix-loader | Functions for building, fetching, and loading external resources |
|
||||
| helix-view | UI abstractions for use in backends, imperative shell. |
|
||||
| helix-term | Terminal UI |
|
||||
| helix-tui | TUI primitives, forked from tui-rs, inspired by Cursive |
|
||||
|
||||
|
||||
This document contains a high-level overview of Helix internals.
|
||||
@@ -54,15 +55,40 @@ A `Document` ties together the `Rope`, `Selection`(s), `Syntax`, document
|
||||
file.
|
||||
|
||||
A `View` represents an open split in the UI. It holds the currently open
|
||||
document ID and other related state.
|
||||
document ID and other related state. Views encapsulate the gutter, status line,
|
||||
diagnostics, and the inner area where the code is displayed.
|
||||
|
||||
> NOTE: Multiple views are able to display the same document, so the document
|
||||
> contains selections for each view. To retrieve, `document.selection()` takes
|
||||
> a `ViewId`.
|
||||
|
||||
`Info` is the autoinfo box that shows hints when awaiting another key with bindings
|
||||
like `g` and `m`. It is attached to the viewport as a whole.
|
||||
|
||||
`Surface` is like a buffer to which widgets draw themselves to, and the
|
||||
surface is then rendered on the screen on each cycle.
|
||||
|
||||
`Rect`s are areas (simply an x and y coordinate with the origin at the
|
||||
screen top left and then a height and width) which are part of a
|
||||
`Surface`. They can be used to limit the area to which a `Component` can
|
||||
render. For example if we wrap a `Markdown` component in a `Popup`
|
||||
(think the documentation popup with space+k), Markdown's render method
|
||||
will get a Rect that is the exact size of the popup.
|
||||
|
||||
Widgets are called `Component`s internally, and you can see most of them
|
||||
in `helix-term/src/ui`. Some components like `Popup` and `Overlay` can take
|
||||
other components as children.
|
||||
|
||||
`Layer`s are how multiple components are displayed, and is simply a
|
||||
`Vec<Component>`. Layers are managed by the `Compositor`. On each top
|
||||
level render call, the compositor renders each component in the order
|
||||
they were pushed into the stack. This makes multiple components "layer"
|
||||
on top of one another. Hence we get a file picker displayed over the
|
||||
editor, etc.
|
||||
|
||||
The `Editor` holds the global state: all the open documents, a tree
|
||||
representation of all the view splits, and a registry of language servers. To
|
||||
open or close files, interact with the editor.
|
||||
representation of all the view splits, the configuration, and a registry of
|
||||
language servers. To open or close files, interact with the editor.
|
||||
|
||||
## LSP
|
||||
|
||||
|
138
flake.lock
generated
138
flake.lock
generated
@@ -1,12 +1,35 @@
|
||||
{
|
||||
"nodes": {
|
||||
"devshell": {
|
||||
"crane": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1639692811,
|
||||
"narHash": "sha256-wOOBH0fVsfNqw/5ZWRoKspyesoXBgiwEOUBH4c7JKEo=",
|
||||
"lastModified": 1644785799,
|
||||
"narHash": "sha256-VpAJO1L0XeBvtCuNGK4IDKp6ENHIpTrlaZT7yfBCvwo=",
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"rev": "fc7a94f841347c88f2cb44217b2a3faa93e2a0b2",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"devshell": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1646667754,
|
||||
"narHash": "sha256-LahZHvCC3UVzGQ55iWDRZkuDssXl1rYgqgScrPV9S38=",
|
||||
"owner": "numtide",
|
||||
"repo": "devshell",
|
||||
"rev": "d3a1f5bec3632b33346865b1c165bf2420bb2f52",
|
||||
"rev": "59fbe1dfc0de8c3332957c16998a7d16dff365d8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -15,7 +38,73 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"dream2nix": {
|
||||
"inputs": {
|
||||
"alejandra": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"crane": "crane",
|
||||
"flake-utils-pre-commit": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"gomod2nix": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"mach-nix": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"nixpkgs": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"node2nix": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"poetry2nix": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"pre-commit-hooks": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1646710334,
|
||||
"narHash": "sha256-eLBcDgcbOUfeH4k6SEW5a5v0PTp2KNCn+5ZXIoWGYww=",
|
||||
"owner": "nix-community",
|
||||
"repo": "dream2nix",
|
||||
"rev": "5dcfbfd3b60ce0208b894c1bdea00e2bdf80ca6a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"ref": "main",
|
||||
"repo": "dream2nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"locked": {
|
||||
"lastModified": 1642700792,
|
||||
"narHash": "sha256-XqHrk7hFb+zBvRg6Ghl+AZDq03ov6OshJLiSWOoX5es=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "846b2ae0fc4cc943637d3d1def4454213e203cba",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils_2": {
|
||||
"locked": {
|
||||
"lastModified": 1637014545,
|
||||
"narHash": "sha256-26IZAc5yzlD9FlDT54io1oqG/bBoyka+FJk5guaX4x4=",
|
||||
@@ -33,6 +122,7 @@
|
||||
"nixCargoIntegration": {
|
||||
"inputs": {
|
||||
"devshell": "devshell",
|
||||
"dream2nix": "dream2nix",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
@@ -41,11 +131,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1639807801,
|
||||
"narHash": "sha256-y32tMq1LTRVbMW3QN5i98iOQjQt2QSsif3ayUkD1o3g=",
|
||||
"lastModified": 1646766572,
|
||||
"narHash": "sha256-DV3+zxvAIKsMHsHedJKYFsracvFyLKpFQqurUBR86oY=",
|
||||
"owner": "yusdacra",
|
||||
"repo": "nix-cargo-integration",
|
||||
"rev": "b5bbaa4f5239e6f0619846f9a5380f07baa853d3",
|
||||
"rev": "3a3f47f43ba486b7554164a698c8dfc5a38624ce",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -56,11 +146,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1639699734,
|
||||
"narHash": "sha256-tlX6WebGmiHb2Hmniff+ltYp+7dRfdsBxw9YczLsP60=",
|
||||
"lastModified": 1646497237,
|
||||
"narHash": "sha256-Ccpot1h/rV8MgcngDp5OrdmLTMaUTbStZTR5/sI7zW0=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "03ec468b14067729a285c2c7cfa7b9434a04816c",
|
||||
"rev": "062a0c5437b68f950b081bbfc8a699d57a4ee026",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -70,22 +160,6 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs_2": {
|
||||
"locked": {
|
||||
"lastModified": 1637453606,
|
||||
"narHash": "sha256-Gy6cwUswft9xqsjWxFYEnx/63/qzaFUwatcbV5GF/GQ=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "8afc4e543663ca0a6a4f496262cd05233737e732",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixCargoIntegration": "nixCargoIntegration",
|
||||
@@ -95,15 +169,17 @@
|
||||
},
|
||||
"rust-overlay": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs_2"
|
||||
"flake-utils": "flake-utils_2",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1639880499,
|
||||
"narHash": "sha256-/BibDmFwgWuuTUkNVO6YlvuTSWM9dpBvlZoTAPs7ORI=",
|
||||
"lastModified": 1646792695,
|
||||
"narHash": "sha256-2drCXIKIQnJMlTZbcCfuHZAh+iPcdlRkCqtZnA6MHLY=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "c6c83589ae048af20d93d01eb07a4176012093d0",
|
||||
"rev": "7f599870402c8d2a5806086c8ee0f2d92b175c54",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
78
flake.nix
78
flake.nix
@@ -3,7 +3,10 @@
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||
rust-overlay.url = "github:oxalica/rust-overlay";
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
nixCargoIntegration = {
|
||||
url = "github:yusdacra/nix-cargo-integration";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
@@ -11,59 +14,50 @@
|
||||
};
|
||||
};
|
||||
|
||||
outputs = inputs@{ self, nixCargoIntegration, ... }:
|
||||
outputs = inputs@{ nixCargoIntegration, ... }:
|
||||
nixCargoIntegration.lib.makeOutputs {
|
||||
root = ./.;
|
||||
buildPlatform = "crate2nix";
|
||||
renameOutputs = { "helix-term" = "helix"; };
|
||||
# Set default app to hx (binary is from helix-term release build)
|
||||
# Set default package to helix-term release build
|
||||
defaultOutputs = { app = "hx"; package = "helix"; };
|
||||
defaultOutputs = {
|
||||
app = "hx";
|
||||
package = "helix";
|
||||
};
|
||||
overrides = {
|
||||
crateOverrides = common: _: {
|
||||
helix-term = prev: {
|
||||
# link languages and theme toml files since helix-term expects them (for tests)
|
||||
preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml} ..";
|
||||
buildInputs = (prev.buildInputs or [ ]) ++ [ common.cCompiler.cc.lib ];
|
||||
};
|
||||
# link languages and theme toml files since helix-view expects them
|
||||
helix-view = _: { preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml} .."; };
|
||||
helix-syntax = _prev: {
|
||||
preConfigure = "mkdir -p ../runtime/grammars";
|
||||
postInstall = "cp -r ../runtime $out/runtime";
|
||||
};
|
||||
};
|
||||
mainBuild = common: prev:
|
||||
let
|
||||
inherit (common) pkgs lib;
|
||||
helixSyntax = lib.buildCrate {
|
||||
root = self;
|
||||
memberName = "helix-syntax";
|
||||
defaultCrateOverrides = {
|
||||
helix-syntax = common.crateOverrides.helix-syntax;
|
||||
};
|
||||
release = false;
|
||||
helix-term = prev:
|
||||
let
|
||||
inherit (common) pkgs;
|
||||
grammars = pkgs.callPackage ./grammars.nix { };
|
||||
runtimeDir = pkgs.runCommand "helix-runtime" { } ''
|
||||
mkdir -p $out
|
||||
ln -s ${common.root}/runtime/* $out
|
||||
rm -r $out/grammars
|
||||
ln -s ${grammars} $out/grammars
|
||||
'';
|
||||
in
|
||||
{
|
||||
# disable fetching and building of tree-sitter grammars in the helix-term build.rs
|
||||
HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1";
|
||||
# link languages and theme toml files since helix-term expects them (for tests)
|
||||
preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} ..";
|
||||
buildInputs = (prev.buildInputs or [ ]) ++ [ common.cCompiler.cc.lib ];
|
||||
nativeBuildInputs = [ pkgs.makeWrapper ];
|
||||
|
||||
postFixup = ''
|
||||
if [ -f "$out/bin/hx" ]; then
|
||||
wrapProgram "$out/bin/hx" --set HELIX_RUNTIME "${runtimeDir}"
|
||||
fi
|
||||
'';
|
||||
};
|
||||
runtimeDir = pkgs.runCommand "helix-runtime" { } ''
|
||||
mkdir -p $out
|
||||
ln -s ${common.root}/runtime/* $out
|
||||
ln -sf ${helixSyntax}/runtime/grammars $out
|
||||
'';
|
||||
in
|
||||
lib.optionalAttrs (common.memberName == "helix-term") {
|
||||
nativeBuildInputs = [ pkgs.makeWrapper ];
|
||||
postFixup = ''
|
||||
if [ -f "$out/bin/hx" ]; then
|
||||
wrapProgram "$out/bin/hx" --set HELIX_RUNTIME "${runtimeDir}"
|
||||
fi
|
||||
'';
|
||||
};
|
||||
};
|
||||
shell = common: prev: {
|
||||
packages = prev.packages ++ (with common.pkgs; [ lld_13 lldb cargo-tarpaulin ]);
|
||||
packages = prev.packages ++ (with common.pkgs; [ lld_13 lldb cargo-tarpaulin cargo-flamegraph ]);
|
||||
env = prev.env ++ [
|
||||
{ name = "HELIX_RUNTIME"; eval = "$PWD/runtime"; }
|
||||
{ name = "RUST_BACKTRACE"; value = "1"; }
|
||||
{ name = "RUSTFLAGS"; value = "-C link-arg=-fuse-ld=lld -C target-cpu=native"; }
|
||||
{ name = "RUSTFLAGS"; value = "-C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment"; }
|
||||
];
|
||||
};
|
||||
};
|
||||
|
89
grammars.nix
Normal file
89
grammars.nix
Normal file
@@ -0,0 +1,89 @@
|
||||
{ stdenv, lib, runCommand, yj }:
|
||||
let
|
||||
# HACK: nix < 2.6 has a bug in the toml parser, so we convert to JSON
|
||||
# before parsing
|
||||
languages-json = runCommand "languages-toml-to-json" { } ''
|
||||
${yj}/bin/yj -t < ${./languages.toml} > $out
|
||||
'';
|
||||
languagesConfig =
|
||||
builtins.fromJSON (builtins.readFile (builtins.toPath languages-json));
|
||||
isGitGrammar = (grammar:
|
||||
builtins.hasAttr "source" grammar && builtins.hasAttr "git" grammar.source
|
||||
&& builtins.hasAttr "rev" grammar.source);
|
||||
gitGrammars = builtins.filter isGitGrammar languagesConfig.grammar;
|
||||
buildGrammar = grammar:
|
||||
let
|
||||
source = builtins.fetchTree {
|
||||
type = "git";
|
||||
url = grammar.source.git;
|
||||
rev = grammar.source.rev;
|
||||
ref = grammar.source.ref or "HEAD";
|
||||
shallow = true;
|
||||
};
|
||||
in stdenv.mkDerivation rec {
|
||||
# see https://github.com/NixOS/nixpkgs/blob/fbdd1a7c0bc29af5325e0d7dd70e804a972eb465/pkgs/development/tools/parsing/tree-sitter/grammar.nix
|
||||
|
||||
pname = "helix-tree-sitter-${grammar.name}";
|
||||
version = grammar.source.rev;
|
||||
|
||||
src = if builtins.hasAttr "subpath" grammar.source then
|
||||
"${source}/${grammar.source.subpath}"
|
||||
else
|
||||
source;
|
||||
|
||||
dontUnpack = true;
|
||||
dontConfigure = true;
|
||||
|
||||
FLAGS = [
|
||||
"-I${src}/src"
|
||||
"-g"
|
||||
"-O3"
|
||||
"-fPIC"
|
||||
"-fno-exceptions"
|
||||
"-Wl,-z,relro,-z,now"
|
||||
];
|
||||
|
||||
NAME = grammar.name;
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
if [[ -e "$src/src/scanner.cc" ]]; then
|
||||
$CXX -c "$src/src/scanner.cc" -o scanner.o $FLAGS
|
||||
elif [[ -e "$src/src/scanner.c" ]]; then
|
||||
$CC -c "$src/src/scanner.c" -o scanner.o $FLAGS
|
||||
fi
|
||||
|
||||
$CC -c "$src/src/parser.c" -o parser.o $FLAGS
|
||||
$CXX -shared -o $NAME.so *.o
|
||||
|
||||
ls -al
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
mkdir $out
|
||||
mv $NAME.so $out/
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
# Strip failed on darwin: strip: error: symbols referenced by indirect symbol table entries that can't be stripped
|
||||
fixupPhase = lib.optionalString stdenv.isLinux ''
|
||||
runHook preFixup
|
||||
$STRIP $out/$NAME.so
|
||||
runHook postFixup
|
||||
'';
|
||||
};
|
||||
builtGrammars = builtins.map (grammar: {
|
||||
inherit (grammar) name;
|
||||
artifact = buildGrammar grammar;
|
||||
}) gitGrammars;
|
||||
grammarLinks = builtins.map (grammar:
|
||||
"ln -s ${grammar.artifact}/${grammar.name}.so $out/${grammar.name}.so")
|
||||
builtGrammars;
|
||||
in runCommand "consolidated-helix-grammars" { } ''
|
||||
mkdir -p $out
|
||||
${builtins.concatStringsSep "\n" grammarLinks}
|
||||
''
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "helix-core"
|
||||
version = "0.5.0"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
@@ -13,17 +13,18 @@ include = ["src/**/*", "README.md"]
|
||||
[features]
|
||||
|
||||
[dependencies]
|
||||
helix-syntax = { version = "0.5", path = "../helix-syntax" }
|
||||
helix-loader = { version = "0.6", path = "../helix-loader" }
|
||||
|
||||
ropey = "1.3"
|
||||
smallvec = "1.7"
|
||||
tendril = "0.4.2"
|
||||
unicode-segmentation = "1.8"
|
||||
smallvec = "1.8"
|
||||
smartstring = "1.0.0"
|
||||
unicode-segmentation = "1.9"
|
||||
unicode-width = "0.1"
|
||||
unicode-general-category = "0.4"
|
||||
unicode-general-category = "0.5"
|
||||
# slab = "0.4.2"
|
||||
slotmap = "1.0"
|
||||
tree-sitter = "0.20"
|
||||
once_cell = "1.9"
|
||||
once_cell = "1.10"
|
||||
arc-swap = "1"
|
||||
regex = "1"
|
||||
|
||||
@@ -34,10 +35,11 @@ toml = "0.5"
|
||||
|
||||
similar = "2.1"
|
||||
|
||||
etcetera = "0.3"
|
||||
encoding_rs = "0.8"
|
||||
|
||||
chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] }
|
||||
|
||||
etcetera = "0.3"
|
||||
|
||||
[dev-dependencies]
|
||||
quickcheck = { version = "1", default-features = false }
|
||||
|
@@ -1,13 +1,17 @@
|
||||
//! When typing the opening character of one of the possible pairs defined below,
|
||||
//! this module provides the functionality to insert the paired closing character.
|
||||
|
||||
use crate::{movement::Direction, Range, Rope, Selection, Tendril, Transaction};
|
||||
use crate::{
|
||||
graphemes, movement::Direction, Range, Rope, RopeGraphemes, Selection, Tendril, Transaction,
|
||||
};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use log::debug;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
// Heavily based on https://github.com/codemirror/closebrackets/
|
||||
|
||||
pub const PAIRS: &[(char, char)] = &[
|
||||
pub const DEFAULT_PAIRS: &[(char, char)] = &[
|
||||
('(', ')'),
|
||||
('{', '}'),
|
||||
('[', ']'),
|
||||
@@ -16,9 +20,95 @@ pub const PAIRS: &[(char, char)] = &[
|
||||
('`', '`'),
|
||||
];
|
||||
|
||||
// [TODO] build this dynamically in language config. see #992
|
||||
const OPEN_BEFORE: &str = "([{'\":;,> \n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}";
|
||||
const CLOSE_BEFORE: &str = ")]}'\":;,> \n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}"; // includes space and newlines
|
||||
/// The type that represents the collection of auto pairs,
|
||||
/// keyed by the opener.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AutoPairs(HashMap<char, Pair>);
|
||||
|
||||
/// Represents the config for a particular pairing.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Pair {
|
||||
pub open: char,
|
||||
pub close: char,
|
||||
}
|
||||
|
||||
impl Pair {
|
||||
/// true if open == close
|
||||
pub fn same(&self) -> bool {
|
||||
self.open == self.close
|
||||
}
|
||||
|
||||
/// true if all of the pair's conditions hold for the given document and range
|
||||
pub fn should_close(&self, doc: &Rope, range: &Range) -> bool {
|
||||
let mut should_close = Self::next_is_not_alpha(doc, range);
|
||||
|
||||
if self.same() {
|
||||
should_close &= Self::prev_is_not_alpha(doc, range);
|
||||
}
|
||||
|
||||
should_close
|
||||
}
|
||||
|
||||
pub fn next_is_not_alpha(doc: &Rope, range: &Range) -> bool {
|
||||
let cursor = range.cursor(doc.slice(..));
|
||||
let next_char = doc.get_char(cursor);
|
||||
next_char.map(|c| !c.is_alphanumeric()).unwrap_or(true)
|
||||
}
|
||||
|
||||
pub fn prev_is_not_alpha(doc: &Rope, range: &Range) -> bool {
|
||||
let cursor = range.cursor(doc.slice(..));
|
||||
let prev_char = prev_char(doc, cursor);
|
||||
prev_char.map(|c| !c.is_alphanumeric()).unwrap_or(true)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&(char, char)> for Pair {
|
||||
fn from(&(open, close): &(char, char)) -> Self {
|
||||
Self { open, close }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(&char, &char)> for Pair {
|
||||
fn from((open, close): (&char, &char)) -> Self {
|
||||
Self {
|
||||
open: *open,
|
||||
close: *close,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AutoPairs {
|
||||
/// Make a new AutoPairs set with the given pairs and default conditions.
|
||||
pub fn new<'a, V: 'a, A>(pairs: V) -> Self
|
||||
where
|
||||
V: IntoIterator<Item = A>,
|
||||
A: Into<Pair>,
|
||||
{
|
||||
let mut auto_pairs = HashMap::new();
|
||||
|
||||
for pair in pairs.into_iter() {
|
||||
let auto_pair = pair.into();
|
||||
|
||||
auto_pairs.insert(auto_pair.open, auto_pair);
|
||||
|
||||
if auto_pair.open != auto_pair.close {
|
||||
auto_pairs.insert(auto_pair.close, auto_pair);
|
||||
}
|
||||
}
|
||||
|
||||
Self(auto_pairs)
|
||||
}
|
||||
|
||||
pub fn get(&self, ch: char) -> Option<&Pair> {
|
||||
self.0.get(&ch)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AutoPairs {
|
||||
fn default() -> Self {
|
||||
AutoPairs::new(DEFAULT_PAIRS.iter())
|
||||
}
|
||||
}
|
||||
|
||||
// insert hook:
|
||||
// Fn(doc, selection, char) => Option<Transaction>
|
||||
@@ -34,21 +124,17 @@ const CLOSE_BEFORE: &str = ")]}'\":;,> \n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{20
|
||||
// middle of triple quotes, and more exotic pairs like Jinja's {% %}
|
||||
|
||||
#[must_use]
|
||||
pub fn hook(doc: &Rope, selection: &Selection, ch: char) -> Option<Transaction> {
|
||||
pub fn hook(doc: &Rope, selection: &Selection, ch: char, pairs: &AutoPairs) -> Option<Transaction> {
|
||||
debug!("autopairs hook selection: {:#?}", selection);
|
||||
|
||||
for &(open, close) in PAIRS {
|
||||
if open == ch {
|
||||
if open == close {
|
||||
return Some(handle_same(doc, selection, open, CLOSE_BEFORE, OPEN_BEFORE));
|
||||
} else {
|
||||
return Some(handle_open(doc, selection, open, close, CLOSE_BEFORE));
|
||||
}
|
||||
}
|
||||
|
||||
if close == ch {
|
||||
if let Some(pair) = pairs.get(ch) {
|
||||
if pair.same() {
|
||||
return Some(handle_same(doc, selection, pair));
|
||||
} else if pair.open == ch {
|
||||
return Some(handle_open(doc, selection, pair));
|
||||
} else if pair.close == ch {
|
||||
// && char_at pos == close
|
||||
return Some(handle_close(doc, selection, open, close));
|
||||
return Some(handle_close(doc, selection, pair));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,43 +149,138 @@ fn prev_char(doc: &Rope, pos: usize) -> Option<char> {
|
||||
doc.get_char(pos - 1)
|
||||
}
|
||||
|
||||
fn is_single_grapheme(doc: &Rope, range: &Range) -> bool {
|
||||
let mut graphemes = RopeGraphemes::new(doc.slice(range.from()..range.to()));
|
||||
let first = graphemes.next();
|
||||
let second = graphemes.next();
|
||||
debug!("first: {:#?}, second: {:#?}", first, second);
|
||||
first.is_some() && second.is_none()
|
||||
}
|
||||
|
||||
/// calculate what the resulting range should be for an auto pair insertion
|
||||
fn get_next_range(
|
||||
doc: &Rope,
|
||||
start_range: &Range,
|
||||
offset: usize,
|
||||
typed_char: char,
|
||||
len_inserted: usize,
|
||||
) -> Range {
|
||||
let end_head = start_range.head + offset + typed_char.len_utf8();
|
||||
// When the character under the cursor changes due to complete pair
|
||||
// insertion, we must look backward a grapheme and then add the length
|
||||
// of the insertion to put the resulting cursor in the right place, e.g.
|
||||
//
|
||||
// foo[\r\n] - anchor: 3, head: 5
|
||||
// foo([)]\r\n - anchor: 4, head: 5
|
||||
//
|
||||
// foo[\r\n] - anchor: 3, head: 5
|
||||
// foo'[\r\n] - anchor: 4, head: 6
|
||||
//
|
||||
// foo([)]\r\n - anchor: 4, head: 5
|
||||
// foo()[\r\n] - anchor: 5, head: 7
|
||||
//
|
||||
// [foo]\r\n - anchor: 0, head: 3
|
||||
// [foo(])\r\n - anchor: 0, head: 5
|
||||
|
||||
// inserting at the very end of the document after the last newline
|
||||
if start_range.head == doc.len_chars() && start_range.anchor == doc.len_chars() {
|
||||
return Range::new(
|
||||
start_range.anchor + offset + typed_char.len_utf8(),
|
||||
start_range.head + offset + typed_char.len_utf8(),
|
||||
);
|
||||
}
|
||||
|
||||
let single_grapheme = is_single_grapheme(doc, start_range);
|
||||
let doc_slice = doc.slice(..);
|
||||
|
||||
// just skip over graphemes
|
||||
if len_inserted == 0 {
|
||||
let end_anchor = if single_grapheme {
|
||||
graphemes::next_grapheme_boundary(doc_slice, start_range.anchor) + offset
|
||||
|
||||
// even for backward inserts with multiple grapheme selections,
|
||||
// we want the anchor to stay where it is so that the relative
|
||||
// selection does not change, e.g.:
|
||||
//
|
||||
// foo([) wor]d -> insert ) -> foo()[ wor]d
|
||||
} else {
|
||||
start_range.anchor + offset
|
||||
};
|
||||
|
||||
return Range::new(
|
||||
end_anchor,
|
||||
graphemes::next_grapheme_boundary(doc_slice, start_range.head) + offset,
|
||||
);
|
||||
}
|
||||
|
||||
// trivial case: only inserted a single-char opener, just move the selection
|
||||
if len_inserted == 1 {
|
||||
let end_anchor = if single_grapheme || start_range.direction() == Direction::Backward {
|
||||
start_range.anchor + offset + typed_char.len_utf8()
|
||||
} else {
|
||||
start_range.anchor + offset
|
||||
};
|
||||
|
||||
return Range::new(
|
||||
end_anchor,
|
||||
start_range.head + offset + typed_char.len_utf8(),
|
||||
);
|
||||
}
|
||||
|
||||
// If the head = 0, then we must be in insert mode with a backward
|
||||
// cursor, which implies the head will just move
|
||||
let end_head = if start_range.head == 0 || start_range.direction() == Direction::Backward {
|
||||
start_range.head + offset + typed_char.len_utf8()
|
||||
} else {
|
||||
// We must have a forward cursor, which means we must move to the
|
||||
// other end of the grapheme to get to where the new characters
|
||||
// are inserted, then move the head to where it should be
|
||||
let prev_bound = graphemes::prev_grapheme_boundary(doc_slice, start_range.head);
|
||||
debug!(
|
||||
"prev_bound: {}, offset: {}, len_inserted: {}",
|
||||
prev_bound, offset, len_inserted
|
||||
);
|
||||
prev_bound + offset + len_inserted
|
||||
};
|
||||
|
||||
let end_anchor = match (start_range.len(), start_range.direction()) {
|
||||
// if we have a zero width cursor, it shifts to the same number
|
||||
(0, _) => end_head,
|
||||
|
||||
// if we are inserting for a regular one-width cursor, the anchor
|
||||
// moves with the head
|
||||
// If we are inserting for a regular one-width cursor, the anchor
|
||||
// moves with the head. This is the fast path for ASCII.
|
||||
(1, Direction::Forward) => end_head - 1,
|
||||
(1, Direction::Backward) => end_head + 1,
|
||||
|
||||
// if we are appending, the anchor stays where it is; only offset
|
||||
// for multiple range insertions
|
||||
(_, Direction::Forward) => start_range.anchor + offset,
|
||||
(_, Direction::Forward) => {
|
||||
if single_grapheme {
|
||||
graphemes::prev_grapheme_boundary(doc.slice(..), start_range.head)
|
||||
+ typed_char.len_utf8()
|
||||
|
||||
// when we are inserting in front of a selection, we need to move
|
||||
// the anchor over by however many characters were inserted overall
|
||||
(_, Direction::Backward) => start_range.anchor + offset + len_inserted,
|
||||
// if we are appending, the anchor stays where it is; only offset
|
||||
// for multiple range insertions
|
||||
} else {
|
||||
start_range.anchor + offset
|
||||
}
|
||||
}
|
||||
|
||||
(_, Direction::Backward) => {
|
||||
if single_grapheme {
|
||||
// if we're backward, then the head is at the first char
|
||||
// of the typed char, so we need to add the length of
|
||||
// the closing char
|
||||
graphemes::prev_grapheme_boundary(doc.slice(..), start_range.anchor) + len_inserted
|
||||
} else {
|
||||
// when we are inserting in front of a selection, we need to move
|
||||
// the anchor over by however many characters were inserted overall
|
||||
start_range.anchor + offset + len_inserted
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Range::new(end_anchor, end_head)
|
||||
}
|
||||
|
||||
fn handle_open(
|
||||
doc: &Rope,
|
||||
selection: &Selection,
|
||||
open: char,
|
||||
close: char,
|
||||
close_before: &str,
|
||||
) -> Transaction {
|
||||
fn handle_open(doc: &Rope, selection: &Selection, pair: &Pair) -> Transaction {
|
||||
let mut end_ranges = SmallVec::with_capacity(selection.len());
|
||||
let mut offs = 0;
|
||||
|
||||
@@ -109,20 +290,21 @@ fn handle_open(
|
||||
let len_inserted;
|
||||
|
||||
let change = match next_char {
|
||||
Some(ch) if !close_before.contains(ch) => {
|
||||
len_inserted = open.len_utf8();
|
||||
(cursor, cursor, Some(Tendril::from_char(open)))
|
||||
Some(_) if !pair.should_close(doc, start_range) => {
|
||||
len_inserted = pair.open.len_utf8();
|
||||
let mut tendril = Tendril::new();
|
||||
tendril.push(pair.open);
|
||||
(cursor, cursor, Some(tendril))
|
||||
}
|
||||
// None | Some(ch) if close_before.contains(ch) => {}
|
||||
_ => {
|
||||
// insert open & close
|
||||
let pair = Tendril::from_iter([open, close]);
|
||||
len_inserted = open.len_utf8() + close.len_utf8();
|
||||
(cursor, cursor, Some(pair))
|
||||
let pair_str = Tendril::from_iter([pair.open, pair.close]);
|
||||
len_inserted = pair.open.len_utf8() + pair.close.len_utf8();
|
||||
(cursor, cursor, Some(pair_str))
|
||||
}
|
||||
};
|
||||
|
||||
let next_range = get_next_range(start_range, offs, open, len_inserted);
|
||||
let next_range = get_next_range(doc, start_range, offs, pair.open, len_inserted);
|
||||
end_ranges.push(next_range);
|
||||
offs += len_inserted;
|
||||
|
||||
@@ -134,7 +316,7 @@ fn handle_open(
|
||||
t
|
||||
}
|
||||
|
||||
fn handle_close(doc: &Rope, selection: &Selection, _open: char, close: char) -> Transaction {
|
||||
fn handle_close(doc: &Rope, selection: &Selection, pair: &Pair) -> Transaction {
|
||||
let mut end_ranges = SmallVec::with_capacity(selection.len());
|
||||
|
||||
let mut offs = 0;
|
||||
@@ -144,15 +326,17 @@ fn handle_close(doc: &Rope, selection: &Selection, _open: char, close: char) ->
|
||||
let next_char = doc.get_char(cursor);
|
||||
let mut len_inserted = 0;
|
||||
|
||||
let change = if next_char == Some(close) {
|
||||
let change = if next_char == Some(pair.close) {
|
||||
// return transaction that moves past close
|
||||
(cursor, cursor, None) // no-op
|
||||
} else {
|
||||
len_inserted += close.len_utf8();
|
||||
(cursor, cursor, Some(Tendril::from_char(close)))
|
||||
len_inserted += pair.close.len_utf8();
|
||||
let mut tendril = Tendril::new();
|
||||
tendril.push(pair.close);
|
||||
(cursor, cursor, Some(tendril))
|
||||
};
|
||||
|
||||
let next_range = get_next_range(start_range, offs, close, len_inserted);
|
||||
let next_range = get_next_range(doc, start_range, offs, pair.close, len_inserted);
|
||||
end_ranges.push(next_range);
|
||||
offs += len_inserted;
|
||||
|
||||
@@ -165,13 +349,7 @@ fn handle_close(doc: &Rope, selection: &Selection, _open: char, close: char) ->
|
||||
}
|
||||
|
||||
/// handle cases where open and close is the same, or in triples ("""docstring""")
|
||||
fn handle_same(
|
||||
doc: &Rope,
|
||||
selection: &Selection,
|
||||
token: char,
|
||||
close_before: &str,
|
||||
open_before: &str,
|
||||
) -> Transaction {
|
||||
fn handle_same(doc: &Rope, selection: &Selection, pair: &Pair) -> Transaction {
|
||||
let mut end_ranges = SmallVec::with_capacity(selection.len());
|
||||
|
||||
let mut offs = 0;
|
||||
@@ -179,30 +357,26 @@ fn handle_same(
|
||||
let transaction = Transaction::change_by_selection(doc, selection, |start_range| {
|
||||
let cursor = start_range.cursor(doc.slice(..));
|
||||
let mut len_inserted = 0;
|
||||
|
||||
let next_char = doc.get_char(cursor);
|
||||
let prev_char = prev_char(doc, cursor);
|
||||
|
||||
let change = if next_char == Some(token) {
|
||||
let change = if next_char == Some(pair.open) {
|
||||
// return transaction that moves past close
|
||||
(cursor, cursor, None) // no-op
|
||||
} else {
|
||||
let mut pair = Tendril::with_capacity(2 * token.len_utf8() as u32);
|
||||
pair.push_char(token);
|
||||
let mut pair_str = Tendril::new();
|
||||
pair_str.push(pair.open);
|
||||
|
||||
// for equal pairs, don't insert both open and close if either
|
||||
// side has a non-pair char
|
||||
if (next_char.is_none() || close_before.contains(next_char.unwrap()))
|
||||
&& (prev_char.is_none() || open_before.contains(prev_char.unwrap()))
|
||||
{
|
||||
pair.push_char(token);
|
||||
if pair.should_close(doc, start_range) {
|
||||
pair_str.push(pair.close);
|
||||
}
|
||||
|
||||
len_inserted += pair.len();
|
||||
(cursor, cursor, Some(pair))
|
||||
len_inserted += pair_str.len();
|
||||
(cursor, cursor, Some(pair_str))
|
||||
};
|
||||
|
||||
let next_range = get_next_range(start_range, offs, token, len_inserted);
|
||||
let next_range = get_next_range(doc, start_range, offs, pair.open, len_inserted);
|
||||
end_ranges.push(next_range);
|
||||
offs += len_inserted;
|
||||
|
||||
@@ -219,22 +393,26 @@ mod test {
|
||||
use super::*;
|
||||
use smallvec::smallvec;
|
||||
|
||||
const LINE_END: &str = crate::DEFAULT_LINE_ENDING.as_str();
|
||||
|
||||
fn differing_pairs() -> impl Iterator<Item = &'static (char, char)> {
|
||||
PAIRS.iter().filter(|(open, close)| open != close)
|
||||
DEFAULT_PAIRS.iter().filter(|(open, close)| open != close)
|
||||
}
|
||||
|
||||
fn matching_pairs() -> impl Iterator<Item = &'static (char, char)> {
|
||||
PAIRS.iter().filter(|(open, close)| open == close)
|
||||
DEFAULT_PAIRS.iter().filter(|(open, close)| open == close)
|
||||
}
|
||||
|
||||
fn test_hooks(
|
||||
in_doc: &Rope,
|
||||
in_sel: &Selection,
|
||||
ch: char,
|
||||
pairs: &[(char, char)],
|
||||
expected_doc: &Rope,
|
||||
expected_sel: &Selection,
|
||||
) {
|
||||
let trans = hook(&in_doc, &in_sel, ch).unwrap();
|
||||
let pairs = AutoPairs::new(pairs.iter());
|
||||
let trans = hook(in_doc, in_sel, ch, &pairs).unwrap();
|
||||
let mut actual_doc = in_doc.clone();
|
||||
assert!(trans.apply(&mut actual_doc));
|
||||
assert_eq!(expected_doc, &actual_doc);
|
||||
@@ -244,7 +422,8 @@ mod test {
|
||||
fn test_hooks_with_pairs<I, F, R>(
|
||||
in_doc: &Rope,
|
||||
in_sel: &Selection,
|
||||
pairs: I,
|
||||
test_pairs: I,
|
||||
pairs: &[(char, char)],
|
||||
get_expected_doc: F,
|
||||
actual_sel: &Selection,
|
||||
) where
|
||||
@@ -253,11 +432,12 @@ mod test {
|
||||
R: Into<Rope>,
|
||||
Rope: From<R>,
|
||||
{
|
||||
pairs.into_iter().for_each(|(open, close)| {
|
||||
test_pairs.into_iter().for_each(|(open, close)| {
|
||||
test_hooks(
|
||||
in_doc,
|
||||
in_sel,
|
||||
*open,
|
||||
pairs,
|
||||
&Rope::from(get_expected_doc(*open, *close)),
|
||||
actual_sel,
|
||||
)
|
||||
@@ -270,12 +450,66 @@ mod test {
|
||||
#[test]
|
||||
fn test_insert_blank() {
|
||||
test_hooks_with_pairs(
|
||||
&Rope::new(),
|
||||
&Rope::from(LINE_END),
|
||||
&Selection::single(1, 0),
|
||||
PAIRS,
|
||||
|open, close| format!("{}{}", open, close),
|
||||
DEFAULT_PAIRS,
|
||||
DEFAULT_PAIRS,
|
||||
|open, close| format!("{}{}{}", open, close, LINE_END),
|
||||
&Selection::single(2, 1),
|
||||
);
|
||||
|
||||
let empty_doc = Rope::from(format!("{line_end}{line_end}", line_end = LINE_END));
|
||||
|
||||
test_hooks_with_pairs(
|
||||
&empty_doc,
|
||||
&Selection::single(empty_doc.len_chars(), LINE_END.len()),
|
||||
DEFAULT_PAIRS,
|
||||
DEFAULT_PAIRS,
|
||||
|open, close| {
|
||||
format!(
|
||||
"{line_end}{open}{close}{line_end}",
|
||||
open = open,
|
||||
close = close,
|
||||
line_end = LINE_END
|
||||
)
|
||||
},
|
||||
&Selection::single(LINE_END.len() + 2, LINE_END.len() + 1),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_before_multi_code_point_graphemes() {
|
||||
for (_, close) in differing_pairs() {
|
||||
test_hooks(
|
||||
&Rope::from(format!("hello 👨👩👧👦 goodbye{}", LINE_END)),
|
||||
&Selection::single(13, 6),
|
||||
*close,
|
||||
DEFAULT_PAIRS,
|
||||
&Rope::from(format!("hello {}👨👩👧👦 goodbye{}", close, LINE_END)),
|
||||
&Selection::single(14, 7),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_at_end_of_document() {
|
||||
test_hooks_with_pairs(
|
||||
&Rope::from(LINE_END),
|
||||
&Selection::single(LINE_END.len(), LINE_END.len()),
|
||||
DEFAULT_PAIRS,
|
||||
DEFAULT_PAIRS,
|
||||
|open, close| format!("{}{}{}", LINE_END, open, close),
|
||||
&Selection::single(LINE_END.len() + 1, LINE_END.len() + 1),
|
||||
);
|
||||
|
||||
test_hooks_with_pairs(
|
||||
&Rope::from(format!("foo{}", LINE_END)),
|
||||
&Selection::single(3 + LINE_END.len(), 3 + LINE_END.len()),
|
||||
DEFAULT_PAIRS,
|
||||
DEFAULT_PAIRS,
|
||||
|open, close| format!("foo{}{}{}", LINE_END, open, close),
|
||||
&Selection::single(LINE_END.len() + 4, LINE_END.len() + 4),
|
||||
);
|
||||
}
|
||||
|
||||
/// [] -> append ( -> ([])
|
||||
@@ -283,11 +517,21 @@ mod test {
|
||||
fn test_append_blank() {
|
||||
test_hooks_with_pairs(
|
||||
// this is what happens when you have a totally blank document and then append
|
||||
&Rope::from("\n\n"),
|
||||
&Selection::single(0, 2),
|
||||
PAIRS,
|
||||
|open, close| format!("\n{}{}\n", open, close),
|
||||
&Selection::single(0, 3),
|
||||
&Rope::from(format!("{line_end}{line_end}", line_end = LINE_END)),
|
||||
// before inserting the pair, the cursor covers all of both empty lines
|
||||
&Selection::single(0, LINE_END.len() * 2),
|
||||
DEFAULT_PAIRS,
|
||||
DEFAULT_PAIRS,
|
||||
|open, close| {
|
||||
format!(
|
||||
"{line_end}{open}{close}{line_end}",
|
||||
line_end = LINE_END,
|
||||
open = open,
|
||||
close = close
|
||||
)
|
||||
},
|
||||
// after inserting pair, the cursor covers the first new line and the open char
|
||||
&Selection::single(0, LINE_END.len() + 2),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -302,7 +546,8 @@ mod test {
|
||||
smallvec!(Range::new(1, 0), Range::new(2, 1), Range::new(3, 2),),
|
||||
0,
|
||||
),
|
||||
PAIRS,
|
||||
DEFAULT_PAIRS,
|
||||
DEFAULT_PAIRS,
|
||||
|open, close| {
|
||||
format!(
|
||||
"{open}{close}\n{open}{close}\n{open}{close}\n",
|
||||
@@ -324,11 +569,25 @@ mod test {
|
||||
&Rope::from("foo\n"),
|
||||
&Selection::single(2, 4),
|
||||
differing_pairs(),
|
||||
DEFAULT_PAIRS,
|
||||
|open, close| format!("foo{}{}\n", open, close),
|
||||
&Selection::single(2, 5),
|
||||
);
|
||||
}
|
||||
|
||||
/// foo[] -> append to end of line ( -> foo([])
|
||||
#[test]
|
||||
fn test_append_single_cursor() {
|
||||
test_hooks_with_pairs(
|
||||
&Rope::from(format!("foo{}", LINE_END)),
|
||||
&Selection::single(3, 3 + LINE_END.len()),
|
||||
differing_pairs(),
|
||||
DEFAULT_PAIRS,
|
||||
|open, close| format!("foo{}{}{}", open, close, LINE_END),
|
||||
&Selection::single(4, 5),
|
||||
);
|
||||
}
|
||||
|
||||
/// fo[o] fo[o(])
|
||||
/// fo[o] -> append ( -> fo[o(])
|
||||
/// fo[o] fo[o(])
|
||||
@@ -341,6 +600,7 @@ mod test {
|
||||
0,
|
||||
),
|
||||
differing_pairs(),
|
||||
DEFAULT_PAIRS,
|
||||
|open, close| {
|
||||
format!(
|
||||
"foo{open}{close}\nfoo{open}{close}\nfoo{open}{close}\n",
|
||||
@@ -355,18 +615,19 @@ mod test {
|
||||
);
|
||||
}
|
||||
|
||||
/// ([]) -> insert ) -> ()[]
|
||||
/// ([)] -> insert ) -> ()[]
|
||||
#[test]
|
||||
fn test_insert_close_inside_pair() {
|
||||
for (open, close) in PAIRS {
|
||||
let doc = Rope::from(format!("{}{}", open, close));
|
||||
for (open, close) in DEFAULT_PAIRS {
|
||||
let doc = Rope::from(format!("{}{}{}", open, close, LINE_END));
|
||||
|
||||
test_hooks(
|
||||
&doc,
|
||||
&Selection::single(2, 1),
|
||||
*close,
|
||||
DEFAULT_PAIRS,
|
||||
&doc,
|
||||
&Selection::single(3, 2),
|
||||
&Selection::single(2 + LINE_END.len(), 2),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -374,15 +635,16 @@ mod test {
|
||||
/// [(]) -> append ) -> [()]
|
||||
#[test]
|
||||
fn test_append_close_inside_pair() {
|
||||
for (open, close) in PAIRS {
|
||||
let doc = Rope::from(format!("{}{}\n", open, close));
|
||||
for (open, close) in DEFAULT_PAIRS {
|
||||
let doc = Rope::from(format!("{}{}{}", open, close, LINE_END));
|
||||
|
||||
test_hooks(
|
||||
&doc,
|
||||
&Selection::single(0, 2),
|
||||
*close,
|
||||
DEFAULT_PAIRS,
|
||||
&doc,
|
||||
&Selection::single(0, 3),
|
||||
&Selection::single(0, 2 + LINE_END.len()),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -402,14 +664,14 @@ mod test {
|
||||
0,
|
||||
);
|
||||
|
||||
for (open, close) in PAIRS {
|
||||
for (open, close) in DEFAULT_PAIRS {
|
||||
let doc = Rope::from(format!(
|
||||
"{open}{close}\n{open}{close}\n{open}{close}\n",
|
||||
open = open,
|
||||
close = close
|
||||
));
|
||||
|
||||
test_hooks(&doc, &sel, *close, &doc, &expected_sel);
|
||||
test_hooks(&doc, &sel, *close, DEFAULT_PAIRS, &doc, &expected_sel);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -428,14 +690,14 @@ mod test {
|
||||
0,
|
||||
);
|
||||
|
||||
for (open, close) in PAIRS {
|
||||
for (open, close) in DEFAULT_PAIRS {
|
||||
let doc = Rope::from(format!(
|
||||
"{open}{close}\n{open}{close}\n{open}{close}\n",
|
||||
open = open,
|
||||
close = close
|
||||
));
|
||||
|
||||
test_hooks(&doc, &sel, *close, &doc, &expected_sel);
|
||||
test_hooks(&doc, &sel, *close, DEFAULT_PAIRS, &doc, &expected_sel);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -453,7 +715,14 @@ mod test {
|
||||
close = close
|
||||
));
|
||||
|
||||
test_hooks(&doc, &sel, *open, &expected_doc, &expected_sel);
|
||||
test_hooks(
|
||||
&doc,
|
||||
&sel,
|
||||
*open,
|
||||
DEFAULT_PAIRS,
|
||||
&expected_doc,
|
||||
&expected_sel,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -471,7 +740,14 @@ mod test {
|
||||
close = close
|
||||
));
|
||||
|
||||
test_hooks(&doc, &sel, *open, &expected_doc, &expected_sel);
|
||||
test_hooks(
|
||||
&doc,
|
||||
&sel,
|
||||
*open,
|
||||
DEFAULT_PAIRS,
|
||||
&expected_doc,
|
||||
&expected_sel,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -490,7 +766,14 @@ mod test {
|
||||
outer_open, inner_open, inner_close, outer_close
|
||||
));
|
||||
|
||||
test_hooks(&doc, &sel, *inner_open, &expected_doc, &expected_sel);
|
||||
test_hooks(
|
||||
&doc,
|
||||
&sel,
|
||||
*inner_open,
|
||||
DEFAULT_PAIRS,
|
||||
&expected_doc,
|
||||
&expected_sel,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -510,7 +793,14 @@ mod test {
|
||||
outer_open, inner_open, inner_close, outer_close
|
||||
));
|
||||
|
||||
test_hooks(&doc, &sel, *inner_open, &expected_doc, &expected_sel);
|
||||
test_hooks(
|
||||
&doc,
|
||||
&sel,
|
||||
*inner_open,
|
||||
DEFAULT_PAIRS,
|
||||
&expected_doc,
|
||||
&expected_sel,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -521,7 +811,8 @@ mod test {
|
||||
test_hooks_with_pairs(
|
||||
&Rope::from("word"),
|
||||
&Selection::single(1, 0),
|
||||
PAIRS,
|
||||
DEFAULT_PAIRS,
|
||||
DEFAULT_PAIRS,
|
||||
|open, _| format!("{}word", open),
|
||||
&Selection::single(2, 1),
|
||||
)
|
||||
@@ -533,7 +824,8 @@ mod test {
|
||||
test_hooks_with_pairs(
|
||||
&Rope::from("word"),
|
||||
&Selection::single(3, 0),
|
||||
PAIRS,
|
||||
DEFAULT_PAIRS,
|
||||
DEFAULT_PAIRS,
|
||||
|open, _| format!("{}word", open),
|
||||
&Selection::single(4, 1),
|
||||
)
|
||||
@@ -545,10 +837,17 @@ mod test {
|
||||
let sel = Selection::single(0, 4);
|
||||
let expected_sel = Selection::single(0, 5);
|
||||
|
||||
for (_, close) in PAIRS {
|
||||
for (_, close) in DEFAULT_PAIRS {
|
||||
let doc = Rope::from("word");
|
||||
let expected_doc = Rope::from(format!("wor{}d", close));
|
||||
test_hooks(&doc, &sel, *close, &expected_doc, &expected_sel);
|
||||
test_hooks(
|
||||
&doc,
|
||||
&sel,
|
||||
*close,
|
||||
DEFAULT_PAIRS,
|
||||
&expected_doc,
|
||||
&expected_sel,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -559,11 +858,27 @@ mod test {
|
||||
&Rope::from("foo word"),
|
||||
&Selection::single(7, 3),
|
||||
differing_pairs(),
|
||||
DEFAULT_PAIRS,
|
||||
|open, close| format!("foo{}{} word", open, close),
|
||||
&Selection::single(9, 4),
|
||||
)
|
||||
}
|
||||
|
||||
/// foo([) wor]d -> insert ) -> foo()[ wor]d
|
||||
#[test]
|
||||
fn test_insert_close_inside_pair_trailing_word_with_selection() {
|
||||
for (open, close) in differing_pairs() {
|
||||
test_hooks(
|
||||
&Rope::from(format!("foo{}{} word{}", open, close, LINE_END)),
|
||||
&Selection::single(9, 4),
|
||||
*close,
|
||||
DEFAULT_PAIRS,
|
||||
&Rope::from(format!("foo{}{} word{}", open, close, LINE_END)),
|
||||
&Selection::single(9, 5),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// we want pairs that are *not* the same char to be inserted after
|
||||
/// a non-pair char, for cases like functions, but for pairs that are
|
||||
/// the same char, we want to *not* insert a pair to handle cases like "I'm"
|
||||
@@ -572,7 +887,7 @@ mod test {
|
||||
/// word[] -> insert ' -> word'[]
|
||||
#[test]
|
||||
fn test_insert_open_after_non_pair() {
|
||||
let doc = Rope::from("word");
|
||||
let doc = Rope::from(format!("word{}", LINE_END));
|
||||
let sel = Selection::single(5, 4);
|
||||
let expected_sel = Selection::single(6, 5);
|
||||
|
||||
@@ -580,7 +895,8 @@ mod test {
|
||||
&doc,
|
||||
&sel,
|
||||
differing_pairs(),
|
||||
|open, close| format!("word{}{}", open, close),
|
||||
DEFAULT_PAIRS,
|
||||
|open, close| format!("word{}{}{}", open, close, LINE_END),
|
||||
&expected_sel,
|
||||
);
|
||||
|
||||
@@ -588,22 +904,34 @@ mod test {
|
||||
&doc,
|
||||
&sel,
|
||||
matching_pairs(),
|
||||
|open, _| format!("word{}", open),
|
||||
DEFAULT_PAIRS,
|
||||
|open, _| format!("word{}{}", open, LINE_END),
|
||||
&expected_sel,
|
||||
);
|
||||
}
|
||||
|
||||
/// appending with only a cursor should stay a cursor
|
||||
///
|
||||
/// [] -> append to end "foo -> "foo[]"
|
||||
#[test]
|
||||
fn test_append_single_cursor() {
|
||||
fn test_configured_pairs() {
|
||||
let test_pairs = &[('`', ':'), ('+', '-')];
|
||||
|
||||
test_hooks_with_pairs(
|
||||
&Rope::from("\n"),
|
||||
&Selection::single(0, 1),
|
||||
PAIRS,
|
||||
|open, close| format!("{}{}\n", open, close),
|
||||
&Selection::single(1, 2),
|
||||
&Rope::from(LINE_END),
|
||||
&Selection::single(1, 0),
|
||||
test_pairs,
|
||||
test_pairs,
|
||||
|open, close| format!("{}{}{}", open, close, LINE_END),
|
||||
&Selection::single(2, 1),
|
||||
);
|
||||
|
||||
let doc = Rope::from(format!("foo`: word{}", LINE_END));
|
||||
|
||||
test_hooks(
|
||||
&doc,
|
||||
&Selection::single(9, 4),
|
||||
':',
|
||||
test_pairs,
|
||||
&doc,
|
||||
&Selection::single(9, 5),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@@ -91,12 +91,11 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_categorize() {
|
||||
const EOL_TEST_CASE: &'static str = "\n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}";
|
||||
const WORD_TEST_CASE: &'static str =
|
||||
"_hello_world_あいうえおー12345678901234567890";
|
||||
const PUNCTUATION_TEST_CASE: &'static str =
|
||||
const EOL_TEST_CASE: &str = "\n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}";
|
||||
const WORD_TEST_CASE: &str = "_hello_world_あいうえおー12345678901234567890";
|
||||
const PUNCTUATION_TEST_CASE: &str =
|
||||
"!\"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~!”#$%&’()*+、。:;<=>?@「」^`{|}~";
|
||||
const WHITESPACE_TEST_CASE: &'static str = " ";
|
||||
const WHITESPACE_TEST_CASE: &str = " ";
|
||||
|
||||
for ch in EOL_TEST_CASE.chars() {
|
||||
assert_eq!(CharCategory::Eol, categorize_char(ch));
|
||||
|
10
helix-core/src/config.rs
Normal file
10
helix-core/src/config.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
/// Syntax configuration loader based on built-in languages.toml.
|
||||
pub fn default_syntax_loader() -> crate::syntax::Configuration {
|
||||
helix_loader::default_lang_config()
|
||||
.try_into()
|
||||
.expect("Could not serialize built-in languages.toml")
|
||||
}
|
||||
/// Syntax configuration loader based on user configured languages.toml.
|
||||
pub fn user_syntax_loader() -> Result<crate::syntax::Configuration, toml::de::Error> {
|
||||
helix_loader::user_lang_config()?.try_into()
|
||||
}
|
@@ -11,10 +11,6 @@ pub fn compare_ropes(old: &Rope, new: &Rope) -> Transaction {
|
||||
// A timeout is set so after 1 seconds, the algorithm will start
|
||||
// approximating. This is especially important for big `Rope`s or
|
||||
// `Rope`s that are extremely dissimilar to each other.
|
||||
//
|
||||
// Note: Ignore the clippy warning, as the trait bounds of
|
||||
// `Transaction::change()` require an iterator implementing
|
||||
// `ExactIterator`.
|
||||
let mut config = similar::TextDiff::configure();
|
||||
config.timeout(std::time::Duration::from_secs(1));
|
||||
|
||||
@@ -62,7 +58,7 @@ mod tests {
|
||||
let mut old = Rope::from(a);
|
||||
let new = Rope::from(b);
|
||||
compare_ropes(&old, &new).apply(&mut old);
|
||||
old.to_string() == new.to_string()
|
||||
old == new
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -120,6 +120,43 @@ pub fn nth_next_grapheme_boundary(slice: RopeSlice, char_idx: usize, n: usize) -
|
||||
chunk_char_idx + tmp
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn nth_next_grapheme_boundary_byte(slice: RopeSlice, mut byte_idx: usize, n: usize) -> usize {
|
||||
// Bounds check
|
||||
debug_assert!(byte_idx <= slice.len_bytes());
|
||||
|
||||
// Get the chunk with our byte index in it.
|
||||
let (mut chunk, mut chunk_byte_idx, mut _chunk_char_idx, _) = slice.chunk_at_byte(byte_idx);
|
||||
|
||||
// Set up the grapheme cursor.
|
||||
let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true);
|
||||
|
||||
// Find the nth next grapheme cluster boundary.
|
||||
for _ in 0..n {
|
||||
loop {
|
||||
match gc.next_boundary(chunk, chunk_byte_idx) {
|
||||
Ok(None) => return slice.len_bytes(),
|
||||
Ok(Some(n)) => {
|
||||
byte_idx = n;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::NextChunk) => {
|
||||
chunk_byte_idx += chunk.len();
|
||||
let (a, _, _c, _) = slice.chunk_at_byte(chunk_byte_idx);
|
||||
chunk = a;
|
||||
// chunk_char_idx = c;
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(n)) => {
|
||||
let ctx_chunk = slice.chunk_at_byte(n - 1).0;
|
||||
gc.provide_context(ctx_chunk, n - ctx_chunk.len());
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
byte_idx
|
||||
}
|
||||
|
||||
/// Finds the next grapheme boundary after the given char position.
|
||||
#[must_use]
|
||||
#[inline(always)]
|
||||
@@ -127,6 +164,13 @@ pub fn next_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> usize {
|
||||
nth_next_grapheme_boundary(slice, char_idx, 1)
|
||||
}
|
||||
|
||||
/// Finds the next grapheme boundary after the given byte position.
|
||||
#[must_use]
|
||||
#[inline(always)]
|
||||
pub fn next_grapheme_boundary_byte(slice: RopeSlice, byte_idx: usize) -> usize {
|
||||
nth_next_grapheme_boundary_byte(slice, byte_idx, 1)
|
||||
}
|
||||
|
||||
/// Returns the passed char index if it's already a grapheme boundary,
|
||||
/// or the next grapheme boundary char index if not.
|
||||
#[must_use]
|
||||
@@ -151,6 +195,23 @@ pub fn ensure_grapheme_boundary_prev(slice: RopeSlice, char_idx: usize) -> usize
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the passed byte index if it's already a grapheme boundary,
|
||||
/// or the next grapheme boundary byte index if not.
|
||||
#[must_use]
|
||||
#[inline]
|
||||
pub fn ensure_grapheme_boundary_next_byte(slice: RopeSlice, byte_idx: usize) -> usize {
|
||||
if byte_idx == 0 {
|
||||
byte_idx
|
||||
} else {
|
||||
// TODO: optimize so we're not constructing grapheme cursor twice
|
||||
if is_grapheme_boundary_byte(slice, byte_idx) {
|
||||
byte_idx
|
||||
} else {
|
||||
next_grapheme_boundary_byte(slice, byte_idx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the given char position is a grapheme boundary.
|
||||
#[must_use]
|
||||
pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool {
|
||||
@@ -179,6 +240,31 @@ pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the given byte position is a grapheme boundary.
|
||||
#[must_use]
|
||||
pub fn is_grapheme_boundary_byte(slice: RopeSlice, byte_idx: usize) -> bool {
|
||||
// Bounds check
|
||||
debug_assert!(byte_idx <= slice.len_bytes());
|
||||
|
||||
// Get the chunk with our byte index in it.
|
||||
let (chunk, chunk_byte_idx, _, _) = slice.chunk_at_byte(byte_idx);
|
||||
|
||||
// Set up the grapheme cursor.
|
||||
let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true);
|
||||
|
||||
// Determine if the given position is a grapheme cluster boundary.
|
||||
loop {
|
||||
match gc.is_boundary(chunk, chunk_byte_idx) {
|
||||
Ok(n) => return n,
|
||||
Err(GraphemeIncomplete::PreContext(n)) => {
|
||||
let (ctx_chunk, ctx_byte_start, _, _) = slice.chunk_at_byte(n - 1);
|
||||
gc.provide_context(ctx_chunk, ctx_byte_start);
|
||||
}
|
||||
Err(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over the graphemes of a `RopeSlice`.
|
||||
#[derive(Clone)]
|
||||
pub struct RopeGraphemes<'a> {
|
||||
|
@@ -448,8 +448,8 @@ mod test {
|
||||
change: crate::transaction::Change,
|
||||
instant: Instant,
|
||||
) {
|
||||
let txn = Transaction::change(&state.doc, vec![change.clone()].into_iter());
|
||||
history.commit_revision_at_timestamp(&txn, &state, instant);
|
||||
let txn = Transaction::change(&state.doc, vec![change].into_iter());
|
||||
history.commit_revision_at_timestamp(&txn, state, instant);
|
||||
txn.apply(&mut state.doc);
|
||||
}
|
||||
|
||||
|
@@ -195,82 +195,82 @@ struct DateField {
|
||||
impl DateField {
|
||||
fn from_specifier(specifier: &str) -> Option<Self> {
|
||||
match specifier {
|
||||
"Y" => Some(DateField {
|
||||
"Y" => Some(Self {
|
||||
regex: r"\d{4}",
|
||||
unit: DateUnit::Years,
|
||||
max_len: 5,
|
||||
}),
|
||||
"y" => Some(DateField {
|
||||
"y" => Some(Self {
|
||||
regex: r"\d\d",
|
||||
unit: DateUnit::Years,
|
||||
max_len: 2,
|
||||
}),
|
||||
"m" => Some(DateField {
|
||||
"m" => Some(Self {
|
||||
regex: r"[0-1]\d",
|
||||
unit: DateUnit::Months,
|
||||
max_len: 2,
|
||||
}),
|
||||
"d" => Some(DateField {
|
||||
"d" => Some(Self {
|
||||
regex: r"[0-3]\d",
|
||||
unit: DateUnit::Days,
|
||||
max_len: 2,
|
||||
}),
|
||||
"-d" => Some(DateField {
|
||||
"-d" => Some(Self {
|
||||
regex: r"[1-3]?\d",
|
||||
unit: DateUnit::Days,
|
||||
max_len: 2,
|
||||
}),
|
||||
"a" => Some(DateField {
|
||||
"a" => Some(Self {
|
||||
regex: r"Sun|Mon|Tue|Wed|Thu|Fri|Sat",
|
||||
unit: DateUnit::Days,
|
||||
max_len: 3,
|
||||
}),
|
||||
"A" => Some(DateField {
|
||||
"A" => Some(Self {
|
||||
regex: r"Sunday|Monday|Tuesday|Wednesday|Thursday|Friday|Saturday",
|
||||
unit: DateUnit::Days,
|
||||
max_len: 9,
|
||||
}),
|
||||
"b" | "h" => Some(DateField {
|
||||
"b" | "h" => Some(Self {
|
||||
regex: r"Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec",
|
||||
unit: DateUnit::Months,
|
||||
max_len: 3,
|
||||
}),
|
||||
"B" => Some(DateField {
|
||||
"B" => Some(Self {
|
||||
regex: r"January|February|March|April|May|June|July|August|September|October|November|December",
|
||||
unit: DateUnit::Months,
|
||||
max_len: 9,
|
||||
}),
|
||||
"H" => Some(DateField {
|
||||
"H" => Some(Self {
|
||||
regex: r"[0-2]\d",
|
||||
unit: DateUnit::Hours,
|
||||
max_len: 2,
|
||||
}),
|
||||
"M" => Some(DateField {
|
||||
"M" => Some(Self {
|
||||
regex: r"[0-5]\d",
|
||||
unit: DateUnit::Minutes,
|
||||
max_len: 2,
|
||||
}),
|
||||
"S" => Some(DateField {
|
||||
"S" => Some(Self {
|
||||
regex: r"[0-5]\d",
|
||||
unit: DateUnit::Seconds,
|
||||
max_len: 2,
|
||||
}),
|
||||
"I" => Some(DateField {
|
||||
"I" => Some(Self {
|
||||
regex: r"[0-1]\d",
|
||||
unit: DateUnit::Hours,
|
||||
max_len: 2,
|
||||
}),
|
||||
"-I" => Some(DateField {
|
||||
"-I" => Some(Self {
|
||||
regex: r"1?\d",
|
||||
unit: DateUnit::Hours,
|
||||
max_len: 2,
|
||||
}),
|
||||
"P" => Some(DateField {
|
||||
"P" => Some(Self {
|
||||
regex: r"am|pm",
|
||||
unit: DateUnit::AmPm,
|
||||
max_len: 2,
|
||||
}),
|
||||
"p" => Some(DateField {
|
||||
"p" => Some(Self {
|
||||
regex: r"AM|PM",
|
||||
unit: DateUnit::AmPm,
|
||||
max_len: 2,
|
||||
@@ -451,7 +451,7 @@ mod test {
|
||||
.unwrap()
|
||||
.increment(amount)
|
||||
.1,
|
||||
expected.into()
|
||||
Tendril::from(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -371,7 +371,7 @@ mod test {
|
||||
.unwrap()
|
||||
.increment(amount)
|
||||
.1,
|
||||
expected.into()
|
||||
Tendril::from(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -398,7 +398,7 @@ mod test {
|
||||
.unwrap()
|
||||
.increment(amount)
|
||||
.1,
|
||||
expected.into()
|
||||
Tendril::from(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -426,7 +426,7 @@ mod test {
|
||||
.unwrap()
|
||||
.increment(amount)
|
||||
.1,
|
||||
expected.into()
|
||||
Tendril::from(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -472,7 +472,7 @@ mod test {
|
||||
.unwrap()
|
||||
.increment(amount)
|
||||
.1,
|
||||
expected.into()
|
||||
Tendril::from(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -500,7 +500,7 @@ mod test {
|
||||
.unwrap()
|
||||
.increment(amount)
|
||||
.1,
|
||||
expected.into()
|
||||
Tendril::from(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -192,10 +192,7 @@ fn get_highest_syntax_node_at_bytepos(syntax: &Syntax, pos: usize) -> Option<Nod
|
||||
let tree = syntax.tree();
|
||||
|
||||
// named_descendant
|
||||
let mut node = match tree.root_node().descendant_for_byte_range(pos, pos) {
|
||||
Some(node) => node,
|
||||
None => return None,
|
||||
};
|
||||
let mut node = tree.root_node().descendant_for_byte_range(pos, pos)?;
|
||||
|
||||
while let Some(parent) = node.parent() {
|
||||
if parent.start_byte() == node.start_byte() {
|
||||
@@ -416,7 +413,7 @@ where
|
||||
",
|
||||
);
|
||||
|
||||
let doc = Rope::from(doc);
|
||||
let doc = doc;
|
||||
use crate::diagnostic::Severity;
|
||||
use crate::syntax::{
|
||||
Configuration, IndentationConfiguration, LanguageConfiguration, Loader,
|
||||
@@ -436,6 +433,7 @@ where
|
||||
comment_token: None,
|
||||
auto_format: false,
|
||||
diagnostic_severity: Severity::Warning,
|
||||
grammar: None,
|
||||
language_server: None,
|
||||
indent: Some(IndentationConfiguration {
|
||||
tab_width: 4,
|
||||
@@ -443,6 +441,8 @@ where
|
||||
}),
|
||||
indent_query: OnceCell::new(),
|
||||
textobject_query: OnceCell::new(),
|
||||
debugger: None,
|
||||
auto_pairs: None,
|
||||
}],
|
||||
});
|
||||
|
||||
@@ -453,7 +453,7 @@ where
|
||||
|
||||
let language_config = loader.language_config_for_scope("source.rust").unwrap();
|
||||
let highlight_config = language_config.highlight_config(&[]).unwrap();
|
||||
let syntax = Syntax::new(&doc, highlight_config.clone());
|
||||
let syntax = Syntax::new(&doc, highlight_config, std::sync::Arc::new(loader));
|
||||
let text = doc.slice(..);
|
||||
let tab_width = 4;
|
||||
|
||||
|
@@ -3,6 +3,7 @@ pub use encoding_rs as encoding;
|
||||
pub mod auto_pairs;
|
||||
pub mod chars;
|
||||
pub mod comment;
|
||||
pub mod config;
|
||||
pub mod diagnostic;
|
||||
pub mod diff;
|
||||
pub mod graphemes;
|
||||
@@ -32,9 +33,6 @@ pub mod unicode {
|
||||
pub use unicode_width as width;
|
||||
}
|
||||
|
||||
static RUNTIME_DIR: once_cell::sync::Lazy<std::path::PathBuf> =
|
||||
once_cell::sync::Lazy::new(runtime_dir);
|
||||
|
||||
pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option<usize> {
|
||||
line.chars().position(|ch| !ch.is_whitespace())
|
||||
}
|
||||
@@ -84,135 +82,12 @@ pub fn find_root(root: Option<&str>, root_markers: &[String]) -> Option<std::pat
|
||||
}
|
||||
}
|
||||
|
||||
pub fn runtime_dir() -> std::path::PathBuf {
|
||||
if let Ok(dir) = std::env::var("HELIX_RUNTIME") {
|
||||
return dir.into();
|
||||
}
|
||||
|
||||
const RT_DIR: &str = "runtime";
|
||||
let conf_dir = config_dir().join(RT_DIR);
|
||||
if conf_dir.exists() {
|
||||
return conf_dir;
|
||||
}
|
||||
|
||||
if let Ok(dir) = std::env::var("CARGO_MANIFEST_DIR") {
|
||||
// this is the directory of the crate being run by cargo, we need the workspace path so we take the parent
|
||||
return std::path::PathBuf::from(dir).parent().unwrap().join(RT_DIR);
|
||||
}
|
||||
|
||||
// fallback to location of the executable being run
|
||||
std::env::current_exe()
|
||||
.ok()
|
||||
.and_then(|path| path.parent().map(|path| path.to_path_buf().join(RT_DIR)))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub fn config_dir() -> std::path::PathBuf {
|
||||
// TODO: allow env var override
|
||||
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
|
||||
let mut path = strategy.config_dir();
|
||||
path.push("helix");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn cache_dir() -> std::path::PathBuf {
|
||||
// TODO: allow env var override
|
||||
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
|
||||
let mut path = strategy.cache_dir();
|
||||
path.push("helix");
|
||||
path
|
||||
}
|
||||
|
||||
// right overrides left
|
||||
pub fn merge_toml_values(left: toml::Value, right: toml::Value) -> toml::Value {
|
||||
use toml::Value;
|
||||
|
||||
fn get_name(v: &Value) -> Option<&str> {
|
||||
v.get("name").and_then(Value::as_str)
|
||||
}
|
||||
|
||||
match (left, right) {
|
||||
(Value::Array(mut left_items), Value::Array(right_items)) => {
|
||||
left_items.reserve(right_items.len());
|
||||
for rvalue in right_items {
|
||||
let lvalue = get_name(&rvalue)
|
||||
.and_then(|rname| left_items.iter().position(|v| get_name(v) == Some(rname)))
|
||||
.map(|lpos| left_items.remove(lpos));
|
||||
let mvalue = match lvalue {
|
||||
Some(lvalue) => merge_toml_values(lvalue, rvalue),
|
||||
None => rvalue,
|
||||
};
|
||||
left_items.push(mvalue);
|
||||
}
|
||||
Value::Array(left_items)
|
||||
}
|
||||
(Value::Table(mut left_map), Value::Table(right_map)) => {
|
||||
for (rname, rvalue) in right_map {
|
||||
match left_map.remove(&rname) {
|
||||
Some(lvalue) => {
|
||||
let merged_value = merge_toml_values(lvalue, rvalue);
|
||||
left_map.insert(rname, merged_value);
|
||||
}
|
||||
None => {
|
||||
left_map.insert(rname, rvalue);
|
||||
}
|
||||
}
|
||||
}
|
||||
Value::Table(left_map)
|
||||
}
|
||||
// Catch everything else we didn't handle, and use the right value
|
||||
(_, value) => value,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod merge_toml_tests {
|
||||
use super::merge_toml_values;
|
||||
|
||||
#[test]
|
||||
fn language_tomls() {
|
||||
use toml::Value;
|
||||
|
||||
const USER: &str = "
|
||||
[[language]]
|
||||
name = \"nix\"
|
||||
test = \"bbb\"
|
||||
indent = { tab-width = 4, unit = \" \", test = \"aaa\" }
|
||||
";
|
||||
|
||||
let base: Value = toml::from_slice(include_bytes!("../../languages.toml"))
|
||||
.expect("Couldn't parse built-in languages config");
|
||||
let user: Value = toml::from_str(USER).unwrap();
|
||||
|
||||
let merged = merge_toml_values(base, user);
|
||||
let languages = merged.get("language").unwrap().as_array().unwrap();
|
||||
let nix = languages
|
||||
.iter()
|
||||
.find(|v| v.get("name").unwrap().as_str().unwrap() == "nix")
|
||||
.unwrap();
|
||||
let nix_indent = nix.get("indent").unwrap();
|
||||
|
||||
// We changed tab-width and unit in indent so check them if they are the new values
|
||||
assert_eq!(
|
||||
nix_indent.get("tab-width").unwrap().as_integer().unwrap(),
|
||||
4
|
||||
);
|
||||
assert_eq!(nix_indent.get("unit").unwrap().as_str().unwrap(), " ");
|
||||
// We added a new keys, so check them
|
||||
assert_eq!(nix.get("test").unwrap().as_str().unwrap(), "bbb");
|
||||
assert_eq!(nix_indent.get("test").unwrap().as_str().unwrap(), "aaa");
|
||||
// We didn't change comment-token so it should be same
|
||||
assert_eq!(nix.get("comment-token").unwrap().as_str().unwrap(), "#");
|
||||
}
|
||||
}
|
||||
|
||||
pub use etcetera::home_dir;
|
||||
|
||||
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
|
||||
|
||||
pub use ropey::{Rope, RopeBuilder, RopeSlice};
|
||||
|
||||
pub use tendril::StrTendril as Tendril;
|
||||
// pub use tendril::StrTendril as Tendril;
|
||||
pub use smartstring::SmartString;
|
||||
|
||||
pub type Tendril = SmartString<smartstring::LazyCompact>;
|
||||
|
||||
#[doc(inline)]
|
||||
pub use {regex, tree_sitter};
|
||||
@@ -220,7 +95,7 @@ pub use {regex, tree_sitter};
|
||||
pub use graphemes::RopeGraphemes;
|
||||
pub use position::{coords_at_pos, pos_at_coords, visual_coords_at_pos, Position};
|
||||
pub use selection::{Range, Selection};
|
||||
pub use smallvec::SmallVec;
|
||||
pub use smallvec::{smallvec, SmallVec};
|
||||
pub use syntax::Syntax;
|
||||
|
||||
pub use diagnostic::Diagnostic;
|
||||
|
@@ -250,7 +250,7 @@ mod line_ending_tests {
|
||||
assert_eq!(get_line_ending_of_str(&text[..6]), Some(LineEnding::CR));
|
||||
assert_eq!(get_line_ending_of_str(&text[..12]), Some(LineEnding::LF));
|
||||
assert_eq!(get_line_ending_of_str(&text[..17]), Some(LineEnding::Crlf));
|
||||
assert_eq!(get_line_ending_of_str(&text[..]), None);
|
||||
assert_eq!(get_line_ending_of_str(text), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@@ -1,6 +1,7 @@
|
||||
use std::iter;
|
||||
|
||||
use ropey::iter::Chars;
|
||||
use tree_sitter::{Node, QueryCursor};
|
||||
|
||||
use crate::{
|
||||
chars::{categorize_char, char_is_line_ending, CharCategory},
|
||||
@@ -9,7 +10,10 @@ use crate::{
|
||||
next_grapheme_boundary, nth_next_grapheme_boundary, nth_prev_grapheme_boundary,
|
||||
prev_grapheme_boundary,
|
||||
},
|
||||
pos_at_coords, Position, Range, RopeSlice,
|
||||
pos_at_coords,
|
||||
syntax::LanguageConfiguration,
|
||||
textobject::TextObject,
|
||||
Position, Range, RopeSlice,
|
||||
};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
@@ -305,6 +309,56 @@ fn reached_target(target: WordMotionTarget, prev_ch: char, next_ch: char) -> boo
|
||||
}
|
||||
}
|
||||
|
||||
pub fn goto_treesitter_object(
|
||||
slice: RopeSlice,
|
||||
range: Range,
|
||||
object_name: &str,
|
||||
dir: Direction,
|
||||
slice_tree: Node,
|
||||
lang_config: &LanguageConfiguration,
|
||||
_count: usize,
|
||||
) -> Range {
|
||||
let get_range = move || -> Option<Range> {
|
||||
let byte_pos = slice.char_to_byte(range.cursor(slice));
|
||||
|
||||
let cap_name = |t: TextObject| format!("{}.{}", object_name, t);
|
||||
let mut cursor = QueryCursor::new();
|
||||
let nodes = lang_config.textobject_query()?.capture_nodes_any(
|
||||
&[
|
||||
&cap_name(TextObject::Movement),
|
||||
&cap_name(TextObject::Around),
|
||||
&cap_name(TextObject::Inside),
|
||||
],
|
||||
slice_tree,
|
||||
slice,
|
||||
&mut cursor,
|
||||
)?;
|
||||
|
||||
let node = match dir {
|
||||
Direction::Forward => nodes
|
||||
.filter(|n| n.start_byte() > byte_pos)
|
||||
.min_by_key(|n| n.start_byte())?,
|
||||
Direction::Backward => nodes
|
||||
.filter(|n| n.start_byte() < byte_pos)
|
||||
.max_by_key(|n| n.start_byte())?,
|
||||
};
|
||||
|
||||
let len = slice.len_bytes();
|
||||
let start_byte = node.start_byte();
|
||||
let end_byte = node.end_byte();
|
||||
if start_byte >= len || end_byte >= len {
|
||||
return None;
|
||||
}
|
||||
|
||||
let start_char = slice.byte_to_char(start_byte);
|
||||
let end_char = slice.byte_to_char(end_byte);
|
||||
|
||||
// head of range should be at beginning
|
||||
Some(Range::new(end_char, start_char))
|
||||
};
|
||||
get_range().unwrap_or(range)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use ropey::Rope;
|
||||
|
@@ -1,31 +1,72 @@
|
||||
use crate::{Range, RopeSlice, Selection, Syntax};
|
||||
use tree_sitter::Node;
|
||||
|
||||
// TODO: to contract_selection we'd need to store the previous ranges before expand.
|
||||
// Maybe just contract to the first child node?
|
||||
pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: &Selection) -> Selection {
|
||||
pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||
select_node_impl(syntax, text, selection, |descendant, from, to| {
|
||||
if descendant.start_byte() == from && descendant.end_byte() == to {
|
||||
descendant.parent()
|
||||
} else {
|
||||
Some(descendant)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn shrink_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||
select_node_impl(syntax, text, selection, |descendant, _from, _to| {
|
||||
descendant.child(0).or(Some(descendant))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn select_sibling<F>(
|
||||
syntax: &Syntax,
|
||||
text: RopeSlice,
|
||||
selection: Selection,
|
||||
sibling_fn: &F,
|
||||
) -> Selection
|
||||
where
|
||||
F: Fn(Node) -> Option<Node>,
|
||||
{
|
||||
select_node_impl(syntax, text, selection, |descendant, _from, _to| {
|
||||
find_sibling_recursive(descendant, sibling_fn)
|
||||
})
|
||||
}
|
||||
|
||||
fn find_sibling_recursive<F>(node: Node, sibling_fn: F) -> Option<Node>
|
||||
where
|
||||
F: Fn(Node) -> Option<Node>,
|
||||
{
|
||||
sibling_fn(node).or_else(|| {
|
||||
node.parent()
|
||||
.and_then(|node| find_sibling_recursive(node, sibling_fn))
|
||||
})
|
||||
}
|
||||
|
||||
fn select_node_impl<F>(
|
||||
syntax: &Syntax,
|
||||
text: RopeSlice,
|
||||
selection: Selection,
|
||||
select_fn: F,
|
||||
) -> Selection
|
||||
where
|
||||
F: Fn(Node, usize, usize) -> Option<Node>,
|
||||
{
|
||||
let tree = syntax.tree();
|
||||
|
||||
selection.clone().transform(|range| {
|
||||
selection.transform(|range| {
|
||||
let from = text.char_to_byte(range.from());
|
||||
let to = text.char_to_byte(range.to());
|
||||
|
||||
// find parent of a descendant that matches the range
|
||||
let parent = match tree
|
||||
let node = match tree
|
||||
.root_node()
|
||||
.descendant_for_byte_range(from, to)
|
||||
.and_then(|node| {
|
||||
if node.child_count() == 0 || (node.start_byte() == from && node.end_byte() == to) {
|
||||
node.parent()
|
||||
} else {
|
||||
Some(node)
|
||||
}
|
||||
}) {
|
||||
Some(parent) => parent,
|
||||
.and_then(|node| select_fn(node, from, to))
|
||||
{
|
||||
Some(node) => node,
|
||||
None => return range,
|
||||
};
|
||||
|
||||
let from = text.byte_to_char(parent.start_byte());
|
||||
let to = text.byte_to_char(parent.end_byte());
|
||||
let from = text.byte_to_char(node.start_byte());
|
||||
let to = text.byte_to_char(node.end_byte());
|
||||
|
||||
if range.head < range.anchor {
|
||||
Range::new(to, from)
|
||||
|
@@ -1,9 +1,10 @@
|
||||
use etcetera::home_dir;
|
||||
use std::path::{Component, Path, PathBuf};
|
||||
|
||||
/// Replaces users home directory from `path` with tilde `~` if the directory
|
||||
/// is available, otherwise returns the path unchanged.
|
||||
pub fn fold_home_dir(path: &Path) -> PathBuf {
|
||||
if let Ok(home) = super::home_dir() {
|
||||
if let Ok(home) = home_dir() {
|
||||
if path.starts_with(&home) {
|
||||
// it's ok to unwrap, the path starts with home dir
|
||||
return PathBuf::from("~").join(path.strip_prefix(&home).unwrap());
|
||||
@@ -20,7 +21,7 @@ pub fn expand_tilde(path: &Path) -> PathBuf {
|
||||
let mut components = path.components().peekable();
|
||||
if let Some(Component::Normal(c)) = components.peek() {
|
||||
if c == &"~" {
|
||||
if let Ok(home) = super::home_dir() {
|
||||
if let Ok(home) = home_dir() {
|
||||
// it's ok to unwrap, the path starts with `~`
|
||||
return home.join(path.strip_prefix("~").unwrap());
|
||||
}
|
||||
|
@@ -1,8 +1,9 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use crate::{
|
||||
chars::char_is_line_ending,
|
||||
graphemes::{ensure_grapheme_boundary_prev, RopeGraphemes},
|
||||
graphemes::{ensure_grapheme_boundary_prev, grapheme_width, RopeGraphemes},
|
||||
line_ending::line_end_char_index,
|
||||
unicode::width::UnicodeWidthChar,
|
||||
RopeSlice,
|
||||
};
|
||||
|
||||
@@ -77,14 +78,17 @@ pub fn visual_coords_at_pos(text: RopeSlice, pos: usize, tab_width: usize) -> Po
|
||||
|
||||
let line_start = text.line_to_char(line);
|
||||
let pos = ensure_grapheme_boundary_prev(text, pos);
|
||||
let col = text
|
||||
.slice(line_start..pos)
|
||||
.chars()
|
||||
.flat_map(|c| match c {
|
||||
'\t' => Some(tab_width),
|
||||
c => UnicodeWidthChar::width(c),
|
||||
})
|
||||
.sum();
|
||||
|
||||
let mut col = 0;
|
||||
|
||||
for grapheme in RopeGraphemes::new(text.slice(line_start..pos)) {
|
||||
if grapheme == "\t" {
|
||||
col += tab_width - (col % tab_width);
|
||||
} else {
|
||||
let grapheme = Cow::from(grapheme);
|
||||
col += grapheme_width(&grapheme);
|
||||
}
|
||||
}
|
||||
|
||||
Position::new(line, col)
|
||||
}
|
||||
@@ -109,7 +113,10 @@ pub fn visual_coords_at_pos(text: RopeSlice, pos: usize, tab_width: usize) -> Po
|
||||
/// TODO: this should be changed to work in terms of visual row/column, not
|
||||
/// graphemes.
|
||||
pub fn pos_at_coords(text: RopeSlice, coords: Position, limit_before_line_ending: bool) -> usize {
|
||||
let Position { row, col } = coords;
|
||||
let Position { mut row, col } = coords;
|
||||
if limit_before_line_ending {
|
||||
row = row.min(text.len_lines() - 1);
|
||||
};
|
||||
let line_start = text.line_to_char(row);
|
||||
let line_end = if limit_before_line_ending {
|
||||
line_end_char_index(&text, row)
|
||||
@@ -290,5 +297,12 @@ mod test {
|
||||
assert_eq!(pos_at_coords(slice, (0, 0).into(), false), 0);
|
||||
assert_eq!(pos_at_coords(slice, (0, 1).into(), false), 1);
|
||||
assert_eq!(pos_at_coords(slice, (0, 2).into(), false), 2);
|
||||
|
||||
// Test out of bounds.
|
||||
let text = Rope::new();
|
||||
let slice = text.slice(..);
|
||||
assert_eq!(pos_at_coords(slice, (10, 0).into(), true), 0);
|
||||
assert_eq!(pos_at_coords(slice, (0, 10).into(), true), 0);
|
||||
assert_eq!(pos_at_coords(slice, (10, 10).into(), true), 0);
|
||||
}
|
||||
}
|
||||
|
@@ -68,4 +68,8 @@ impl Registers {
|
||||
pub fn read(&self, name: char) -> Option<&[String]> {
|
||||
self.get(name).map(|reg| reg.read())
|
||||
}
|
||||
|
||||
pub fn inner(&self) -> &HashMap<char, Register> {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
@@ -140,6 +140,11 @@ impl Range {
|
||||
self.from() == other.from() || (self.to() > other.from() && other.to() > self.from())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn contains_range(&self, other: &Self) -> bool {
|
||||
self.from() <= other.from() && self.to() >= other.to()
|
||||
}
|
||||
|
||||
pub fn contains(&self, pos: usize) -> bool {
|
||||
self.from() <= pos && pos < self.to()
|
||||
}
|
||||
@@ -544,6 +549,39 @@ impl Selection {
|
||||
pub fn len(&self) -> usize {
|
||||
self.ranges.len()
|
||||
}
|
||||
|
||||
// returns true if self ⊇ other
|
||||
pub fn contains(&self, other: &Selection) -> bool {
|
||||
// can't contain other if it is larger
|
||||
if other.len() > self.len() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let (mut iter_self, mut iter_other) = (self.iter(), other.iter());
|
||||
let (mut ele_self, mut ele_other) = (iter_self.next(), iter_other.next());
|
||||
|
||||
loop {
|
||||
match (ele_self, ele_other) {
|
||||
(Some(ra), Some(rb)) => {
|
||||
if !ra.contains_range(rb) {
|
||||
// `self` doesn't contain next element from `other`, advance `self`, we need to match all from `other`
|
||||
ele_self = iter_self.next();
|
||||
} else {
|
||||
// matched element from `other`, advance `other`
|
||||
ele_other = iter_other.next();
|
||||
};
|
||||
}
|
||||
(None, Some(_)) => {
|
||||
// exhausted `self`, we can't match the reminder of `other`
|
||||
return false;
|
||||
}
|
||||
(_, None) => {
|
||||
// no elements from `other` left to match, `self` contains `other`
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a Selection {
|
||||
@@ -728,16 +766,16 @@ mod test {
|
||||
fn test_contains() {
|
||||
let range = Range::new(10, 12);
|
||||
|
||||
assert_eq!(range.contains(9), false);
|
||||
assert_eq!(range.contains(10), true);
|
||||
assert_eq!(range.contains(11), true);
|
||||
assert_eq!(range.contains(12), false);
|
||||
assert_eq!(range.contains(13), false);
|
||||
assert!(!range.contains(9));
|
||||
assert!(range.contains(10));
|
||||
assert!(range.contains(11));
|
||||
assert!(!range.contains(12));
|
||||
assert!(!range.contains(13));
|
||||
|
||||
let range = Range::new(9, 6);
|
||||
assert_eq!(range.contains(9), false);
|
||||
assert_eq!(range.contains(7), true);
|
||||
assert_eq!(range.contains(6), true);
|
||||
assert!(!range.contains(9));
|
||||
assert!(range.contains(7));
|
||||
assert!(range.contains(6));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -982,4 +1020,30 @@ mod test {
|
||||
&["", "abcd", "efg", "rs", "xyz"]
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn test_selection_contains() {
|
||||
fn contains(a: Vec<(usize, usize)>, b: Vec<(usize, usize)>) -> bool {
|
||||
let sela = Selection::new(a.iter().map(|a| Range::new(a.0, a.1)).collect(), 0);
|
||||
let selb = Selection::new(b.iter().map(|b| Range::new(b.0, b.1)).collect(), 0);
|
||||
sela.contains(&selb)
|
||||
}
|
||||
|
||||
// exact match
|
||||
assert!(contains(vec!((1, 1)), vec!((1, 1))));
|
||||
|
||||
// larger set contains smaller
|
||||
assert!(contains(vec!((1, 1), (2, 2), (3, 3)), vec!((2, 2))));
|
||||
|
||||
// multiple matches
|
||||
assert!(contains(vec!((1, 1), (2, 2)), vec!((1, 1), (2, 2))));
|
||||
|
||||
// smaller set can't contain bigger
|
||||
assert!(!contains(vec!((1, 1)), vec!((1, 1), (2, 2))));
|
||||
|
||||
assert!(contains(
|
||||
vec!((1, 1), (2, 4), (5, 6), (7, 9), (10, 13)),
|
||||
vec!((3, 4), (7, 9))
|
||||
));
|
||||
assert!(!contains(vec!((1, 1), (5, 6)), vec!((1, 6))));
|
||||
}
|
||||
}
|
||||
|
@@ -1,3 +1,5 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use crate::{search, Range, Selection};
|
||||
use ropey::RopeSlice;
|
||||
|
||||
@@ -11,6 +13,27 @@ pub const PAIRS: &[(char, char)] = &[
|
||||
('(', ')'),
|
||||
];
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum Error {
|
||||
PairNotFound,
|
||||
CursorOverlap,
|
||||
RangeExceedsText,
|
||||
CursorOnAmbiguousPair,
|
||||
}
|
||||
|
||||
impl Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(match *self {
|
||||
Error::PairNotFound => "Surround pair not found around all cursors",
|
||||
Error::CursorOverlap => "Cursors overlap for a single surround pair range",
|
||||
Error::RangeExceedsText => "Cursor range exceeds text length",
|
||||
Error::CursorOnAmbiguousPair => "Cursor on ambiguous surround pair",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
/// Given any char in [PAIRS], return the open and closing chars. If not found in
|
||||
/// [PAIRS] return (ch, ch).
|
||||
///
|
||||
@@ -37,31 +60,36 @@ pub fn find_nth_pairs_pos(
|
||||
ch: char,
|
||||
range: Range,
|
||||
n: usize,
|
||||
) -> Option<(usize, usize)> {
|
||||
if text.len_chars() < 2 || range.to() >= text.len_chars() {
|
||||
return None;
|
||||
) -> Result<(usize, usize)> {
|
||||
if text.len_chars() < 2 {
|
||||
return Err(Error::PairNotFound);
|
||||
}
|
||||
if range.to() >= text.len_chars() {
|
||||
return Err(Error::RangeExceedsText);
|
||||
}
|
||||
|
||||
let (open, close) = get_pair(ch);
|
||||
let pos = range.cursor(text);
|
||||
|
||||
if open == close {
|
||||
let (open, close) = if open == close {
|
||||
if Some(open) == text.get_char(pos) {
|
||||
// Cursor is directly on match char. We return no match
|
||||
// because there's no way to know which side of the char
|
||||
// we should be searching on.
|
||||
return None;
|
||||
return Err(Error::CursorOnAmbiguousPair);
|
||||
}
|
||||
Some((
|
||||
search::find_nth_prev(text, open, pos, n)?,
|
||||
search::find_nth_next(text, close, pos, n)?,
|
||||
))
|
||||
(
|
||||
search::find_nth_prev(text, open, pos, n),
|
||||
search::find_nth_next(text, close, pos, n),
|
||||
)
|
||||
} else {
|
||||
Some((
|
||||
find_nth_open_pair(text, open, close, pos, n)?,
|
||||
find_nth_close_pair(text, open, close, pos, n)?,
|
||||
))
|
||||
}
|
||||
(
|
||||
find_nth_open_pair(text, open, close, pos, n),
|
||||
find_nth_close_pair(text, open, close, pos, n),
|
||||
)
|
||||
};
|
||||
|
||||
Option::zip(open, close).ok_or(Error::PairNotFound)
|
||||
}
|
||||
|
||||
fn find_nth_open_pair(
|
||||
@@ -151,17 +179,17 @@ pub fn get_surround_pos(
|
||||
selection: &Selection,
|
||||
ch: char,
|
||||
skip: usize,
|
||||
) -> Option<Vec<usize>> {
|
||||
) -> Result<Vec<usize>> {
|
||||
let mut change_pos = Vec::new();
|
||||
|
||||
for &range in selection {
|
||||
let (open_pos, close_pos) = find_nth_pairs_pos(text, ch, range, skip)?;
|
||||
if change_pos.contains(&open_pos) || change_pos.contains(&close_pos) {
|
||||
return None;
|
||||
return Err(Error::CursorOverlap);
|
||||
}
|
||||
change_pos.extend_from_slice(&[open_pos, close_pos]);
|
||||
}
|
||||
Some(change_pos)
|
||||
Ok(change_pos)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -172,9 +200,10 @@ mod test {
|
||||
use ropey::Rope;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn check_find_nth_pair_pos(
|
||||
text: &str,
|
||||
cases: Vec<(usize, char, usize, Option<(usize, usize)>)>,
|
||||
cases: Vec<(usize, char, usize, Result<(usize, usize)>)>,
|
||||
) {
|
||||
let doc = Rope::from(text);
|
||||
let slice = doc.slice(..);
|
||||
@@ -195,13 +224,13 @@ mod test {
|
||||
"some (text) here",
|
||||
vec![
|
||||
// cursor on [t]ext
|
||||
(6, '(', 1, Some((5, 10))),
|
||||
(6, ')', 1, Some((5, 10))),
|
||||
(6, '(', 1, Ok((5, 10))),
|
||||
(6, ')', 1, Ok((5, 10))),
|
||||
// cursor on so[m]e
|
||||
(2, '(', 1, None),
|
||||
(2, '(', 1, Err(Error::PairNotFound)),
|
||||
// cursor on bracket itself
|
||||
(5, '(', 1, Some((5, 10))),
|
||||
(10, '(', 1, Some((5, 10))),
|
||||
(5, '(', 1, Ok((5, 10))),
|
||||
(10, '(', 1, Ok((5, 10))),
|
||||
],
|
||||
);
|
||||
}
|
||||
@@ -212,9 +241,9 @@ mod test {
|
||||
"(so (many (good) text) here)",
|
||||
vec![
|
||||
// cursor on go[o]d
|
||||
(13, '(', 1, Some((10, 15))),
|
||||
(13, '(', 2, Some((4, 21))),
|
||||
(13, '(', 3, Some((0, 27))),
|
||||
(13, '(', 1, Ok((10, 15))),
|
||||
(13, '(', 2, Ok((4, 21))),
|
||||
(13, '(', 3, Ok((0, 27))),
|
||||
],
|
||||
);
|
||||
}
|
||||
@@ -225,11 +254,11 @@ mod test {
|
||||
"'so 'many 'good' text' here'",
|
||||
vec![
|
||||
// cursor on go[o]d
|
||||
(13, '\'', 1, Some((10, 15))),
|
||||
(13, '\'', 2, Some((4, 21))),
|
||||
(13, '\'', 3, Some((0, 27))),
|
||||
(13, '\'', 1, Ok((10, 15))),
|
||||
(13, '\'', 2, Ok((4, 21))),
|
||||
(13, '\'', 3, Ok((0, 27))),
|
||||
// cursor on the quotes
|
||||
(10, '\'', 1, None),
|
||||
(10, '\'', 1, Err(Error::CursorOnAmbiguousPair)),
|
||||
],
|
||||
)
|
||||
}
|
||||
@@ -240,8 +269,8 @@ mod test {
|
||||
"((so)((many) good (text))(here))",
|
||||
vec![
|
||||
// cursor on go[o]d
|
||||
(15, '(', 1, Some((5, 24))),
|
||||
(15, '(', 2, Some((0, 31))),
|
||||
(15, '(', 1, Ok((5, 24))),
|
||||
(15, '(', 2, Ok((0, 31))),
|
||||
],
|
||||
)
|
||||
}
|
||||
@@ -252,9 +281,9 @@ mod test {
|
||||
"(so [many {good} text] here)",
|
||||
vec![
|
||||
// cursor on go[o]d
|
||||
(13, '{', 1, Some((10, 15))),
|
||||
(13, '[', 1, Some((4, 21))),
|
||||
(13, '(', 1, Some((0, 27))),
|
||||
(13, '{', 1, Ok((10, 15))),
|
||||
(13, '[', 1, Ok((4, 21))),
|
||||
(13, '(', 1, Ok((0, 27))),
|
||||
],
|
||||
)
|
||||
}
|
||||
@@ -284,11 +313,10 @@ mod test {
|
||||
|
||||
let selection =
|
||||
Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(9)]), 0);
|
||||
|
||||
// cursor on s[o]me, c[h]ars
|
||||
assert_eq!(
|
||||
get_surround_pos(slice, &selection, '(', 1),
|
||||
None // different surround chars
|
||||
Err(Error::PairNotFound) // different surround chars
|
||||
);
|
||||
|
||||
let selection = Selection::new(
|
||||
@@ -298,7 +326,15 @@ mod test {
|
||||
// cursor on [x]x, newli[n]e
|
||||
assert_eq!(
|
||||
get_surround_pos(slice, &selection, '(', 1),
|
||||
None // overlapping surround chars
|
||||
Err(Error::PairNotFound) // overlapping surround chars
|
||||
);
|
||||
|
||||
let selection =
|
||||
Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(3)]), 0);
|
||||
// cursor on s[o][m]e
|
||||
assert_eq!(
|
||||
get_surround_pos(slice, &selection, '[', 1),
|
||||
Err(Error::CursorOverlap)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -53,6 +53,8 @@ fn find_word_boundary(slice: RopeSlice, mut pos: usize, direction: Direction, lo
|
||||
pub enum TextObject {
|
||||
Around,
|
||||
Inside,
|
||||
/// Used for moving between objects.
|
||||
Movement,
|
||||
}
|
||||
|
||||
impl Display for TextObject {
|
||||
@@ -60,6 +62,7 @@ impl Display for TextObject {
|
||||
f.write_str(match self {
|
||||
Self::Around => "around",
|
||||
Self::Inside => "inside",
|
||||
Self::Movement => "movement",
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -104,6 +107,7 @@ pub fn textobject_word(
|
||||
Range::new(word_start - whitespace_count_left, word_end)
|
||||
}
|
||||
}
|
||||
TextObject::Movement => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,6 +122,7 @@ pub fn textobject_surround(
|
||||
.map(|(anchor, head)| match textobject {
|
||||
TextObject::Inside => Range::new(next_grapheme_boundary(slice, anchor), head),
|
||||
TextObject::Around => Range::new(anchor, next_grapheme_boundary(slice, head)),
|
||||
TextObject::Movement => unreachable!(),
|
||||
})
|
||||
.unwrap_or(range)
|
||||
}
|
||||
|
@@ -21,7 +21,6 @@ pub enum Assoc {
|
||||
After,
|
||||
}
|
||||
|
||||
// ChangeSpec = Change | ChangeSet | Vec<Change>
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
pub struct ChangeSet {
|
||||
pub(crate) changes: Vec<Operation>,
|
||||
@@ -50,7 +49,6 @@ impl ChangeSet {
|
||||
}
|
||||
|
||||
// TODO: from iter
|
||||
//
|
||||
|
||||
#[doc(hidden)] // used by lsp to convert to LSP changes
|
||||
pub fn changes(&self) -> &[Operation] {
|
||||
@@ -85,7 +83,7 @@ impl ChangeSet {
|
||||
|
||||
let new_last = match self.changes.as_mut_slice() {
|
||||
[.., Insert(prev)] | [.., Insert(prev), Delete(_)] => {
|
||||
prev.push_tendril(&fragment);
|
||||
prev.push_str(&fragment);
|
||||
return;
|
||||
}
|
||||
[.., last @ Delete(_)] => std::mem::replace(last, Insert(fragment)),
|
||||
@@ -189,7 +187,7 @@ impl ChangeSet {
|
||||
// TODO: cover this with a test
|
||||
// figure out the byte index of the truncated string end
|
||||
let (pos, _) = s.char_indices().nth(j).unwrap();
|
||||
s.pop_front(pos as u32);
|
||||
s.replace_range(0..pos, "");
|
||||
head_a = Some(Insert(s));
|
||||
head_b = changes_b.next();
|
||||
}
|
||||
@@ -211,9 +209,11 @@ impl ChangeSet {
|
||||
Ordering::Greater => {
|
||||
// figure out the byte index of the truncated string end
|
||||
let (pos, _) = s.char_indices().nth(j).unwrap();
|
||||
let pos = pos as u32;
|
||||
changes.insert(s.subtendril(0, pos));
|
||||
head_a = Some(Insert(s.subtendril(pos, s.len() as u32 - pos)));
|
||||
let mut before = s;
|
||||
let after = before.split_off(pos);
|
||||
|
||||
changes.insert(before);
|
||||
head_a = Some(Insert(after));
|
||||
head_b = changes_b.next();
|
||||
}
|
||||
}
|
||||
@@ -277,7 +277,7 @@ impl ChangeSet {
|
||||
}
|
||||
Delete(n) => {
|
||||
let text = Cow::from(original_doc.slice(pos..pos + *n));
|
||||
changes.insert(Tendril::from_slice(&text));
|
||||
changes.insert(Tendril::from(text.as_ref()));
|
||||
pos += n;
|
||||
}
|
||||
Insert(s) => {
|
||||
@@ -413,8 +413,6 @@ impl ChangeSet {
|
||||
pub struct Transaction {
|
||||
changes: ChangeSet,
|
||||
selection: Option<Selection>,
|
||||
// effects, annotations
|
||||
// scroll_into_view
|
||||
}
|
||||
|
||||
impl Transaction {
|
||||
@@ -438,14 +436,12 @@ impl Transaction {
|
||||
|
||||
/// Returns true if applied successfully.
|
||||
pub fn apply(&self, doc: &mut Rope) -> bool {
|
||||
if !self.changes.is_empty() {
|
||||
// apply changes to the document
|
||||
if !self.changes.apply(doc) {
|
||||
return false;
|
||||
}
|
||||
if self.changes.is_empty() {
|
||||
return true;
|
||||
}
|
||||
|
||||
true
|
||||
// apply changes to the document
|
||||
self.changes.apply(doc)
|
||||
}
|
||||
|
||||
/// Generate a transaction that reverts this one.
|
||||
@@ -473,7 +469,7 @@ impl Transaction {
|
||||
/// Generate a transaction from a set of changes.
|
||||
pub fn change<I>(doc: &Rope, changes: I) -> Self
|
||||
where
|
||||
I: IntoIterator<Item = Change> + Iterator,
|
||||
I: Iterator<Item = Change>,
|
||||
{
|
||||
let len = doc.len_chars();
|
||||
|
||||
@@ -481,12 +477,11 @@ impl Transaction {
|
||||
let size = upper.unwrap_or(lower);
|
||||
let mut changeset = ChangeSet::with_capacity(2 * size + 1); // rough estimate
|
||||
|
||||
// TODO: verify ranges are ordered and not overlapping or change will panic.
|
||||
|
||||
// TODO: test for (pos, pos, None) to factor out as nothing
|
||||
|
||||
let mut last = 0;
|
||||
for (from, to, tendril) in changes {
|
||||
// Verify ranges are ordered and not overlapping
|
||||
debug_assert!(last <= from);
|
||||
|
||||
// Retain from last "to" to current "from"
|
||||
changeset.retain(from - last);
|
||||
let span = to - from;
|
||||
@@ -692,7 +687,7 @@ mod test {
|
||||
let mut doc = Rope::from("hello world!\ntest 123");
|
||||
let transaction = Transaction::change(
|
||||
&doc,
|
||||
// (1, 1, None) is a useless 0-width delete
|
||||
// (1, 1, None) is a useless 0-width delete that gets factored out
|
||||
vec![(1, 1, None), (6, 11, Some("void".into())), (12, 17, None)].into_iter(),
|
||||
);
|
||||
transaction.apply(&mut doc);
|
||||
@@ -710,19 +705,19 @@ mod test {
|
||||
#[test]
|
||||
fn optimized_composition() {
|
||||
let mut state = State::new("".into());
|
||||
let t1 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('h'));
|
||||
let t1 = Transaction::insert(&state.doc, &state.selection, Tendril::from("h"));
|
||||
t1.apply(&mut state.doc);
|
||||
state.selection = state.selection.clone().map(t1.changes());
|
||||
let t2 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('e'));
|
||||
let t2 = Transaction::insert(&state.doc, &state.selection, Tendril::from("e"));
|
||||
t2.apply(&mut state.doc);
|
||||
state.selection = state.selection.clone().map(t2.changes());
|
||||
let t3 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('l'));
|
||||
let t3 = Transaction::insert(&state.doc, &state.selection, Tendril::from("l"));
|
||||
t3.apply(&mut state.doc);
|
||||
state.selection = state.selection.clone().map(t3.changes());
|
||||
let t4 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('l'));
|
||||
let t4 = Transaction::insert(&state.doc, &state.selection, Tendril::from("l"));
|
||||
t4.apply(&mut state.doc);
|
||||
state.selection = state.selection.clone().map(t4.changes());
|
||||
let t5 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('o'));
|
||||
let t5 = Transaction::insert(&state.doc, &state.selection, Tendril::from("o"));
|
||||
t5.apply(&mut state.doc);
|
||||
state.selection = state.selection.clone().map(t5.changes());
|
||||
|
||||
@@ -761,7 +756,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn combine_with_utf8() {
|
||||
const TEST_CASE: &'static str = "Hello, これはヘリックスエディターです!";
|
||||
const TEST_CASE: &str = "Hello, これはヘリックスエディターです!";
|
||||
|
||||
let empty = Rope::from("");
|
||||
let a = ChangeSet::new(&empty);
|
||||
|
25
helix-dap/Cargo.toml
Normal file
25
helix-dap/Cargo.toml
Normal file
@@ -0,0 +1,25 @@
|
||||
[package]
|
||||
name = "helix-dap"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2018"
|
||||
license = "MPL-2.0"
|
||||
description = "DAP client implementation for Helix project"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
anyhow = "1.0"
|
||||
log = "0.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] }
|
||||
which = "4.2"
|
||||
|
||||
[dev-dependencies]
|
||||
fern = "0.6"
|
480
helix-dap/src/client.rs
Normal file
480
helix-dap/src/client.rs
Normal file
@@ -0,0 +1,480 @@
|
||||
use crate::{
|
||||
transport::{Payload, Request, Response, Transport},
|
||||
types::*,
|
||||
Error, Result, ThreadId,
|
||||
};
|
||||
use helix_core::syntax::DebuggerQuirks;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use anyhow::anyhow;
|
||||
pub use log::{error, info};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
future::Future,
|
||||
net::{IpAddr, Ipv4Addr, SocketAddr},
|
||||
path::PathBuf,
|
||||
process::Stdio,
|
||||
sync::atomic::{AtomicU64, Ordering},
|
||||
};
|
||||
use tokio::{
|
||||
io::{AsyncBufRead, AsyncWrite, BufReader, BufWriter},
|
||||
net::TcpStream,
|
||||
process::{Child, Command},
|
||||
sync::mpsc::{channel, unbounded_channel, UnboundedReceiver, UnboundedSender},
|
||||
time,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Client {
|
||||
id: usize,
|
||||
_process: Option<Child>,
|
||||
server_tx: UnboundedSender<Payload>,
|
||||
request_counter: AtomicU64,
|
||||
pub caps: Option<DebuggerCapabilities>,
|
||||
// thread_id -> frames
|
||||
pub stack_frames: HashMap<ThreadId, Vec<StackFrame>>,
|
||||
pub thread_states: HashMap<ThreadId, String>,
|
||||
pub thread_id: Option<ThreadId>,
|
||||
/// Currently active frame for the current thread.
|
||||
pub active_frame: Option<usize>,
|
||||
pub quirks: DebuggerQuirks,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
// Spawn a process and communicate with it by either TCP or stdio
|
||||
pub async fn process(
|
||||
transport: &str,
|
||||
command: &str,
|
||||
args: Vec<&str>,
|
||||
port_arg: Option<&str>,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
if command.is_empty() {
|
||||
return Result::Err(Error::Other(anyhow!("Command not provided")));
|
||||
}
|
||||
if transport == "tcp" && port_arg.is_some() {
|
||||
Self::tcp_process(command, args, port_arg.unwrap(), id).await
|
||||
} else if transport == "stdio" {
|
||||
Self::stdio(command, args, id)
|
||||
} else {
|
||||
Result::Err(Error::Other(anyhow!("Incorrect transport {}", transport)))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn streams(
|
||||
rx: Box<dyn AsyncBufRead + Unpin + Send>,
|
||||
tx: Box<dyn AsyncWrite + Unpin + Send>,
|
||||
err: Option<Box<dyn AsyncBufRead + Unpin + Send>>,
|
||||
id: usize,
|
||||
process: Option<Child>,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
let (server_rx, server_tx) = Transport::start(rx, tx, err, id);
|
||||
let (client_rx, client_tx) = unbounded_channel();
|
||||
|
||||
let client = Self {
|
||||
id,
|
||||
_process: process,
|
||||
server_tx,
|
||||
request_counter: AtomicU64::new(0),
|
||||
caps: None,
|
||||
//
|
||||
stack_frames: HashMap::new(),
|
||||
thread_states: HashMap::new(),
|
||||
thread_id: None,
|
||||
active_frame: None,
|
||||
quirks: DebuggerQuirks::default(),
|
||||
};
|
||||
|
||||
tokio::spawn(Self::recv(server_rx, client_rx));
|
||||
|
||||
Ok((client, client_tx))
|
||||
}
|
||||
|
||||
pub async fn tcp(
|
||||
addr: std::net::SocketAddr,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
let stream = TcpStream::connect(addr).await?;
|
||||
let (rx, tx) = stream.into_split();
|
||||
Self::streams(Box::new(BufReader::new(rx)), Box::new(tx), None, id, None)
|
||||
}
|
||||
|
||||
pub fn stdio(
|
||||
cmd: &str,
|
||||
args: Vec<&str>,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
// Resolve path to the binary
|
||||
let cmd = which::which(cmd).map_err(|err| anyhow::anyhow!(err))?;
|
||||
|
||||
let process = Command::new(cmd)
|
||||
.args(args)
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
// make sure the process is reaped on drop
|
||||
.kill_on_drop(true)
|
||||
.spawn();
|
||||
|
||||
let mut process = process?;
|
||||
|
||||
// TODO: do we need bufreader/writer here? or do we use async wrappers on unblock?
|
||||
let writer = BufWriter::new(process.stdin.take().expect("Failed to open stdin"));
|
||||
let reader = BufReader::new(process.stdout.take().expect("Failed to open stdout"));
|
||||
let errors = process.stderr.take().map(BufReader::new);
|
||||
|
||||
Self::streams(
|
||||
Box::new(BufReader::new(reader)),
|
||||
Box::new(writer),
|
||||
// errors.map(|errors| Box::new(BufReader::new(errors))),
|
||||
match errors {
|
||||
Some(errors) => Some(Box::new(BufReader::new(errors))),
|
||||
None => None,
|
||||
},
|
||||
id,
|
||||
Some(process),
|
||||
)
|
||||
}
|
||||
|
||||
async fn get_port() -> Option<u16> {
|
||||
Some(
|
||||
tokio::net::TcpListener::bind(SocketAddr::new(
|
||||
IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
|
||||
0,
|
||||
))
|
||||
.await
|
||||
.ok()?
|
||||
.local_addr()
|
||||
.ok()?
|
||||
.port(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn tcp_process(
|
||||
cmd: &str,
|
||||
args: Vec<&str>,
|
||||
port_format: &str,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
let port = Self::get_port().await.unwrap();
|
||||
|
||||
let process = Command::new(cmd)
|
||||
.args(args)
|
||||
.args(port_format.replace("{}", &port.to_string()).split(' '))
|
||||
// silence messages
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
// Do not kill debug adapter when leaving, it should exit automatically
|
||||
.spawn()?;
|
||||
|
||||
// Wait for adapter to become ready for connection
|
||||
time::sleep(time::Duration::from_millis(500)).await;
|
||||
|
||||
let stream = TcpStream::connect(SocketAddr::new(
|
||||
IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
|
||||
port,
|
||||
))
|
||||
.await?;
|
||||
|
||||
let (rx, tx) = stream.into_split();
|
||||
Self::streams(
|
||||
Box::new(BufReader::new(rx)),
|
||||
Box::new(tx),
|
||||
None,
|
||||
id,
|
||||
Some(process),
|
||||
)
|
||||
}
|
||||
|
||||
async fn recv(mut server_rx: UnboundedReceiver<Payload>, client_tx: UnboundedSender<Payload>) {
|
||||
while let Some(msg) = server_rx.recv().await {
|
||||
match msg {
|
||||
Payload::Event(ev) => {
|
||||
client_tx.send(Payload::Event(ev)).expect("Failed to send");
|
||||
}
|
||||
Payload::Response(_) => unreachable!(),
|
||||
Payload::Request(req) => {
|
||||
client_tx
|
||||
.send(Payload::Request(req))
|
||||
.expect("Failed to send");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn id(&self) -> usize {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn next_request_id(&self) -> u64 {
|
||||
self.request_counter.fetch_add(1, Ordering::Relaxed)
|
||||
}
|
||||
|
||||
// Internal, called by specific DAP commands when resuming
|
||||
pub fn resume_application(&mut self) {
|
||||
if let Some(thread_id) = self.thread_id {
|
||||
self.thread_states.insert(thread_id, "running".to_string());
|
||||
self.stack_frames.remove(&thread_id);
|
||||
}
|
||||
self.active_frame = None;
|
||||
self.thread_id = None;
|
||||
}
|
||||
|
||||
/// Execute a RPC request on the debugger.
|
||||
pub fn call<R: crate::types::Request>(
|
||||
&self,
|
||||
arguments: R::Arguments,
|
||||
) -> impl Future<Output = Result<Value>>
|
||||
where
|
||||
R::Arguments: serde::Serialize,
|
||||
{
|
||||
let server_tx = self.server_tx.clone();
|
||||
let id = self.next_request_id();
|
||||
|
||||
async move {
|
||||
use std::time::Duration;
|
||||
use tokio::time::timeout;
|
||||
|
||||
let arguments = Some(serde_json::to_value(arguments)?);
|
||||
|
||||
let (callback_tx, mut callback_rx) = channel(1);
|
||||
|
||||
let req = Request {
|
||||
back_ch: Some(callback_tx),
|
||||
seq: id,
|
||||
command: R::COMMAND.to_string(),
|
||||
arguments,
|
||||
};
|
||||
|
||||
server_tx
|
||||
.send(Payload::Request(req))
|
||||
.map_err(|e| Error::Other(e.into()))?;
|
||||
|
||||
// TODO: specifiable timeout, delay other calls until initialize success
|
||||
timeout(Duration::from_secs(20), callback_rx.recv())
|
||||
.await
|
||||
.map_err(|_| Error::Timeout)? // return Timeout
|
||||
.ok_or(Error::StreamClosed)?
|
||||
.map(|response| response.body.unwrap_or_default())
|
||||
// TODO: check response.success
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn request<R: crate::types::Request>(&self, params: R::Arguments) -> Result<R::Result>
|
||||
where
|
||||
R::Arguments: serde::Serialize,
|
||||
R::Result: core::fmt::Debug, // TODO: temporary
|
||||
{
|
||||
// a future that resolves into the response
|
||||
let json = self.call::<R>(params).await?;
|
||||
let response = serde_json::from_value(json)?;
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub fn reply(
|
||||
&self,
|
||||
request_seq: u64,
|
||||
command: &str,
|
||||
result: core::result::Result<Value, Error>,
|
||||
) -> impl Future<Output = Result<()>> {
|
||||
let server_tx = self.server_tx.clone();
|
||||
let command = command.to_string();
|
||||
|
||||
async move {
|
||||
let response = match result {
|
||||
Ok(result) => Response {
|
||||
request_seq,
|
||||
command,
|
||||
success: true,
|
||||
message: None,
|
||||
body: Some(result),
|
||||
},
|
||||
Err(error) => Response {
|
||||
request_seq,
|
||||
command,
|
||||
success: false,
|
||||
message: Some(error.to_string()),
|
||||
body: None,
|
||||
},
|
||||
};
|
||||
|
||||
server_tx
|
||||
.send(Payload::Response(response))
|
||||
.map_err(|e| Error::Other(e.into()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn capabilities(&self) -> &DebuggerCapabilities {
|
||||
self.caps.as_ref().expect("debugger not yet initialized!")
|
||||
}
|
||||
|
||||
pub async fn initialize(&mut self, adapter_id: String) -> Result<()> {
|
||||
let args = requests::InitializeArguments {
|
||||
client_id: Some("hx".to_owned()),
|
||||
client_name: Some("helix".to_owned()),
|
||||
adapter_id,
|
||||
locale: Some("en-us".to_owned()),
|
||||
lines_start_at_one: Some(true),
|
||||
columns_start_at_one: Some(true),
|
||||
path_format: Some("path".to_owned()),
|
||||
supports_variable_type: Some(true),
|
||||
supports_variable_paging: Some(false),
|
||||
supports_run_in_terminal_request: Some(true),
|
||||
supports_memory_references: Some(false),
|
||||
supports_progress_reporting: Some(false),
|
||||
supports_invalidated_event: Some(false),
|
||||
};
|
||||
|
||||
let response = self.request::<requests::Initialize>(args).await?;
|
||||
self.caps = Some(response);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn disconnect(&self) -> impl Future<Output = Result<Value>> {
|
||||
self.call::<requests::Disconnect>(())
|
||||
}
|
||||
|
||||
pub fn launch(&self, args: serde_json::Value) -> impl Future<Output = Result<Value>> {
|
||||
self.call::<requests::Launch>(args)
|
||||
}
|
||||
|
||||
pub fn attach(&self, args: serde_json::Value) -> impl Future<Output = Result<Value>> {
|
||||
self.call::<requests::Attach>(args)
|
||||
}
|
||||
|
||||
pub async fn set_breakpoints(
|
||||
&self,
|
||||
file: PathBuf,
|
||||
breakpoints: Vec<SourceBreakpoint>,
|
||||
) -> Result<Option<Vec<Breakpoint>>> {
|
||||
let args = requests::SetBreakpointsArguments {
|
||||
source: Source {
|
||||
path: Some(file),
|
||||
name: None,
|
||||
source_reference: None,
|
||||
presentation_hint: None,
|
||||
origin: None,
|
||||
sources: None,
|
||||
adapter_data: None,
|
||||
checksums: None,
|
||||
},
|
||||
breakpoints: Some(breakpoints),
|
||||
source_modified: Some(false),
|
||||
};
|
||||
|
||||
let response = self.request::<requests::SetBreakpoints>(args).await?;
|
||||
|
||||
Ok(response.breakpoints)
|
||||
}
|
||||
|
||||
pub async fn configuration_done(&self) -> Result<()> {
|
||||
self.request::<requests::ConfigurationDone>(()).await
|
||||
}
|
||||
|
||||
pub fn continue_thread(&self, thread_id: ThreadId) -> impl Future<Output = Result<Value>> {
|
||||
let args = requests::ContinueArguments { thread_id };
|
||||
|
||||
self.call::<requests::Continue>(args)
|
||||
}
|
||||
|
||||
pub async fn stack_trace(
|
||||
&self,
|
||||
thread_id: ThreadId,
|
||||
) -> Result<(Vec<StackFrame>, Option<usize>)> {
|
||||
let args = requests::StackTraceArguments {
|
||||
thread_id,
|
||||
start_frame: None,
|
||||
levels: None,
|
||||
format: None,
|
||||
};
|
||||
|
||||
let response = self.request::<requests::StackTrace>(args).await?;
|
||||
Ok((response.stack_frames, response.total_frames))
|
||||
}
|
||||
|
||||
pub fn threads(&self) -> impl Future<Output = Result<Value>> {
|
||||
self.call::<requests::Threads>(())
|
||||
}
|
||||
|
||||
pub async fn scopes(&self, frame_id: usize) -> Result<Vec<Scope>> {
|
||||
let args = requests::ScopesArguments { frame_id };
|
||||
|
||||
let response = self.request::<requests::Scopes>(args).await?;
|
||||
Ok(response.scopes)
|
||||
}
|
||||
|
||||
pub async fn variables(&self, variables_reference: usize) -> Result<Vec<Variable>> {
|
||||
let args = requests::VariablesArguments {
|
||||
variables_reference,
|
||||
filter: None,
|
||||
start: None,
|
||||
count: None,
|
||||
format: None,
|
||||
};
|
||||
|
||||
let response = self.request::<requests::Variables>(args).await?;
|
||||
Ok(response.variables)
|
||||
}
|
||||
|
||||
pub fn step_in(&self, thread_id: ThreadId) -> impl Future<Output = Result<Value>> {
|
||||
let args = requests::StepInArguments {
|
||||
thread_id,
|
||||
target_id: None,
|
||||
granularity: None,
|
||||
};
|
||||
|
||||
self.call::<requests::StepIn>(args)
|
||||
}
|
||||
|
||||
pub fn step_out(&self, thread_id: ThreadId) -> impl Future<Output = Result<Value>> {
|
||||
let args = requests::StepOutArguments {
|
||||
thread_id,
|
||||
granularity: None,
|
||||
};
|
||||
|
||||
self.call::<requests::StepOut>(args)
|
||||
}
|
||||
|
||||
pub fn next(&self, thread_id: ThreadId) -> impl Future<Output = Result<Value>> {
|
||||
let args = requests::NextArguments {
|
||||
thread_id,
|
||||
granularity: None,
|
||||
};
|
||||
|
||||
self.call::<requests::Next>(args)
|
||||
}
|
||||
|
||||
pub fn pause(&self, thread_id: ThreadId) -> impl Future<Output = Result<Value>> {
|
||||
let args = requests::PauseArguments { thread_id };
|
||||
|
||||
self.call::<requests::Pause>(args)
|
||||
}
|
||||
|
||||
pub async fn eval(
|
||||
&self,
|
||||
expression: String,
|
||||
frame_id: Option<usize>,
|
||||
) -> Result<requests::EvaluateResponse> {
|
||||
let args = requests::EvaluateArguments {
|
||||
expression,
|
||||
frame_id,
|
||||
context: None,
|
||||
format: None,
|
||||
};
|
||||
|
||||
self.request::<requests::Evaluate>(args).await
|
||||
}
|
||||
|
||||
pub fn set_exception_breakpoints(
|
||||
&self,
|
||||
filters: Vec<String>,
|
||||
) -> impl Future<Output = Result<Value>> {
|
||||
let args = requests::SetExceptionBreakpointsArguments { filters };
|
||||
|
||||
self.call::<requests::SetExceptionBreakpoints>(args)
|
||||
}
|
||||
}
|
24
helix-dap/src/lib.rs
Normal file
24
helix-dap/src/lib.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
mod client;
|
||||
mod transport;
|
||||
mod types;
|
||||
|
||||
pub use client::Client;
|
||||
pub use events::Event;
|
||||
pub use transport::{Payload, Response, Transport};
|
||||
pub use types::*;
|
||||
|
||||
use thiserror::Error;
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error("failed to parse: {0}")]
|
||||
Parse(#[from] serde_json::Error),
|
||||
#[error("IO Error: {0}")]
|
||||
IO(#[from] std::io::Error),
|
||||
#[error("request timed out")]
|
||||
Timeout,
|
||||
#[error("server closed the stream")]
|
||||
StreamClosed,
|
||||
#[error(transparent)]
|
||||
Other(#[from] anyhow::Error),
|
||||
}
|
||||
pub type Result<T> = core::result::Result<T, Error>;
|
280
helix-dap/src/transport.rs
Normal file
280
helix-dap/src/transport.rs
Normal file
@@ -0,0 +1,280 @@
|
||||
use crate::{Error, Event, Result};
|
||||
use anyhow::Context;
|
||||
use log::{error, info, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::{
|
||||
io::{AsyncBufRead, AsyncBufReadExt, AsyncReadExt, AsyncWrite, AsyncWriteExt},
|
||||
sync::{
|
||||
mpsc::{unbounded_channel, Sender, UnboundedReceiver, UnboundedSender},
|
||||
Mutex,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct Request {
|
||||
#[serde(skip)]
|
||||
pub back_ch: Option<Sender<Result<Response>>>,
|
||||
pub seq: u64,
|
||||
pub command: String,
|
||||
pub arguments: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
pub struct Response {
|
||||
// seq is omitted as unused and is not sent by some implementations
|
||||
pub request_seq: u64,
|
||||
pub success: bool,
|
||||
pub command: String,
|
||||
pub message: Option<String>,
|
||||
pub body: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
pub enum Payload {
|
||||
// type = "event"
|
||||
Event(Box<Event>),
|
||||
// type = "response"
|
||||
Response(Response),
|
||||
// type = "request"
|
||||
Request(Request),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Transport {
|
||||
#[allow(unused)]
|
||||
id: usize,
|
||||
pending_requests: Mutex<HashMap<u64, Sender<Result<Response>>>>,
|
||||
}
|
||||
|
||||
impl Transport {
|
||||
pub fn start(
|
||||
server_stdout: Box<dyn AsyncBufRead + Unpin + Send>,
|
||||
server_stdin: Box<dyn AsyncWrite + Unpin + Send>,
|
||||
server_stderr: Option<Box<dyn AsyncBufRead + Unpin + Send>>,
|
||||
id: usize,
|
||||
) -> (UnboundedReceiver<Payload>, UnboundedSender<Payload>) {
|
||||
let (client_tx, rx) = unbounded_channel();
|
||||
let (tx, client_rx) = unbounded_channel();
|
||||
|
||||
let transport = Self {
|
||||
id,
|
||||
pending_requests: Mutex::new(HashMap::default()),
|
||||
};
|
||||
|
||||
let transport = Arc::new(transport);
|
||||
|
||||
tokio::spawn(Self::recv(transport.clone(), server_stdout, client_tx));
|
||||
tokio::spawn(Self::send(transport, server_stdin, client_rx));
|
||||
if let Some(stderr) = server_stderr {
|
||||
tokio::spawn(Self::err(stderr));
|
||||
}
|
||||
|
||||
(rx, tx)
|
||||
}
|
||||
|
||||
async fn recv_server_message(
|
||||
reader: &mut Box<dyn AsyncBufRead + Unpin + Send>,
|
||||
buffer: &mut String,
|
||||
) -> Result<Payload> {
|
||||
let mut content_length = None;
|
||||
loop {
|
||||
buffer.truncate(0);
|
||||
if reader.read_line(buffer).await? == 0 {
|
||||
return Err(Error::StreamClosed);
|
||||
};
|
||||
|
||||
if buffer == "\r\n" {
|
||||
// look for an empty CRLF line
|
||||
break;
|
||||
}
|
||||
|
||||
let header = buffer.trim();
|
||||
let parts = header.split_once(": ");
|
||||
|
||||
match parts {
|
||||
Some(("Content-Length", value)) => {
|
||||
content_length = Some(value.parse().context("invalid content length")?);
|
||||
}
|
||||
Some((_, _)) => {}
|
||||
None => {
|
||||
// Workaround: Some non-conformant language servers will output logging and other garbage
|
||||
// into the same stream as JSON-RPC messages. This can also happen from shell scripts that spawn
|
||||
// the server. Skip such lines and log a warning.
|
||||
|
||||
// warn!("Failed to parse header: {:?}", header);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let content_length = content_length.context("missing content length")?;
|
||||
|
||||
//TODO: reuse vector
|
||||
let mut content = vec![0; content_length];
|
||||
reader.read_exact(&mut content).await?;
|
||||
let msg = std::str::from_utf8(&content).context("invalid utf8 from server")?;
|
||||
|
||||
info!("<- DAP {}", msg);
|
||||
|
||||
// try parsing as output (server response) or call (server request)
|
||||
let output: serde_json::Result<Payload> = serde_json::from_str(msg);
|
||||
|
||||
Ok(output?)
|
||||
}
|
||||
|
||||
async fn recv_server_error(
|
||||
err: &mut (impl AsyncBufRead + Unpin + Send),
|
||||
buffer: &mut String,
|
||||
) -> Result<()> {
|
||||
buffer.truncate(0);
|
||||
if err.read_line(buffer).await? == 0 {
|
||||
return Err(Error::StreamClosed);
|
||||
};
|
||||
error!("err <- {}", buffer);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn send_payload_to_server(
|
||||
&self,
|
||||
server_stdin: &mut Box<dyn AsyncWrite + Unpin + Send>,
|
||||
mut payload: Payload,
|
||||
) -> Result<()> {
|
||||
if let Payload::Request(request) = &mut payload {
|
||||
if let Some(back) = request.back_ch.take() {
|
||||
self.pending_requests.lock().await.insert(request.seq, back);
|
||||
}
|
||||
}
|
||||
let json = serde_json::to_string(&payload)?;
|
||||
self.send_string_to_server(server_stdin, json).await
|
||||
}
|
||||
|
||||
async fn send_string_to_server(
|
||||
&self,
|
||||
server_stdin: &mut Box<dyn AsyncWrite + Unpin + Send>,
|
||||
request: String,
|
||||
) -> Result<()> {
|
||||
info!("-> DAP {}", request);
|
||||
|
||||
// send the headers
|
||||
server_stdin
|
||||
.write_all(format!("Content-Length: {}\r\n\r\n", request.len()).as_bytes())
|
||||
.await?;
|
||||
|
||||
// send the body
|
||||
server_stdin.write_all(request.as_bytes()).await?;
|
||||
|
||||
server_stdin.flush().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn process_response(res: Response) -> Result<Response> {
|
||||
if res.success {
|
||||
info!("<- DAP success in response to {}", res.request_seq);
|
||||
|
||||
Ok(res)
|
||||
} else {
|
||||
error!(
|
||||
"<- DAP error {:?} ({:?}) for command #{} {}",
|
||||
res.message, res.body, res.request_seq, res.command
|
||||
);
|
||||
|
||||
Err(Error::Other(anyhow::format_err!("{:?}", res.body)))
|
||||
}
|
||||
}
|
||||
|
||||
async fn process_server_message(
|
||||
&self,
|
||||
client_tx: &UnboundedSender<Payload>,
|
||||
msg: Payload,
|
||||
) -> Result<()> {
|
||||
match msg {
|
||||
Payload::Response(res) => {
|
||||
let request_seq = res.request_seq;
|
||||
let tx = self.pending_requests.lock().await.remove(&request_seq);
|
||||
|
||||
match tx {
|
||||
Some(tx) => match tx.send(Self::process_response(res)).await {
|
||||
Ok(_) => (),
|
||||
Err(_) => error!(
|
||||
"Tried sending response into a closed channel (id={:?}), original request likely timed out",
|
||||
request_seq
|
||||
),
|
||||
}
|
||||
None => {
|
||||
warn!("Response to nonexistent request #{}", res.request_seq);
|
||||
client_tx.send(Payload::Response(res)).expect("Failed to send");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Payload::Request(Request {
|
||||
ref command,
|
||||
ref seq,
|
||||
..
|
||||
}) => {
|
||||
info!("<- DAP request {} #{}", command, seq);
|
||||
client_tx.send(msg).expect("Failed to send");
|
||||
Ok(())
|
||||
}
|
||||
Payload::Event(ref event) => {
|
||||
info!("<- DAP event {:?}", event);
|
||||
client_tx.send(msg).expect("Failed to send");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn recv(
|
||||
transport: Arc<Self>,
|
||||
mut server_stdout: Box<dyn AsyncBufRead + Unpin + Send>,
|
||||
client_tx: UnboundedSender<Payload>,
|
||||
) {
|
||||
let mut recv_buffer = String::new();
|
||||
loop {
|
||||
match Self::recv_server_message(&mut server_stdout, &mut recv_buffer).await {
|
||||
Ok(msg) => {
|
||||
transport
|
||||
.process_server_message(&client_tx, msg)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
Err(err) => {
|
||||
error!("err: <- {:?}", err);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn send(
|
||||
transport: Arc<Self>,
|
||||
mut server_stdin: Box<dyn AsyncWrite + Unpin + Send>,
|
||||
mut client_rx: UnboundedReceiver<Payload>,
|
||||
) {
|
||||
while let Some(payload) = client_rx.recv().await {
|
||||
transport
|
||||
.send_payload_to_server(&mut server_stdin, payload)
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
async fn err(mut server_stderr: Box<dyn AsyncBufRead + Unpin + Send>) {
|
||||
let mut recv_buffer = String::new();
|
||||
loop {
|
||||
match Self::recv_server_error(&mut server_stderr, &mut recv_buffer).await {
|
||||
Ok(_) => {}
|
||||
Err(err) => {
|
||||
error!("err: <- {:?}", err);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
707
helix-dap/src/types.rs
Normal file
707
helix-dap/src/types.rs
Normal file
@@ -0,0 +1,707 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(
|
||||
Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize,
|
||||
)]
|
||||
pub struct ThreadId(isize);
|
||||
|
||||
impl std::fmt::Display for ThreadId {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Request {
|
||||
type Arguments: serde::de::DeserializeOwned + serde::Serialize;
|
||||
type Result: serde::de::DeserializeOwned + serde::Serialize;
|
||||
const COMMAND: &'static str;
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ColumnDescriptor {
|
||||
pub attribute_name: String,
|
||||
pub label: String,
|
||||
pub format: Option<String>,
|
||||
#[serde(rename = "type")]
|
||||
pub ty: Option<String>,
|
||||
pub width: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExceptionBreakpointsFilter {
|
||||
pub filter: String,
|
||||
pub label: String,
|
||||
pub description: Option<String>,
|
||||
pub default: Option<bool>,
|
||||
pub supports_condition: Option<bool>,
|
||||
pub condition_description: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DebuggerCapabilities {
|
||||
pub supports_configuration_done_request: Option<bool>,
|
||||
pub supports_function_breakpoints: Option<bool>,
|
||||
pub supports_conditional_breakpoints: Option<bool>,
|
||||
pub supports_hit_conditional_breakpoints: Option<bool>,
|
||||
pub supports_evaluate_for_hovers: Option<bool>,
|
||||
pub supports_step_back: Option<bool>,
|
||||
pub supports_set_variable: Option<bool>,
|
||||
pub supports_restart_frame: Option<bool>,
|
||||
pub supports_goto_targets_request: Option<bool>,
|
||||
pub supports_step_in_targets_request: Option<bool>,
|
||||
pub supports_completions_request: Option<bool>,
|
||||
pub supports_modules_request: Option<bool>,
|
||||
pub supports_restart_request: Option<bool>,
|
||||
pub supports_exception_options: Option<bool>,
|
||||
pub supports_value_formatting_options: Option<bool>,
|
||||
pub supports_exception_info_request: Option<bool>,
|
||||
pub support_terminate_debuggee: Option<bool>,
|
||||
pub support_suspend_debuggee: Option<bool>,
|
||||
pub supports_delayed_stack_trace_loading: Option<bool>,
|
||||
pub supports_loaded_sources_request: Option<bool>,
|
||||
pub supports_log_points: Option<bool>,
|
||||
pub supports_terminate_threads_request: Option<bool>,
|
||||
pub supports_set_expression: Option<bool>,
|
||||
pub supports_terminate_request: Option<bool>,
|
||||
pub supports_data_breakpoints: Option<bool>,
|
||||
pub supports_read_memory_request: Option<bool>,
|
||||
pub supports_write_memory_request: Option<bool>,
|
||||
pub supports_disassemble_request: Option<bool>,
|
||||
pub supports_cancel_request: Option<bool>,
|
||||
pub supports_breakpoint_locations_request: Option<bool>,
|
||||
pub supports_clipboard_context: Option<bool>,
|
||||
pub supports_stepping_granularity: Option<bool>,
|
||||
pub supports_instruction_breakpoints: Option<bool>,
|
||||
pub supports_exception_filter_options: Option<bool>,
|
||||
pub exception_breakpoint_filters: Option<Vec<ExceptionBreakpointsFilter>>,
|
||||
pub completion_trigger_characters: Option<Vec<String>>,
|
||||
pub additional_module_columns: Option<Vec<ColumnDescriptor>>,
|
||||
pub supported_checksum_algorithms: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Checksum {
|
||||
pub algorithm: String,
|
||||
pub checksum: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Source {
|
||||
pub name: Option<String>,
|
||||
pub path: Option<PathBuf>,
|
||||
pub source_reference: Option<usize>,
|
||||
pub presentation_hint: Option<String>,
|
||||
pub origin: Option<String>,
|
||||
pub sources: Option<Vec<Source>>,
|
||||
pub adapter_data: Option<Value>,
|
||||
pub checksums: Option<Vec<Checksum>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SourceBreakpoint {
|
||||
pub line: usize,
|
||||
pub column: Option<usize>,
|
||||
pub condition: Option<String>,
|
||||
pub hit_condition: Option<String>,
|
||||
pub log_message: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Breakpoint {
|
||||
pub id: Option<usize>,
|
||||
pub verified: bool,
|
||||
pub message: Option<String>,
|
||||
pub source: Option<Source>,
|
||||
pub line: Option<usize>,
|
||||
pub column: Option<usize>,
|
||||
pub end_line: Option<usize>,
|
||||
pub end_column: Option<usize>,
|
||||
pub instruction_reference: Option<String>,
|
||||
pub offset: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StackFrameFormat {
|
||||
pub parameters: Option<bool>,
|
||||
pub parameter_types: Option<bool>,
|
||||
pub parameter_names: Option<bool>,
|
||||
pub parameter_values: Option<bool>,
|
||||
pub line: Option<bool>,
|
||||
pub module: Option<bool>,
|
||||
pub include_all: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StackFrame {
|
||||
pub id: usize,
|
||||
pub name: String,
|
||||
pub source: Option<Source>,
|
||||
pub line: usize,
|
||||
pub column: usize,
|
||||
pub end_line: Option<usize>,
|
||||
pub end_column: Option<usize>,
|
||||
pub can_restart: Option<bool>,
|
||||
pub instruction_pointer_reference: Option<String>,
|
||||
pub module_id: Option<Value>,
|
||||
pub presentation_hint: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Thread {
|
||||
pub id: ThreadId,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Scope {
|
||||
pub name: String,
|
||||
pub presentation_hint: Option<String>,
|
||||
pub variables_reference: usize,
|
||||
pub named_variables: Option<usize>,
|
||||
pub indexed_variables: Option<usize>,
|
||||
pub expensive: bool,
|
||||
pub source: Option<Source>,
|
||||
pub line: Option<usize>,
|
||||
pub column: Option<usize>,
|
||||
pub end_line: Option<usize>,
|
||||
pub end_column: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ValueFormat {
|
||||
pub hex: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct VariablePresentationHint {
|
||||
pub kind: Option<String>,
|
||||
pub attributes: Option<Vec<String>>,
|
||||
pub visibility: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Variable {
|
||||
pub name: String,
|
||||
pub value: String,
|
||||
#[serde(rename = "type")]
|
||||
pub ty: Option<String>,
|
||||
pub presentation_hint: Option<VariablePresentationHint>,
|
||||
pub evaluate_name: Option<String>,
|
||||
pub variables_reference: usize,
|
||||
pub named_variables: Option<usize>,
|
||||
pub indexed_variables: Option<usize>,
|
||||
pub memory_reference: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Module {
|
||||
pub id: String, // TODO: || number
|
||||
pub name: String,
|
||||
pub path: Option<PathBuf>,
|
||||
pub is_optimized: Option<bool>,
|
||||
pub is_user_code: Option<bool>,
|
||||
pub version: Option<String>,
|
||||
pub symbol_status: Option<String>,
|
||||
pub symbol_file_path: Option<String>,
|
||||
pub date_time_stamp: Option<String>,
|
||||
pub address_range: Option<String>,
|
||||
}
|
||||
|
||||
pub mod requests {
|
||||
use super::*;
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeArguments {
|
||||
#[serde(rename = "clientID")]
|
||||
pub client_id: Option<String>,
|
||||
pub client_name: Option<String>,
|
||||
#[serde(rename = "adapterID")]
|
||||
pub adapter_id: String,
|
||||
pub locale: Option<String>,
|
||||
#[serde(rename = "linesStartAt1")]
|
||||
pub lines_start_at_one: Option<bool>,
|
||||
#[serde(rename = "columnsStartAt1")]
|
||||
pub columns_start_at_one: Option<bool>,
|
||||
pub path_format: Option<String>,
|
||||
pub supports_variable_type: Option<bool>,
|
||||
pub supports_variable_paging: Option<bool>,
|
||||
pub supports_run_in_terminal_request: Option<bool>,
|
||||
pub supports_memory_references: Option<bool>,
|
||||
pub supports_progress_reporting: Option<bool>,
|
||||
pub supports_invalidated_event: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Initialize {}
|
||||
|
||||
impl Request for Initialize {
|
||||
type Arguments = InitializeArguments;
|
||||
type Result = DebuggerCapabilities;
|
||||
const COMMAND: &'static str = "initialize";
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Launch {}
|
||||
|
||||
impl Request for Launch {
|
||||
type Arguments = Value;
|
||||
type Result = Value;
|
||||
const COMMAND: &'static str = "launch";
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Attach {}
|
||||
|
||||
impl Request for Attach {
|
||||
type Arguments = Value;
|
||||
type Result = Value;
|
||||
const COMMAND: &'static str = "attach";
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Disconnect {}
|
||||
|
||||
impl Request for Disconnect {
|
||||
type Arguments = ();
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "disconnect";
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ConfigurationDone {}
|
||||
|
||||
impl Request for ConfigurationDone {
|
||||
type Arguments = ();
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "configurationDone";
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetBreakpointsArguments {
|
||||
pub source: Source,
|
||||
pub breakpoints: Option<Vec<SourceBreakpoint>>,
|
||||
// lines is deprecated
|
||||
pub source_modified: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetBreakpointsResponse {
|
||||
pub breakpoints: Option<Vec<Breakpoint>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SetBreakpoints {}
|
||||
|
||||
impl Request for SetBreakpoints {
|
||||
type Arguments = SetBreakpointsArguments;
|
||||
type Result = SetBreakpointsResponse;
|
||||
const COMMAND: &'static str = "setBreakpoints";
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ContinueArguments {
|
||||
pub thread_id: ThreadId,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ContinueResponse {
|
||||
pub all_threads_continued: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Continue {}
|
||||
|
||||
impl Request for Continue {
|
||||
type Arguments = ContinueArguments;
|
||||
type Result = ContinueResponse;
|
||||
const COMMAND: &'static str = "continue";
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StackTraceArguments {
|
||||
pub thread_id: ThreadId,
|
||||
pub start_frame: Option<usize>,
|
||||
pub levels: Option<usize>,
|
||||
pub format: Option<StackFrameFormat>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StackTraceResponse {
|
||||
pub total_frames: Option<usize>,
|
||||
pub stack_frames: Vec<StackFrame>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum StackTrace {}
|
||||
|
||||
impl Request for StackTrace {
|
||||
type Arguments = StackTraceArguments;
|
||||
type Result = StackTraceResponse;
|
||||
const COMMAND: &'static str = "stackTrace";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ThreadsResponse {
|
||||
pub threads: Vec<Thread>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Threads {}
|
||||
|
||||
impl Request for Threads {
|
||||
type Arguments = ();
|
||||
type Result = ThreadsResponse;
|
||||
const COMMAND: &'static str = "threads";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ScopesArguments {
|
||||
pub frame_id: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ScopesResponse {
|
||||
pub scopes: Vec<Scope>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Scopes {}
|
||||
|
||||
impl Request for Scopes {
|
||||
type Arguments = ScopesArguments;
|
||||
type Result = ScopesResponse;
|
||||
const COMMAND: &'static str = "scopes";
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct VariablesArguments {
|
||||
pub variables_reference: usize,
|
||||
pub filter: Option<String>,
|
||||
pub start: Option<usize>,
|
||||
pub count: Option<usize>,
|
||||
pub format: Option<ValueFormat>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct VariablesResponse {
|
||||
pub variables: Vec<Variable>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Variables {}
|
||||
|
||||
impl Request for Variables {
|
||||
type Arguments = VariablesArguments;
|
||||
type Result = VariablesResponse;
|
||||
const COMMAND: &'static str = "variables";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StepInArguments {
|
||||
pub thread_id: ThreadId,
|
||||
pub target_id: Option<usize>,
|
||||
pub granularity: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum StepIn {}
|
||||
|
||||
impl Request for StepIn {
|
||||
type Arguments = StepInArguments;
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "stepIn";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StepOutArguments {
|
||||
pub thread_id: ThreadId,
|
||||
pub granularity: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum StepOut {}
|
||||
|
||||
impl Request for StepOut {
|
||||
type Arguments = StepOutArguments;
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "stepOut";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NextArguments {
|
||||
pub thread_id: ThreadId,
|
||||
pub granularity: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Next {}
|
||||
|
||||
impl Request for Next {
|
||||
type Arguments = NextArguments;
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "next";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PauseArguments {
|
||||
pub thread_id: ThreadId,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Pause {}
|
||||
|
||||
impl Request for Pause {
|
||||
type Arguments = PauseArguments;
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "pause";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct EvaluateArguments {
|
||||
pub expression: String,
|
||||
pub frame_id: Option<usize>,
|
||||
pub context: Option<String>,
|
||||
pub format: Option<ValueFormat>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct EvaluateResponse {
|
||||
pub result: String,
|
||||
#[serde(rename = "type")]
|
||||
pub ty: Option<String>,
|
||||
pub presentation_hint: Option<VariablePresentationHint>,
|
||||
pub variables_reference: usize,
|
||||
pub named_variables: Option<usize>,
|
||||
pub indexed_variables: Option<usize>,
|
||||
pub memory_reference: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Evaluate {}
|
||||
|
||||
impl Request for Evaluate {
|
||||
type Arguments = EvaluateArguments;
|
||||
type Result = EvaluateResponse;
|
||||
const COMMAND: &'static str = "evaluate";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetExceptionBreakpointsArguments {
|
||||
pub filters: Vec<String>,
|
||||
// pub filterOptions: Option<Vec<ExceptionFilterOptions>>, // needs capability
|
||||
// pub exceptionOptions: Option<Vec<ExceptionOptions>>, // needs capability
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetExceptionBreakpointsResponse {
|
||||
pub breakpoints: Option<Vec<Breakpoint>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SetExceptionBreakpoints {}
|
||||
|
||||
impl Request for SetExceptionBreakpoints {
|
||||
type Arguments = SetExceptionBreakpointsArguments;
|
||||
type Result = SetExceptionBreakpointsResponse;
|
||||
const COMMAND: &'static str = "setExceptionBreakpoints";
|
||||
}
|
||||
|
||||
// Reverse Requests
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RunInTerminalResponse {
|
||||
pub process_id: Option<u32>,
|
||||
pub shell_process_id: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RunInTerminalArguments {
|
||||
pub kind: Option<String>,
|
||||
pub title: Option<String>,
|
||||
pub cwd: Option<String>,
|
||||
pub args: Vec<String>,
|
||||
pub env: Option<HashMap<String, Option<String>>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum RunInTerminal {}
|
||||
|
||||
impl Request for RunInTerminal {
|
||||
type Arguments = RunInTerminalArguments;
|
||||
type Result = RunInTerminalResponse;
|
||||
const COMMAND: &'static str = "runInTerminal";
|
||||
}
|
||||
}
|
||||
|
||||
// Events
|
||||
|
||||
pub mod events {
|
||||
use super::*;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "event", content = "body")]
|
||||
// seq is omitted as unused and is not sent by some implementations
|
||||
pub enum Event {
|
||||
Initialized,
|
||||
Stopped(Stopped),
|
||||
Continued(Continued),
|
||||
Exited(Exited),
|
||||
Terminated(Option<Terminated>),
|
||||
Thread(Thread),
|
||||
Output(Output),
|
||||
Breakpoint(Breakpoint),
|
||||
Module(Module),
|
||||
LoadedSource(LoadedSource),
|
||||
Process(Process),
|
||||
Capabilities(Capabilities),
|
||||
// ProgressStart(),
|
||||
// ProgressUpdate(),
|
||||
// ProgressEnd(),
|
||||
// Invalidated(),
|
||||
Memory(Memory),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Stopped {
|
||||
pub reason: String,
|
||||
pub description: Option<String>,
|
||||
pub thread_id: Option<ThreadId>,
|
||||
pub preserve_focus_hint: Option<bool>,
|
||||
pub text: Option<String>,
|
||||
pub all_threads_stopped: Option<bool>,
|
||||
pub hit_breakpoint_ids: Option<Vec<usize>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Continued {
|
||||
pub thread_id: ThreadId,
|
||||
pub all_threads_continued: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Exited {
|
||||
pub exit_code: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Terminated {
|
||||
pub restart: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Thread {
|
||||
pub reason: String,
|
||||
pub thread_id: ThreadId,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Output {
|
||||
pub output: String,
|
||||
pub category: Option<String>,
|
||||
pub group: Option<String>,
|
||||
pub line: Option<usize>,
|
||||
pub column: Option<usize>,
|
||||
pub variables_reference: Option<usize>,
|
||||
pub source: Option<Source>,
|
||||
pub data: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Breakpoint {
|
||||
pub reason: String,
|
||||
pub breakpoint: super::Breakpoint,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Module {
|
||||
pub reason: String,
|
||||
pub module: super::Module,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoadedSource {
|
||||
pub reason: String,
|
||||
pub source: super::Source,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Process {
|
||||
pub name: String,
|
||||
pub system_process_id: Option<usize>,
|
||||
pub is_local_process: Option<bool>,
|
||||
pub start_method: Option<String>, // TODO: use enum
|
||||
pub pointer_size: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Capabilities {
|
||||
pub capabilities: super::DebuggerCapabilities,
|
||||
}
|
||||
|
||||
// #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
// #[serde(rename_all = "camelCase")]
|
||||
// pub struct Invalidated {
|
||||
// pub areas: Vec<InvalidatedArea>,
|
||||
// pub thread_id: Option<ThreadId>,
|
||||
// pub stack_frame_id: Option<usize>,
|
||||
// }
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Memory {
|
||||
pub memory_reference: String,
|
||||
pub offset: usize,
|
||||
pub count: usize,
|
||||
}
|
||||
}
|
@@ -1,21 +1,23 @@
|
||||
[package]
|
||||
name = "helix-syntax"
|
||||
version = "0.5.0"
|
||||
name = "helix-loader"
|
||||
version = "0.6.0"
|
||||
description = "A post-modern text editor."
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
description = "Tree-sitter grammars support"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
include = ["src/**/*", "languages/**/*", "build.rs", "!**/docs/**/*", "!**/test/**/*", "!**/examples/**/*", "!**/build/**/*"]
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
toml = "0.5"
|
||||
etcetera = "0.3"
|
||||
tree-sitter = "0.20"
|
||||
libloading = "0.7"
|
||||
anyhow = "1"
|
||||
once_cell = "1.9"
|
||||
|
||||
[build-dependencies]
|
||||
# cloning/compiling tree-sitter grammars
|
||||
cc = { version = "1" }
|
||||
threadpool = { version = "1.0" }
|
||||
anyhow = "1"
|
6
helix-loader/build.rs
Normal file
6
helix-loader/build.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
fn main() {
|
||||
println!(
|
||||
"cargo:rustc-env=BUILD_TARGET={}",
|
||||
std::env::var("TARGET").unwrap()
|
||||
);
|
||||
}
|
387
helix-loader/src/grammar.rs
Normal file
387
helix-loader/src/grammar.rs
Normal file
@@ -0,0 +1,387 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use libloading::{Library, Symbol};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::time::SystemTime;
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
sync::mpsc::channel,
|
||||
};
|
||||
use tree_sitter::Language;
|
||||
|
||||
#[cfg(unix)]
|
||||
const DYLIB_EXTENSION: &str = "so";
|
||||
|
||||
#[cfg(windows)]
|
||||
const DYLIB_EXTENSION: &str = "dll";
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct Configuration {
|
||||
#[serde(rename = "use-grammars")]
|
||||
pub grammar_selection: Option<GrammarSelection>,
|
||||
pub grammar: Vec<GrammarConfiguration>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase", untagged)]
|
||||
pub enum GrammarSelection {
|
||||
Only(HashSet<String>),
|
||||
Except(HashSet<String>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct GrammarConfiguration {
|
||||
#[serde(rename = "name")]
|
||||
pub grammar_id: String,
|
||||
pub source: GrammarSource,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase", untagged)]
|
||||
pub enum GrammarSource {
|
||||
Local {
|
||||
path: String,
|
||||
},
|
||||
Git {
|
||||
#[serde(rename = "git")]
|
||||
remote: String,
|
||||
#[serde(rename = "rev")]
|
||||
revision: String,
|
||||
subpath: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
const BUILD_TARGET: &str = env!("BUILD_TARGET");
|
||||
const REMOTE_NAME: &str = "origin";
|
||||
|
||||
pub fn get_language(name: &str) -> Result<Language> {
|
||||
let name = name.to_ascii_lowercase();
|
||||
let mut library_path = crate::runtime_dir().join("grammars").join(&name);
|
||||
library_path.set_extension(DYLIB_EXTENSION);
|
||||
|
||||
let library = unsafe { Library::new(&library_path) }
|
||||
.with_context(|| format!("Error opening dynamic library {library_path:?}"))?;
|
||||
let language_fn_name = format!("tree_sitter_{}", name.replace('-', "_"));
|
||||
let language = unsafe {
|
||||
let language_fn: Symbol<unsafe extern "C" fn() -> Language> = library
|
||||
.get(language_fn_name.as_bytes())
|
||||
.with_context(|| format!("Failed to load symbol {language_fn_name}"))?;
|
||||
language_fn()
|
||||
};
|
||||
std::mem::forget(library);
|
||||
Ok(language)
|
||||
}
|
||||
|
||||
pub fn fetch_grammars() -> Result<()> {
|
||||
// We do not need to fetch local grammars.
|
||||
let mut grammars = get_grammar_configs()?;
|
||||
grammars.retain(|grammar| !matches!(grammar.source, GrammarSource::Local { .. }));
|
||||
|
||||
run_parallel(grammars, fetch_grammar, "fetch")
|
||||
}
|
||||
|
||||
pub fn build_grammars() -> Result<()> {
|
||||
run_parallel(get_grammar_configs()?, build_grammar, "build")
|
||||
}
|
||||
|
||||
// Returns the set of grammar configurations the user requests.
|
||||
// Grammars are configured in the default and user `languages.toml` and are
|
||||
// merged. The `grammar_selection` key of the config is then used to filter
|
||||
// down all grammars into a subset of the user's choosing.
|
||||
fn get_grammar_configs() -> Result<Vec<GrammarConfiguration>> {
|
||||
let config: Configuration = crate::user_lang_config()
|
||||
.context("Could not parse languages.toml")?
|
||||
.try_into()?;
|
||||
|
||||
let grammars = match config.grammar_selection {
|
||||
Some(GrammarSelection::Only(selections)) => config
|
||||
.grammar
|
||||
.into_iter()
|
||||
.filter(|grammar| selections.contains(&grammar.grammar_id))
|
||||
.collect(),
|
||||
Some(GrammarSelection::Except(rejections)) => config
|
||||
.grammar
|
||||
.into_iter()
|
||||
.filter(|grammar| !rejections.contains(&grammar.grammar_id))
|
||||
.collect(),
|
||||
None => config.grammar,
|
||||
};
|
||||
|
||||
Ok(grammars)
|
||||
}
|
||||
|
||||
fn run_parallel<F>(grammars: Vec<GrammarConfiguration>, job: F, action: &'static str) -> Result<()>
|
||||
where
|
||||
F: Fn(GrammarConfiguration) -> Result<()> + std::marker::Send + 'static + Copy,
|
||||
{
|
||||
let pool = threadpool::Builder::new().build();
|
||||
let (tx, rx) = channel();
|
||||
|
||||
for grammar in grammars {
|
||||
let tx = tx.clone();
|
||||
|
||||
pool.execute(move || {
|
||||
tx.send(job(grammar)).unwrap();
|
||||
});
|
||||
}
|
||||
|
||||
drop(tx);
|
||||
|
||||
// TODO: print all failures instead of the first one found.
|
||||
rx.iter()
|
||||
.find(|result| result.is_err())
|
||||
.map(|err| err.with_context(|| format!("Failed to {action} some grammar(s)")))
|
||||
.unwrap_or(Ok(()))
|
||||
}
|
||||
|
||||
fn fetch_grammar(grammar: GrammarConfiguration) -> Result<()> {
|
||||
if let GrammarSource::Git {
|
||||
remote, revision, ..
|
||||
} = grammar.source
|
||||
{
|
||||
let grammar_dir = crate::runtime_dir()
|
||||
.join("grammars/sources")
|
||||
.join(&grammar.grammar_id);
|
||||
|
||||
fs::create_dir_all(&grammar_dir).context(format!(
|
||||
"Could not create grammar directory {:?}",
|
||||
grammar_dir
|
||||
))?;
|
||||
|
||||
// create the grammar dir contains a git directory
|
||||
if !grammar_dir.join(".git").is_dir() {
|
||||
git(&grammar_dir, ["init"])?;
|
||||
}
|
||||
|
||||
// ensure the remote matches the configured remote
|
||||
if get_remote_url(&grammar_dir).map_or(true, |s| s != remote) {
|
||||
set_remote(&grammar_dir, &remote)?;
|
||||
}
|
||||
|
||||
// ensure the revision matches the configured revision
|
||||
if get_revision(&grammar_dir).map_or(true, |s| s != revision) {
|
||||
// Fetch the exact revision from the remote.
|
||||
// Supported by server-side git since v2.5.0 (July 2015),
|
||||
// enabled by default on major git hosts.
|
||||
git(
|
||||
&grammar_dir,
|
||||
["fetch", "--depth", "1", REMOTE_NAME, &revision],
|
||||
)?;
|
||||
git(&grammar_dir, ["checkout", &revision])?;
|
||||
|
||||
println!(
|
||||
"Grammar '{}' checked out at '{}'.",
|
||||
grammar.grammar_id, revision
|
||||
);
|
||||
} else {
|
||||
println!("Grammar '{}' is already up to date.", grammar.grammar_id);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Sets the remote for a repository to the given URL, creating the remote if
|
||||
// it does not yet exist.
|
||||
fn set_remote(repository_dir: &Path, remote_url: &str) -> Result<String> {
|
||||
git(
|
||||
repository_dir,
|
||||
["remote", "set-url", REMOTE_NAME, remote_url],
|
||||
)
|
||||
.or_else(|_| git(repository_dir, ["remote", "add", REMOTE_NAME, remote_url]))
|
||||
}
|
||||
|
||||
fn get_remote_url(repository_dir: &Path) -> Option<String> {
|
||||
git(repository_dir, ["remote", "get-url", REMOTE_NAME]).ok()
|
||||
}
|
||||
|
||||
fn get_revision(repository_dir: &Path) -> Option<String> {
|
||||
git(repository_dir, ["rev-parse", "HEAD"]).ok()
|
||||
}
|
||||
|
||||
// A wrapper around 'git' commands which returns stdout in success and a
|
||||
// helpful error message showing the command, stdout, and stderr in error.
|
||||
fn git<I, S>(repository_dir: &Path, args: I) -> Result<String>
|
||||
where
|
||||
I: IntoIterator<Item = S>,
|
||||
S: AsRef<std::ffi::OsStr>,
|
||||
{
|
||||
let output = Command::new("git")
|
||||
.args(args)
|
||||
.current_dir(repository_dir)
|
||||
.output()?;
|
||||
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8_lossy(&output.stdout)
|
||||
.trim_end()
|
||||
.to_owned())
|
||||
} else {
|
||||
// TODO: figure out how to display the git command using `args`
|
||||
Err(anyhow!(
|
||||
"Git command failed.\nStdout: {}\nStderr: {}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn build_grammar(grammar: GrammarConfiguration) -> Result<()> {
|
||||
let grammar_dir = if let GrammarSource::Local { path } = &grammar.source {
|
||||
PathBuf::from(&path)
|
||||
} else {
|
||||
crate::runtime_dir()
|
||||
.join("grammars/sources")
|
||||
.join(&grammar.grammar_id)
|
||||
};
|
||||
|
||||
let grammar_dir_entries = grammar_dir.read_dir().with_context(|| {
|
||||
format!("Failed to read directory {grammar_dir:?}. Did you use 'hx --grammar fetch'?")
|
||||
})?;
|
||||
|
||||
if grammar_dir_entries.count() == 0 {
|
||||
return Err(anyhow!(
|
||||
"Directory {grammar_dir:?} is empty. Did you use 'hx --grammar fetch'?"
|
||||
));
|
||||
};
|
||||
|
||||
let path = match &grammar.source {
|
||||
GrammarSource::Git {
|
||||
subpath: Some(subpath),
|
||||
..
|
||||
} => grammar_dir.join(subpath),
|
||||
_ => grammar_dir,
|
||||
}
|
||||
.join("src");
|
||||
|
||||
build_tree_sitter_library(&path, grammar)
|
||||
}
|
||||
|
||||
fn build_tree_sitter_library(src_path: &Path, grammar: GrammarConfiguration) -> Result<()> {
|
||||
let header_path = src_path;
|
||||
let parser_path = src_path.join("parser.c");
|
||||
let mut scanner_path = src_path.join("scanner.c");
|
||||
|
||||
let scanner_path = if scanner_path.exists() {
|
||||
Some(scanner_path)
|
||||
} else {
|
||||
scanner_path.set_extension("cc");
|
||||
if scanner_path.exists() {
|
||||
Some(scanner_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
let parser_lib_path = crate::runtime_dir().join("grammars");
|
||||
let mut library_path = parser_lib_path.join(&grammar.grammar_id);
|
||||
library_path.set_extension(DYLIB_EXTENSION);
|
||||
|
||||
let recompile = needs_recompile(&library_path, &parser_path, &scanner_path)
|
||||
.context("Failed to compare source and binary timestamps")?;
|
||||
|
||||
if !recompile {
|
||||
println!("Grammar '{}' is already built.", grammar.grammar_id);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("Building grammar '{}'", grammar.grammar_id);
|
||||
|
||||
let mut config = cc::Build::new();
|
||||
config
|
||||
.cpp(true)
|
||||
.opt_level(3)
|
||||
.cargo_metadata(false)
|
||||
.host(BUILD_TARGET)
|
||||
.target(BUILD_TARGET);
|
||||
let compiler = config.get_compiler();
|
||||
let mut command = Command::new(compiler.path());
|
||||
command.current_dir(src_path);
|
||||
for (key, value) in compiler.env() {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
if cfg!(windows) {
|
||||
command
|
||||
.args(&["/nologo", "/LD", "/I"])
|
||||
.arg(header_path)
|
||||
.arg("/Od")
|
||||
.arg("/utf-8");
|
||||
if let Some(scanner_path) = scanner_path.as_ref() {
|
||||
command.arg(scanner_path);
|
||||
}
|
||||
|
||||
command
|
||||
.arg(parser_path)
|
||||
.arg("/link")
|
||||
.arg(format!("/out:{}", library_path.to_str().unwrap()));
|
||||
} else {
|
||||
command
|
||||
.arg("-shared")
|
||||
.arg("-fPIC")
|
||||
.arg("-fno-exceptions")
|
||||
.arg("-g")
|
||||
.arg("-I")
|
||||
.arg(header_path)
|
||||
.arg("-o")
|
||||
.arg(&library_path)
|
||||
.arg("-O3");
|
||||
if let Some(scanner_path) = scanner_path.as_ref() {
|
||||
if scanner_path.extension() == Some("c".as_ref()) {
|
||||
command.arg("-xc").arg("-std=c99").arg(scanner_path);
|
||||
} else {
|
||||
command.arg(scanner_path);
|
||||
}
|
||||
}
|
||||
command.arg("-xc").arg(parser_path);
|
||||
if cfg!(all(unix, not(target_os = "macos"))) {
|
||||
command.arg("-Wl,-z,relro,-z,now");
|
||||
}
|
||||
}
|
||||
|
||||
let output = command.output().context("Failed to execute C compiler")?;
|
||||
if !output.status.success() {
|
||||
return Err(anyhow!(
|
||||
"Parser compilation failed.\nStdout: {}\nStderr: {}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn needs_recompile(
|
||||
lib_path: &Path,
|
||||
parser_c_path: &Path,
|
||||
scanner_path: &Option<PathBuf>,
|
||||
) -> Result<bool> {
|
||||
if !lib_path.exists() {
|
||||
return Ok(true);
|
||||
}
|
||||
let lib_mtime = mtime(lib_path)?;
|
||||
if mtime(parser_c_path)? > lib_mtime {
|
||||
return Ok(true);
|
||||
}
|
||||
if let Some(scanner_path) = scanner_path {
|
||||
if mtime(scanner_path)? > lib_mtime {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn mtime(path: &Path) -> Result<SystemTime> {
|
||||
Ok(fs::metadata(path)?.modified()?)
|
||||
}
|
||||
|
||||
/// Gives the contents of a file from a language's `runtime/queries/<lang>`
|
||||
/// directory
|
||||
pub fn load_runtime_file(language: &str, filename: &str) -> Result<String, std::io::Error> {
|
||||
let path = crate::RUNTIME_DIR
|
||||
.join("queries")
|
||||
.join(language)
|
||||
.join(filename);
|
||||
std::fs::read_to_string(&path)
|
||||
}
|
161
helix-loader/src/lib.rs
Normal file
161
helix-loader/src/lib.rs
Normal file
@@ -0,0 +1,161 @@
|
||||
pub mod grammar;
|
||||
|
||||
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
|
||||
|
||||
pub static RUNTIME_DIR: once_cell::sync::Lazy<std::path::PathBuf> =
|
||||
once_cell::sync::Lazy::new(runtime_dir);
|
||||
|
||||
pub fn runtime_dir() -> std::path::PathBuf {
|
||||
if let Ok(dir) = std::env::var("HELIX_RUNTIME") {
|
||||
return dir.into();
|
||||
}
|
||||
|
||||
const RT_DIR: &str = "runtime";
|
||||
let conf_dir = config_dir().join(RT_DIR);
|
||||
if conf_dir.exists() {
|
||||
return conf_dir;
|
||||
}
|
||||
|
||||
if let Ok(dir) = std::env::var("CARGO_MANIFEST_DIR") {
|
||||
// this is the directory of the crate being run by cargo, we need the workspace path so we take the parent
|
||||
return std::path::PathBuf::from(dir).parent().unwrap().join(RT_DIR);
|
||||
}
|
||||
|
||||
// fallback to location of the executable being run
|
||||
std::env::current_exe()
|
||||
.ok()
|
||||
.and_then(|path| path.parent().map(|path| path.to_path_buf().join(RT_DIR)))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub fn config_dir() -> std::path::PathBuf {
|
||||
// TODO: allow env var override
|
||||
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
|
||||
let mut path = strategy.config_dir();
|
||||
path.push("helix");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn cache_dir() -> std::path::PathBuf {
|
||||
// TODO: allow env var override
|
||||
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
|
||||
let mut path = strategy.cache_dir();
|
||||
path.push("helix");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn config_file() -> std::path::PathBuf {
|
||||
config_dir().join("config.toml")
|
||||
}
|
||||
|
||||
pub fn lang_config_file() -> std::path::PathBuf {
|
||||
config_dir().join("languages.toml")
|
||||
}
|
||||
|
||||
pub fn log_file() -> std::path::PathBuf {
|
||||
cache_dir().join("helix.log")
|
||||
}
|
||||
|
||||
/// Default bultin-in languages.toml.
|
||||
pub fn default_lang_config() -> toml::Value {
|
||||
toml::from_slice(include_bytes!("../../languages.toml"))
|
||||
.expect("Could not parse bultin-in languages.toml to valid toml")
|
||||
}
|
||||
|
||||
/// User configured languages.toml file, merged with the default config.
|
||||
pub fn user_lang_config() -> Result<toml::Value, toml::de::Error> {
|
||||
let def_lang_conf = default_lang_config();
|
||||
let data = std::fs::read(crate::config_dir().join("languages.toml"));
|
||||
let user_lang_conf = match data {
|
||||
Ok(raw) => {
|
||||
let value = toml::from_slice(&raw)?;
|
||||
merge_toml_values(def_lang_conf, value)
|
||||
}
|
||||
Err(_) => def_lang_conf,
|
||||
};
|
||||
|
||||
Ok(user_lang_conf)
|
||||
}
|
||||
|
||||
// right overrides left
|
||||
pub fn merge_toml_values(left: toml::Value, right: toml::Value) -> toml::Value {
|
||||
use toml::Value;
|
||||
|
||||
fn get_name(v: &Value) -> Option<&str> {
|
||||
v.get("name").and_then(Value::as_str)
|
||||
}
|
||||
|
||||
match (left, right) {
|
||||
(Value::Array(mut left_items), Value::Array(right_items)) => {
|
||||
left_items.reserve(right_items.len());
|
||||
for rvalue in right_items {
|
||||
let lvalue = get_name(&rvalue)
|
||||
.and_then(|rname| left_items.iter().position(|v| get_name(v) == Some(rname)))
|
||||
.map(|lpos| left_items.remove(lpos));
|
||||
let mvalue = match lvalue {
|
||||
Some(lvalue) => merge_toml_values(lvalue, rvalue),
|
||||
None => rvalue,
|
||||
};
|
||||
left_items.push(mvalue);
|
||||
}
|
||||
Value::Array(left_items)
|
||||
}
|
||||
(Value::Table(mut left_map), Value::Table(right_map)) => {
|
||||
for (rname, rvalue) in right_map {
|
||||
match left_map.remove(&rname) {
|
||||
Some(lvalue) => {
|
||||
let merged_value = merge_toml_values(lvalue, rvalue);
|
||||
left_map.insert(rname, merged_value);
|
||||
}
|
||||
None => {
|
||||
left_map.insert(rname, rvalue);
|
||||
}
|
||||
}
|
||||
}
|
||||
Value::Table(left_map)
|
||||
}
|
||||
// Catch everything else we didn't handle, and use the right value
|
||||
(_, value) => value,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod merge_toml_tests {
|
||||
use super::merge_toml_values;
|
||||
|
||||
#[test]
|
||||
fn language_tomls() {
|
||||
use toml::Value;
|
||||
|
||||
const USER: &str = "
|
||||
[[language]]
|
||||
name = \"nix\"
|
||||
test = \"bbb\"
|
||||
indent = { tab-width = 4, unit = \" \", test = \"aaa\" }
|
||||
";
|
||||
|
||||
let base: Value = toml::from_slice(include_bytes!("../../languages.toml"))
|
||||
.expect("Couldn't parse built-in languages config");
|
||||
let user: Value = toml::from_str(USER).unwrap();
|
||||
|
||||
let merged = merge_toml_values(base, user);
|
||||
let languages = merged.get("language").unwrap().as_array().unwrap();
|
||||
let nix = languages
|
||||
.iter()
|
||||
.find(|v| v.get("name").unwrap().as_str().unwrap() == "nix")
|
||||
.unwrap();
|
||||
let nix_indent = nix.get("indent").unwrap();
|
||||
|
||||
// We changed tab-width and unit in indent so check them if they are the new values
|
||||
assert_eq!(
|
||||
nix_indent.get("tab-width").unwrap().as_integer().unwrap(),
|
||||
4
|
||||
);
|
||||
assert_eq!(nix_indent.get("unit").unwrap().as_str().unwrap(), " ");
|
||||
// We added a new keys, so check them
|
||||
assert_eq!(nix.get("test").unwrap().as_str().unwrap(), "bbb");
|
||||
assert_eq!(nix_indent.get("test").unwrap().as_str().unwrap(), "aaa");
|
||||
// We didn't change comment-token so it should be same
|
||||
assert_eq!(nix.get("comment-token").unwrap().as_str().unwrap(), "#");
|
||||
}
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "helix-lsp"
|
||||
version = "0.5.0"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
@@ -12,16 +12,17 @@ homepage = "https://helix-editor.com"
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
helix-core = { version = "0.5", path = "../helix-core" }
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
|
||||
anyhow = "1.0"
|
||||
futures-executor = "0.3"
|
||||
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
|
||||
jsonrpc-core = { version = "18.0", default-features = false } # don't pull in all of futures
|
||||
log = "0.4"
|
||||
lsp-types = { version = "0.91", features = ["proposed"] }
|
||||
lsp-types = { version = "0.92", features = ["proposed"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1.15", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] }
|
||||
tokio = { version = "1.17", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
||||
tokio-stream = "0.1.8"
|
||||
which = "4.2"
|
||||
|
@@ -43,6 +43,9 @@ impl Client {
|
||||
root_markers: Vec<String>,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<(usize, Call)>, Arc<Notify>)> {
|
||||
// Resolve path to the binary
|
||||
let cmd = which::which(cmd).map_err(|err| anyhow::anyhow!(err))?;
|
||||
|
||||
let process = Command::new(cmd)
|
||||
.args(args)
|
||||
.stdin(Stdio::piped())
|
||||
@@ -110,6 +113,10 @@ impl Client {
|
||||
self.offset_encoding
|
||||
}
|
||||
|
||||
pub fn config(&self) -> Option<&Value> {
|
||||
self.config.as_ref()
|
||||
}
|
||||
|
||||
/// Execute a RPC request on the language server.
|
||||
async fn request<R: lsp::request::Request>(&self, params: R::Params) -> Result<R::Result>
|
||||
where
|
||||
@@ -243,6 +250,13 @@ impl Client {
|
||||
root_uri: root,
|
||||
initialization_options: self.config.clone(),
|
||||
capabilities: lsp::ClientCapabilities {
|
||||
workspace: Some(lsp::WorkspaceClientCapabilities {
|
||||
configuration: Some(true),
|
||||
did_change_configuration: Some(lsp::DynamicRegistrationClientCapabilities {
|
||||
dynamic_registration: Some(false),
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
text_document: Some(lsp::TextDocumentClientCapabilities {
|
||||
completion: Some(lsp::CompletionClientCapabilities {
|
||||
completion_item: Some(lsp::CompletionItemCapability {
|
||||
@@ -327,6 +341,16 @@ impl Client {
|
||||
self.exit().await
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------------------------
|
||||
// Workspace
|
||||
// -------------------------------------------------------------------------------------------
|
||||
|
||||
pub fn did_change_configuration(&self, settings: Value) -> impl Future<Output = Result<()>> {
|
||||
self.notify::<lsp::notification::DidChangeConfiguration>(
|
||||
lsp::DidChangeConfigurationParams { settings },
|
||||
)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------------------------
|
||||
// Text document
|
||||
// -------------------------------------------------------------------------------------------
|
||||
@@ -438,7 +462,7 @@ impl Client {
|
||||
|
||||
changes.push(lsp::TextDocumentContentChangeEvent {
|
||||
range: Some(lsp::Range::new(start, end)),
|
||||
text: s.into(),
|
||||
text: s.to_string(),
|
||||
range_length: None,
|
||||
});
|
||||
}
|
||||
|
@@ -191,6 +191,7 @@ pub mod util {
|
||||
pub enum MethodCall {
|
||||
WorkDoneProgressCreate(lsp::WorkDoneProgressCreateParams),
|
||||
ApplyWorkspaceEdit(lsp::ApplyWorkspaceEditParams),
|
||||
WorkspaceConfiguration(lsp::ConfigurationParams),
|
||||
}
|
||||
|
||||
impl MethodCall {
|
||||
@@ -209,6 +210,12 @@ impl MethodCall {
|
||||
.expect("Failed to parse ApplyWorkspaceEdit params");
|
||||
Self::ApplyWorkspaceEdit(params)
|
||||
}
|
||||
lsp::request::WorkspaceConfiguration::METHOD => {
|
||||
let params: lsp::ConfigurationParams = params
|
||||
.parse()
|
||||
.expect("Failed to parse WorkspaceConfiguration params");
|
||||
Self::WorkspaceConfiguration(params)
|
||||
}
|
||||
_ => {
|
||||
log::warn!("unhandled lsp request: {}", method);
|
||||
return None;
|
||||
|
@@ -1,13 +0,0 @@
|
||||
helix-syntax
|
||||
============
|
||||
|
||||
Syntax highlighting for helix, (shallow) submodules resides here.
|
||||
|
||||
Differences from nvim-treesitter
|
||||
--------------------------------
|
||||
|
||||
As the syntax are commonly ported from
|
||||
<https://github.com/nvim-treesitter/nvim-treesitter>.
|
||||
|
||||
Note that we do not support the custom `#any-of` predicate which is
|
||||
supported by neovim so one needs to change it to `#match` with regex.
|
@@ -1,207 +0,0 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use std::fs;
|
||||
use std::time::SystemTime;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
};
|
||||
|
||||
use std::sync::mpsc::channel;
|
||||
|
||||
fn collect_tree_sitter_dirs(ignore: &[String]) -> Result<Vec<String>> {
|
||||
let mut dirs = Vec::new();
|
||||
let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("languages");
|
||||
|
||||
for entry in fs::read_dir(path)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if !entry.file_type()?.is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let dir = path.file_name().unwrap().to_str().unwrap().to_string();
|
||||
|
||||
// filter ignores
|
||||
if ignore.contains(&dir) {
|
||||
continue;
|
||||
}
|
||||
dirs.push(dir)
|
||||
}
|
||||
|
||||
Ok(dirs)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
const DYLIB_EXTENSION: &str = "so";
|
||||
|
||||
#[cfg(windows)]
|
||||
const DYLIB_EXTENSION: &str = "dll";
|
||||
|
||||
fn build_library(src_path: &Path, language: &str) -> Result<()> {
|
||||
let header_path = src_path;
|
||||
// let grammar_path = src_path.join("grammar.json");
|
||||
let parser_path = src_path.join("parser.c");
|
||||
let mut scanner_path = src_path.join("scanner.c");
|
||||
|
||||
let scanner_path = if scanner_path.exists() {
|
||||
Some(scanner_path)
|
||||
} else {
|
||||
scanner_path.set_extension("cc");
|
||||
if scanner_path.exists() {
|
||||
Some(scanner_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
let parser_lib_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../runtime/grammars");
|
||||
let mut library_path = parser_lib_path.join(language);
|
||||
library_path.set_extension(DYLIB_EXTENSION);
|
||||
|
||||
let recompile = needs_recompile(&library_path, &parser_path, &scanner_path)
|
||||
.with_context(|| "Failed to compare source and binary timestamps")?;
|
||||
|
||||
if !recompile {
|
||||
return Ok(());
|
||||
}
|
||||
let mut config = cc::Build::new();
|
||||
config.cpp(true).opt_level(2).cargo_metadata(false);
|
||||
let compiler = config.get_compiler();
|
||||
let mut command = Command::new(compiler.path());
|
||||
command.current_dir(src_path);
|
||||
for (key, value) in compiler.env() {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
if cfg!(windows) {
|
||||
command
|
||||
.args(&["/nologo", "/LD", "/I"])
|
||||
.arg(header_path)
|
||||
.arg("/Od")
|
||||
.arg("/utf-8");
|
||||
if let Some(scanner_path) = scanner_path.as_ref() {
|
||||
command.arg(scanner_path);
|
||||
}
|
||||
|
||||
command
|
||||
.arg(parser_path)
|
||||
.arg("/link")
|
||||
.arg(format!("/out:{}", library_path.to_str().unwrap()));
|
||||
} else {
|
||||
command
|
||||
.arg("-shared")
|
||||
.arg("-fPIC")
|
||||
.arg("-fno-exceptions")
|
||||
.arg("-g")
|
||||
.arg("-I")
|
||||
.arg(header_path)
|
||||
.arg("-o")
|
||||
.arg(&library_path)
|
||||
.arg("-O2");
|
||||
if let Some(scanner_path) = scanner_path.as_ref() {
|
||||
if scanner_path.extension() == Some("c".as_ref()) {
|
||||
command.arg("-xc").arg("-std=c99").arg(scanner_path);
|
||||
} else {
|
||||
command.arg(scanner_path);
|
||||
}
|
||||
}
|
||||
command.arg("-xc").arg(parser_path);
|
||||
if cfg!(all(unix, not(target_os = "macos"))) {
|
||||
command.arg("-Wl,-z,relro,-z,now");
|
||||
}
|
||||
}
|
||||
|
||||
let output = command
|
||||
.output()
|
||||
.with_context(|| "Failed to execute C compiler")?;
|
||||
if !output.status.success() {
|
||||
return Err(anyhow!(
|
||||
"Parser compilation failed.\nStdout: {}\nStderr: {}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
fn needs_recompile(
|
||||
lib_path: &Path,
|
||||
parser_c_path: &Path,
|
||||
scanner_path: &Option<PathBuf>,
|
||||
) -> Result<bool> {
|
||||
if !lib_path.exists() {
|
||||
return Ok(true);
|
||||
}
|
||||
let lib_mtime = mtime(lib_path)?;
|
||||
if mtime(parser_c_path)? > lib_mtime {
|
||||
return Ok(true);
|
||||
}
|
||||
if let Some(scanner_path) = scanner_path {
|
||||
if mtime(scanner_path)? > lib_mtime {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn mtime(path: &Path) -> Result<SystemTime> {
|
||||
Ok(fs::metadata(path)?.modified()?)
|
||||
}
|
||||
|
||||
fn build_dir(dir: &str, language: &str) {
|
||||
println!("Build language {}", language);
|
||||
if PathBuf::from("languages")
|
||||
.join(dir)
|
||||
.read_dir()
|
||||
.unwrap()
|
||||
.next()
|
||||
.is_none()
|
||||
{
|
||||
eprintln!(
|
||||
"The directory {} is empty, you probably need to use 'git submodule update --init --recursive'?",
|
||||
dir
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("languages")
|
||||
.join(dir)
|
||||
.join("src");
|
||||
|
||||
build_library(&path, language).unwrap();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let ignore = vec![
|
||||
"tree-sitter-typescript".to_string(),
|
||||
"tree-sitter-haskell".to_string(), // aarch64 failures: https://github.com/tree-sitter/tree-sitter-haskell/issues/34
|
||||
"tree-sitter-ocaml".to_string(),
|
||||
];
|
||||
let dirs = collect_tree_sitter_dirs(&ignore).unwrap();
|
||||
|
||||
let mut n_jobs = 0;
|
||||
let pool = threadpool::Builder::new().build(); // by going through the builder, it'll use num_cpus
|
||||
let (tx, rx) = channel();
|
||||
|
||||
for dir in dirs {
|
||||
let tx = tx.clone();
|
||||
n_jobs += 1;
|
||||
|
||||
pool.execute(move || {
|
||||
let language = &dir.strip_prefix("tree-sitter-").unwrap();
|
||||
build_dir(&dir, language);
|
||||
|
||||
// report progress
|
||||
tx.send(1).unwrap();
|
||||
});
|
||||
}
|
||||
pool.join();
|
||||
// drop(tx);
|
||||
assert_eq!(rx.try_iter().sum::<usize>(), n_jobs);
|
||||
|
||||
build_dir("tree-sitter-typescript/tsx", "tsx");
|
||||
build_dir("tree-sitter-typescript/typescript", "typescript");
|
||||
build_dir("tree-sitter-ocaml/ocaml", "ocaml");
|
||||
build_dir("tree-sitter-ocaml/interface", "ocaml-interface")
|
||||
}
|
Submodule helix-syntax/languages/tree-sitter-agda deleted from ca69cdf485
Submodule helix-syntax/languages/tree-sitter-bash deleted from a8eb5cb57c
Submodule helix-syntax/languages/tree-sitter-c deleted from f05e279aed
Submodule helix-syntax/languages/tree-sitter-c-sharp deleted from 53a65a9081
Submodule helix-syntax/languages/tree-sitter-cmake deleted from f6616f1e41
Submodule helix-syntax/languages/tree-sitter-comment deleted from 5dd3c62f1b
Submodule helix-syntax/languages/tree-sitter-cpp deleted from e8dcc9d2b4
Submodule helix-syntax/languages/tree-sitter-css deleted from 94e1023093
Submodule helix-syntax/languages/tree-sitter-dart deleted from 6a25376685
Submodule helix-syntax/languages/tree-sitter-dockerfile deleted from 7af32bc04a
Submodule helix-syntax/languages/tree-sitter-elixir deleted from f5d7bda543
Submodule helix-syntax/languages/tree-sitter-fish deleted from 04e54ab658
Submodule helix-syntax/languages/tree-sitter-git-commit deleted from 066e395e11
Submodule helix-syntax/languages/tree-sitter-git-diff deleted from c12e6ecb54
Submodule helix-syntax/languages/tree-sitter-git-rebase deleted from 332dc528f2
Submodule helix-syntax/languages/tree-sitter-glsl deleted from 88408ffc5e
Submodule helix-syntax/languages/tree-sitter-go deleted from 2a83dfdd75
Submodule helix-syntax/languages/tree-sitter-haskell deleted from 237f4eb441
Submodule helix-syntax/languages/tree-sitter-html deleted from d93af487cc
Submodule helix-syntax/languages/tree-sitter-java deleted from bd6186c24d
Submodule helix-syntax/languages/tree-sitter-javascript deleted from 4a95461c47
Submodule helix-syntax/languages/tree-sitter-json deleted from 65bceef69c
Submodule helix-syntax/languages/tree-sitter-julia deleted from 12ea597262
Submodule helix-syntax/languages/tree-sitter-latex deleted from 7f720661de
Submodule helix-syntax/languages/tree-sitter-ledger deleted from 0cdeb0e514
Submodule helix-syntax/languages/tree-sitter-llvm deleted from 3b213925b9
Submodule helix-syntax/languages/tree-sitter-lua deleted from 6f5d40190e
Submodule helix-syntax/languages/tree-sitter-markdown deleted from ad8c32917a
Submodule helix-syntax/languages/tree-sitter-nix deleted from 50f38ceab6
Submodule helix-syntax/languages/tree-sitter-ocaml deleted from 23d419ba45
Submodule helix-syntax/languages/tree-sitter-perl deleted from 0ac2c6da56
Submodule helix-syntax/languages/tree-sitter-php deleted from 0d63eaf94e
Submodule helix-syntax/languages/tree-sitter-protobuf deleted from 19c211a014
Submodule helix-syntax/languages/tree-sitter-python deleted from d6210ceab1
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user