mirror of
https://github.com/helix-editor/helix.git
synced 2025-10-06 00:13:28 +02:00
Compare commits
893 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
7a51085e8a | ||
|
20a132e36f | ||
|
2b0835b295 | ||
|
0902ede7b1 | ||
|
9400d74307 | ||
|
be2b452a39 | ||
|
c0dbd6dc3f | ||
|
85492e587c | ||
|
1ac576f2b3 | ||
|
29d6a5a9b6 | ||
|
3d76fa0b81 | ||
|
43fc073cb3 | ||
|
c94c0d9f1c | ||
|
610ce93600 | ||
|
05161aa85e | ||
|
e8cc7ace75 | ||
|
6fdf5d0920 | ||
|
43997f1936 | ||
|
61828ea519 | ||
|
0712eb3e3b | ||
|
cf8f59ddd0 | ||
|
98851d1594 | ||
|
37fed4de80 | ||
|
5eb9a0167f | ||
|
94203a97e5 | ||
|
e01c53551d | ||
|
7044d7d804 | ||
|
37520f46ae | ||
|
b157c5a8a4 | ||
|
6fcab90d16 | ||
|
a229f405cc | ||
|
31b7596f09 | ||
|
4fc991fdec | ||
|
08ee949dcb | ||
|
db3470d973 | ||
|
8081e9f052 | ||
|
00b2d616eb | ||
|
8330f6af20 | ||
|
c1f677ff75 | ||
|
eeb3f8e963 | ||
|
c1f90a127b | ||
|
fbb98300df | ||
|
8d7a25b4d4 | ||
|
3f603b27f1 | ||
|
bfa533fe78 | ||
|
194b09fbc1 | ||
|
f31e85aca4 | ||
|
24352b2729 | ||
|
bde0307c87 | ||
|
970a111aa3 | ||
|
5a60989efe | ||
|
9a04064373 | ||
|
b67686d318 | ||
|
0d0165b76e | ||
|
1493ff7657 | ||
|
b0aaf08995 | ||
|
79caa7b72b | ||
|
19247ff0ec | ||
|
9bfb0caf1b | ||
|
7633c5acd3 | ||
|
f1e90ac2e3 | ||
|
d62ad8b595 | ||
|
fd02d1bf89 | ||
|
5f386fa355 | ||
|
5d14f56fa9 | ||
|
74a9dd51ff | ||
|
c484b08923 | ||
|
0062af6a19 | ||
|
737282d0e9 | ||
|
376d99a51d | ||
|
adf97e088e | ||
|
68bad148a5 | ||
|
78fba8683b | ||
|
0ff3e3ea38 | ||
|
c0b86afdc8 | ||
|
1c1aee74b4 | ||
|
5c810e5e52 | ||
|
86b1236b46 | ||
|
227e0108e9 | ||
|
a76e94848a | ||
|
49c5bc5934 | ||
|
14e2ced440 | ||
|
e83cdf3fd3 | ||
|
e6c36e82cf | ||
|
78d37fd332 | ||
|
59c691d2db | ||
|
b13d44156c | ||
|
bdbf423876 | ||
|
7bb1db3ab5 | ||
|
0846822371 | ||
|
f044059a2a | ||
|
c15996aff5 | ||
|
f9ad1cafdc | ||
|
39f7ba36e0 | ||
|
c1251aecc7 | ||
|
6a6a9ab2b3 | ||
|
78a6f77e99 | ||
|
66637be700 | ||
|
8e07e1b898 | ||
|
951fd1c80e | ||
|
93ec42d06e | ||
|
a494f47a5d | ||
|
b935fac957 | ||
|
9712bbb23b | ||
|
4526216139 | ||
|
f83843ceba | ||
|
40eb1268c7 | ||
|
e1a92fd399 | ||
|
806cc1c3b1 | ||
|
dd549e1729 | ||
|
77b1a4a768 | ||
|
7d2a77e53c | ||
|
24f86017a6 | ||
|
865881ba19 | ||
|
1ca6ba03ca | ||
|
d5ba0b5162 | ||
|
700058f433 | ||
|
c7b326be04 | ||
|
2af04325d8 | ||
|
a449156702 | ||
|
5af9136aec | ||
|
1cd710fe01 | ||
|
4e845409b6 | ||
|
c06155ace4 | ||
|
504d5ce8bd | ||
|
7b1d682fe5 | ||
|
4e1b3b12f3 | ||
|
a8cf0c6b90 | ||
|
368064e316 | ||
|
afec54485a | ||
|
24f90ba8d8 | ||
|
af21e2a5b4 | ||
|
e023a78919 | ||
|
6118486eb2 | ||
|
a1207fd768 | ||
|
d11b652139 | ||
|
fd0e4b1159 | ||
|
a629343476 | ||
|
ab2a0f325b | ||
|
989407f190 | ||
|
966fbc5984 | ||
|
1422449537 | ||
|
eeb9b39857 | ||
|
4429993842 | ||
|
23907a063c | ||
|
3a83a764e3 | ||
|
8a7aec6414 | ||
|
225484c26c | ||
|
4c424d5ee4 | ||
|
e267dc834a | ||
|
59acee308d | ||
|
97d4b2b5fe | ||
|
bd549d8a20 | ||
|
7083b98a38 | ||
|
a19a6ca01e | ||
|
1bcb624ae6 | ||
|
f88c077f99 | ||
|
fdb9a1677b | ||
|
59b5bf3178 | ||
|
fa83426011 | ||
|
bf773db451 | ||
|
5995568c1d | ||
|
547c3ecd0c | ||
|
e90276df0b | ||
|
f0cd02d5ef | ||
|
828d39e736 | ||
|
718d4ab0f0 | ||
|
23553bd37c | ||
|
ad62e1e129 | ||
|
e7f5ec5561 | ||
|
f5b95beef6 | ||
|
6c11708fb3 | ||
|
6ea477ab60 | ||
|
d3221b03a2 | ||
|
d6b6ad879e | ||
|
983a53bfb4 | ||
|
d090369404 | ||
|
05aeeaca0b | ||
|
36b975c4ce | ||
|
f10a06f4de | ||
|
094a0aa3f9 | ||
|
f7f55143a1 | ||
|
4c996f43df | ||
|
62561e9d23 | ||
|
5aead46f4b | ||
|
2a7ae963e1 | ||
|
e2833b5853 | ||
|
333c2949c2 | ||
|
48a0c80652 | ||
|
7bce91556a | ||
|
ed03be1450 | ||
|
5c007c2248 | ||
|
0ad7561135 | ||
|
1bcff796e5 | ||
|
a4fffaed9f | ||
|
d49e5323f9 | ||
|
4044c70eb2 | ||
|
759b850859 | ||
|
7d510429c5 | ||
|
f064894e57 | ||
|
e2d2f19fd0 | ||
|
a8e69e12f4 | ||
|
4080341977 | ||
|
7c9ebd05b8 | ||
|
80e920ba36 | ||
|
ac81b47a41 | ||
|
66a8612351 | ||
|
2302869836 | ||
|
add3be8528 | ||
|
df0d58e9f7 | ||
|
11c3ba9350 | ||
|
9d41113ae0 | ||
|
e22dbf102f | ||
|
2f4a9fea03 | ||
|
24314bd844 | ||
|
4b0205f690 | ||
|
9508684031 | ||
|
7315f6f3e4 | ||
|
8a53e34e66 | ||
|
72eb2ce1f1 | ||
|
5135fa37eb | ||
|
53d881f172 | ||
|
6728e44490 | ||
|
83bde1004d | ||
|
b8cafee9f5 | ||
|
1c747674b6 | ||
|
5c1a06d28e | ||
|
0b55b21f30 | ||
|
f453f8724d | ||
|
4563832318 | ||
|
392dfa0841 | ||
|
fd7080498e | ||
|
b2c8aa1ee7 | ||
|
440d4ae9df | ||
|
22b728d1eb | ||
|
89eb22525b | ||
|
ed45d380eb | ||
|
96d4ca5f73 | ||
|
56a9ce5d83 | ||
|
8ea5742b08 | ||
|
e7eab95b94 | ||
|
f5b0821860 | ||
|
9da0abaa5d | ||
|
22297d0b40 | ||
|
38ca8daa09 | ||
|
62c78c061c | ||
|
64d3e7b705 | ||
|
dd1f64d4dc | ||
|
a7b0cc730c | ||
|
3a34036310 | ||
|
97e6f2a38f | ||
|
6bfd001b48 | ||
|
ac6b2de0fd | ||
|
f80da7b4de | ||
|
85cf2648a2 | ||
|
a2fad4fcb0 | ||
|
f77dbc7c83 | ||
|
8d273a5613 | ||
|
ddbf03613d | ||
|
afc602d306 | ||
|
1a34a3ce57 | ||
|
e0a99ae51a | ||
|
b3b4e78585 | ||
|
97e12f5c5a | ||
|
05e5520ec0 | ||
|
939261fc07 | ||
|
5b45bdd80f | ||
|
1c6bc6d455 | ||
|
c238f20e1d | ||
|
9eacbc1887 | ||
|
41ee45ce54 | ||
|
b799b0d50e | ||
|
5e22694865 | ||
|
1af8dd9912 | ||
|
a8fd33ac01 | ||
|
449624965b | ||
|
2e02a1d6bc | ||
|
66afbc9fff | ||
|
3e4f81547c | ||
|
b18bda928f | ||
|
7767703979 | ||
|
bed9aced5f | ||
|
bd0d20a2b3 | ||
|
1bcae78f06 | ||
|
efaac6c5d3 | ||
|
c8794b30ee | ||
|
5b1a628e81 | ||
|
641255ccc8 | ||
|
7c9d3682db | ||
|
4d59f66b76 | ||
|
96935eb28d | ||
|
78967779bd | ||
|
61fe1dc9e8 | ||
|
dbaed0ba83 | ||
|
609f7363a1 | ||
|
ed97ecceb8 | ||
|
ea095ca5fb | ||
|
4da050b4bb | ||
|
8f2af71340 | ||
|
93a948d889 | ||
|
aaa42e1a69 | ||
|
0dab6c8c17 | ||
|
8a019b423f | ||
|
8fda87af2b | ||
|
a066f59dc8 | ||
|
bcf3808e97 | ||
|
49444f9c05 | ||
|
8c29b76bcc | ||
|
f1ed042c84 | ||
|
34db33e1dc | ||
|
bd2ab5be43 | ||
|
dc1faa35cb | ||
|
1d2009e4f0 | ||
|
5b69e9b466 | ||
|
a4641a8613 | ||
|
7001665342 | ||
|
8340d73545 | ||
|
2d4bc0aec7 | ||
|
ee3eb4057a | ||
|
b9cb3930e2 | ||
|
9a32617b30 | ||
|
5326a05117 | ||
|
c7a59e24e6 | ||
|
a306a1052a | ||
|
6af0d51dc5 | ||
|
c3fb86cbaa | ||
|
28c9afdd0e | ||
|
c1f4c0e67a | ||
|
3b800025af | ||
|
78f93239b5 | ||
|
fd31662b70 | ||
|
4b0b1a5657 | ||
|
ec878e4011 | ||
|
5d7b5db8ab | ||
|
0e7d757869 | ||
|
60f3225c7f | ||
|
8aa0b8eacf | ||
|
13d804418f | ||
|
b2f8f2ba77 | ||
|
02f24e1214 | ||
|
bb684a2b42 | ||
|
c0bbadcaaf | ||
|
a8618cf111 | ||
|
34766e242a | ||
|
dba22c60ed | ||
|
75a8b789d2 | ||
|
600ce70cf6 | ||
|
5b4540fc2d | ||
|
1c082cb4ef | ||
|
176fbe760a | ||
|
205dc8776b | ||
|
f16651b590 | ||
|
5dfdc95f6f | ||
|
e02c0d1573 | ||
|
692ed7500f | ||
|
73f97fbb96 | ||
|
25691061a2 | ||
|
65b7acea75 | ||
|
614e0e0026 | ||
|
7438db66ae | ||
|
f1c634326b | ||
|
23091c9d29 | ||
|
e72786df8e | ||
|
7c01d92653 | ||
|
8208d22601 | ||
|
d52eda5d1b | ||
|
6d183b2154 | ||
|
f174d27d0d | ||
|
02fc52f6d5 | ||
|
a66833590c | ||
|
edf3c70c30 | ||
|
0683f0a20a | ||
|
d4fb1d0633 | ||
|
016640f4fb | ||
|
3ef115d420 | ||
|
5d91335d6b | ||
|
9c484e88cf | ||
|
78b6155292 | ||
|
a8060c06d1 | ||
|
0c447151cf | ||
|
ac4b72fcc8 | ||
|
40969ad452 | ||
|
9bfb701c94 | ||
|
49e0678741 | ||
|
6da2174e14 | ||
|
4527d63a65 | ||
|
e188926138 | ||
|
10ad25b95b | ||
|
5404a3f01b | ||
|
3820258c57 | ||
|
b711687e19 | ||
|
ca0c5fb08c | ||
|
94535fa013 | ||
|
730d3be201 | ||
|
c7ace15fd4 | ||
|
cce34feb4e | ||
|
42e6d96a75 | ||
|
d9727868dd | ||
|
3080be8268 | ||
|
a9a9d498e8 | ||
|
cff5344a13 | ||
|
98ce2a301d | ||
|
43d17c482c | ||
|
7ad8eaaef0 | ||
|
b25d453f64 | ||
|
e91d357fae | ||
|
3156577fbf | ||
|
3307f44ce2 | ||
|
b66d3d3d9d | ||
|
44681c5057 | ||
|
df3b88387b | ||
|
dac317e620 | ||
|
60c86eff89 | ||
|
a1e64815cb | ||
|
29c053e84e | ||
|
d08bdfa838 | ||
|
70c989e122 | ||
|
a78b789406 | ||
|
71292f9f11 | ||
|
9bdbafa075 | ||
|
d8351d35ab | ||
|
e98993d609 | ||
|
178cd5ecfc | ||
|
93e276cd9d | ||
|
35ac815409 | ||
|
a2b22ec152 | ||
|
cab09093dd | ||
|
bf8437d098 | ||
|
dc8df7ba21 | ||
|
2b4de41bf0 | ||
|
d5d1a9b1ae | ||
|
461cd20563 | ||
|
539c27e3f5 | ||
|
0b7911d921 | ||
|
31ed91dc2e | ||
|
584a31cd90 | ||
|
c74cd48f38 | ||
|
febee2dc0c | ||
|
37e484ee38 | ||
|
c9641fcced | ||
|
2a0c685a78 | ||
|
64afd54654 | ||
|
cc04fabe40 | ||
|
57a8e79940 | ||
|
95cfeed2fa | ||
|
c1f6167e37 | ||
|
11a2f9ac31 | ||
|
a06871a689 | ||
|
70c62530ee | ||
|
038a6ce22c | ||
|
cf40e61b0a | ||
|
5545f8ebb5 | ||
|
bcf70d8e67 | ||
|
43fbb6d965 | ||
|
371c84f70b | ||
|
34f46e7502 | ||
|
032aaffa15 | ||
|
2dbf966293 | ||
|
0d73a4d23a | ||
|
d31bef7fea | ||
|
01f7a312d0 | ||
|
a45df12699 | ||
|
119dee2980 | ||
|
27ffc79c44 | ||
|
e2b428cc2d | ||
|
418b833d2b | ||
|
d14ca05d6b | ||
|
de5e5863aa | ||
|
54f8e5c9c3 | ||
|
573cb39926 | ||
|
ffc89e483b | ||
|
dfd499f5a9 | ||
|
c955eaa6cd | ||
|
662ecf0cd4 | ||
|
259678585c | ||
|
7bbf4c5b06 | ||
|
d562e13e1f | ||
|
b4fd3148e3 | ||
|
3e15aead4a | ||
|
96ae5897a1 | ||
|
84e939ef58 | ||
|
d906911417 | ||
|
30ac5869df | ||
|
8ffafb826f | ||
|
3633f85b38 | ||
|
c08d2fae58 | ||
|
9ed930b233 | ||
|
94296229e7 | ||
|
5313d0f04e | ||
|
fcd39adcaa | ||
|
987c4ad97f | ||
|
82889d7c41 | ||
|
0dd2303f64 | ||
|
6204c38556 | ||
|
058796c18e | ||
|
225e8ccf31 | ||
|
27c1a84f05 | ||
|
ba45db84d4 | ||
|
c71c9f69e2 | ||
|
30171416cb | ||
|
42fde95223 | ||
|
4f9390a435 | ||
|
6f1a7b1220 | ||
|
dc53e65b9e | ||
|
1d773bcefb | ||
|
103b5125e4 | ||
|
3b2b7341a5 | ||
|
4ec20eaeff | ||
|
6e62c3de47 | ||
|
17473b51d3 | ||
|
67bf4250ca | ||
|
e8f800a141 | ||
|
95f392b18d | ||
|
1eecd9a2ac | ||
|
72f606ee19 | ||
|
7961355ba1 | ||
|
57c14d4a93 | ||
|
21143e8d22 | ||
|
f24e5a3c41 | ||
|
2cc19bd8e4 | ||
|
a2f301ee4f | ||
|
8fec8c0791 | ||
|
4238a843f7 | ||
|
72576822f3 | ||
|
85b4410703 | ||
|
177b6fcdc9 | ||
|
28fd704bce | ||
|
b55ca8fdb8 | ||
|
0eadeab8c7 | ||
|
5f329a22c4 | ||
|
3b3c396ca4 | ||
|
1724930765 | ||
|
05d3ad4a0e | ||
|
d1854d8e6a | ||
|
6a4d9693ba | ||
|
05c6cb1d0b | ||
|
a3a3b0b517 | ||
|
b95c9470de | ||
|
ed76cdf238 | ||
|
2b7c086653 | ||
|
f2b4ff23ba | ||
|
5959356a24 | ||
|
bd56dde6e2 | ||
|
e9dc658de4 | ||
|
9dcccb45bb | ||
|
27ceeb83bb | ||
|
fa4c59df46 | ||
|
90fd09f2cc | ||
|
1132b7088a | ||
|
8db6fffe90 | ||
|
335ed7fa69 | ||
|
f1d3d97004 | ||
|
39479949fc | ||
|
bc31d998de | ||
|
411f522e5d | ||
|
7e4418468a | ||
|
225e7904ec | ||
|
c95cb2be28 | ||
|
dd98727bad | ||
|
6cb35d28a8 | ||
|
46d9ae2b62 | ||
|
c638b6b60e | ||
|
cccc1949eb | ||
|
87e61a0894 | ||
|
f5e070e808 | ||
|
e128a8702e | ||
|
8f7ada12ac | ||
|
b7c3877e94 | ||
|
6fa76d9fe7 | ||
|
edc976b6bb | ||
|
1817b7f581 | ||
|
35c974c9c4 | ||
|
0949a0de7f | ||
|
b74912ea78 | ||
|
b824e091a9 | ||
|
6d4409c00f | ||
|
d3def16584 | ||
|
fa0cb010e1 | ||
|
187197afb1 | ||
|
bf95a9ed04 | ||
|
9d591427be | ||
|
d131a9dd0e | ||
|
bf70cfd050 | ||
|
ebc14d9d20 | ||
|
4d22454386 | ||
|
c7cb7527be | ||
|
e0540fbcc4 | ||
|
efc2b4c77b | ||
|
e863e3b62d | ||
|
f9e9efb3ec | ||
|
80036b8bd3 | ||
|
5654909135 | ||
|
cf831b1a65 | ||
|
68224232af | ||
|
92d23430c0 | ||
|
97893cca64 | ||
|
7c9f620236 | ||
|
490919df4f | ||
|
a69caff450 | ||
|
f96be0fcbc | ||
|
a424ef4e20 | ||
|
eb68cd3767 | ||
|
81015266d9 | ||
|
e18198aeb2 | ||
|
f804ed3192 | ||
|
41fc326325 | ||
|
3f0345ff58 | ||
|
549cdee561 | ||
|
77dbbc73f9 | ||
|
29e6849413 | ||
|
bf4c70e027 | ||
|
30744646cb | ||
|
cdc2107bca | ||
|
82ff5b0ab6 | ||
|
8b85903116 | ||
|
4010b327e2 | ||
|
1e793c2bbf | ||
|
a252ecd8c8 | ||
|
e0e227d172 | ||
|
4304b52ff8 | ||
|
09f5796537 | ||
|
c39d9f44a0 | ||
|
3042ff3e5a | ||
|
9963a5614d | ||
|
65868081fc | ||
|
4f2a01cc09 | ||
|
2bd8a9b39d | ||
|
31b431bfdd | ||
|
9dd17c46a2 | ||
|
757babb1b4 | ||
|
5803de2067 | ||
|
155c608237 | ||
|
9baddc825d | ||
|
fd9b826f2c | ||
|
5938ab1bf1 | ||
|
3b8d5102ac | ||
|
64bb1f7563 | ||
|
29fe504398 | ||
|
acced82be6 | ||
|
ed23057ff8 | ||
|
e2a23ac0b5 | ||
|
2e1aa5f15b | ||
|
a5ea61433c | ||
|
09d8c139af | ||
|
14a3502cf1 | ||
|
f2b709a3c3 | ||
|
f979bdc442 | ||
|
2c1313c064 | ||
|
1a1685acf7 | ||
|
e80708eba7 | ||
|
0f4cd73000 | ||
|
f659e1178a | ||
|
4c1321b3b6 | ||
|
b81a554424 | ||
|
6431b26a6a | ||
|
911b9b3276 | ||
|
cfc8285867 | ||
|
51b4d35dce | ||
|
aa4d0b4646 | ||
|
7b65a6d687 | ||
|
78c68fae91 | ||
|
e2560f427e | ||
|
39584cbccd | ||
|
70d21a903f | ||
|
5b5d1b9dff | ||
|
e39cfa40df | ||
|
253bd6b3a8 | ||
|
3eb829e233 | ||
|
ee889aaa85 | ||
|
7a0c4322ea | ||
|
eb8745db09 | ||
|
e505bf2b48 | ||
|
9e247bf6ee | ||
|
924b7d3b19 | ||
|
1720b98760 | ||
|
44ff597841 | ||
|
0c381adcb1 | ||
|
a13af476c1 | ||
|
2f8ad7f890 | ||
|
592fba1100 | ||
|
ea452bec80 | ||
|
f140a2a00e | ||
|
e5de103728 | ||
|
68697cb332 | ||
|
a1c7e55e3b | ||
|
49f6c2623f | ||
|
cec0cfdaec | ||
|
bc6a34d97e | ||
|
21d535565b | ||
|
befecc8a9a | ||
|
f1d339919f | ||
|
45fadf6151 | ||
|
58b8100751 | ||
|
c02534d261 | ||
|
8af6d713cd | ||
|
cde57dae35 | ||
|
6aa9838ea6 | ||
|
d6e8a44d85 | ||
|
bda05ec4bf | ||
|
83a8167402 | ||
|
ea59f77a6b | ||
|
0a6b60085a | ||
|
bc0084d071 | ||
|
48cb81eff1 | ||
|
814dcfa8d2 | ||
|
d943a51e3e | ||
|
0e51e5fbaf | ||
|
bf53aff27d | ||
|
413e477dc2 | ||
|
507a1f8dd6 | ||
|
c9cd06e904 | ||
|
bdd636d8ee | ||
|
9b8c5bdade | ||
|
0add0c5639 | ||
|
2d35b7b99c | ||
|
b6c58ea23e | ||
|
bb26c589b4 | ||
|
0e1e4edc5e | ||
|
8a609047c3 | ||
|
7bdead5b4b | ||
|
3b0ec750ff | ||
|
1befbd076c | ||
|
e36fc57fff | ||
|
698583c241 | ||
|
df0ea6674a | ||
|
430c80ff2a | ||
|
c6186ce600 | ||
|
cb31d20b46 | ||
|
9a1916ebfd | ||
|
00cccdc62a | ||
|
9939dbf119 | ||
|
cf7237d0b9 | ||
|
c63ad60c31 | ||
|
7b61c63ece | ||
|
b997d2cdeb | ||
|
289303a30d | ||
|
42f9718f55 | ||
|
27c1b3f98b | ||
|
5b920c53f0 | ||
|
4c410eef87 | ||
|
9c64650a26 | ||
|
2c89107349 | ||
|
e0180a4b88 | ||
|
5b20f6020a | ||
|
6265e196b7 | ||
|
0b0b1d850a | ||
|
2c7b75475f | ||
|
986828e75c | ||
|
03b2d81406 | ||
|
4d24a43651 | ||
|
81f51c13fa | ||
|
51328a4966 | ||
|
d6ccc150c7 | ||
|
ee2ba744a2 | ||
|
2a7e38a2b4 | ||
|
db7f693550 | ||
|
b42631942b | ||
|
f53d8411cb | ||
|
98fda6b8f0 | ||
|
9d2f2a9e32 | ||
|
2d42766a71 | ||
|
af657ef2ec | ||
|
94901b8677 | ||
|
ef155e62ef | ||
|
e315394631 | ||
|
8df6739759 | ||
|
5e4da09be2 | ||
|
890b51b568 | ||
|
3b87fce0ce | ||
|
2ad2838a27 | ||
|
c7759a5aa0 | ||
|
4ee66b8766 | ||
|
ba96f5d296 | ||
|
326293cb57 | ||
|
2c3e2b979b | ||
|
8cc6d68160 | ||
|
235a84d989 | ||
|
299da5a35b | ||
|
b001008a69 | ||
|
31212e133d | ||
|
1041a5bb07 | ||
|
0e779381a8 | ||
|
774ab6f8b6 | ||
|
c463142e5e | ||
|
34c6094604 | ||
|
2158366b24 | ||
|
fdad7d67aa | ||
|
c4085b4e88 | ||
|
5d3c69d565 | ||
|
ec599a1eac | ||
|
3a5e044c89 | ||
|
b3469df5bf | ||
|
c09b15197b | ||
|
09c994a97a | ||
|
b5b79e3656 | ||
|
e529f4eb21 | ||
|
56d00fa7f4 | ||
|
802ef20dbc | ||
|
b6b99b2487 | ||
|
839d210573 | ||
|
f55a012fb7 | ||
|
c5b210df59 | ||
|
dabec2d799 | ||
|
b78f70e602 | ||
|
53ee57f84a | ||
|
f247858055 | ||
|
3197c2536e | ||
|
838f69929d | ||
|
060a422c7e | ||
|
74102bfc6d | ||
|
d0b0c9b2ef | ||
|
132198323c | ||
|
be9dc5802a | ||
|
d93cd2a261 | ||
|
dfc70a12f3 | ||
|
28658836ee | ||
|
2aee5f02d0 | ||
|
7233ab2deb | ||
|
a964cbae65 | ||
|
91f2c60b36 | ||
|
89ad54a2e5 | ||
|
cc66475592 | ||
|
95ba4ff5bd | ||
|
f3e47bfee4 | ||
|
5230a2b669 | ||
|
66c035fa99 | ||
|
6709b4242f | ||
|
26dee49dc9 | ||
|
462c8a6ec8 | ||
|
afeaba1113 | ||
|
5a06263b78 | ||
|
bcab93c94e | ||
|
a938f5a87a | ||
|
3fc501c99f | ||
|
7087558918 | ||
|
56bddb12f8 | ||
|
5f5b383979 | ||
|
6458edecfd | ||
|
738e8a4dd3 | ||
|
9e22842d51 | ||
|
e2c74d26e0 | ||
|
c4970c617e | ||
|
a54b09e3fe | ||
|
94a1951d40 | ||
|
8759dc7e33 | ||
|
86102a651f | ||
|
2094ff1aaf | ||
|
d39baa3b4e | ||
|
0300dbdeb3 | ||
|
54dc2f8107 | ||
|
8fbda0abaf | ||
|
184abdc510 | ||
|
3a9e1c305b | ||
|
2d1ae2e44b | ||
|
6225401e84 | ||
|
3f62799656 | ||
|
b9797a7dd2 | ||
|
6c0f7eafc3 | ||
|
279db98d3c | ||
|
36fb8d1b1a | ||
|
f92fb966c0 | ||
|
ae32159247 | ||
|
4f2b8fb05a | ||
|
2a6210806b | ||
|
f5b1655eab | ||
|
e7f543fe66 | ||
|
eb0605c13d | ||
|
809990a3a4 | ||
|
d4c215b35d | ||
|
e388079a0b | ||
|
43c9eba037 | ||
|
c5492788a2 | ||
|
cc650c7f4f | ||
|
b3be6b269a | ||
|
0777948fc0 | ||
|
09390be6a5 | ||
|
c72475bc30 | ||
|
d6de5408b7 | ||
|
59d6b92e5b | ||
|
9678df1c62 | ||
|
541f7a0514 | ||
|
aac586b546 | ||
|
6bb653f820 | ||
|
3d64cf8356 | ||
|
fabee03983 | ||
|
26a55dcefd | ||
|
e11b67b0db | ||
|
0fa127b105 | ||
|
7d2d4ed4a8 | ||
|
5f3e806341 | ||
|
5fd0a2ddda | ||
|
fd709bc56d | ||
|
0f6e81b85b |
2
.cargo/config
Normal file
2
.cargo/config
Normal file
@@ -0,0 +1,2 @@
|
||||
[alias]
|
||||
xtask = "run --package xtask --"
|
1
.envrc
1
.envrc
@@ -3,3 +3,4 @@ watch_file flake.lock
|
||||
|
||||
# try to use flakes, if it fails use normal nix (ie. shell.nix)
|
||||
use flake || use nix
|
||||
eval "$shellHook"
|
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -17,7 +17,8 @@ Please search on the issue tracker before creating one. -->
|
||||
### Environment
|
||||
|
||||
- Platform: <!-- macOS / Windows / Linux -->
|
||||
- Helix version: <!-- 'hx -v' if using a release, 'git describe' if building from master -->
|
||||
- Terminal emulator:
|
||||
- Helix version: <!-- 'hx -V' if using a release, 'git describe' if building from master -->
|
||||
|
||||
<details><summary>~/.cache/helix/helix.log</summary>
|
||||
|
||||
|
101
.github/workflows/build.yml
vendored
101
.github/workflows/build.yml
vendored
@@ -13,9 +13,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
@@ -25,22 +23,25 @@ jobs:
|
||||
override: true
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v2.1.6
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-registry-
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v2.1.6
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-index-
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v2.1.6
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
|
||||
|
||||
- name: Run cargo check
|
||||
uses: actions-rs/cargo@v1
|
||||
@@ -52,9 +53,7 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
@@ -64,27 +63,41 @@ jobs:
|
||||
override: true
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v2.1.6
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-registry-
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v2.1.6
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-index-
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v2.1.6
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
|
||||
|
||||
- name: Copy minimal languages config
|
||||
run: cp .github/workflows/languages.toml ./languages.toml
|
||||
|
||||
- name: Cache test tree-sitter grammar
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: runtime/grammars
|
||||
key: ${{ runner.os }}-v2-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
|
||||
restore-keys: ${{ runner.os }}-v2-tree-sitter-grammars-
|
||||
|
||||
- name: Run cargo test
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --workspace
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -96,9 +109,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
@@ -109,22 +120,25 @@ jobs:
|
||||
components: rustfmt, clippy
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v2.1.6
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-registry-
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v2.1.6
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-index-
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v2.1.6
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
|
||||
|
||||
- name: Run cargo fmt
|
||||
uses: actions-rs/cargo@v1
|
||||
@@ -136,4 +150,53 @@ jobs:
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
||||
args: --all-targets -- -D warnings
|
||||
|
||||
docs:
|
||||
name: Docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-registry-
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-index-
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: ${{ runner.os }}-v2-cargo-build-target-
|
||||
|
||||
- name: Generate docs
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: xtask
|
||||
args: docgen
|
||||
|
||||
- name: Check uncommitted documentation changes
|
||||
run: |
|
||||
git diff
|
||||
git diff-files --quiet \
|
||||
|| (echo "Run 'cargo xtask docgen', commit the changes and push again" \
|
||||
&& exit 1)
|
||||
|
||||
|
26
.github/workflows/cachix.yml
vendored
Normal file
26
.github/workflows/cachix.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
# Publish the Nix flake outputs to Cachix
|
||||
name: Cachix
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish Flake
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install nix
|
||||
uses: cachix/install-nix-action@v16
|
||||
|
||||
- name: Authenticate with Cachix
|
||||
uses: cachix/cachix-action@v10
|
||||
with:
|
||||
name: helix
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
|
||||
- name: Build nix flake
|
||||
run: nix build
|
18
.github/workflows/gh-pages.yml
vendored
18
.github/workflows/gh-pages.yml
vendored
@@ -4,12 +4,14 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdBook
|
||||
uses: peaceiris/actions-mdbook@v1
|
||||
@@ -18,10 +20,22 @@ jobs:
|
||||
# mdbook-version: '0.4.8'
|
||||
|
||||
- run: mdbook build book
|
||||
|
||||
- name: Set output directory
|
||||
run: |
|
||||
OUTDIR=$(basename ${{ github.ref }})
|
||||
echo "OUTDIR=$OUTDIR" >> $GITHUB_ENV
|
||||
|
||||
- name: Deploy
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
if: github.ref == 'refs/heads/master'
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book/book
|
||||
destination_dir: ./${{ env.OUTDIR }}
|
||||
|
||||
- name: Deploy stable
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
if: startswith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book/book
|
||||
|
26
.github/workflows/languages.toml
vendored
Normal file
26
.github/workflows/languages.toml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
# This languages.toml is used for testing in CI.
|
||||
|
||||
[[language]]
|
||||
name = "rust"
|
||||
scope = "source.rust"
|
||||
injection-regex = "rust"
|
||||
file-types = ["rs"]
|
||||
comment-token = "//"
|
||||
roots = ["Cargo.toml", "Cargo.lock"]
|
||||
indent = { tab-width = 4, unit = " " }
|
||||
|
||||
[[grammar]]
|
||||
name = "rust"
|
||||
source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "a360da0a29a19c281d08295a35ecd0544d2da211" }
|
||||
|
||||
[[language]]
|
||||
name = "nix"
|
||||
scope = "source.nix"
|
||||
injection-regex = "nix"
|
||||
file-types = ["nix"]
|
||||
shebangs = []
|
||||
roots = []
|
||||
comment-token = "#"
|
||||
|
||||
# A grammar entry is not necessary for this language - it is only used for
|
||||
# testing TOML merging behavior.
|
16
.github/workflows/release.yml
vendored
16
.github/workflows/release.yml
vendored
@@ -52,9 +52,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install ${{ matrix.rust }} toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
@@ -69,7 +67,7 @@ jobs:
|
||||
with:
|
||||
use-cross: ${{ matrix.cross }}
|
||||
command: test
|
||||
args: --release --locked --target ${{ matrix.target }}
|
||||
args: --release --locked --target ${{ matrix.target }} --workspace
|
||||
|
||||
- name: Build release binary
|
||||
uses: actions-rs/cargo@v1
|
||||
@@ -102,7 +100,7 @@ jobs:
|
||||
fi
|
||||
cp -r runtime dist
|
||||
|
||||
- uses: actions/upload-artifact@v2.2.4
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bins-${{ matrix.build }}
|
||||
path: dist
|
||||
@@ -113,15 +111,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: false
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
# with:
|
||||
# path: dist
|
||||
# - run: ls -al ./dist
|
||||
- run: ls -al bins-*
|
||||
|
||||
- name: Calculate tag name
|
||||
run: |
|
||||
|
136
.gitmodules
vendored
136
.gitmodules
vendored
@@ -1,136 +0,0 @@
|
||||
[submodule "helix-syntax/languages/tree-sitter-cpp"]
|
||||
path = helix-syntax/languages/tree-sitter-cpp
|
||||
url = https://github.com/tree-sitter/tree-sitter-cpp
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-javascript"]
|
||||
path = helix-syntax/languages/tree-sitter-javascript
|
||||
url = https://github.com/tree-sitter/tree-sitter-javascript
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-julia"]
|
||||
path = helix-syntax/languages/tree-sitter-julia
|
||||
url = https://github.com/tree-sitter/tree-sitter-julia
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-python"]
|
||||
path = helix-syntax/languages/tree-sitter-python
|
||||
url = https://github.com/tree-sitter/tree-sitter-python
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-typescript"]
|
||||
path = helix-syntax/languages/tree-sitter-typescript
|
||||
url = https://github.com/tree-sitter/tree-sitter-typescript
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-agda"]
|
||||
path = helix-syntax/languages/tree-sitter-agda
|
||||
url = https://github.com/tree-sitter/tree-sitter-agda
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-go"]
|
||||
path = helix-syntax/languages/tree-sitter-go
|
||||
url = https://github.com/tree-sitter/tree-sitter-go
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-ruby"]
|
||||
path = helix-syntax/languages/tree-sitter-ruby
|
||||
url = https://github.com/tree-sitter/tree-sitter-ruby
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-java"]
|
||||
path = helix-syntax/languages/tree-sitter-java
|
||||
url = https://github.com/tree-sitter/tree-sitter-java
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-php"]
|
||||
path = helix-syntax/languages/tree-sitter-php
|
||||
url = https://github.com/tree-sitter/tree-sitter-php
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-html"]
|
||||
path = helix-syntax/languages/tree-sitter-html
|
||||
url = https://github.com/tree-sitter/tree-sitter-html
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-scala"]
|
||||
path = helix-syntax/languages/tree-sitter-scala
|
||||
url = https://github.com/tree-sitter/tree-sitter-scala
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-bash"]
|
||||
path = helix-syntax/languages/tree-sitter-bash
|
||||
url = https://github.com/tree-sitter/tree-sitter-bash
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-rust"]
|
||||
path = helix-syntax/languages/tree-sitter-rust
|
||||
url = https://github.com/tree-sitter/tree-sitter-rust
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-json"]
|
||||
path = helix-syntax/languages/tree-sitter-json
|
||||
url = https://github.com/tree-sitter/tree-sitter-json
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-css"]
|
||||
path = helix-syntax/languages/tree-sitter-css
|
||||
url = https://github.com/tree-sitter/tree-sitter-css
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-c-sharp"]
|
||||
path = helix-syntax/languages/tree-sitter-c-sharp
|
||||
url = https://github.com/tree-sitter/tree-sitter-c-sharp
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-c"]
|
||||
path = helix-syntax/languages/tree-sitter-c
|
||||
url = https://github.com/tree-sitter/tree-sitter-c
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-haskell"]
|
||||
path = helix-syntax/languages/tree-sitter-haskell
|
||||
url = https://github.com/tree-sitter/tree-sitter-haskell
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-swift"]
|
||||
path = helix-syntax/languages/tree-sitter-swift
|
||||
url = https://github.com/tree-sitter/tree-sitter-swift
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-toml"]
|
||||
path = helix-syntax/languages/tree-sitter-toml
|
||||
url = https://github.com/ikatyang/tree-sitter-toml
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-elixir"]
|
||||
path = helix-syntax/languages/tree-sitter-elixir
|
||||
url = https://github.com/elixir-lang/tree-sitter-elixir
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-nix"]
|
||||
path = helix-syntax/languages/tree-sitter-nix
|
||||
url = https://github.com/cstrahan/tree-sitter-nix
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-latex"]
|
||||
path = helix-syntax/languages/tree-sitter-latex
|
||||
url = https://github.com/latex-lsp/tree-sitter-latex
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-ledger"]
|
||||
path = helix-syntax/languages/tree-sitter-ledger
|
||||
url = https://github.com/cbarrete/tree-sitter-ledger
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-protobuf"]
|
||||
path = helix-syntax/languages/tree-sitter-protobuf
|
||||
url = https://github.com/yusdacra/tree-sitter-protobuf.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-ocaml"]
|
||||
path = helix-syntax/languages/tree-sitter-ocaml
|
||||
url = https://github.com/tree-sitter/tree-sitter-ocaml
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-lua"]
|
||||
path = helix-syntax/languages/tree-sitter-lua
|
||||
url = https://github.com/nvim-treesitter/tree-sitter-lua
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-yaml"]
|
||||
path = helix-syntax/languages/tree-sitter-yaml
|
||||
url = https://github.com/ikatyang/tree-sitter-yaml
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-zig"]
|
||||
path = helix-syntax/languages/tree-sitter-zig
|
||||
url = https://github.com/maxxnino/tree-sitter-zig
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-svelte"]
|
||||
path = helix-syntax/languages/tree-sitter-svelte
|
||||
url = https://github.com/Himujjal/tree-sitter-svelte
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-vue"]
|
||||
path = helix-syntax/languages/tree-sitter-vue
|
||||
url = https://github.com/ikatyang/tree-sitter-vue
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-tsq"]
|
||||
path = helix-syntax/languages/tree-sitter-tsq
|
||||
url = https://github.com/tree-sitter/tree-sitter-tsq
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-cmake"]
|
||||
path = helix-syntax/languages/tree-sitter-cmake
|
||||
url = https://github.com/uyha/tree-sitter-cmake
|
||||
shallow = true
|
188
CHANGELOG.md
188
CHANGELOG.md
@@ -1,5 +1,124 @@
|
||||
|
||||
# 0.5.0 (2021-11-28)
|
||||
# 0.6.0 (2022-01-04)
|
||||
|
||||
Happy new year and a big shout out to all the contributors! We had 55 contributors in this release.
|
||||
|
||||
Helix has popped up in DPorts and Fedora Linux via COPR ([#1270](https://github.com/helix-editor/helix/pull/1270))
|
||||
|
||||
As usual the following is a brief summary, refer to the git history for a full log:
|
||||
|
||||
Breaking changes:
|
||||
|
||||
- fix: Normalize backtab into shift-tab
|
||||
|
||||
Features:
|
||||
|
||||
- Macros ([#1234](https://github.com/helix-editor/helix/pull/1234))
|
||||
- Add reverse search functionality ([#958](https://github.com/helix-editor/helix/pull/958))
|
||||
- Allow keys to be mapped to sequences of commands ([#589](https://github.com/helix-editor/helix/pull/589))
|
||||
- Make it possible to keybind TypableCommands ([#1169](https://github.com/helix-editor/helix/pull/1169))
|
||||
- Detect workspace root using language markers ([#1370](https://github.com/helix-editor/helix/pull/1370))
|
||||
- Add WORD textobject ([#991](https://github.com/helix-editor/helix/pull/991))
|
||||
- Add LSP rename_symbol (space-r) ([#1011](https://github.com/helix-editor/helix/pull/1011))
|
||||
- Added workspace_symbol_picker ([#1041](https://github.com/helix-editor/helix/pull/1041))
|
||||
- Detect filetype from shebang line ([#1001](https://github.com/helix-editor/helix/pull/1001))
|
||||
- Allow piping from stdin into a buffer on startup ([#996](https://github.com/helix-editor/helix/pull/996))
|
||||
- Add auto pairs for same-char pairs ([#1219](https://github.com/helix-editor/helix/pull/1219))
|
||||
- Update settings at runtime ([#798](https://github.com/helix-editor/helix/pull/798))
|
||||
- Enable thin LTO (cccc194)
|
||||
|
||||
Commands:
|
||||
- :wonly -- window only ([#1057](https://github.com/helix-editor/helix/pull/1057))
|
||||
- buffer-close (:bc, :bclose) ([#1035](https://github.com/helix-editor/helix/pull/1035))
|
||||
- Add :<line> and :goto <line> commands ([#1128](https://github.com/helix-editor/helix/pull/1128))
|
||||
- :sort command ([#1288](https://github.com/helix-editor/helix/pull/1288))
|
||||
- Add m textobject for pair under cursor ([#961](https://github.com/helix-editor/helix/pull/961))
|
||||
- Implement "Goto next buffer / Goto previous buffer" commands ([#950](https://github.com/helix-editor/helix/pull/950))
|
||||
- Implement "Goto last modification" command ([#1067](https://github.com/helix-editor/helix/pull/1067))
|
||||
- Add trim_selections command ([#1092](https://github.com/helix-editor/helix/pull/1092))
|
||||
- Add movement shortcut for history ([#1088](https://github.com/helix-editor/helix/pull/1088))
|
||||
- Add command to inc/dec number under cursor ([#1027](https://github.com/helix-editor/helix/pull/1027))
|
||||
- Add support for dates for increment/decrement
|
||||
- Align selections (&) ([#1101](https://github.com/helix-editor/helix/pull/1101))
|
||||
- Implement no-yank delete/change ([#1099](https://github.com/helix-editor/helix/pull/1099))
|
||||
- Implement black hole register ([#1165](https://github.com/helix-editor/helix/pull/1165))
|
||||
- gf as goto_file (gf) ([#1102](https://github.com/helix-editor/helix/pull/1102))
|
||||
- Add last modified file (gm) ([#1093](https://github.com/helix-editor/helix/pull/1093))
|
||||
- ensure_selections_forward ([#1393](https://github.com/helix-editor/helix/pull/1393))
|
||||
- Readline style insert mode ([#1039](https://github.com/helix-editor/helix/pull/1039))
|
||||
|
||||
Usability improvements and fixes:
|
||||
|
||||
- Detect filetype on :write ([#1141](https://github.com/helix-editor/helix/pull/1141))
|
||||
- Add single and double quotes to matching pairs ([#995](https://github.com/helix-editor/helix/pull/995))
|
||||
- Launch with defaults upon invalid config/theme (rather than panicking) ([#982](https://github.com/helix-editor/helix/pull/982))
|
||||
- If switching away from an empty scratch buffer, remove it ([#935](https://github.com/helix-editor/helix/pull/935))
|
||||
- Truncate the starts of file paths instead of the ends in picker ([#951](https://github.com/helix-editor/helix/pull/951))
|
||||
- Truncate the start of file paths in the StatusLine ([#1351](https://github.com/helix-editor/helix/pull/1351))
|
||||
- Prevent picker from previewing binaries or large file ([#939](https://github.com/helix-editor/helix/pull/939))
|
||||
- Inform when reaching undo/redo bounds ([#981](https://github.com/helix-editor/helix/pull/981))
|
||||
- search_impl will only align cursor center when it isn't in view ([#959](https://github.com/helix-editor/helix/pull/959))
|
||||
- Add <C-h>, <C-u>, <C-d>, Delete in prompt mode ([#1034](https://github.com/helix-editor/helix/pull/1034))
|
||||
- Restore screen position when aborting search ([#1047](https://github.com/helix-editor/helix/pull/1047))
|
||||
- Buffer picker: show is_modifier flag ([#1020](https://github.com/helix-editor/helix/pull/1020))
|
||||
- Add commit hash to version info, if present ([#957](https://github.com/helix-editor/helix/pull/957))
|
||||
- Implement indent-aware delete ([#1120](https://github.com/helix-editor/helix/pull/1120))
|
||||
- Jump to end char of surrounding pair from any cursor pos ([#1121](https://github.com/helix-editor/helix/pull/1121))
|
||||
- File picker configuration ([#988](https://github.com/helix-editor/helix/pull/988))
|
||||
- Fix surround cursor position calculation ([#1183](https://github.com/helix-editor/helix/pull/1183))
|
||||
- Accept count for goto_window ([#1033](https://github.com/helix-editor/helix/pull/1033))
|
||||
- Make kill_to_line_end behave like emacs ([#1235](https://github.com/helix-editor/helix/pull/1235))
|
||||
- Only use a single documentation popup ([#1241](https://github.com/helix-editor/helix/pull/1241))
|
||||
- ui: popup: Don't allow scrolling past the end of content (3307f44c)
|
||||
- Open files with spaces in filename, allow opening multiple files ([#1231](https://github.com/helix-editor/helix/pull/1231))
|
||||
- Allow paste commands to take a count ([#1261](https://github.com/helix-editor/helix/pull/1261))
|
||||
- Auto pairs selection ([#1254](https://github.com/helix-editor/helix/pull/1254))
|
||||
- Use a fuzzy matcher for commands ([#1386](https://github.com/helix-editor/helix/pull/1386))
|
||||
- Add c-s to pick word under doc cursor to prompt line & search completion ([#831](https://github.com/helix-editor/helix/pull/831))
|
||||
- Fix :earlier/:later missing changeset update ([#1069](https://github.com/helix-editor/helix/pull/1069))
|
||||
- Support extend for multiple goto ([#909](https://github.com/helix-editor/helix/pull/909))
|
||||
- Add arrow-key bindings for window switching ([#933](https://github.com/helix-editor/helix/pull/933))
|
||||
- Implement key ordering for info box ([#952](https://github.com/helix-editor/helix/pull/952))
|
||||
|
||||
LSP:
|
||||
- Implement MarkedString rendering (e128a8702)
|
||||
- Don't panic if init fails (d31bef7)
|
||||
- Configurable diagnostic severity ([#1325](https://github.com/helix-editor/helix/pull/1325))
|
||||
- Resolve completion item ([#1315](https://github.com/helix-editor/helix/pull/1315))
|
||||
- Code action command support ([#1304](https://github.com/helix-editor/helix/pull/1304))
|
||||
|
||||
Grammars:
|
||||
|
||||
- Adds mint language server ([#974](https://github.com/helix-editor/helix/pull/974))
|
||||
- Perl ([#978](https://github.com/helix-editor/helix/pull/978)) ([#1280](https://github.com/helix-editor/helix/pull/1280))
|
||||
- GLSL ([#993](https://github.com/helix-editor/helix/pull/993))
|
||||
- Racket ([#1143](https://github.com/helix-editor/helix/pull/1143))
|
||||
- WGSL ([#1166](https://github.com/helix-editor/helix/pull/1166))
|
||||
- LLVM ([#1167](https://github.com/helix-editor/helix/pull/1167)) ([#1388](https://github.com/helix-editor/helix/pull/1388)) ([#1409](https://github.com/helix-editor/helix/pull/1409)) ([#1398](https://github.com/helix-editor/helix/pull/1398))
|
||||
- Markdown (49e06787)
|
||||
- Scala ([#1278](https://github.com/helix-editor/helix/pull/1278))
|
||||
- Dart ([#1250](https://github.com/helix-editor/helix/pull/1250))
|
||||
- Fish ([#1308](https://github.com/helix-editor/helix/pull/1308))
|
||||
- Dockerfile ([#1303](https://github.com/helix-editor/helix/pull/1303))
|
||||
- Git (commit, rebase, diff) ([#1338](https://github.com/helix-editor/helix/pull/1338)) ([#1402](https://github.com/helix-editor/helix/pull/1402)) ([#1373](https://github.com/helix-editor/helix/pull/1373))
|
||||
- tree-sitter-comment ([#1300](https://github.com/helix-editor/helix/pull/1300))
|
||||
- Highlight comments in c, cpp, cmake and llvm ([#1309](https://github.com/helix-editor/helix/pull/1309))
|
||||
- Improve yaml syntax highlighting highlighting ([#1294](https://github.com/helix-editor/helix/pull/1294))
|
||||
- Improve rust syntax highlighting ([#1295](https://github.com/helix-editor/helix/pull/1295))
|
||||
- Add textobjects and indents to cmake ([#1307](https://github.com/helix-editor/helix/pull/1307))
|
||||
- Add textobjects and indents to c and cpp ([#1293](https://github.com/helix-editor/helix/pull/1293))
|
||||
|
||||
New themes:
|
||||
|
||||
- Solarized dark ([#999](https://github.com/helix-editor/helix/pull/999))
|
||||
- Solarized light ([#1010](https://github.com/helix-editor/helix/pull/1010))
|
||||
- Spacebones light ([#1131](https://github.com/helix-editor/helix/pull/1131))
|
||||
- Monokai Pro ([#1206](https://github.com/helix-editor/helix/pull/1206))
|
||||
- Base16 Light and Terminal ([#1078](https://github.com/helix-editor/helix/pull/1078))
|
||||
- and a default 16 color theme, truecolor detection
|
||||
- Dracula ([#1258](https://github.com/helix-editor/helix/pull/1258))
|
||||
|
||||
# 0.5.0 (2021-10-28)
|
||||
|
||||
A big shout out to all the contributors! We had 46 contributors in this release.
|
||||
|
||||
@@ -16,63 +135,64 @@ Breaking changes:
|
||||
- `keep_primary_selection` moved from `space+space` to `,`
|
||||
- `Alt-,` is now `remove_primary_selection` which keeps all selections except the primary one
|
||||
- Opening files in a split moved from `C-h` to `C-s`
|
||||
- Some configuration options moved from a `[terminal]` section to `[editor]`. [Consult the documentation for more information.](https://docs.helix-editor.com/configuration.html).
|
||||
- Some configuration options moved from a `[terminal]` section to `[editor]`. [Consult the documentation for more information.](https://docs.helix-editor.com/configuration.html)
|
||||
|
||||
Features:
|
||||
|
||||
- LSP compatibility greatly improved for some implementations (Julia, Python, Typescript)
|
||||
- Autocompletion! Completion now triggers automatically after a set idle timeout
|
||||
- Completion documentation is now displayed next to the popup (#691)
|
||||
- Treesitter textobjects (select a function via `mf`, class via `mc`) (#728)
|
||||
- Global search across entire workspace `space+/` (#651)
|
||||
- Relative line number support (#485)
|
||||
- Completion documentation is now displayed next to the popup ([#691](https://github.com/helix-editor/helix/pull/691))
|
||||
- Treesitter textobjects (select a function via `mf`, class via `mc`) ([#728](https://github.com/helix-editor/helix/pull/728))
|
||||
- Global search across entire workspace `space+/` ([#651](https://github.com/helix-editor/helix/pull/651))
|
||||
- Relative line number support ([#485](https://github.com/helix-editor/helix/pull/485))
|
||||
- Prompts now store a history (72cf86e)
|
||||
- `:vsplit` and `:hsplit` commands (#639)
|
||||
- `C-w h/j/k/l` can now be used to navigate between splits (#860)
|
||||
- `C-j` and `C-k` are now alternative keybindings to `C-n` and `C-p` in the UI (#876)
|
||||
- Shell commands (shell-pipe, pipe-to, shell-insert-output, shell-append-output, keep-pipe) (#547)
|
||||
- Searching now defaults to smart case search (case insensitive unless uppercase is used) (#761)
|
||||
- `:vsplit` and `:hsplit` commands ([#639](https://github.com/helix-editor/helix/pull/639))
|
||||
- `C-w h/j/k/l` can now be used to navigate between splits ([#860](https://github.com/helix-editor/helix/pull/860))
|
||||
- `C-j` and `C-k` are now alternative keybindings to `C-n` and `C-p` in the UI ([#876](https://github.com/helix-editor/helix/pull/876))
|
||||
- Shell commands (shell-pipe, pipe-to, shell-insert-output, shell-append-output, keep-pipe) ([#547](https://github.com/helix-editor/helix/pull/547))
|
||||
- Searching now defaults to smart case search (case insensitive unless uppercase is used) ([#761](https://github.com/helix-editor/helix/pull/761))
|
||||
- The preview pane was improved to highlight and center line ranges
|
||||
- The user `languages.toml` is now merged into defaults, no longer need to copy the entire file (dc57f8dc)
|
||||
- Show hidden files in completions (#648)
|
||||
- Show hidden files in completions ([#648](https://github.com/helix-editor/helix/pull/648))
|
||||
- Grammar injections are now properly handled (dd0b15e)
|
||||
- `v` in select mode now switches back to normal mode (#660)
|
||||
- View mode can now be triggered as a "sticky" mode (#719)
|
||||
- `f`/`t` and object selection motions can now be repeated via `Alt-.` (#891)
|
||||
- Statusline now displays total selection count and diagnostics counts for both errors and warnings (#916)
|
||||
- `v` in select mode now switches back to normal mode ([#660](https://github.com/helix-editor/helix/pull/660))
|
||||
- View mode can now be triggered as a "sticky" mode ([#719](https://github.com/helix-editor/helix/pull/719))
|
||||
- `f`/`t` and object selection motions can now be repeated via `Alt-.` ([#891](https://github.com/helix-editor/helix/pull/891))
|
||||
- Statusline now displays total selection count and diagnostics counts for both errors and warnings ([#916](https://github.com/helix-editor/helix/pull/916))
|
||||
|
||||
New grammars:
|
||||
|
||||
- Ledger (#572)
|
||||
- Protobuf (#614)
|
||||
- Zig (#631)
|
||||
- YAML (#667)
|
||||
- Lua (#665)
|
||||
- OCaml (#666)
|
||||
- Svelte (#733)
|
||||
- Vue (#787)
|
||||
- Tree-sitter queries (#845)
|
||||
- Ledger ([#572](https://github.com/helix-editor/helix/pull/572))
|
||||
- Protobuf ([#614](https://github.com/helix-editor/helix/pull/614))
|
||||
- Zig ([#631](https://github.com/helix-editor/helix/pull/631))
|
||||
- YAML ([#667](https://github.com/helix-editor/helix/pull/667))
|
||||
- Lua ([#665](https://github.com/helix-editor/helix/pull/665))
|
||||
- OCaml ([#666](https://github.com/helix-editor/helix/pull/666))
|
||||
- Svelte ([#733](https://github.com/helix-editor/helix/pull/733))
|
||||
- Vue ([#787](https://github.com/helix-editor/helix/pull/787))
|
||||
- Tree-sitter queries ([#845](https://github.com/helix-editor/helix/pull/845))
|
||||
- CMake ([#888](https://github.com/helix-editor/helix/pull/888))
|
||||
- Elixir (we switched over to the official grammar) (6c0786e)
|
||||
- Language server definitions for Nix and Elixir (#725)
|
||||
- Language server definitions for Nix and Elixir ([#725](https://github.com/helix-editor/helix/pull/725))
|
||||
- Python now uses `pylsp` instead of `pyls`
|
||||
- Python now supports indentation
|
||||
|
||||
New themes:
|
||||
|
||||
- Monokai (#628)
|
||||
- Everforest Dark (#760)
|
||||
- Nord (#799)
|
||||
- Base16 Default Dark (#833)
|
||||
- Rose Pine (#897)
|
||||
- Monokai ([#628](https://github.com/helix-editor/helix/pull/628))
|
||||
- Everforest Dark ([#760](https://github.com/helix-editor/helix/pull/760))
|
||||
- Nord ([#799](https://github.com/helix-editor/helix/pull/799))
|
||||
- Base16 Default Dark ([#833](https://github.com/helix-editor/helix/pull/833))
|
||||
- Rose Pine ([#897](https://github.com/helix-editor/helix/pull/897))
|
||||
|
||||
Fixes:
|
||||
|
||||
- Fix crash on empty rust file (#592)
|
||||
- Exit select mode after toggle comment (#598)
|
||||
- Fix crash on empty rust file ([#592](https://github.com/helix-editor/helix/pull/592))
|
||||
- Exit select mode after toggle comment ([#598](https://github.com/helix-editor/helix/pull/598))
|
||||
- Pin popups with no positioning to the initial position (12ea3888)
|
||||
- xsel copy should not freeze the editor (6dd7dc4)
|
||||
- `*` now only sets the search register and doesn't jump to the next occurrence (3426285)
|
||||
- Goto line start/end commands extend when in select mode (#739)
|
||||
- Goto line start/end commands extend when in select mode ([#739](https://github.com/helix-editor/helix/pull/739))
|
||||
- Fix documentation popups sometimes not getting fully highlighted (066367c)
|
||||
- Refactor apply_workspace_edit to remove assert (b02d872)
|
||||
- Wrap around the top of the picker menu when scrolling (c7d6e44)
|
||||
|
855
Cargo.lock
generated
855
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
21
Cargo.toml
21
Cargo.toml
@@ -4,13 +4,26 @@ members = [
|
||||
"helix-view",
|
||||
"helix-term",
|
||||
"helix-tui",
|
||||
"helix-syntax",
|
||||
"helix-lsp",
|
||||
"helix-dap",
|
||||
"helix-loader",
|
||||
"xtask",
|
||||
]
|
||||
|
||||
# Build helix-syntax in release mode to make the code path faster in development.
|
||||
# [profile.dev.package."helix-syntax"]
|
||||
# opt-level = 3
|
||||
default-members = [
|
||||
"helix-term"
|
||||
]
|
||||
|
||||
[profile.dev]
|
||||
split-debuginfo = "unpacked"
|
||||
|
||||
[profile.release]
|
||||
lto = "thin"
|
||||
# debug = true
|
||||
|
||||
[profile.opt]
|
||||
inherits = "release"
|
||||
lto = "fat"
|
||||
codegen-units = 1
|
||||
# strip = "debuginfo" # TODO: or strip = true
|
||||
opt-level = 3
|
||||
|
31
README.md
31
README.md
@@ -27,25 +27,27 @@ All shortcuts/keymaps can be found [in the documentation on the website](https:/
|
||||
It's a terminal-based editor first, but I'd like to explore a custom renderer
|
||||
(similar to emacs) in wgpu or skulpin.
|
||||
|
||||
# Installation
|
||||
|
||||
Note: Only certain languages have indentation definitions at the moment. Check
|
||||
`runtime/queries/<lang>/` for `indents.toml`.
|
||||
|
||||
# Installation
|
||||
|
||||
We provide packaging for various distributions, but here's a quick method to
|
||||
build from source.
|
||||
|
||||
```
|
||||
git clone --recurse-submodules --shallow-submodules -j8 https://github.com/helix-editor/helix
|
||||
git clone https://github.com/helix-editor/helix
|
||||
cd helix
|
||||
cargo install --path helix-term
|
||||
hx --grammar fetch
|
||||
hx --grammar build
|
||||
```
|
||||
|
||||
This will install the `hx` binary to `$HOME/.cargo/bin`.
|
||||
This will install the `hx` binary to `$HOME/.cargo/bin` and build tree-sitter grammars.
|
||||
|
||||
Helix also needs its runtime files so make sure to copy/symlink the `runtime/` directory into the
|
||||
config directory (for example `~/.config/helix/runtime` on Linux/macOS). This location can be overriden
|
||||
via the `HELIX_RUNTIME` environment variable.
|
||||
config directory (for example `~/.config/helix/runtime` on Linux/macOS, or `%AppData%/helix/runtime` on Windows).
|
||||
This location can be overridden via the `HELIX_RUNTIME` environment variable.
|
||||
|
||||
Packages already solve this for you by wrapping the `hx` binary with a wrapper
|
||||
that sets the variable to the install dir.
|
||||
@@ -56,6 +58,7 @@ that sets the variable to the install dir.
|
||||
[](https://repology.org/project/helix/versions)
|
||||
|
||||
## MacOS
|
||||
|
||||
Helix can be installed on MacOS through homebrew via:
|
||||
|
||||
```
|
||||
@@ -65,21 +68,7 @@ brew install helix
|
||||
|
||||
# Contributing
|
||||
|
||||
Contributors are very welcome! **No contribution is too small and all contributions are valued.**
|
||||
|
||||
Some suggestions to get started:
|
||||
|
||||
- You can look at the [good first issue](https://github.com/helix-editor/helix/labels/E-easy) label on the issue tracker.
|
||||
- Help with packaging on various distributions needed!
|
||||
- To use print debugging to the `~/.cache/helix/helix.log` file, you must:
|
||||
* Print using `log::info!`, `warn!`, or `error!`. (`log::info!("helix!")`)
|
||||
* Pass the appropriate verbosity level option for the desired log level. (`hx -v <file>` for info, more `v`s for higher severity inclusive)
|
||||
- If your preferred language is missing, integrating a tree-sitter grammar for
|
||||
it and defining syntax highlight queries for it is straight forward and
|
||||
doesn't require much knowledge of the internals.
|
||||
|
||||
We provide an [architecture.md](./docs/architecture.md) that should give you
|
||||
a good overview of the internals.
|
||||
Contributing guidelines can be found [here](./docs/CONTRIBUTING.md).
|
||||
|
||||
# Getting help
|
||||
|
||||
|
32
TODO.md
32
TODO.md
@@ -1,32 +0,0 @@
|
||||
|
||||
- tree sitter:
|
||||
- markdown
|
||||
- regex
|
||||
- kotlin
|
||||
- clojure
|
||||
- erlang
|
||||
|
||||
- [ ] completion isIncomplete support
|
||||
|
||||
1
|
||||
- [ ] respect view fullscreen flag
|
||||
- [ ] Implement marks (superset of Selection/Range)
|
||||
|
||||
- [ ] = for auto indent line/selection
|
||||
- [ ] :x for closing buffers
|
||||
- [ ] lsp: signature help
|
||||
|
||||
2
|
||||
- [ ] macro recording
|
||||
- [ ] extend selection (treesitter select parent node) (replaces viw, vi(, va( etc )
|
||||
- [ ] selection align
|
||||
- [ ] store some state between restarts: file positions, prompt history
|
||||
- [ ] highlight matched characters in picker
|
||||
|
||||
3
|
||||
- [ ] diff mode with highlighting?
|
||||
- [ ] snippet support (tab to jump between marks)
|
||||
- [ ] gamelisp/wasm scripting
|
||||
|
||||
X
|
||||
- [ ] rendering via skulpin/skia or raw wgpu
|
51
base16_theme.toml
Normal file
51
base16_theme.toml
Normal file
@@ -0,0 +1,51 @@
|
||||
# Author: NNB <nnbnh@protonmail.com>
|
||||
|
||||
"ui.menu" = "black"
|
||||
"ui.menu.selected" = { modifiers = ["reversed"] }
|
||||
"ui.linenr" = { fg = "gray", bg = "black" }
|
||||
"ui.popup" = { modifiers = ["reversed"] }
|
||||
"ui.linenr.selected" = { fg = "white", bg = "black", modifiers = ["bold"] }
|
||||
"ui.selection" = { fg = "black", bg = "blue" }
|
||||
"ui.selection.primary" = { fg = "white", bg = "blue" }
|
||||
"comment" = { fg = "gray" }
|
||||
"ui.statusline" = { fg = "black", bg = "white" }
|
||||
"ui.statusline.inactive" = { fg = "gray", bg = "white" }
|
||||
"ui.help" = { modifiers = ["reversed"] }
|
||||
"ui.cursor" = { fg = "white", modifiers = ["reversed"] }
|
||||
"variable" = "red"
|
||||
"constant.numeric" = "yellow"
|
||||
"constant" = "yellow"
|
||||
"attributes" = "yellow"
|
||||
"type" = "yellow"
|
||||
"ui.cursor.match" = { fg = "yellow", modifiers = ["underlined"] }
|
||||
"string" = "green"
|
||||
"variable.other.member" = "green"
|
||||
"constant.character.escape" = "cyan"
|
||||
"function" = "blue"
|
||||
"constructor" = "blue"
|
||||
"special" = "blue"
|
||||
"keyword" = "magenta"
|
||||
"label" = "magenta"
|
||||
"namespace" = "magenta"
|
||||
"ui.help" = { fg = "white", bg = "black" }
|
||||
|
||||
"markup.heading" = "blue"
|
||||
"markup.list" = "red"
|
||||
"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
|
||||
"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
|
||||
"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
|
||||
"markup.link.text" = "red"
|
||||
"markup.quote" = "cyan"
|
||||
"markup.raw" = "green"
|
||||
|
||||
"diff.plus" = "green"
|
||||
"diff.delta" = "yellow"
|
||||
"diff.minus" = "red"
|
||||
|
||||
"diagnostic" = { modifiers = ["underlined"] }
|
||||
"ui.gutter" = { bg = "black" }
|
||||
"info" = "blue"
|
||||
"hint" = "gray"
|
||||
"debug" = "gray"
|
||||
"warning" = "yellow"
|
||||
"error" = "red"
|
@@ -1,12 +1,18 @@
|
||||
# Summary
|
||||
|
||||
[Helix](./title-page.md)
|
||||
|
||||
- [Installation](./install.md)
|
||||
- [Usage](./usage.md)
|
||||
- [Keymap](./keymap.md)
|
||||
- [Commands](./commands.md)
|
||||
- [Language Support](./lang-support.md)
|
||||
- [Migrating from Vim](./from-vim.md)
|
||||
- [Configuration](./configuration.md)
|
||||
- [Themes](./themes.md)
|
||||
- [Keymap](./keymap.md)
|
||||
- [Key Remapping](./remapping.md)
|
||||
- [Hooks](./hooks.md)
|
||||
- [Languages](./languages.md)
|
||||
- [Guides](./guides/README.md)
|
||||
- [Adding Languages](./guides/adding_languages.md)
|
||||
- [Adding Textobject Queries](./guides/textobject.md)
|
||||
|
5
book/src/commands.md
Normal file
5
book/src/commands.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Commands
|
||||
|
||||
Command mode can be activated by pressing `:`, similar to vim. Built-in commands:
|
||||
|
||||
{{#include ./generated/typable-cmd.md}}
|
@@ -5,9 +5,29 @@ To override global configuration parameters, create a `config.toml` file located
|
||||
* Linux and Mac: `~/.config/helix/config.toml`
|
||||
* Windows: `%AppData%\helix\config.toml`
|
||||
|
||||
> Note: You may use `hx --edit-config` to create and edit the `config.toml` file.
|
||||
|
||||
Example config:
|
||||
|
||||
```toml
|
||||
theme = "onedark"
|
||||
|
||||
[editor]
|
||||
line-number = "relative"
|
||||
mouse = false
|
||||
|
||||
[editor.cursor-shape]
|
||||
insert = "bar"
|
||||
normal = "block"
|
||||
select = "underline"
|
||||
|
||||
[editor.file-picker]
|
||||
hidden = false
|
||||
```
|
||||
|
||||
## Editor
|
||||
|
||||
`[editor]` section of the config.
|
||||
### `[editor]` Section
|
||||
|
||||
| Key | Description | Default |
|
||||
|--|--|---------|
|
||||
@@ -16,16 +36,102 @@ To override global configuration parameters, create a `config.toml` file located
|
||||
| `middle-click-paste` | Middle click paste support. | `true` |
|
||||
| `scroll-lines` | Number of lines to scroll per scroll wheel step. | `3` |
|
||||
| `shell` | Shell to use when running external commands. | Unix: `["sh", "-c"]`<br/>Windows: `["cmd", "/C"]` |
|
||||
| `line-number` | Line number display (`absolute`, `relative`) | `absolute` |
|
||||
| `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` |
|
||||
| `auto-pairs` | Enable automatic insertion of pairs to parenthese, brackets, etc. | `true` |
|
||||
| `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers. | `absolute` |
|
||||
| `auto-completion` | Enable automatic pop up of auto-completion. | `true` |
|
||||
| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant. | `400` |
|
||||
| `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` |
|
||||
| `auto-info` | Whether to display infoboxes | `true` |
|
||||
| `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative. | `false` |
|
||||
|
||||
### `[editor.cursor-shape]` Section
|
||||
|
||||
Defines the shape of cursor in each mode. Note that due to limitations
|
||||
of the terminal environment, only the primary cursor can change shape.
|
||||
|
||||
| Key | Description | Default |
|
||||
| --- | ----------- | ------- |
|
||||
| `normal` | Cursor shape in [normal mode][normal mode] | `block` |
|
||||
| `insert` | Cursor shape in [insert mode][insert mode] | `block` |
|
||||
| `select` | Cursor shape in [select mode][select mode] | `block` |
|
||||
|
||||
[normal mode]: ./keymap.md#normal-mode
|
||||
[insert mode]: ./keymap.md#insert-mode
|
||||
[select mode]: ./keymap.md#select--extend-mode
|
||||
|
||||
### `[editor.file-picker]` Section
|
||||
|
||||
Sets options for file picker and global search. All but the last key listed in
|
||||
the default file-picker configuration below are IgnoreOptions: whether hidden
|
||||
files and files listed within ignore files are ignored by (not visible in) the
|
||||
helix file picker and global search. There is also one other key, `max-depth`
|
||||
available, which is not defined by default.
|
||||
|
||||
| Key | Description | Default |
|
||||
|--|--|---------|
|
||||
|`hidden` | Enables ignoring hidden files. | true
|
||||
|`parents` | Enables reading ignore files from parent directories. | true
|
||||
|`ignore` | Enables reading `.ignore` files. | true
|
||||
|`git-ignore` | Enables reading `.gitignore` files. | true
|
||||
|`git-global` | Enables reading global .gitignore, whose path is specified in git's config: `core.excludefile` option. | true
|
||||
|`git-exclude` | Enables reading `.git/info/exclude` files. | true
|
||||
|`max-depth` | Set with an integer value for maximum depth to recurse. | Defaults to `None`.
|
||||
|
||||
### `[editor.auto-pairs]` Section
|
||||
|
||||
Enable automatic insertion of pairs to parentheses, brackets, etc. Can be
|
||||
a simple boolean value, or a specific mapping of pairs of single characters.
|
||||
|
||||
| Key | Description |
|
||||
| --- | ----------- |
|
||||
| `false` | Completely disable auto pairing, regardless of language-specific settings
|
||||
| `true` | Use the default pairs: <code>(){}[]''""``</code>
|
||||
| Mapping of pairs | e.g. `{ "(" = ")", "{" = "}", ... }`
|
||||
|
||||
Example
|
||||
|
||||
```toml
|
||||
[editor.auto-pairs]
|
||||
'(' = ')'
|
||||
'{' = '}'
|
||||
'[' = ']'
|
||||
'"' = '"'
|
||||
'`' = '`'
|
||||
'<' = '>'
|
||||
```
|
||||
|
||||
Additionally, this setting can be used in a language config. Unless
|
||||
the editor setting is `false`, this will override the editor config in
|
||||
documents with this language.
|
||||
|
||||
Example `languages.toml` that adds <> and removes ''
|
||||
|
||||
```toml
|
||||
[[language]]
|
||||
name = "rust"
|
||||
|
||||
[language.auto-pairs]
|
||||
'(' = ')'
|
||||
'{' = '}'
|
||||
'[' = ']'
|
||||
'"' = '"'
|
||||
'`' = '`'
|
||||
'<' = '>'
|
||||
```
|
||||
|
||||
### `[editor.search]` Section
|
||||
|
||||
Search specific options.
|
||||
|
||||
| Key | Description | Default |
|
||||
|--|--|---------|
|
||||
| `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` |
|
||||
| `wrap-around`| Whether the search should wrap after depleting the matches | `true` |
|
||||
|
||||
|
||||
## LSP
|
||||
|
||||
To display all language server messages in the status line add the following to your `config.toml`:
|
||||
|
||||
```toml
|
||||
[lsp]
|
||||
display-messages = true
|
||||
|
66
book/src/generated/lang-support.md
Normal file
66
book/src/generated/lang-support.md
Normal file
@@ -0,0 +1,66 @@
|
||||
| Language | Syntax Highlighting | Treesitter Textobjects | Auto Indent | Default LSP |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| bash | ✓ | | | `bash-language-server` |
|
||||
| c | ✓ | ✓ | ✓ | `clangd` |
|
||||
| c-sharp | ✓ | | | `OmniSharp` |
|
||||
| cmake | ✓ | ✓ | ✓ | `cmake-language-server` |
|
||||
| comment | ✓ | | | |
|
||||
| cpp | ✓ | ✓ | ✓ | `clangd` |
|
||||
| css | ✓ | | | |
|
||||
| dart | ✓ | | ✓ | `dart` |
|
||||
| dockerfile | ✓ | | | `docker-langserver` |
|
||||
| elixir | ✓ | | | `elixir-ls` |
|
||||
| elm | ✓ | | | `elm-language-server` |
|
||||
| erlang | ✓ | | | |
|
||||
| fish | ✓ | ✓ | ✓ | |
|
||||
| git-commit | ✓ | | | |
|
||||
| git-config | ✓ | | | |
|
||||
| git-diff | ✓ | | | |
|
||||
| git-rebase | ✓ | | | |
|
||||
| glsl | ✓ | | ✓ | |
|
||||
| go | ✓ | ✓ | ✓ | `gopls` |
|
||||
| graphql | ✓ | | | |
|
||||
| haskell | ✓ | | | `haskell-language-server-wrapper` |
|
||||
| hcl | ✓ | | ✓ | `terraform-ls` |
|
||||
| html | ✓ | | | |
|
||||
| iex | ✓ | | | |
|
||||
| java | ✓ | | | |
|
||||
| javascript | ✓ | | ✓ | `typescript-language-server` |
|
||||
| json | ✓ | | ✓ | |
|
||||
| julia | ✓ | | | `julia` |
|
||||
| kotlin | ✓ | | | `kotlin-language-server` |
|
||||
| latex | ✓ | | | |
|
||||
| lean | ✓ | | | `lean` |
|
||||
| ledger | ✓ | | | |
|
||||
| llvm | ✓ | ✓ | ✓ | |
|
||||
| llvm-mir | ✓ | ✓ | ✓ | |
|
||||
| llvm-mir-yaml | ✓ | | ✓ | |
|
||||
| lua | ✓ | | ✓ | |
|
||||
| make | ✓ | | | |
|
||||
| markdown | ✓ | | | |
|
||||
| mint | | | | `mint` |
|
||||
| nix | ✓ | | ✓ | `rnix-lsp` |
|
||||
| ocaml | ✓ | | ✓ | |
|
||||
| ocaml-interface | ✓ | | | |
|
||||
| perl | ✓ | ✓ | ✓ | |
|
||||
| php | ✓ | ✓ | ✓ | |
|
||||
| prolog | | | | `swipl` |
|
||||
| protobuf | ✓ | | ✓ | |
|
||||
| python | ✓ | ✓ | ✓ | `pylsp` |
|
||||
| racket | | | | `racket` |
|
||||
| regex | ✓ | | | |
|
||||
| rescript | ✓ | ✓ | | `rescript-language-server` |
|
||||
| ruby | ✓ | | ✓ | `solargraph` |
|
||||
| rust | ✓ | ✓ | ✓ | `rust-analyzer` |
|
||||
| scala | ✓ | | ✓ | `metals` |
|
||||
| svelte | ✓ | | ✓ | `svelteserver` |
|
||||
| tablegen | ✓ | ✓ | ✓ | |
|
||||
| toml | ✓ | | | |
|
||||
| tsq | ✓ | | | |
|
||||
| tsx | ✓ | | | `typescript-language-server` |
|
||||
| twig | ✓ | | | |
|
||||
| typescript | ✓ | | ✓ | `typescript-language-server` |
|
||||
| vue | ✓ | | | |
|
||||
| wgsl | ✓ | | | |
|
||||
| yaml | ✓ | | ✓ | |
|
||||
| zig | ✓ | | ✓ | `zls` |
|
57
book/src/generated/typable-cmd.md
Normal file
57
book/src/generated/typable-cmd.md
Normal file
@@ -0,0 +1,57 @@
|
||||
| Name | Description |
|
||||
| --- | --- |
|
||||
| `:quit`, `:q` | Close the current view. |
|
||||
| `:quit!`, `:q!` | Close the current view forcefully (ignoring unsaved changes). |
|
||||
| `:open`, `:o` | Open a file from disk into the current view. |
|
||||
| `:buffer-close`, `:bc`, `:bclose` | Close the current buffer. |
|
||||
| `:buffer-close!`, `:bc!`, `:bclose!` | Close the current buffer forcefully (ignoring unsaved changes). |
|
||||
| `:buffer-close-others`, `:bco`, `:bcloseother` | Close all buffers but the currently focused one. |
|
||||
| `:buffer-close-others!`, `:bco!`, `:bcloseother!` | Close all buffers but the currently focused one. |
|
||||
| `:buffer-close-all`, `:bca`, `:bcloseall` | Close all buffers, without quiting. |
|
||||
| `:buffer-close-all!`, `:bca!`, `:bcloseall!` | Close all buffers forcefully (ignoring unsaved changes), without quiting. |
|
||||
| `:write`, `:w` | Write changes to disk. Accepts an optional path (:write some/path.txt) |
|
||||
| `:new`, `:n` | Create a new scratch buffer. |
|
||||
| `:format`, `:fmt` | Format the file using the LSP formatter. |
|
||||
| `:indent-style` | Set the indentation style for editing. ('t' for tabs or 1-8 for number of spaces.) |
|
||||
| `:line-ending` | Set the document's default line ending. Options: crlf, lf, cr, ff, nel. |
|
||||
| `:earlier`, `:ear` | Jump back to an earlier point in edit history. Accepts a number of steps or a time span. |
|
||||
| `:later`, `:lat` | Jump to a later point in edit history. Accepts a number of steps or a time span. |
|
||||
| `:write-quit`, `:wq`, `:x` | Write changes to disk and close the current view. Accepts an optional path (:wq some/path.txt) |
|
||||
| `:write-quit!`, `:wq!`, `:x!` | Write changes to disk and close the current view forcefully. Accepts an optional path (:wq! some/path.txt) |
|
||||
| `:write-all`, `:wa` | Write changes from all views to disk. |
|
||||
| `:write-quit-all`, `:wqa`, `:xa` | Write changes from all views to disk and close all views. |
|
||||
| `:write-quit-all!`, `:wqa!`, `:xa!` | Write changes from all views to disk and close all views forcefully (ignoring unsaved changes). |
|
||||
| `:quit-all`, `:qa` | Close all views. |
|
||||
| `:quit-all!`, `:qa!` | Close all views forcefully (ignoring unsaved changes). |
|
||||
| `:cquit`, `:cq` | Quit with exit code (default 1). Accepts an optional integer exit code (:cq 2). |
|
||||
| `:cquit!`, `:cq!` | Quit with exit code (default 1) forcefully (ignoring unsaved changes). Accepts an optional integer exit code (:cq! 2). |
|
||||
| `:theme` | Change the editor theme. |
|
||||
| `:clipboard-yank` | Yank main selection into system clipboard. |
|
||||
| `:clipboard-yank-join` | Yank joined selections into system clipboard. A separator can be provided as first argument. Default value is newline. |
|
||||
| `:primary-clipboard-yank` | Yank main selection into system primary clipboard. |
|
||||
| `:primary-clipboard-yank-join` | Yank joined selections into system primary clipboard. A separator can be provided as first argument. Default value is newline. |
|
||||
| `:clipboard-paste-after` | Paste system clipboard after selections. |
|
||||
| `:clipboard-paste-before` | Paste system clipboard before selections. |
|
||||
| `:clipboard-paste-replace` | Replace selections with content of system clipboard. |
|
||||
| `:primary-clipboard-paste-after` | Paste primary clipboard after selections. |
|
||||
| `:primary-clipboard-paste-before` | Paste primary clipboard before selections. |
|
||||
| `:primary-clipboard-paste-replace` | Replace selections with content of system primary clipboard. |
|
||||
| `:show-clipboard-provider` | Show clipboard provider name in status bar. |
|
||||
| `:change-current-directory`, `:cd` | Change the current working directory. |
|
||||
| `:show-directory`, `:pwd` | Show the current working directory. |
|
||||
| `:encoding` | Set encoding based on `https://encoding.spec.whatwg.org` |
|
||||
| `:reload` | Discard changes and reload from the source file. |
|
||||
| `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. |
|
||||
| `:debug-start`, `:dbg` | Start a debug session from a given template with given parameters. |
|
||||
| `:debug-remote`, `:dbg-tcp` | Connect to a debug adapter by TCP address and start a debugging session from a given template with given parameters. |
|
||||
| `:debug-eval` | Evaluate expression in current debug context. |
|
||||
| `:vsplit`, `:vs` | Open the file in a vertical split. |
|
||||
| `:vsplit-new`, `:vnew` | Open a scratch buffer in a vertical split. |
|
||||
| `:hsplit`, `:hs`, `:sp` | Open the file in a horizontal split. |
|
||||
| `:hsplit-new`, `:hnew` | Open a scratch buffer in a horizontal split. |
|
||||
| `:tutor` | Open the tutorial. |
|
||||
| `:goto`, `:g` | Go to line number. |
|
||||
| `:set-option`, `:set` | Set a config option at runtime |
|
||||
| `:sort` | Sort ranges in selection. |
|
||||
| `:rsort` | Sort ranges in selection in reverse order. |
|
||||
| `:tree-sitter-subtree`, `:ts-subtree` | Display tree sitter subtree under cursor, primarily for debugging queries. |
|
90
book/src/guides/adding_languages.md
Normal file
90
book/src/guides/adding_languages.md
Normal file
@@ -0,0 +1,90 @@
|
||||
# Adding languages
|
||||
|
||||
## Language configuration
|
||||
|
||||
To add a new language, you need to add a `language` entry to the
|
||||
[`languages.toml`][languages.toml] found in the root of the repository;
|
||||
this `languages.toml` file is included at compilation time, and is
|
||||
distinct from the `languages.toml` file in the user's [configuration
|
||||
directory](../configuration.md).
|
||||
|
||||
```toml
|
||||
[[language]]
|
||||
name = "mylang"
|
||||
scope = "scope.mylang"
|
||||
injection-regex = "^mylang$"
|
||||
file-types = ["mylang", "myl"]
|
||||
comment-token = "#"
|
||||
indent = { tab-width = 2, unit = " " }
|
||||
```
|
||||
|
||||
These are the available keys and descriptions for the file.
|
||||
|
||||
| Key | Description |
|
||||
| ---- | ----------- |
|
||||
| `name` | The name of the language |
|
||||
| `scope` | A string like `source.js` that identifies the language. Currently, we strive to match the scope names used by popular TextMate grammars and by the Linguist library. Usually `source.<name>` or `text.<name>` in case of markup languages |
|
||||
| `injection-regex` | regex pattern that will be tested against a language name in order to determine whether this language should be used for a potential [language injection][treesitter-language-injection] site. |
|
||||
| `file-types` | The filetypes of the language, for example `["yml", "yaml"]`. Extensions and full file names are supported. |
|
||||
| `shebangs` | The interpreters from the shebang line, for example `["sh", "bash"]` |
|
||||
| `roots` | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` |
|
||||
| `auto-format` | Whether to autoformat this language when saving |
|
||||
| `diagnostic-severity` | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) |
|
||||
| `comment-token` | The token to use as a comment-token |
|
||||
| `indent` | The indent to use. Has sub keys `tab-width` and `unit` |
|
||||
| `config` | Language server configuration |
|
||||
| `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) |
|
||||
|
||||
## Grammar configuration
|
||||
|
||||
If a tree-sitter grammar is available for the language, add a new `grammar`
|
||||
entry to `languages.toml`.
|
||||
|
||||
```toml
|
||||
[[grammar]]
|
||||
name = "mylang"
|
||||
source = { git = "https://github.com/example/mylang", rev = "a250c4582510ff34767ec3b7dcdd3c24e8c8aa68" }
|
||||
```
|
||||
|
||||
Grammar configuration takes these keys:
|
||||
|
||||
| Key | Description |
|
||||
| --- | ----------- |
|
||||
| `name` | The name of the tree-sitter grammar |
|
||||
| `source` | The method of fetching the grammar - a table with a schema defined below |
|
||||
|
||||
Where `source` is a table with either these keys when using a grammar from a
|
||||
git repository:
|
||||
|
||||
| Key | Description |
|
||||
| --- | ----------- |
|
||||
| `git` | A git remote URL from which the grammar should be cloned |
|
||||
| `rev` | The revision (commit hash or tag) which should be fetched |
|
||||
| `subpath` | A path within the grammar directory which should be built. Some grammar repositories host multiple grammars (for example `tree-sitter-typescript` and `tree-sitter-ocaml`) in subdirectories. This key is used to point `hx --grammar build` to the correct path for compilation. When omitted, the root of repository is used |
|
||||
|
||||
Or a `path` key with an absolute path to a locally available grammar directory.
|
||||
|
||||
## Queries
|
||||
|
||||
For a language to have syntax-highlighting and indentation among
|
||||
other things, you have to add queries. Add a directory for your
|
||||
language with the path `runtime/queries/<name>/`. The tree-sitter
|
||||
[website](https://tree-sitter.github.io/tree-sitter/syntax-highlighting#queries)
|
||||
gives more info on how to write queries.
|
||||
|
||||
> NOTE: When evaluating queries, the first matching query takes
|
||||
precedence, which is different from other editors like neovim where
|
||||
the last matching query supersedes the ones before it. See
|
||||
[this issue][neovim-query-precedence] for an example.
|
||||
|
||||
## Common Issues
|
||||
|
||||
- If you get errors when running after switching branches, you may have to update the tree-sitter grammars. Run `hx --grammar fetch` to fetch the grammars and `hx --grammar build` to build any out-of-date grammars.
|
||||
|
||||
- If a parser is segfaulting or you want to remove the parser, make sure to remove the compiled parser in `runtime/grammar/<name>.so`
|
||||
|
||||
- The indents query is `indents.toml`, *not* `indents.scm`. See [this](https://github.com/helix-editor/helix/issues/114) issue for more information.
|
||||
|
||||
[treesitter-language-injection]: https://tree-sitter.github.io/tree-sitter/syntax-highlighting#language-injection
|
||||
[languages.toml]: https://github.com/helix-editor/helix/blob/master/languages.toml
|
||||
[neovim-query-precedence]: https://github.com/helix-editor/helix/pull/1170#issuecomment-997294090
|
@@ -5,7 +5,7 @@ require an accompanying tree-sitter grammar and a `textobjects.scm` query file
|
||||
to work properly. Tree-sitter allows us to query the source code syntax tree
|
||||
and capture specific parts of it. The queries are written in a lisp dialect.
|
||||
More information on how to write queries can be found in the [official tree-sitter
|
||||
documentation](tree-sitter-queries).
|
||||
documentation][tree-sitter-queries].
|
||||
|
||||
Query files should be placed in `runtime/queries/{language}/textobjects.scm`
|
||||
when contributing. Note that to test the query files locally you should put
|
||||
@@ -21,10 +21,27 @@ The following [captures][tree-sitter-captures] are recognized:
|
||||
| `class.inside` |
|
||||
| `class.around` |
|
||||
| `parameter.inside` |
|
||||
| `comment.inside` |
|
||||
| `comment.around` |
|
||||
|
||||
[Example query files][textobject-examples] can be found in the helix GitHub repository.
|
||||
|
||||
## Queries for Textobject Based Navigation
|
||||
|
||||
[Tree-sitter based navigation][textobjects-nav] is done using captures in the
|
||||
following order:
|
||||
|
||||
- `object.movement`
|
||||
- `object.around`
|
||||
- `object.inside`
|
||||
|
||||
For example if a `function.around` capture has been already defined for a language
|
||||
in it's `textobjects.scm` file, function navigation should also work automatically.
|
||||
`function.movement` should be defined only if the node captured by `function.around`
|
||||
doesn't make sense in a navigation context.
|
||||
|
||||
[textobjects]: ../usage.md#textobjects
|
||||
[textobjects-nav]: ../usage.md#tree-sitter-textobject-based-navigation
|
||||
[tree-sitter-queries]: https://tree-sitter.github.io/tree-sitter/using-parsers#query-syntax
|
||||
[tree-sitter-captures]: https://tree-sitter.github.io/tree-sitter/using-parsers#capturing-nodes
|
||||
[textobject-examples]: https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+filename%3Atextobjects.scm&type=Code&ref=advsearch&l=&l=
|
||||
|
@@ -19,20 +19,32 @@ brew install helix
|
||||
|
||||
A [flake](https://nixos.wiki/wiki/Flakes) containing the package is available in
|
||||
the project root. The flake can also be used to spin up a reproducible development
|
||||
shell for working on Helix.
|
||||
shell for working on Helix with `nix develop`.
|
||||
|
||||
Flake outputs are cached for each push to master using
|
||||
[Cachix](https://www.cachix.org/). With Cachix
|
||||
[installed](https://docs.cachix.org/installation), `cachix use helix` will
|
||||
configure Nix to use cached outputs when possible.
|
||||
|
||||
### Arch Linux
|
||||
|
||||
Releases are available in the `community` repository.
|
||||
|
||||
Packages are also available on AUR:
|
||||
- [helix-bin](https://aur.archlinux.org/packages/helix-bin/) contains the pre-built release
|
||||
- [helix-git](https://aur.archlinux.org/packages/helix-git/) builds the master branch
|
||||
A [helix-git](https://aur.archlinux.org/packages/helix-git/) package is also available on the AUR, which builds the master branch.
|
||||
|
||||
### Fedora Linux
|
||||
|
||||
You can install the COPR package for Helix via
|
||||
|
||||
```
|
||||
sudo dnf copr enable varlad/helix
|
||||
sudo dnf install helix
|
||||
```
|
||||
|
||||
## Build from source
|
||||
|
||||
```
|
||||
git clone --recurse-submodules --shallow-submodules -j8 https://github.com/helix-editor/helix
|
||||
git clone https://github.com/helix-editor/helix
|
||||
cd helix
|
||||
cargo install --path helix-term
|
||||
```
|
||||
@@ -42,3 +54,9 @@ This will install the `hx` binary to `$HOME/.cargo/bin`.
|
||||
Helix also needs it's runtime files so make sure to copy/symlink the `runtime/` directory into the
|
||||
config directory (for example `~/.config/helix/runtime` on Linux/macOS). This location can be overriden
|
||||
via the `HELIX_RUNTIME` environment variable.
|
||||
|
||||
## Building tree-sitter grammars
|
||||
|
||||
Tree-sitter grammars must be fetched and compiled if not pre-packaged.
|
||||
Fetch grammars with `hx --grammar fetch` (requires `git`) and compile them
|
||||
with `hx --grammar build` (requires a C compiler).
|
||||
|
@@ -1,124 +1,148 @@
|
||||
# Keymap
|
||||
|
||||
- Mappings marked (**LSP**) require an active language server for the file.
|
||||
- Mappings marked (**TS**) require a tree-sitter grammar for the filetype.
|
||||
|
||||
## Normal mode
|
||||
|
||||
### Movement
|
||||
|
||||
> NOTE: Unlike vim, `f`, `F`, `t` and `T` are not confined to the current line.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `h`/`Left` | Move left | `move_char_left` |
|
||||
| `j`/`Down` | Move down | `move_line_down` |
|
||||
| `k`/`Up` | Move up | `move_line_up` |
|
||||
| `l`/`Right` | Move right | `move_char_right` |
|
||||
| `w` | Move next word start | `move_next_word_start` |
|
||||
| `b` | Move previous word start | `move_prev_word_start` |
|
||||
| `e` | Move next word end | `move_next_word_end` |
|
||||
| `W` | Move next WORD start | `move_next_long_word_start` |
|
||||
| `B` | Move previous WORD start | `move_prev_long_word_start` |
|
||||
| `E` | Move next WORD end | `move_next_long_word_end` |
|
||||
| `t` | Find 'till next char | `find_till_char` |
|
||||
| `f` | Find next char | `find_next_char` |
|
||||
| `T` | Find 'till previous char | `till_prev_char` |
|
||||
| `F` | Find previous char | `find_prev_char` |
|
||||
| `Alt-.` | Repeat last motion (`f`, `t` or `m`) | `repeat_last_motion` |
|
||||
| `Home` | Move to the start of the line | `goto_line_start` |
|
||||
| `End` | Move to the end of the line | `goto_line_end` |
|
||||
| `PageUp` | Move page up | `page_up` |
|
||||
| `PageDown` | Move page down | `page_down` |
|
||||
| `Ctrl-u` | Move half page up | `half_page_up` |
|
||||
| `Ctrl-d` | Move half page down | `half_page_down` |
|
||||
| `Ctrl-i` | Jump forward on the jumplist | `jump_forward` |
|
||||
| `Ctrl-o` | Jump backward on the jumplist | `jump_backward` |
|
||||
| `v` | Enter [select (extend) mode](#select--extend-mode) | `select_mode` |
|
||||
| `g` | Enter [goto mode](#goto-mode) | N/A |
|
||||
| `m` | Enter [match mode](#match-mode) | N/A |
|
||||
| `:` | Enter command mode | `command_mode` |
|
||||
| `z` | Enter [view mode](#view-mode) | N/A |
|
||||
| `Z` | Enter sticky [view mode](#view-mode) | N/A |
|
||||
| `Ctrl-w` | Enter [window mode](#window-mode) | N/A |
|
||||
| `Space` | Enter [space mode](#space-mode) | N/A |
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `h`, `Left` | Move left | `move_char_left` |
|
||||
| `j`, `Down` | Move down | `move_line_down` |
|
||||
| `k`, `Up` | Move up | `move_line_up` |
|
||||
| `l`, `Right` | Move right | `move_char_right` |
|
||||
| `w` | Move next word start | `move_next_word_start` |
|
||||
| `b` | Move previous word start | `move_prev_word_start` |
|
||||
| `e` | Move next word end | `move_next_word_end` |
|
||||
| `W` | Move next WORD start | `move_next_long_word_start` |
|
||||
| `B` | Move previous WORD start | `move_prev_long_word_start` |
|
||||
| `E` | Move next WORD end | `move_next_long_word_end` |
|
||||
| `t` | Find 'till next char | `find_till_char` |
|
||||
| `f` | Find next char | `find_next_char` |
|
||||
| `T` | Find 'till previous char | `till_prev_char` |
|
||||
| `F` | Find previous char | `find_prev_char` |
|
||||
| `G` | Go to line number `<n>` | `goto_line` |
|
||||
| `Alt-.` | Repeat last motion (`f`, `t` or `m`) | `repeat_last_motion` |
|
||||
| `Home` | Move to the start of the line | `goto_line_start` |
|
||||
| `End` | Move to the end of the line | `goto_line_end` |
|
||||
| `Ctrl-b`, `PageUp` | Move page up | `page_up` |
|
||||
| `Ctrl-f`, `PageDown` | Move page down | `page_down` |
|
||||
| `Ctrl-u` | Move half page up | `half_page_up` |
|
||||
| `Ctrl-d` | Move half page down | `half_page_down` |
|
||||
| `Ctrl-i` | Jump forward on the jumplist | `jump_forward` |
|
||||
| `Ctrl-o` | Jump backward on the jumplist | `jump_backward` |
|
||||
| `Ctrl-s` | Save the current selection to the jumplist | `save_selection` |
|
||||
|
||||
### Changes
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `r` | Replace with a character | `replace` |
|
||||
| `R` | Replace with yanked text | `replace_with_yanked` |
|
||||
| `~` | Switch case of the selected text | `switch_case` |
|
||||
| `` ` `` | Set the selected text to lower case | `switch_to_lowercase` |
|
||||
| `` Alt-` `` | Set the selected text to upper case | `switch_to_uppercase` |
|
||||
| `i` | Insert before selection | `insert_mode` |
|
||||
| `a` | Insert after selection (append) | `append_mode` |
|
||||
| `I` | Insert at the start of the line | `prepend_to_line` |
|
||||
| `A` | Insert at the end of the line | `append_to_line` |
|
||||
| `o` | Open new line below selection | `open_below` |
|
||||
| `O` | Open new line above selection | `open_above` |
|
||||
| `.` | Repeat last change | N/A |
|
||||
| `u` | Undo change | `undo` |
|
||||
| `U` | Redo change | `redo` |
|
||||
| `y` | Yank selection | `yank` |
|
||||
| `p` | Paste after selection | `paste_after` |
|
||||
| `P` | Paste before selection | `paste_before` |
|
||||
| `"` `<reg>` | Select a register to yank to or paste from | `select_register` |
|
||||
| `>` | Indent selection | `indent` |
|
||||
| `<` | Unindent selection | `unindent` |
|
||||
| `=` | Format selection | `format_selections` |
|
||||
| `d` | Delete selection | `delete_selection` |
|
||||
| `c` | Change selection (delete and enter insert mode) | `change_selection` |
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `r` | Replace with a character | `replace` |
|
||||
| `R` | Replace with yanked text | `replace_with_yanked` |
|
||||
| `~` | Switch case of the selected text | `switch_case` |
|
||||
| `` ` `` | Set the selected text to lower case | `switch_to_lowercase` |
|
||||
| `` Alt-` `` | Set the selected text to upper case | `switch_to_uppercase` |
|
||||
| `i` | Insert before selection | `insert_mode` |
|
||||
| `a` | Insert after selection (append) | `append_mode` |
|
||||
| `I` | Insert at the start of the line | `prepend_to_line` |
|
||||
| `A` | Insert at the end of the line | `append_to_line` |
|
||||
| `o` | Open new line below selection | `open_below` |
|
||||
| `O` | Open new line above selection | `open_above` |
|
||||
| `.` | Repeat last change | N/A |
|
||||
| `u` | Undo change | `undo` |
|
||||
| `U` | Redo change | `redo` |
|
||||
| `Alt-u` | Move backward in history | `earlier` |
|
||||
| `Alt-U` | Move forward in history | `later` |
|
||||
| `y` | Yank selection | `yank` |
|
||||
| `p` | Paste after selection | `paste_after` |
|
||||
| `P` | Paste before selection | `paste_before` |
|
||||
| `"` `<reg>` | Select a register to yank to or paste from | `select_register` |
|
||||
| `>` | Indent selection | `indent` |
|
||||
| `<` | Unindent selection | `unindent` |
|
||||
| `=` | Format selection (currently nonfunctional/disabled) (**LSP**) | `format_selections` |
|
||||
| `d` | Delete selection | `delete_selection` |
|
||||
| `Alt-d` | Delete selection, without yanking | `delete_selection_noyank` |
|
||||
| `c` | Change selection (delete and enter insert mode) | `change_selection` |
|
||||
| `Alt-c` | Change selection (delete and enter insert mode, without yanking) | `change_selection_noyank` |
|
||||
| `Ctrl-a` | Increment object (number) under cursor | `increment` |
|
||||
| `Ctrl-x` | Decrement object (number) under cursor | `decrement` |
|
||||
| `Q` | Start/stop macro recording to the selected register (experimental) | `record_macro` |
|
||||
| `q` | Play back a recorded macro from the selected register (experimental) | `replay_macro` |
|
||||
|
||||
#### Shell
|
||||
|
||||
| Key | Description | Command |
|
||||
| ------ | ----------- | ------- |
|
||||
| <code>|</code> | Pipe each selection through shell command, replacing with output | `shell_pipe` |
|
||||
| <code>A-|</code> | Pipe each selection into shell command, ignoring output | `shell_pipe_to` |
|
||||
| `!` | Run shell command, inserting output before each selection | `shell_insert_output` |
|
||||
| `A-!` | Run shell command, appending output after each selection | `shell_append_output` |
|
||||
| Key | Description | Command |
|
||||
| ------ | ----------- | ------- |
|
||||
| <code>|</code> | Pipe each selection through shell command, replacing with output | `shell_pipe` |
|
||||
| <code>Alt-|</code> | Pipe each selection into shell command, ignoring output | `shell_pipe_to` |
|
||||
| `!` | Run shell command, inserting output before each selection | `shell_insert_output` |
|
||||
| `Alt-!` | Run shell command, appending output after each selection | `shell_append_output` |
|
||||
| `$` | Pipe each selection into shell command, keep selections where command returned 0 | `shell_keep_pipe` |
|
||||
|
||||
|
||||
### Selection manipulation
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `s` | Select all regex matches inside selections | `select_regex` |
|
||||
| `S` | Split selection into subselections on regex matches | `split_selection` |
|
||||
| `Alt-s` | Split selection on newlines | `split_selection_on_newline` |
|
||||
| `;` | Collapse selection onto a single cursor | `collapse_selection` |
|
||||
| `Alt-;` | Flip selection cursor and anchor | `flip_selections` |
|
||||
| `,` | Keep only the primary selection | `keep_primary_selection` |
|
||||
| `Alt-,` | Remove the primary selection | `remove_primary_selection` |
|
||||
| `C` | Copy selection onto the next line | `copy_selection_on_next_line` |
|
||||
| `Alt-C` | Copy selection onto the previous line | `copy_selection_on_prev_line` |
|
||||
| `(` | Rotate main selection backward | `rotate_selections_backward` |
|
||||
| `)` | Rotate main selection forward | `rotate_selections_forward` |
|
||||
| `Alt-(` | Rotate selection contents backward | `rotate_selection_contents_backward` |
|
||||
| `Alt-)` | Rotate selection contents forward | `rotate_selection_contents_forward` |
|
||||
| `%` | Select entire file | `select_all` |
|
||||
| `x` | Select current line, if already selected, extend to next line | `extend_line` |
|
||||
| `X` | Extend selection to line bounds (line-wise selection) | `extend_to_line_bounds` |
|
||||
| | Expand selection to parent syntax node TODO: pick a key | `expand_selection` |
|
||||
| `J` | Join lines inside selection | `join_selections` |
|
||||
| `K` | Keep selections matching the regex | `keep_selections` |
|
||||
| `$` | Pipe each selection into shell command, keep selections where command returned 0 | `shell_keep_pipe` |
|
||||
| `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` |
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `s` | Select all regex matches inside selections | `select_regex` |
|
||||
| `S` | Split selection into subselections on regex matches | `split_selection` |
|
||||
| `Alt-s` | Split selection on newlines | `split_selection_on_newline` |
|
||||
| `&` | Align selection in columns | `align_selections` |
|
||||
| `_` | Trim whitespace from the selection | `trim_selections` |
|
||||
| `;` | Collapse selection onto a single cursor | `collapse_selection` |
|
||||
| `Alt-;` | Flip selection cursor and anchor | `flip_selections` |
|
||||
| `Alt-:` | Ensures the selection is in forward direction | `ensure_selections_forward` |
|
||||
| `,` | Keep only the primary selection | `keep_primary_selection` |
|
||||
| `Alt-,` | Remove the primary selection | `remove_primary_selection` |
|
||||
| `C` | Copy selection onto the next line (Add cursor below) | `copy_selection_on_next_line` |
|
||||
| `Alt-C` | Copy selection onto the previous line (Add cursor above) | `copy_selection_on_prev_line` |
|
||||
| `(` | Rotate main selection backward | `rotate_selections_backward` |
|
||||
| `)` | Rotate main selection forward | `rotate_selections_forward` |
|
||||
| `Alt-(` | Rotate selection contents backward | `rotate_selection_contents_backward` |
|
||||
| `Alt-)` | Rotate selection contents forward | `rotate_selection_contents_forward` |
|
||||
| `%` | Select entire file | `select_all` |
|
||||
| `x` | Select current line, if already selected, extend to next line | `extend_line` |
|
||||
| `X` | Extend selection to line bounds (line-wise selection) | `extend_to_line_bounds` |
|
||||
| `J` | Join lines inside selection | `join_selections` |
|
||||
| `K` | Keep selections matching the regex | `keep_selections` |
|
||||
| `Alt-K` | Remove selections matching the regex | `remove_selections` |
|
||||
| `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` |
|
||||
| `Alt-k`, `Alt-up` | Expand selection to parent syntax node (**TS**) | `expand_selection` |
|
||||
| `Alt-j`, `Alt-down` | Shrink syntax tree object selection (**TS**) | `shrink_selection` |
|
||||
| `Alt-h`, `Alt-left` | Select previous sibling node in syntax tree (**TS**) | `select_prev_sibling` |
|
||||
| `Alt-l`, `Alt-right` | Select next sibling node in syntax tree (**TS**) | `select_next_sibling` |
|
||||
|
||||
### Search
|
||||
|
||||
> TODO: The search implementation isn't ideal yet -- we don't support searching in reverse.
|
||||
Search commands all operate on the `/` register by default. Use `"<char>` to operate on a different one.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `/` | Search for regex pattern | `search` |
|
||||
| `?` | Search for previous pattern | `rsearch` |
|
||||
| `n` | Select next search match | `search_next` |
|
||||
| `N` | Add next search match to selection | `extend_search_next` |
|
||||
| `N` | Select previous search match | `search_prev` |
|
||||
| `*` | Use current selection as the search pattern | `search_selection` |
|
||||
|
||||
### Minor modes
|
||||
|
||||
These sub-modes are accessible from normal mode and typically switch back to normal mode after a command.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `v` | Enter [select (extend) mode](#select--extend-mode) | `select_mode` |
|
||||
| `g` | Enter [goto mode](#goto-mode) | N/A |
|
||||
| `m` | Enter [match mode](#match-mode) | N/A |
|
||||
| `:` | Enter command mode | `command_mode` |
|
||||
| `z` | Enter [view mode](#view-mode) | N/A |
|
||||
| `Z` | Enter sticky [view mode](#view-mode) | N/A |
|
||||
| `Ctrl-w` | Enter [window mode](#window-mode) | N/A |
|
||||
| `Space` | Enter [space mode](#space-mode) | N/A |
|
||||
|
||||
#### View mode
|
||||
|
||||
View mode is intended for scrolling and manipulating the view without changing
|
||||
@@ -127,40 +151,43 @@ key to return to normal mode after usage (useful when you're simply looking
|
||||
over text and not actively editing it).
|
||||
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `z` , `c` | Vertically center the line | `align_view_center` |
|
||||
| `t` | Align the line to the top of the screen | `align_view_top` |
|
||||
| `b` | Align the line to the bottom of the screen | `align_view_bottom` |
|
||||
| `m` | Align the line to the middle of the screen (horizontally) | `align_view_middle` |
|
||||
| `j` | Scroll the view downwards | `scroll_down` |
|
||||
| `k` | Scroll the view upwards | `scroll_up` |
|
||||
| `f` | Move page down | `page_down` |
|
||||
| `b` | Move page up | `page_up` |
|
||||
| `d` | Move half page down | `half_page_down` |
|
||||
| `u` | Move half page up | `half_page_up` |
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `z`, `c` | Vertically center the line | `align_view_center` |
|
||||
| `t` | Align the line to the top of the screen | `align_view_top` |
|
||||
| `b` | Align the line to the bottom of the screen | `align_view_bottom` |
|
||||
| `m` | Align the line to the middle of the screen (horizontally) | `align_view_middle` |
|
||||
| `j`, `down` | Scroll the view downwards | `scroll_down` |
|
||||
| `k`, `up` | Scroll the view upwards | `scroll_up` |
|
||||
| `Ctrl-f`, `PageDown` | Move page down | `page_down` |
|
||||
| `Ctrl-b`, `PageUp` | Move page up | `page_up` |
|
||||
| `Ctrl-d` | Move half page down | `half_page_down` |
|
||||
| `Ctrl-u` | Move half page up | `half_page_up` |
|
||||
|
||||
#### Goto mode
|
||||
|
||||
Jumps to various locations.
|
||||
|
||||
> NOTE: Some of these features are only available with the LSP present.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `g` | Go to the start of the file | `goto_file_start` |
|
||||
| `g` | Go to line number `<n>` else start of file | `goto_file_start` |
|
||||
| `e` | Go to the end of the file | `goto_last_line` |
|
||||
| `f` | Go to files in the selection | `goto_file` |
|
||||
| `h` | Go to the start of the line | `goto_line_start` |
|
||||
| `l` | Go to the end of the line | `goto_line_end` |
|
||||
| `s` | Go to first non-whitespace character of the line | `goto_first_nonwhitespace` |
|
||||
| `t` | Go to the top of the screen | `goto_window_top` |
|
||||
| `m` | Go to the middle of the screen | `goto_window_middle` |
|
||||
| `c` | Go to the middle of the screen | `goto_window_center` |
|
||||
| `b` | Go to the bottom of the screen | `goto_window_bottom` |
|
||||
| `d` | Go to definition | `goto_definition` |
|
||||
| `y` | Go to type definition | `goto_type_definition` |
|
||||
| `r` | Go to references | `goto_reference` |
|
||||
| `i` | Go to implementation | `goto_implementation` |
|
||||
| `d` | Go to definition (**LSP**) | `goto_definition` |
|
||||
| `y` | Go to type definition (**LSP**) | `goto_type_definition` |
|
||||
| `r` | Go to references (**LSP**) | `goto_reference` |
|
||||
| `i` | Go to implementation (**LSP**) | `goto_implementation` |
|
||||
| `a` | Go to the last accessed/alternate file | `goto_last_accessed_file` |
|
||||
| `m` | Go to the last modified/alternate file | `goto_last_modified_file` |
|
||||
| `n` | Go to next buffer | `goto_next_buffer` |
|
||||
| `p` | Go to previous buffer | `goto_previous_buffer` |
|
||||
| `.` | Go to last modification in current file | `goto_last_modification` |
|
||||
|
||||
#### Match mode
|
||||
|
||||
@@ -170,7 +197,7 @@ and [textobject](./usage.md#textobject) usage.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `m` | Goto matching bracket | `match_brackets` |
|
||||
| `m` | Goto matching bracket (**TS**) | `match_brackets` |
|
||||
| `s` `<char>` | Surround current selection with `<char>` | `surround_add` |
|
||||
| `r` `<from><to>` | Replace surround character `<from>` with `<to>` | `surround_replace` |
|
||||
| `d` `<char>` | Delete surround character `<char>` | `surround_delete` |
|
||||
@@ -183,67 +210,120 @@ TODO: Mappings for selecting syntax nodes (a superset of `[`).
|
||||
|
||||
This layer is similar to vim keybindings as kakoune does not support window.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ------------- | ------- |
|
||||
| `w`, `Ctrl-w` | Switch to next window | `rotate_view` |
|
||||
| `v`, `Ctrl-v` | Vertical right split | `vsplit` |
|
||||
| `s`, `Ctrl-s` | Horizontal bottom split | `hsplit` |
|
||||
| `h`, `Ctrl-h` | Move to left split | `jump_view_left` |
|
||||
| `j`, `Ctrl-j` | Move to split below | `jump_view_down` |
|
||||
| `k`, `Ctrl-k` | Move to split above | `jump_view_up` |
|
||||
| `l`, `Ctrl-l` | Move to right split | `jump_view_right` |
|
||||
| `q`, `Ctrl-q` | Close current window | `wclose` |
|
||||
| Key | Description | Command |
|
||||
| ----- | ------------- | ------- |
|
||||
| `w`, `Ctrl-w` | Switch to next window | `rotate_view` |
|
||||
| `v`, `Ctrl-v` | Vertical right split | `vsplit` |
|
||||
| `s`, `Ctrl-s` | Horizontal bottom split | `hsplit` |
|
||||
| `f` | Go to files in the selection in horizontal splits | `goto_file` |
|
||||
| `F` | Go to files in the selection in vertical splits | `goto_file` |
|
||||
| `h`, `Ctrl-h`, `Left` | Move to left split | `jump_view_left` |
|
||||
| `j`, `Ctrl-j`, `Down` | Move to split below | `jump_view_down` |
|
||||
| `k`, `Ctrl-k`, `Up` | Move to split above | `jump_view_up` |
|
||||
| `l`, `Ctrl-l`, `Right` | Move to right split | `jump_view_right` |
|
||||
| `q`, `Ctrl-q` | Close current window | `wclose` |
|
||||
| `o`, `Ctrl-o` | Only keep the current window, closing all the others | `wonly` |
|
||||
|
||||
#### Space mode
|
||||
|
||||
This layer is a kludge of mappings, mostly pickers.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `k` | Show documentation for the item under the cursor | `hover` |
|
||||
| `f` | Open file picker | `file_picker` |
|
||||
| `b` | Open buffer picker | `buffer_picker` |
|
||||
| `s` | Open symbol picker (current document) | `symbol_picker` |
|
||||
| `a` | Apply code action | `code_action` |
|
||||
| `'` | Open last fuzzy picker | `last_picker` |
|
||||
| `w` | Enter [window mode](#window-mode) | N/A |
|
||||
| `p` | Paste system clipboard after selections | `paste_clipboard_after` |
|
||||
| `P` | Paste system clipboard before selections | `paste_clipboard_before` |
|
||||
| `y` | Join and yank selections to clipboard | `yank_joined_to_clipboard` |
|
||||
| `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` |
|
||||
| `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` |
|
||||
| `/` | Global search in workspace folder | `global_search` |
|
||||
|
||||
> NOTE: Global search display results in a fuzzy picker, use `space + '` to bring it back up after opening a file.
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `f` | Open file picker | `file_picker` |
|
||||
| `b` | Open buffer picker | `buffer_picker` |
|
||||
| `k` | Show documentation for item under cursor in a [popup](#popup) (**LSP**) | `hover` |
|
||||
| `s` | Open document symbol picker (**LSP**) | `symbol_picker` |
|
||||
| `S` | Open workspace symbol picker (**LSP**) | `workspace_symbol_picker` |
|
||||
| `r` | Rename symbol (**LSP**) | `rename_symbol` |
|
||||
| `a` | Apply code action (**LSP**) | `code_action` |
|
||||
| `'` | Open last fuzzy picker | `last_picker` |
|
||||
| `w` | Enter [window mode](#window-mode) | N/A |
|
||||
| `p` | Paste system clipboard after selections | `paste_clipboard_after` |
|
||||
| `P` | Paste system clipboard before selections | `paste_clipboard_before` |
|
||||
| `y` | Join and yank selections to clipboard | `yank_joined_to_clipboard` |
|
||||
| `Y` | Yank main selection to clipboard | `yank_main_selection_to_clipboard` |
|
||||
| `R` | Replace selections by clipboard contents | `replace_selections_with_clipboard` |
|
||||
| `/` | Global search in workspace folder | `global_search` |
|
||||
| `?` | Open command palette | `command_palette` |
|
||||
|
||||
> TIP: Global search displays results in a fuzzy picker, use `space + '` to bring it back up after opening a file.
|
||||
|
||||
##### Popup
|
||||
|
||||
Displays documentation for item under cursor.
|
||||
|
||||
| Key | Description |
|
||||
| ---- | ----------- |
|
||||
| `Ctrl-u` | Scroll up |
|
||||
| `Ctrl-d` | Scroll down |
|
||||
|
||||
#### Unimpaired
|
||||
|
||||
Mappings in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaired).
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `[d` | Go to previous diagnostic | `goto_prev_diag` |
|
||||
| `]d` | Go to next diagnostic | `goto_next_diag` |
|
||||
| `[D` | Go to first diagnostic in document | `goto_first_diag` |
|
||||
| `]D` | Go to last diagnostic in document | `goto_last_diag` |
|
||||
| `[space` | Add newline above | `add_newline_above` |
|
||||
| `]space` | Add newline below | `add_newline_below` |
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `[d` | Go to previous diagnostic (**LSP**) | `goto_prev_diag` |
|
||||
| `]d` | Go to next diagnostic (**LSP**) | `goto_next_diag` |
|
||||
| `[D` | Go to first diagnostic in document (**LSP**) | `goto_first_diag` |
|
||||
| `]D` | Go to last diagnostic in document (**LSP**) | `goto_last_diag` |
|
||||
| `]f` | Go to next function (**TS**) | `goto_next_function` |
|
||||
| `[f` | Go to previous function (**TS**) | `goto_prev_function` |
|
||||
| `]c` | Go to next class (**TS**) | `goto_next_class` |
|
||||
| `[c` | Go to previous class (**TS**) | `goto_prev_class` |
|
||||
| `]a` | Go to next argument/parameter (**TS**) | `goto_next_parameter` |
|
||||
| `[a` | Go to previous argument/parameter (**TS**) | `goto_prev_parameter` |
|
||||
| `]o` | Go to next comment (**TS**) | `goto_next_comment` |
|
||||
| `[o` | Go to previous comment (**TS**) | `goto_prev_comment` |
|
||||
| `[space` | Add newline above | `add_newline_above` |
|
||||
| `]space` | Add newline below | `add_newline_below` |
|
||||
|
||||
## Insert Mode
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `Escape` | Switch to normal mode | `normal_mode` |
|
||||
| `Ctrl-x` | Autocomplete | `completion` |
|
||||
| `Ctrl-w` | Delete previous word | `delete_word_backward` |
|
||||
We support many readline/emacs style bindings in insert mode for
|
||||
convenience. These can be helpful for making simple modifications
|
||||
without escaping to normal mode, but beware that you will not have an
|
||||
undo-able "save point" until you return to normal mode.
|
||||
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `Escape` | Switch to normal mode | `normal_mode` |
|
||||
| `Ctrl-x` | Autocomplete | `completion` |
|
||||
| `Ctrl-r` | Insert a register content | `insert_register` |
|
||||
| `Ctrl-w`, `Alt-Backspace` | Delete previous word | `delete_word_backward` |
|
||||
| `Alt-d` | Delete next word | `delete_word_forward` |
|
||||
| `Alt-b`, `Alt-Left` | Backward a word | `move_prev_word_end` |
|
||||
| `Ctrl-b`, `Left` | Backward a char | `move_char_left` |
|
||||
| `Alt-f`, `Alt-Right` | Forward a word | `move_next_word_start` |
|
||||
| `Ctrl-f`, `Right` | Forward a char | `move_char_right` |
|
||||
| `Ctrl-e`, `End` | Move to line end | `goto_line_end_newline` |
|
||||
| `Ctrl-a`, `Home` | Move to line start | `goto_line_start` |
|
||||
| `Ctrl-u` | Delete to start of line | `kill_to_line_start` |
|
||||
| `Ctrl-k` | Delete to end of line | `kill_to_line_end` |
|
||||
| `Ctrl-j`, `Enter` | Insert new line | `insert_newline` |
|
||||
| `Backspace`, `Ctrl-h` | Delete previous char | `delete_char_backward` |
|
||||
| `Delete`, `Ctrl-d` | Delete previous char | `delete_char_forward` |
|
||||
| `Ctrl-p`, `Up` | Move to previous line | `move_line_up` |
|
||||
| `Ctrl-n`, `Down` | Move to next line | `move_line_down` |
|
||||
| `PageUp` | Move one page up | `page_up` |
|
||||
| `PageDown` | Move one page down | `page_down` |
|
||||
| `Alt->` | Go to end of buffer | `goto_file_end` |
|
||||
| `Alt-<` | Go to start of buffer | `goto_file_start` |
|
||||
|
||||
## Select / extend mode
|
||||
|
||||
I'm still pondering whether to keep this mode or not. It changes movement
|
||||
commands (including goto) to extend the existing selection instead of replacing it.
|
||||
This mode echoes Normal mode, but changes any movements to extend
|
||||
selections rather than replace them. Goto motions are also changed to
|
||||
extend, so that `vgl` for example extends the selection to the end of
|
||||
the line.
|
||||
|
||||
> NOTE: It's a bit confusing at the moment because extend hasn't been
|
||||
> implemented for all movement commands yet.
|
||||
Search is also affected. By default, `n` and `N` will remove the current
|
||||
selection and select the next instance of the search term. Toggling this
|
||||
mode before pressing `n` or `N` makes it possible to keep the current
|
||||
selection. Toggling it on and off during your iterative searching allows
|
||||
you to selectively add search terms to your selections.
|
||||
|
||||
# Picker
|
||||
|
||||
@@ -252,9 +332,40 @@ Keys to use within picker. Remapping currently not supported.
|
||||
| Key | Description |
|
||||
| ----- | ------------- |
|
||||
| `Up`, `Ctrl-k`, `Ctrl-p` | Previous entry |
|
||||
| `PageUp`, `Ctrl-b` | Page up |
|
||||
| `Down`, `Ctrl-j`, `Ctrl-n` | Next entry |
|
||||
| `PageDown`, `Ctrl-f` | Page down |
|
||||
| `Home` | Go to first entry |
|
||||
| `End` | Go to last entry |
|
||||
| `Ctrl-space` | Filter options |
|
||||
| `Enter` | Open selected |
|
||||
| `Ctrl-s` | Open horizontally |
|
||||
| `Ctrl-v` | Open vertically |
|
||||
| `Escape`, `Ctrl-c` | Close picker |
|
||||
|
||||
# Prompt
|
||||
|
||||
Keys to use within prompt, Remapping currently not supported.
|
||||
|
||||
| Key | Description |
|
||||
| ----- | ------------- |
|
||||
| `Escape`, `Ctrl-c` | Close prompt |
|
||||
| `Alt-b`, `Alt-Left` | Backward a word |
|
||||
| `Ctrl-b`, `Left` | Backward a char |
|
||||
| `Alt-f`, `Alt-Right` | Forward a word |
|
||||
| `Ctrl-f`, `Right` | Forward a char |
|
||||
| `Ctrl-e`, `End` | Move prompt end |
|
||||
| `Ctrl-a`, `Home` | Move prompt start |
|
||||
| `Ctrl-w` | Delete previous word |
|
||||
| `Alt-d` | Delete next word |
|
||||
| `Ctrl-u` | Delete to start of line |
|
||||
| `Ctrl-k` | Delete to end of line |
|
||||
| `backspace`, `Ctrl-h` | Delete previous char |
|
||||
| `delete`, `Ctrl-d` | Delete next char |
|
||||
| `Ctrl-s` | Insert a word under doc cursor, may be changed to Ctrl-r Ctrl-w later |
|
||||
| `Ctrl-p`, `Up` | Select previous history |
|
||||
| `Ctrl-n`, `Down` | Select next history |
|
||||
| `Tab` | Select next completion item |
|
||||
| `BackTab` | Select previous completion item |
|
||||
| `Enter` | Open selected |
|
||||
|
||||
|
10
book/src/lang-support.md
Normal file
10
book/src/lang-support.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# Language Support
|
||||
|
||||
For more information like arguments passed to default LSP server,
|
||||
extensions assosciated with a filetype, custom LSP settings, filetype
|
||||
specific indent settings, etc see the default
|
||||
[`languages.toml`][languages.toml] file.
|
||||
|
||||
{{#include ./generated/lang-support.md}}
|
||||
|
||||
[languages.toml]: https://github.com/helix-editor/helix/blob/master/languages.toml
|
40
book/src/languages.md
Normal file
40
book/src/languages.md
Normal file
@@ -0,0 +1,40 @@
|
||||
# Languages
|
||||
|
||||
Language-specific settings and settings for particular language servers can be configured in a `languages.toml` file placed in your [configuration directory](./configuration.md). Helix actually uses two `languages.toml` files, the [first one](https://github.com/helix-editor/helix/blob/master/languages.toml) is in the main helix repository; it contains the default settings for each language and is included in the helix binary at compile time. Users who want to see the available settings and options can either reference the helix repo's `languages.toml` file, or consult the table in the [adding languages](./guides/adding_languages.md) section.
|
||||
|
||||
Changes made to the `languages.toml` file in a user's [configuration directory](./configuration.md) are merged with helix's defaults on start-up, such that a user's settings will take precedence over defaults in the event of a collision. For example, the default `languages.toml` sets rust's `auto-format` to `true`. If a user wants to disable auto-format, they can change the `languages.toml` in their [configuration directory](./configuration.md) to make the rust entry read like the example below; the new key/value pair `auto-format = false` will override the default when the two sets of settings are merged on start-up:
|
||||
|
||||
```toml
|
||||
# in <config_dir>/helix/languages.toml
|
||||
|
||||
[[language]]
|
||||
name = "rust"
|
||||
auto-format = false
|
||||
```
|
||||
|
||||
## Tree-sitter grammars
|
||||
|
||||
Tree-sitter grammars can also be configured in `languages.toml`:
|
||||
|
||||
```toml
|
||||
# in <config_dir>/helix/languages.toml
|
||||
|
||||
[[grammar]]
|
||||
name = "rust"
|
||||
source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "a250c4582510ff34767ec3b7dcdd3c24e8c8aa68" }
|
||||
|
||||
[[grammar]]
|
||||
name = "c"
|
||||
source = { path = "/path/to/tree-sitter-c" }
|
||||
```
|
||||
|
||||
You may use a top-level `use-grammars` key to control which grammars are fetched and built.
|
||||
|
||||
```toml
|
||||
# Note: this key must come **before** the [[language]] and [[grammar]] sections
|
||||
use-grammars = { only = [ "rust", "c", "cpp" ] }
|
||||
# or
|
||||
use-grammars = { except = [ "yaml", "json" ] }
|
||||
```
|
||||
|
||||
When omitted, all grammars are fetched and built.
|
@@ -11,15 +11,19 @@ this:
|
||||
```toml
|
||||
# At most one section each of 'keys.normal', 'keys.insert' and 'keys.select'
|
||||
[keys.normal]
|
||||
C-s = ":w" # Maps the Control-s to the typable command :w which is an alias for :write (save file)
|
||||
C-o = ":open ~/.config/helix/config.toml" # Maps the Control-o to opening of the helix config file
|
||||
a = "move_char_left" # Maps the 'a' key to the move_char_left command
|
||||
w = "move_line_up" # Maps the 'w' key move_line_up
|
||||
"C-S-esc" = "extend_line" # Maps Control-Shift-Escape to extend_line
|
||||
g = { a = "code_action" } # Maps `ga` to show possible code actions
|
||||
"ret" = ["open_below", "normal_mode"] # Maps the enter key to open_below then re-enter normal mode
|
||||
|
||||
[keys.insert]
|
||||
"A-x" = "normal_mode" # Maps Alt-X to enter normal mode
|
||||
j = { k = "normal_mode" } # Maps `jk` to exit insert mode
|
||||
```
|
||||
> NOTE: Typable commands can also be remapped, remember to keep the `:` prefix to indicate it's a typable command.
|
||||
|
||||
Control, Shift and Alt modifiers are encoded respectively with the prefixes
|
||||
`C-`, `S-` and `A-`. Special keys are encoded as follows:
|
||||
@@ -38,12 +42,12 @@ Control, Shift and Alt modifiers are encoded respectively with the prefixes
|
||||
| Left | `"left"` |
|
||||
| Right | `"right"` |
|
||||
| Up | `"up"` |
|
||||
| Down | `"down"` |
|
||||
| Home | `"home"` |
|
||||
| End | `"end"` |
|
||||
| Page | `"pageup"` |
|
||||
| Page | `"pagedown"` |
|
||||
| Page Up | `"pageup"` |
|
||||
| Page Down | `"pagedown"` |
|
||||
| Tab | `"tab"` |
|
||||
| Back | `"backtab"` |
|
||||
| Delete | `"del"` |
|
||||
| Insert | `"ins"` |
|
||||
| Null | `"null"` |
|
||||
@@ -51,4 +55,5 @@ Control, Shift and Alt modifiers are encoded respectively with the prefixes
|
||||
|
||||
Keys can be disabled by binding them to the `no_op` command.
|
||||
|
||||
Commands can be found in the source code at [`helix-term/src/commands.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands.rs)
|
||||
Commands can be found at [Keymap](https://docs.helix-editor.com/keymap.html) Commands.
|
||||
> Commands can also be found in the source code at [`helix-term/src/commands.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands.rs) at the invocation of `static_commands!` macro and the `TypableCommandList`.
|
||||
|
@@ -1,14 +1,14 @@
|
||||
# Themes
|
||||
|
||||
First you'll need to place selected themes in your `themes` directory (i.e `~/.config/helix/themes`), the directory might have to be created beforehand.
|
||||
|
||||
To use a custom theme add `theme = <name>` to your [`config.toml`](./configuration.md) or override it during runtime using `:theme <name>`.
|
||||
|
||||
The default theme.toml can be found [here](https://github.com/helix-editor/helix/blob/master/theme.toml), and user submitted themes [here](https://github.com/helix-editor/helix/blob/master/runtime/themes).
|
||||
To use a theme add `theme = "<name>"` to your [`config.toml`](./configuration.md) at the very top of the file before the first section or select it during runtime using `:theme <name>`.
|
||||
|
||||
## Creating a theme
|
||||
|
||||
First create a file with the name of your theme as file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes`).
|
||||
Create a file with the name of your theme as file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes`). The directory might have to be created beforehand.
|
||||
|
||||
The names "default" and "base16_default" are reserved for the builtin themes and cannot be overridden by user defined themes.
|
||||
|
||||
The default theme.toml can be found [here](https://github.com/helix-editor/helix/blob/master/theme.toml), and user submitted themes [here](https://github.com/helix-editor/helix/blob/master/runtime/themes).
|
||||
|
||||
Each line in the theme file is specified as below:
|
||||
|
||||
@@ -103,17 +103,19 @@ We use a similar set of scopes as
|
||||
[SublimeText](https://www.sublimetext.com/docs/scope_naming.html). See also
|
||||
[TextMate](https://macromates.com/manual/en/language_grammars) scopes.
|
||||
|
||||
- `escape` (TODO: rename to (constant).character.escape)
|
||||
|
||||
- `type` - Types
|
||||
- `builtin` - Primitive types provided by the language (`int`, `usize`)
|
||||
- `constructor`
|
||||
|
||||
- `constant` (TODO: constant.other.placeholder for %v)
|
||||
- `builtin` Special constants provided by the language (`true`, `false`, `nil` etc)
|
||||
- `boolean`
|
||||
- `character`
|
||||
- `escape`
|
||||
- `numeric` (numbers)
|
||||
- `integer`
|
||||
- `float`
|
||||
|
||||
- `number` (TODO: rename to constant.number/.numeric.{integer, float, complex})
|
||||
- `string` (TODO: string.quoted.{single, double}, string.raw/.unquoted)?
|
||||
- `regexp` - Regular expressions
|
||||
- `special`
|
||||
@@ -129,7 +131,8 @@ We use a similar set of scopes as
|
||||
- `variable` - Variables
|
||||
- `builtin` - Reserved language variables (`self`, `this`, `super`, etc)
|
||||
- `parameter` - Function parameters
|
||||
- `property`
|
||||
- `other`
|
||||
- `member` - Fields of composite data types (e.g. structs, unions)
|
||||
- `function` (TODO: ?)
|
||||
|
||||
- `label`
|
||||
@@ -143,11 +146,13 @@ We use a similar set of scopes as
|
||||
- `conditional` - `if`, `else`
|
||||
- `repeat` - `for`, `while`, `loop`
|
||||
- `import` - `import`, `export`
|
||||
- (TODO: return?)
|
||||
- `return`
|
||||
- `exception`
|
||||
- `operator` - `or`, `in`
|
||||
- `directive` - Preprocessor directives (`#if` in C)
|
||||
- `function` - `fn`, `func`
|
||||
|
||||
- `operator` - `||`, `+=`, `>`, `or`
|
||||
- `operator` - `||`, `+=`, `>`
|
||||
|
||||
- `function`
|
||||
- `builtin`
|
||||
@@ -159,10 +164,46 @@ We use a similar set of scopes as
|
||||
|
||||
- `namespace`
|
||||
|
||||
- `markup`
|
||||
- `heading`
|
||||
- `marker`
|
||||
- `1`, `2`, `3`, `4`, `5`, `6` - heading text for h1 through h6
|
||||
- `list`
|
||||
- `unnumbered`
|
||||
- `numbered`
|
||||
- `bold`
|
||||
- `italic`
|
||||
- `link`
|
||||
- `url` - urls pointed to by links
|
||||
- `label` - non-url link references
|
||||
- `text` - url and image descriptions in links
|
||||
- `quote`
|
||||
- `raw`
|
||||
- `inline`
|
||||
- `block`
|
||||
|
||||
- `diff` - version control changes
|
||||
- `plus` - additions
|
||||
- `minus` - deletions
|
||||
- `delta` - modifications
|
||||
- `moved` - renamed or moved files/changes
|
||||
|
||||
#### Interface
|
||||
|
||||
These scopes are used for theming the editor interface.
|
||||
|
||||
- `markup`
|
||||
- `normal`
|
||||
- `completion` - for completion doc popup ui
|
||||
- `hover` - for hover popup ui
|
||||
- `heading`
|
||||
- `completion` - for completion doc popup ui
|
||||
- `hover` - for hover popup ui
|
||||
- `raw`
|
||||
- `inline`
|
||||
- `completion` - for completion doc popup ui
|
||||
- `hover` - for hover popup ui
|
||||
|
||||
|
||||
| Key | Notes |
|
||||
| --- | --- |
|
||||
@@ -177,12 +218,12 @@ These scopes are used for theming the editor interface.
|
||||
| `ui.statusline` | Statusline |
|
||||
| `ui.statusline.inactive` | Statusline (unfocused document) |
|
||||
| `ui.popup` | |
|
||||
| `ui.popup.info` | |
|
||||
| `ui.window` | |
|
||||
| `ui.help` | |
|
||||
| `ui.text` | |
|
||||
| `ui.text.focus` | |
|
||||
| `ui.info` | |
|
||||
| `ui.info.text` | |
|
||||
| `ui.text.info` | |
|
||||
| `ui.menu` | |
|
||||
| `ui.menu.selected` | |
|
||||
| `ui.selection` | For selections in the editing area |
|
||||
|
15
book/src/title-page.md
Normal file
15
book/src/title-page.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# Helix
|
||||
|
||||
Docs for bleeding edge master can be found at
|
||||
[https://docs.helix-editor.com/master](https://docs.helix-editor.com/master).
|
||||
|
||||
See the [usage] section for a quick overview of the editor, [keymap]
|
||||
section for all available keybindings and the [configuration] section
|
||||
for defining custom keybindings, setting themes, etc.
|
||||
|
||||
Refer the [FAQ] for common questions.
|
||||
|
||||
[FAQ]: https://github.com/helix-editor/helix/wiki/FAQ
|
||||
[usage]: ./usage.md
|
||||
[keymap]: ./keymap.md
|
||||
[configuration]: ./configuration.md
|
@@ -23,8 +23,10 @@ If there is a selected register before invoking a change or delete command, the
|
||||
| `/` | Last search |
|
||||
| `:` | Last executed command |
|
||||
| `"` | Last yanked text |
|
||||
| `_` | Black hole |
|
||||
|
||||
> There is no special register for copying to system clipboard, instead special commands and keybindings are provided. See the [keymap](keymap.md#space-mode) for the specifics.
|
||||
> The black hole register works as a no-op register, meaning no data will be written to / read from it.
|
||||
|
||||
## Surround
|
||||
|
||||
@@ -40,7 +42,7 @@ helix. The keymappings have been inspired from [vim-sandwich](https://github.com
|
||||
`ms` acts on a selection, so select the text first and use `ms<char>`. `mr` and `md` work
|
||||
on the closest pairs found and selections are not required; use counts to act in outer pairs.
|
||||
|
||||
It can also act on multiple seletions (yay!). For example, to change every occurance of `(use)` to `[use]`:
|
||||
It can also act on multiple selections (yay!). For example, to change every occurrence of `(use)` to `[use]`:
|
||||
|
||||
- `%` to select the whole file
|
||||
- `s` to split the selections on a search term
|
||||
@@ -62,12 +64,33 @@ Currently supported: `word`, `surround`, `function`, `class`, `parameter`.
|
||||
| Key after `mi` or `ma` | Textobject selected |
|
||||
| --- | --- |
|
||||
| `w` | Word |
|
||||
| `W` | WORD |
|
||||
| `(`, `[`, `'`, etc | Specified surround pairs |
|
||||
| `f` | Function |
|
||||
| `c` | Class |
|
||||
| `p` | Parameter |
|
||||
| `a` | Argument/parameter |
|
||||
| `o` | Comment |
|
||||
|
||||
Note: `f`, `c`, etc need a tree-sitter grammar active for the current
|
||||
> NOTE: `f`, `c`, etc need a tree-sitter grammar active for the current
|
||||
document and a special tree-sitter query file to work properly. [Only
|
||||
some grammars](https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+filename%3Atextobjects.scm&type=Code&ref=advsearch&l=&l=)
|
||||
currently have the query file implemented. Contributions are welcome !
|
||||
some grammars][lang-support] currently have the query file implemented.
|
||||
Contributions are welcome!
|
||||
|
||||
## Tree-sitter Textobject Based Navigation
|
||||
|
||||
Navigating between functions, classes, parameters, etc is made
|
||||
possible by leveraging tree-sitter and textobjects queries. For
|
||||
example to move to the next function use `]f`, to move to previous
|
||||
class use `[c`, and so on.
|
||||
|
||||
![tree-sitter-nav-demo][tree-sitter-nav-demo]
|
||||
|
||||
See the [unimpaired][unimpaired-keybinds] section of the keybind
|
||||
documentation for the full reference.
|
||||
|
||||
> NOTE: This feature is dependent on tree-sitter based textobjects
|
||||
and therefore requires the corresponding query file to work properly.
|
||||
|
||||
[lang-support]: ./lang-support.md
|
||||
[unimpaired-keybinds]: ./keymap.md#unimpaired
|
||||
[tree-sitter-nav-demo]: https://user-images.githubusercontent.com/23398472/152332550-7dfff043-36a2-4aec-b8f2-77c13eb56d6f.gif
|
||||
|
37
docs/CONTRIBUTING.md
Normal file
37
docs/CONTRIBUTING.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# Contributing
|
||||
|
||||
Contributors are very welcome! **No contribution is too small and all contributions are valued.**
|
||||
|
||||
Some suggestions to get started:
|
||||
|
||||
- You can look at the [good first issue][good-first-issue] label on the issue tracker.
|
||||
- Help with packaging on various distributions needed!
|
||||
- To use print debugging to the [Helix log file][log-file], you must:
|
||||
* Print using `log::info!`, `warn!`, or `error!`. (`log::info!("helix!")`)
|
||||
* Pass the appropriate verbosity level option for the desired log level. (`hx -v <file>` for info, more `v`s for higher severity inclusive)
|
||||
- If your preferred language is missing, integrating a tree-sitter grammar for
|
||||
it and defining syntax highlight queries for it is straight forward and
|
||||
doesn't require much knowledge of the internals.
|
||||
|
||||
We provide an [architecture.md][architecture.md] that should give you
|
||||
a good overview of the internals.
|
||||
|
||||
# Auto generated documentation
|
||||
|
||||
Some parts of [the book][docs] are autogenerated from the code itself,
|
||||
like the list of `:commands` and supported languages. To generate these
|
||||
files, run
|
||||
|
||||
```shell
|
||||
cargo xtask docgen
|
||||
```
|
||||
|
||||
inside the project. We use [xtask][xtask] as an ad-hoc task runner and
|
||||
thus do not require any dependencies other than `cargo` (You don't have
|
||||
to `cargo install` anything either).
|
||||
|
||||
[good-first-issue]: https://github.com/helix-editor/helix/labels/E-easy
|
||||
[log-file]: https://github.com/helix-editor/helix/wiki/FAQ#access-the-log-file
|
||||
[architecture.md]: ./architecture.md
|
||||
[docs]: https://docs.helix-editor.com/
|
||||
[xtask]: https://github.com/matklad/cargo-xtask
|
@@ -1,12 +1,13 @@
|
||||
|
||||
| Crate | Description |
|
||||
| ----------- | ----------- |
|
||||
| helix-core | Core editing primitives, functional. |
|
||||
| helix-syntax | Tree-sitter grammars |
|
||||
| helix-lsp | Language server client |
|
||||
| helix-view | UI abstractions for use in backends, imperative shell. |
|
||||
| helix-term | Terminal UI |
|
||||
| helix-tui | TUI primitives, forked from tui-rs, inspired by Cursive |
|
||||
| Crate | Description |
|
||||
| ----------- | ----------- |
|
||||
| helix-core | Core editing primitives, functional. |
|
||||
| helix-lsp | Language server client |
|
||||
| helix-dap | Debug Adapter Protocol (DAP) client |
|
||||
| helix-loader | Functions for building, fetching, and loading external resources |
|
||||
| helix-view | UI abstractions for use in backends, imperative shell. |
|
||||
| helix-term | Terminal UI |
|
||||
| helix-tui | TUI primitives, forked from tui-rs, inspired by Cursive |
|
||||
|
||||
|
||||
This document contains a high-level overview of Helix internals.
|
||||
@@ -54,15 +55,40 @@ A `Document` ties together the `Rope`, `Selection`(s), `Syntax`, document
|
||||
file.
|
||||
|
||||
A `View` represents an open split in the UI. It holds the currently open
|
||||
document ID and other related state.
|
||||
document ID and other related state. Views encapsulate the gutter, status line,
|
||||
diagnostics, and the inner area where the code is displayed.
|
||||
|
||||
> NOTE: Multiple views are able to display the same document, so the document
|
||||
> contains selections for each view. To retrieve, `document.selection()` takes
|
||||
> a `ViewId`.
|
||||
|
||||
`Info` is the autoinfo box that shows hints when awaiting another key with bindings
|
||||
like `g` and `m`. It is attached to the viewport as a whole.
|
||||
|
||||
`Surface` is like a buffer to which widgets draw themselves to, and the
|
||||
surface is then rendered on the screen on each cycle.
|
||||
|
||||
`Rect`s are areas (simply an x and y coordinate with the origin at the
|
||||
screen top left and then a height and width) which are part of a
|
||||
`Surface`. They can be used to limit the area to which a `Component` can
|
||||
render. For example if we wrap a `Markdown` component in a `Popup`
|
||||
(think the documentation popup with space+k), Markdown's render method
|
||||
will get a Rect that is the exact size of the popup.
|
||||
|
||||
Widgets are called `Component`s internally, and you can see most of them
|
||||
in `helix-term/src/ui`. Some components like `Popup` and `Overlay` can take
|
||||
other components as children.
|
||||
|
||||
`Layer`s are how multiple components are displayed, and is simply a
|
||||
`Vec<Component>`. Layers are managed by the `Compositor`. On each top
|
||||
level render call, the compositor renders each component in the order
|
||||
they were pushed into the stack. This makes multiple components "layer"
|
||||
on top of one another. Hence we get a file picker displayed over the
|
||||
editor, etc.
|
||||
|
||||
The `Editor` holds the global state: all the open documents, a tree
|
||||
representation of all the view splits, and a registry of language servers. To
|
||||
open or close files, interact with the editor.
|
||||
representation of all the view splits, the configuration, and a registry of
|
||||
language servers. To open or close files, interact with the editor.
|
||||
|
||||
## LSP
|
||||
|
||||
|
146
flake.lock
generated
146
flake.lock
generated
@@ -1,12 +1,35 @@
|
||||
{
|
||||
"nodes": {
|
||||
"devshell": {
|
||||
"crane": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1632436039,
|
||||
"narHash": "sha256-OtITeVWcKXn1SpVEnImpTGH91FycCskGBPqmlxiykv4=",
|
||||
"lastModified": 1644785799,
|
||||
"narHash": "sha256-VpAJO1L0XeBvtCuNGK4IDKp6ENHIpTrlaZT7yfBCvwo=",
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"rev": "fc7a94f841347c88f2cb44217b2a3faa93e2a0b2",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"devshell": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1646667754,
|
||||
"narHash": "sha256-LahZHvCC3UVzGQ55iWDRZkuDssXl1rYgqgScrPV9S38=",
|
||||
"owner": "numtide",
|
||||
"repo": "devshell",
|
||||
"rev": "7a7a7aa0adebe5488e5abaec688fd9ae0f8ea9c6",
|
||||
"rev": "59fbe1dfc0de8c3332957c16998a7d16dff365d8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -15,13 +38,64 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"dream2nix": {
|
||||
"inputs": {
|
||||
"alejandra": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"crane": "crane",
|
||||
"flake-utils-pre-commit": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"gomod2nix": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"mach-nix": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"nixpkgs": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"node2nix": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"poetry2nix": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
],
|
||||
"pre-commit-hooks": [
|
||||
"nixCargoIntegration",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1646710334,
|
||||
"narHash": "sha256-eLBcDgcbOUfeH4k6SEW5a5v0PTp2KNCn+5ZXIoWGYww=",
|
||||
"owner": "nix-community",
|
||||
"repo": "dream2nix",
|
||||
"rev": "5dcfbfd3b60ce0208b894c1bdea00e2bdf80ca6a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"ref": "main",
|
||||
"repo": "dream2nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"locked": {
|
||||
"lastModified": 1623875721,
|
||||
"narHash": "sha256-A8BU7bjS5GirpAUv4QA+QnJ4CceLHkcXdRp4xITDB0s=",
|
||||
"lastModified": 1642700792,
|
||||
"narHash": "sha256-XqHrk7hFb+zBvRg6Ghl+AZDq03ov6OshJLiSWOoX5es=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "f7e004a55b120c02ecb6219596820fcd32ca8772",
|
||||
"rev": "846b2ae0fc4cc943637d3d1def4454213e203cba",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -30,25 +104,25 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flakeCompat": {
|
||||
"flake": false,
|
||||
"flake-utils_2": {
|
||||
"locked": {
|
||||
"lastModified": 1627913399,
|
||||
"narHash": "sha256-hY8g6H2KFL8ownSiFeMOjwPC8P0ueXpCVEbxgda3pko=",
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"rev": "12c64ca55c1014cdc1b16ed5a804aa8576601ff2",
|
||||
"lastModified": 1637014545,
|
||||
"narHash": "sha256-26IZAc5yzlD9FlDT54io1oqG/bBoyka+FJk5guaX4x4=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "bba5dcc8e0b20ab664967ad83d24d64cb64ec4f4",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixCargoIntegration": {
|
||||
"inputs": {
|
||||
"devshell": "devshell",
|
||||
"dream2nix": "dream2nix",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
@@ -57,11 +131,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1634796585,
|
||||
"narHash": "sha256-CW4yx6omk5qCXUIwXHp/sztA7u0SpyLq9NEACPnkiz8=",
|
||||
"lastModified": 1646766572,
|
||||
"narHash": "sha256-DV3+zxvAIKsMHsHedJKYFsracvFyLKpFQqurUBR86oY=",
|
||||
"owner": "yusdacra",
|
||||
"repo": "nix-cargo-integration",
|
||||
"rev": "a84a2137a396f303978f1d48341e0390b0e16a8b",
|
||||
"rev": "3a3f47f43ba486b7554164a698c8dfc5a38624ce",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -72,11 +146,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1634782485,
|
||||
"narHash": "sha256-psfh4OQSokGXG0lpq3zKFbhOo3QfoeudRcaUnwMRkQo=",
|
||||
"lastModified": 1646497237,
|
||||
"narHash": "sha256-Ccpot1h/rV8MgcngDp5OrdmLTMaUTbStZTR5/sI7zW0=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "34ad3ffe08adfca17fcb4e4a47bb5f3b113687be",
|
||||
"rev": "062a0c5437b68f950b081bbfc8a699d57a4ee026",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -86,24 +160,8 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs_2": {
|
||||
"locked": {
|
||||
"lastModified": 1628186154,
|
||||
"narHash": "sha256-r2d0wvywFnL9z4iptztdFMhaUIAaGzrSs7kSok0PgmE=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "06552b72346632b6943c8032e57e702ea12413bf",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flakeCompat": "flakeCompat",
|
||||
"nixCargoIntegration": "nixCargoIntegration",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"rust-overlay": "rust-overlay"
|
||||
@@ -111,15 +169,17 @@
|
||||
},
|
||||
"rust-overlay": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs_2"
|
||||
"flake-utils": "flake-utils_2",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1634869268,
|
||||
"narHash": "sha256-RVAcEFlFU3877Mm4q/nbXGEYTDg/wQNhzmXGMTV6wBs=",
|
||||
"lastModified": 1646792695,
|
||||
"narHash": "sha256-2drCXIKIQnJMlTZbcCfuHZAh+iPcdlRkCqtZnA6MHLY=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "c02c2d86354327317546501af001886fbb53d374",
|
||||
"rev": "7f599870402c8d2a5806086c8ee0f2d92b175c54",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
82
flake.nix
82
flake.nix
@@ -3,71 +3,61 @@
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||
rust-overlay.url = "github:oxalica/rust-overlay";
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
nixCargoIntegration = {
|
||||
url = "github:yusdacra/nix-cargo-integration";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
inputs.rustOverlay.follows = "rust-overlay";
|
||||
};
|
||||
flakeCompat = {
|
||||
url = "github:edolstra/flake-compat";
|
||||
flake = false;
|
||||
};
|
||||
};
|
||||
|
||||
outputs = inputs@{ self, nixCargoIntegration, ... }:
|
||||
outputs = inputs@{ nixCargoIntegration, ... }:
|
||||
nixCargoIntegration.lib.makeOutputs {
|
||||
root = ./.;
|
||||
buildPlatform = "crate2nix";
|
||||
renameOutputs = { "helix-term" = "helix"; };
|
||||
# Set default app to hx (binary is from helix-term release build)
|
||||
# Set default package to helix-term release build
|
||||
defaultOutputs = { app = "hx"; package = "helix"; };
|
||||
defaultOutputs = {
|
||||
app = "hx";
|
||||
package = "helix";
|
||||
};
|
||||
overrides = {
|
||||
crateOverrides = common: _: {
|
||||
helix-term = prev: {
|
||||
# link languages and theme toml files since helix-term expects them (for tests)
|
||||
preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml} ..";
|
||||
buildInputs = (prev.buildInputs or [ ]) ++ [ common.cCompiler.cc.lib ];
|
||||
};
|
||||
# link languages and theme toml files since helix-view expects them
|
||||
helix-view = _: { preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml} .."; };
|
||||
helix-syntax = _prev: {
|
||||
preConfigure = "mkdir -p ../runtime/grammars";
|
||||
postInstall = "cp -r ../runtime $out/runtime";
|
||||
};
|
||||
};
|
||||
mainBuild = common: prev:
|
||||
let
|
||||
inherit (common) pkgs lib;
|
||||
helixSyntax = lib.buildCrate {
|
||||
root = self;
|
||||
memberName = "helix-syntax";
|
||||
defaultCrateOverrides = {
|
||||
helix-syntax = common.crateOverrides.helix-syntax;
|
||||
};
|
||||
release = false;
|
||||
helix-term = prev:
|
||||
let
|
||||
inherit (common) pkgs;
|
||||
grammars = pkgs.callPackage ./grammars.nix { };
|
||||
runtimeDir = pkgs.runCommand "helix-runtime" { } ''
|
||||
mkdir -p $out
|
||||
ln -s ${common.root}/runtime/* $out
|
||||
rm -r $out/grammars
|
||||
ln -s ${grammars} $out/grammars
|
||||
'';
|
||||
in
|
||||
{
|
||||
# disable fetching and building of tree-sitter grammars in the helix-term build.rs
|
||||
HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1";
|
||||
# link languages and theme toml files since helix-term expects them (for tests)
|
||||
preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} ..";
|
||||
buildInputs = (prev.buildInputs or [ ]) ++ [ common.cCompiler.cc.lib ];
|
||||
nativeBuildInputs = [ pkgs.makeWrapper ];
|
||||
|
||||
postFixup = ''
|
||||
if [ -f "$out/bin/hx" ]; then
|
||||
wrapProgram "$out/bin/hx" --set HELIX_RUNTIME "${runtimeDir}"
|
||||
fi
|
||||
'';
|
||||
};
|
||||
runtimeDir = pkgs.runCommand "helix-runtime" { } ''
|
||||
mkdir -p $out
|
||||
ln -s ${common.root}/runtime/* $out
|
||||
ln -sf ${helixSyntax}/runtime/grammars $out
|
||||
'';
|
||||
in
|
||||
lib.optionalAttrs (common.memberName == "helix-term") {
|
||||
nativeBuildInputs = [ pkgs.makeWrapper ];
|
||||
postFixup = ''
|
||||
if [ -f "$out/bin/hx" ]; then
|
||||
wrapProgram "$out/bin/hx" --set HELIX_RUNTIME "${runtimeDir}"
|
||||
fi
|
||||
'';
|
||||
};
|
||||
};
|
||||
shell = common: prev: {
|
||||
packages = prev.packages ++ (with common.pkgs; [ lld_12 lldb cargo-tarpaulin ]);
|
||||
packages = prev.packages ++ (with common.pkgs; [ lld_13 lldb cargo-tarpaulin cargo-flamegraph ]);
|
||||
env = prev.env ++ [
|
||||
{ name = "HELIX_RUNTIME"; eval = "$PWD/runtime"; }
|
||||
{ name = "RUST_BACKTRACE"; value = "1"; }
|
||||
{ name = "RUSTFLAGS"; value = "-C link-arg=-fuse-ld=lld -C target-cpu=native"; }
|
||||
{ name = "RUSTFLAGS"; value = "-C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment"; }
|
||||
];
|
||||
};
|
||||
};
|
||||
|
89
grammars.nix
Normal file
89
grammars.nix
Normal file
@@ -0,0 +1,89 @@
|
||||
{ stdenv, lib, runCommand, yj }:
|
||||
let
|
||||
# HACK: nix < 2.6 has a bug in the toml parser, so we convert to JSON
|
||||
# before parsing
|
||||
languages-json = runCommand "languages-toml-to-json" { } ''
|
||||
${yj}/bin/yj -t < ${./languages.toml} > $out
|
||||
'';
|
||||
languagesConfig =
|
||||
builtins.fromJSON (builtins.readFile (builtins.toPath languages-json));
|
||||
isGitGrammar = (grammar:
|
||||
builtins.hasAttr "source" grammar && builtins.hasAttr "git" grammar.source
|
||||
&& builtins.hasAttr "rev" grammar.source);
|
||||
gitGrammars = builtins.filter isGitGrammar languagesConfig.grammar;
|
||||
buildGrammar = grammar:
|
||||
let
|
||||
source = builtins.fetchTree {
|
||||
type = "git";
|
||||
url = grammar.source.git;
|
||||
rev = grammar.source.rev;
|
||||
ref = grammar.source.ref or "HEAD";
|
||||
shallow = true;
|
||||
};
|
||||
in stdenv.mkDerivation rec {
|
||||
# see https://github.com/NixOS/nixpkgs/blob/fbdd1a7c0bc29af5325e0d7dd70e804a972eb465/pkgs/development/tools/parsing/tree-sitter/grammar.nix
|
||||
|
||||
pname = "helix-tree-sitter-${grammar.name}";
|
||||
version = grammar.source.rev;
|
||||
|
||||
src = if builtins.hasAttr "subpath" grammar.source then
|
||||
"${source}/${grammar.source.subpath}"
|
||||
else
|
||||
source;
|
||||
|
||||
dontUnpack = true;
|
||||
dontConfigure = true;
|
||||
|
||||
FLAGS = [
|
||||
"-I${src}/src"
|
||||
"-g"
|
||||
"-O3"
|
||||
"-fPIC"
|
||||
"-fno-exceptions"
|
||||
"-Wl,-z,relro,-z,now"
|
||||
];
|
||||
|
||||
NAME = grammar.name;
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
if [[ -e "$src/src/scanner.cc" ]]; then
|
||||
$CXX -c "$src/src/scanner.cc" -o scanner.o $FLAGS
|
||||
elif [[ -e "$src/src/scanner.c" ]]; then
|
||||
$CC -c "$src/src/scanner.c" -o scanner.o $FLAGS
|
||||
fi
|
||||
|
||||
$CC -c "$src/src/parser.c" -o parser.o $FLAGS
|
||||
$CXX -shared -o $NAME.so *.o
|
||||
|
||||
ls -al
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
mkdir $out
|
||||
mv $NAME.so $out/
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
# Strip failed on darwin: strip: error: symbols referenced by indirect symbol table entries that can't be stripped
|
||||
fixupPhase = lib.optionalString stdenv.isLinux ''
|
||||
runHook preFixup
|
||||
$STRIP $out/$NAME.so
|
||||
runHook postFixup
|
||||
'';
|
||||
};
|
||||
builtGrammars = builtins.map (grammar: {
|
||||
inherit (grammar) name;
|
||||
artifact = buildGrammar grammar;
|
||||
}) gitGrammars;
|
||||
grammarLinks = builtins.map (grammar:
|
||||
"ln -s ${grammar.artifact}/${grammar.name}.so $out/${grammar.name}.so")
|
||||
builtGrammars;
|
||||
in runCommand "consolidated-helix-grammars" { } ''
|
||||
mkdir -p $out
|
||||
${builtins.concatStringsSep "\n" grammarLinks}
|
||||
''
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "helix-core"
|
||||
version = "0.5.0"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
@@ -13,17 +13,18 @@ include = ["src/**/*", "README.md"]
|
||||
[features]
|
||||
|
||||
[dependencies]
|
||||
helix-syntax = { version = "0.5", path = "../helix-syntax" }
|
||||
helix-loader = { version = "0.6", path = "../helix-loader" }
|
||||
|
||||
ropey = "1.3"
|
||||
smallvec = "1.7"
|
||||
tendril = "0.4.2"
|
||||
unicode-segmentation = "1.8"
|
||||
smallvec = "1.8"
|
||||
smartstring = "1.0.0"
|
||||
unicode-segmentation = "1.9"
|
||||
unicode-width = "0.1"
|
||||
unicode-general-category = "0.4"
|
||||
unicode-general-category = "0.5"
|
||||
# slab = "0.4.2"
|
||||
slotmap = "1.0"
|
||||
tree-sitter = "0.20"
|
||||
once_cell = "1.8"
|
||||
once_cell = "1.10"
|
||||
arc-swap = "1"
|
||||
regex = "1"
|
||||
|
||||
@@ -34,6 +35,10 @@ toml = "0.5"
|
||||
|
||||
similar = "2.1"
|
||||
|
||||
encoding_rs = "0.8"
|
||||
|
||||
chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] }
|
||||
|
||||
etcetera = "0.3"
|
||||
|
||||
[dev-dependencies]
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -91,12 +91,11 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_categorize() {
|
||||
const EOL_TEST_CASE: &'static str = "\n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}";
|
||||
const WORD_TEST_CASE: &'static str =
|
||||
"_hello_world_あいうえおー12345678901234567890";
|
||||
const PUNCTUATION_TEST_CASE: &'static str =
|
||||
const EOL_TEST_CASE: &str = "\n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}";
|
||||
const WORD_TEST_CASE: &str = "_hello_world_あいうえおー12345678901234567890";
|
||||
const PUNCTUATION_TEST_CASE: &str =
|
||||
"!\"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~!”#$%&’()*+、。:;<=>?@「」^`{|}~";
|
||||
const WHITESPACE_TEST_CASE: &'static str = " ";
|
||||
const WHITESPACE_TEST_CASE: &str = " ";
|
||||
|
||||
for ch in EOL_TEST_CASE.chars() {
|
||||
assert_eq!(CharCategory::Eol, categorize_char(ch));
|
||||
|
@@ -63,7 +63,7 @@ pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&st
|
||||
let token = token.unwrap_or("//");
|
||||
let comment = Tendril::from(format!("{} ", token));
|
||||
|
||||
let mut lines: Vec<usize> = Vec::new();
|
||||
let mut lines: Vec<usize> = Vec::with_capacity(selection.len());
|
||||
|
||||
let mut min_next_line = 0;
|
||||
for selection in selection {
|
||||
|
10
helix-core/src/config.rs
Normal file
10
helix-core/src/config.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
/// Syntax configuration loader based on built-in languages.toml.
|
||||
pub fn default_syntax_loader() -> crate::syntax::Configuration {
|
||||
helix_loader::default_lang_config()
|
||||
.try_into()
|
||||
.expect("Could not serialize built-in languages.toml")
|
||||
}
|
||||
/// Syntax configuration loader based on user configured languages.toml.
|
||||
pub fn user_syntax_loader() -> Result<crate::syntax::Configuration, toml::de::Error> {
|
||||
helix_loader::user_lang_config()?.try_into()
|
||||
}
|
@@ -1,23 +1,30 @@
|
||||
//! LSP diagnostic utility types.
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Describes the severity level of a [`Diagnostic`].
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, PartialOrd, Ord, Deserialize, Serialize)]
|
||||
pub enum Severity {
|
||||
Error,
|
||||
Warning,
|
||||
Info,
|
||||
Hint,
|
||||
Info,
|
||||
Warning,
|
||||
Error,
|
||||
}
|
||||
|
||||
impl Default for Severity {
|
||||
fn default() -> Self {
|
||||
Self::Hint
|
||||
}
|
||||
}
|
||||
|
||||
/// A range of `char`s within the text.
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq)]
|
||||
pub struct Range {
|
||||
pub start: usize,
|
||||
pub end: usize,
|
||||
}
|
||||
|
||||
/// Corresponds to [`lsp_types::Diagnostic`](https://docs.rs/lsp-types/0.91.0/lsp_types/struct.Diagnostic.html)
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Diagnostic {
|
||||
pub range: Range,
|
||||
pub line: usize,
|
||||
|
@@ -11,10 +11,6 @@ pub fn compare_ropes(old: &Rope, new: &Rope) -> Transaction {
|
||||
// A timeout is set so after 1 seconds, the algorithm will start
|
||||
// approximating. This is especially important for big `Rope`s or
|
||||
// `Rope`s that are extremely dissimilar to each other.
|
||||
//
|
||||
// Note: Ignore the clippy warning, as the trait bounds of
|
||||
// `Transaction::change()` require an iterator implementing
|
||||
// `ExactIterator`.
|
||||
let mut config = similar::TextDiff::configure();
|
||||
config.timeout(std::time::Duration::from_secs(1));
|
||||
|
||||
@@ -62,7 +58,7 @@ mod tests {
|
||||
let mut old = Rope::from(a);
|
||||
let new = Rope::from(b);
|
||||
compare_ropes(&old, &new).apply(&mut old);
|
||||
old.to_string() == new.to_string()
|
||||
old == new
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,6 +1,6 @@
|
||||
//! Utility functions to traverse the unicode graphemes of a `Rope`'s text contents.
|
||||
//!
|
||||
//! Based on https://github.com/cessen/led/blob/c4fa72405f510b7fd16052f90a598c429b3104a6/src/graphemes.rs
|
||||
//! Based on <https://github.com/cessen/led/blob/c4fa72405f510b7fd16052f90a598c429b3104a6/src/graphemes.rs>
|
||||
use ropey::{iter::Chunks, str_utils::byte_to_char_idx, RopeSlice};
|
||||
use unicode_segmentation::{GraphemeCursor, GraphemeIncomplete};
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
@@ -120,6 +120,43 @@ pub fn nth_next_grapheme_boundary(slice: RopeSlice, char_idx: usize, n: usize) -
|
||||
chunk_char_idx + tmp
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn nth_next_grapheme_boundary_byte(slice: RopeSlice, mut byte_idx: usize, n: usize) -> usize {
|
||||
// Bounds check
|
||||
debug_assert!(byte_idx <= slice.len_bytes());
|
||||
|
||||
// Get the chunk with our byte index in it.
|
||||
let (mut chunk, mut chunk_byte_idx, mut _chunk_char_idx, _) = slice.chunk_at_byte(byte_idx);
|
||||
|
||||
// Set up the grapheme cursor.
|
||||
let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true);
|
||||
|
||||
// Find the nth next grapheme cluster boundary.
|
||||
for _ in 0..n {
|
||||
loop {
|
||||
match gc.next_boundary(chunk, chunk_byte_idx) {
|
||||
Ok(None) => return slice.len_bytes(),
|
||||
Ok(Some(n)) => {
|
||||
byte_idx = n;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::NextChunk) => {
|
||||
chunk_byte_idx += chunk.len();
|
||||
let (a, _, _c, _) = slice.chunk_at_byte(chunk_byte_idx);
|
||||
chunk = a;
|
||||
// chunk_char_idx = c;
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(n)) => {
|
||||
let ctx_chunk = slice.chunk_at_byte(n - 1).0;
|
||||
gc.provide_context(ctx_chunk, n - ctx_chunk.len());
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
byte_idx
|
||||
}
|
||||
|
||||
/// Finds the next grapheme boundary after the given char position.
|
||||
#[must_use]
|
||||
#[inline(always)]
|
||||
@@ -127,6 +164,13 @@ pub fn next_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> usize {
|
||||
nth_next_grapheme_boundary(slice, char_idx, 1)
|
||||
}
|
||||
|
||||
/// Finds the next grapheme boundary after the given byte position.
|
||||
#[must_use]
|
||||
#[inline(always)]
|
||||
pub fn next_grapheme_boundary_byte(slice: RopeSlice, byte_idx: usize) -> usize {
|
||||
nth_next_grapheme_boundary_byte(slice, byte_idx, 1)
|
||||
}
|
||||
|
||||
/// Returns the passed char index if it's already a grapheme boundary,
|
||||
/// or the next grapheme boundary char index if not.
|
||||
#[must_use]
|
||||
@@ -151,6 +195,23 @@ pub fn ensure_grapheme_boundary_prev(slice: RopeSlice, char_idx: usize) -> usize
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the passed byte index if it's already a grapheme boundary,
|
||||
/// or the next grapheme boundary byte index if not.
|
||||
#[must_use]
|
||||
#[inline]
|
||||
pub fn ensure_grapheme_boundary_next_byte(slice: RopeSlice, byte_idx: usize) -> usize {
|
||||
if byte_idx == 0 {
|
||||
byte_idx
|
||||
} else {
|
||||
// TODO: optimize so we're not constructing grapheme cursor twice
|
||||
if is_grapheme_boundary_byte(slice, byte_idx) {
|
||||
byte_idx
|
||||
} else {
|
||||
next_grapheme_boundary_byte(slice, byte_idx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the given char position is a grapheme boundary.
|
||||
#[must_use]
|
||||
pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool {
|
||||
@@ -179,6 +240,31 @@ pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the given byte position is a grapheme boundary.
|
||||
#[must_use]
|
||||
pub fn is_grapheme_boundary_byte(slice: RopeSlice, byte_idx: usize) -> bool {
|
||||
// Bounds check
|
||||
debug_assert!(byte_idx <= slice.len_bytes());
|
||||
|
||||
// Get the chunk with our byte index in it.
|
||||
let (chunk, chunk_byte_idx, _, _) = slice.chunk_at_byte(byte_idx);
|
||||
|
||||
// Set up the grapheme cursor.
|
||||
let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true);
|
||||
|
||||
// Determine if the given position is a grapheme cluster boundary.
|
||||
loop {
|
||||
match gc.is_boundary(chunk, chunk_byte_idx) {
|
||||
Ok(n) => return n,
|
||||
Err(GraphemeIncomplete::PreContext(n)) => {
|
||||
let (ctx_chunk, ctx_byte_start, _, _) = slice.chunk_at_byte(n - 1);
|
||||
gc.provide_context(ctx_chunk, ctx_byte_start);
|
||||
}
|
||||
Err(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over the graphemes of a `RopeSlice`.
|
||||
#[derive(Clone)]
|
||||
pub struct RopeGraphemes<'a> {
|
||||
|
@@ -1,4 +1,4 @@
|
||||
use crate::{ChangeSet, Rope, State, Transaction};
|
||||
use crate::{Assoc, ChangeSet, Range, Rope, State, Transaction};
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use std::num::NonZeroUsize;
|
||||
@@ -40,7 +40,7 @@ use std::time::{Duration, Instant};
|
||||
/// * Because delete transactions currently don't store the text that they
|
||||
/// delete, we also store an inversion of the transaction.
|
||||
///
|
||||
/// Using time to navigate the history: https://github.com/helix-editor/helix/pull/194
|
||||
/// Using time to navigate the history: <https://github.com/helix-editor/helix/pull/194>
|
||||
#[derive(Debug)]
|
||||
pub struct History {
|
||||
revisions: Vec<Revision>,
|
||||
@@ -133,6 +133,32 @@ impl History {
|
||||
Some(&self.revisions[last_child.get()].transaction)
|
||||
}
|
||||
|
||||
// Get the position of last change
|
||||
pub fn last_edit_pos(&self) -> Option<usize> {
|
||||
if self.current == 0 {
|
||||
return None;
|
||||
}
|
||||
let current_revision = &self.revisions[self.current];
|
||||
let primary_selection = current_revision
|
||||
.inversion
|
||||
.selection()
|
||||
.expect("inversion always contains a selection")
|
||||
.primary();
|
||||
let (_from, to, _fragment) = current_revision
|
||||
.transaction
|
||||
.changes_iter()
|
||||
// find a change that matches the primary selection
|
||||
.find(|(from, to, _fragment)| Range::new(*from, *to).overlaps(&primary_selection))
|
||||
// or use the first change
|
||||
.or_else(|| current_revision.transaction.changes_iter().next())
|
||||
.unwrap();
|
||||
let pos = current_revision
|
||||
.transaction
|
||||
.changes()
|
||||
.map_pos(to, Assoc::After);
|
||||
Some(pos)
|
||||
}
|
||||
|
||||
fn lowest_common_ancestor(&self, mut a: usize, mut b: usize) -> usize {
|
||||
use std::collections::HashSet;
|
||||
let mut a_path_set = HashSet::new();
|
||||
@@ -256,7 +282,7 @@ impl History {
|
||||
}
|
||||
|
||||
/// Whether to undo by a number of edits or a duration of time.
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Clone, Copy)]
|
||||
pub enum UndoKind {
|
||||
Steps(usize),
|
||||
TimePeriod(std::time::Duration),
|
||||
@@ -422,8 +448,8 @@ mod test {
|
||||
change: crate::transaction::Change,
|
||||
instant: Instant,
|
||||
) {
|
||||
let txn = Transaction::change(&state.doc, vec![change.clone()].into_iter());
|
||||
history.commit_revision_at_timestamp(&txn, &state, instant);
|
||||
let txn = Transaction::change(&state.doc, vec![change].into_iter());
|
||||
history.commit_revision_at_timestamp(&txn, state, instant);
|
||||
txn.apply(&mut state.doc);
|
||||
}
|
||||
|
||||
|
490
helix-core/src/increment/date_time.rs
Normal file
490
helix-core/src/increment/date_time.rs
Normal file
@@ -0,0 +1,490 @@
|
||||
use chrono::{Datelike, Duration, NaiveDate, NaiveDateTime, NaiveTime, Timelike};
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use ropey::RopeSlice;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::cmp;
|
||||
|
||||
use super::Increment;
|
||||
use crate::{Range, Tendril};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct DateTimeIncrementor {
|
||||
date_time: NaiveDateTime,
|
||||
range: Range,
|
||||
fmt: &'static str,
|
||||
field: DateField,
|
||||
}
|
||||
|
||||
impl DateTimeIncrementor {
|
||||
pub fn from_range(text: RopeSlice, range: Range) -> Option<DateTimeIncrementor> {
|
||||
let range = if range.is_empty() {
|
||||
if range.anchor < text.len_chars() {
|
||||
// Treat empty range as a cursor range.
|
||||
range.put_cursor(text, range.anchor + 1, true)
|
||||
} else {
|
||||
// The range is empty and at the end of the text.
|
||||
return None;
|
||||
}
|
||||
} else {
|
||||
range
|
||||
};
|
||||
|
||||
FORMATS.iter().find_map(|format| {
|
||||
let from = range.from().saturating_sub(format.max_len);
|
||||
let to = (range.from() + format.max_len).min(text.len_chars());
|
||||
|
||||
let (from_in_text, to_in_text) = (range.from() - from, range.to() - from);
|
||||
let text: Cow<str> = text.slice(from..to).into();
|
||||
|
||||
let captures = format.regex.captures(&text)?;
|
||||
if captures.len() - 1 != format.fields.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let date_time = captures.get(0)?;
|
||||
let offset = range.from() - from_in_text;
|
||||
let range = Range::new(date_time.start() + offset, date_time.end() + offset);
|
||||
|
||||
let field = captures
|
||||
.iter()
|
||||
.skip(1)
|
||||
.enumerate()
|
||||
.find_map(|(i, capture)| {
|
||||
let capture = capture?;
|
||||
let capture_range = capture.range();
|
||||
|
||||
if capture_range.contains(&from_in_text)
|
||||
&& capture_range.contains(&(to_in_text - 1))
|
||||
{
|
||||
Some(format.fields[i])
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})?;
|
||||
|
||||
let has_date = format.fields.iter().any(|f| f.unit.is_date());
|
||||
let has_time = format.fields.iter().any(|f| f.unit.is_time());
|
||||
|
||||
let date_time = &text[date_time.start()..date_time.end()];
|
||||
let date_time = match (has_date, has_time) {
|
||||
(true, true) => NaiveDateTime::parse_from_str(date_time, format.fmt).ok()?,
|
||||
(true, false) => {
|
||||
let date = NaiveDate::parse_from_str(date_time, format.fmt).ok()?;
|
||||
|
||||
date.and_hms(0, 0, 0)
|
||||
}
|
||||
(false, true) => {
|
||||
let time = NaiveTime::parse_from_str(date_time, format.fmt).ok()?;
|
||||
|
||||
NaiveDate::from_ymd(0, 1, 1).and_time(time)
|
||||
}
|
||||
(false, false) => return None,
|
||||
};
|
||||
|
||||
Some(DateTimeIncrementor {
|
||||
date_time,
|
||||
range,
|
||||
fmt: format.fmt,
|
||||
field,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Increment for DateTimeIncrementor {
|
||||
fn increment(&self, amount: i64) -> (Range, Tendril) {
|
||||
let date_time = match self.field.unit {
|
||||
DateUnit::Years => add_years(self.date_time, amount),
|
||||
DateUnit::Months => add_months(self.date_time, amount),
|
||||
DateUnit::Days => add_duration(self.date_time, Duration::days(amount)),
|
||||
DateUnit::Hours => add_duration(self.date_time, Duration::hours(amount)),
|
||||
DateUnit::Minutes => add_duration(self.date_time, Duration::minutes(amount)),
|
||||
DateUnit::Seconds => add_duration(self.date_time, Duration::seconds(amount)),
|
||||
DateUnit::AmPm => toggle_am_pm(self.date_time),
|
||||
}
|
||||
.unwrap_or(self.date_time);
|
||||
|
||||
(self.range, date_time.format(self.fmt).to_string().into())
|
||||
}
|
||||
}
|
||||
|
||||
static FORMATS: Lazy<Vec<Format>> = Lazy::new(|| {
|
||||
vec![
|
||||
Format::new("%Y-%m-%d %H:%M:%S"), // 2021-11-24 07:12:23
|
||||
Format::new("%Y/%m/%d %H:%M:%S"), // 2021/11/24 07:12:23
|
||||
Format::new("%Y-%m-%d %H:%M"), // 2021-11-24 07:12
|
||||
Format::new("%Y/%m/%d %H:%M"), // 2021/11/24 07:12
|
||||
Format::new("%Y-%m-%d"), // 2021-11-24
|
||||
Format::new("%Y/%m/%d"), // 2021/11/24
|
||||
Format::new("%a %b %d %Y"), // Wed Nov 24 2021
|
||||
Format::new("%d-%b-%Y"), // 24-Nov-2021
|
||||
Format::new("%Y %b %d"), // 2021 Nov 24
|
||||
Format::new("%b %d, %Y"), // Nov 24, 2021
|
||||
Format::new("%-I:%M:%S %P"), // 7:21:53 am
|
||||
Format::new("%-I:%M %P"), // 7:21 am
|
||||
Format::new("%-I:%M:%S %p"), // 7:21:53 AM
|
||||
Format::new("%-I:%M %p"), // 7:21 AM
|
||||
Format::new("%H:%M:%S"), // 23:24:23
|
||||
Format::new("%H:%M"), // 23:24
|
||||
]
|
||||
});
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Format {
|
||||
fmt: &'static str,
|
||||
fields: Vec<DateField>,
|
||||
regex: Regex,
|
||||
max_len: usize,
|
||||
}
|
||||
|
||||
impl Format {
|
||||
fn new(fmt: &'static str) -> Self {
|
||||
let mut remaining = fmt;
|
||||
let mut fields = Vec::new();
|
||||
let mut regex = String::new();
|
||||
let mut max_len = 0;
|
||||
|
||||
while let Some(i) = remaining.find('%') {
|
||||
let after = &remaining[i + 1..];
|
||||
let mut chars = after.chars();
|
||||
let c = chars.next().unwrap();
|
||||
|
||||
let spec_len = if c == '-' {
|
||||
1 + chars.next().unwrap().len_utf8()
|
||||
} else {
|
||||
c.len_utf8()
|
||||
};
|
||||
|
||||
let specifier = &after[..spec_len];
|
||||
let field = DateField::from_specifier(specifier).unwrap();
|
||||
fields.push(field);
|
||||
max_len += field.max_len + remaining[..i].len();
|
||||
regex += &remaining[..i];
|
||||
regex += &format!("({})", field.regex);
|
||||
remaining = &after[spec_len..];
|
||||
}
|
||||
|
||||
let regex = Regex::new(®ex).unwrap();
|
||||
|
||||
Self {
|
||||
fmt,
|
||||
fields,
|
||||
regex,
|
||||
max_len,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Format {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.fmt == other.fmt && self.fields == other.fields && self.max_len == other.max_len
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Format {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
struct DateField {
|
||||
regex: &'static str,
|
||||
unit: DateUnit,
|
||||
max_len: usize,
|
||||
}
|
||||
|
||||
impl DateField {
|
||||
fn from_specifier(specifier: &str) -> Option<Self> {
|
||||
match specifier {
|
||||
"Y" => Some(Self {
|
||||
regex: r"\d{4}",
|
||||
unit: DateUnit::Years,
|
||||
max_len: 5,
|
||||
}),
|
||||
"y" => Some(Self {
|
||||
regex: r"\d\d",
|
||||
unit: DateUnit::Years,
|
||||
max_len: 2,
|
||||
}),
|
||||
"m" => Some(Self {
|
||||
regex: r"[0-1]\d",
|
||||
unit: DateUnit::Months,
|
||||
max_len: 2,
|
||||
}),
|
||||
"d" => Some(Self {
|
||||
regex: r"[0-3]\d",
|
||||
unit: DateUnit::Days,
|
||||
max_len: 2,
|
||||
}),
|
||||
"-d" => Some(Self {
|
||||
regex: r"[1-3]?\d",
|
||||
unit: DateUnit::Days,
|
||||
max_len: 2,
|
||||
}),
|
||||
"a" => Some(Self {
|
||||
regex: r"Sun|Mon|Tue|Wed|Thu|Fri|Sat",
|
||||
unit: DateUnit::Days,
|
||||
max_len: 3,
|
||||
}),
|
||||
"A" => Some(Self {
|
||||
regex: r"Sunday|Monday|Tuesday|Wednesday|Thursday|Friday|Saturday",
|
||||
unit: DateUnit::Days,
|
||||
max_len: 9,
|
||||
}),
|
||||
"b" | "h" => Some(Self {
|
||||
regex: r"Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec",
|
||||
unit: DateUnit::Months,
|
||||
max_len: 3,
|
||||
}),
|
||||
"B" => Some(Self {
|
||||
regex: r"January|February|March|April|May|June|July|August|September|October|November|December",
|
||||
unit: DateUnit::Months,
|
||||
max_len: 9,
|
||||
}),
|
||||
"H" => Some(Self {
|
||||
regex: r"[0-2]\d",
|
||||
unit: DateUnit::Hours,
|
||||
max_len: 2,
|
||||
}),
|
||||
"M" => Some(Self {
|
||||
regex: r"[0-5]\d",
|
||||
unit: DateUnit::Minutes,
|
||||
max_len: 2,
|
||||
}),
|
||||
"S" => Some(Self {
|
||||
regex: r"[0-5]\d",
|
||||
unit: DateUnit::Seconds,
|
||||
max_len: 2,
|
||||
}),
|
||||
"I" => Some(Self {
|
||||
regex: r"[0-1]\d",
|
||||
unit: DateUnit::Hours,
|
||||
max_len: 2,
|
||||
}),
|
||||
"-I" => Some(Self {
|
||||
regex: r"1?\d",
|
||||
unit: DateUnit::Hours,
|
||||
max_len: 2,
|
||||
}),
|
||||
"P" => Some(Self {
|
||||
regex: r"am|pm",
|
||||
unit: DateUnit::AmPm,
|
||||
max_len: 2,
|
||||
}),
|
||||
"p" => Some(Self {
|
||||
regex: r"AM|PM",
|
||||
unit: DateUnit::AmPm,
|
||||
max_len: 2,
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
enum DateUnit {
|
||||
Years,
|
||||
Months,
|
||||
Days,
|
||||
Hours,
|
||||
Minutes,
|
||||
Seconds,
|
||||
AmPm,
|
||||
}
|
||||
|
||||
impl DateUnit {
|
||||
fn is_date(self) -> bool {
|
||||
matches!(self, DateUnit::Years | DateUnit::Months | DateUnit::Days)
|
||||
}
|
||||
|
||||
fn is_time(self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
DateUnit::Hours | DateUnit::Minutes | DateUnit::Seconds
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn ndays_in_month(year: i32, month: u32) -> u32 {
|
||||
// The first day of the next month...
|
||||
let (y, m) = if month == 12 {
|
||||
(year + 1, 1)
|
||||
} else {
|
||||
(year, month + 1)
|
||||
};
|
||||
let d = NaiveDate::from_ymd(y, m, 1);
|
||||
|
||||
// ...is preceded by the last day of the original month.
|
||||
d.pred().day()
|
||||
}
|
||||
|
||||
fn add_months(date_time: NaiveDateTime, amount: i64) -> Option<NaiveDateTime> {
|
||||
let month = (date_time.month0() as i64).checked_add(amount)?;
|
||||
let year = date_time.year() + i32::try_from(month / 12).ok()?;
|
||||
let year = if month.is_negative() { year - 1 } else { year };
|
||||
|
||||
// Normalize month
|
||||
let month = month % 12;
|
||||
let month = if month.is_negative() {
|
||||
month + 12
|
||||
} else {
|
||||
month
|
||||
} as u32
|
||||
+ 1;
|
||||
|
||||
let day = cmp::min(date_time.day(), ndays_in_month(year, month));
|
||||
|
||||
Some(NaiveDate::from_ymd(year, month, day).and_time(date_time.time()))
|
||||
}
|
||||
|
||||
fn add_years(date_time: NaiveDateTime, amount: i64) -> Option<NaiveDateTime> {
|
||||
let year = i32::try_from((date_time.year() as i64).checked_add(amount)?).ok()?;
|
||||
let ndays = ndays_in_month(year, date_time.month());
|
||||
|
||||
if date_time.day() > ndays {
|
||||
let d = NaiveDate::from_ymd(year, date_time.month(), ndays);
|
||||
Some(d.succ().and_time(date_time.time()))
|
||||
} else {
|
||||
date_time.with_year(year)
|
||||
}
|
||||
}
|
||||
|
||||
fn add_duration(date_time: NaiveDateTime, duration: Duration) -> Option<NaiveDateTime> {
|
||||
date_time.checked_add_signed(duration)
|
||||
}
|
||||
|
||||
fn toggle_am_pm(date_time: NaiveDateTime) -> Option<NaiveDateTime> {
|
||||
if date_time.hour() < 12 {
|
||||
add_duration(date_time, Duration::hours(12))
|
||||
} else {
|
||||
add_duration(date_time, Duration::hours(-12))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::Rope;
|
||||
|
||||
#[test]
|
||||
fn test_increment_date_times() {
|
||||
let tests = [
|
||||
// (original, cursor, amount, expected)
|
||||
("2020-02-28", 0, 1, "2021-02-28"),
|
||||
("2020-02-29", 0, 1, "2021-03-01"),
|
||||
("2020-01-31", 5, 1, "2020-02-29"),
|
||||
("2020-01-20", 5, 1, "2020-02-20"),
|
||||
("2021-01-01", 5, -1, "2020-12-01"),
|
||||
("2021-01-31", 5, -2, "2020-11-30"),
|
||||
("2020-02-28", 8, 1, "2020-02-29"),
|
||||
("2021-02-28", 8, 1, "2021-03-01"),
|
||||
("2021-02-28", 0, -1, "2020-02-28"),
|
||||
("2021-03-01", 0, -1, "2020-03-01"),
|
||||
("2020-02-29", 5, -1, "2020-01-29"),
|
||||
("2020-02-20", 5, -1, "2020-01-20"),
|
||||
("2020-02-29", 8, -1, "2020-02-28"),
|
||||
("2021-03-01", 8, -1, "2021-02-28"),
|
||||
("1980/12/21", 8, 100, "1981/03/31"),
|
||||
("1980/12/21", 8, -100, "1980/09/12"),
|
||||
("1980/12/21", 8, 1000, "1983/09/17"),
|
||||
("1980/12/21", 8, -1000, "1978/03/27"),
|
||||
("2021-11-24 07:12:23", 0, 1, "2022-11-24 07:12:23"),
|
||||
("2021-11-24 07:12:23", 5, 1, "2021-12-24 07:12:23"),
|
||||
("2021-11-24 07:12:23", 8, 1, "2021-11-25 07:12:23"),
|
||||
("2021-11-24 07:12:23", 11, 1, "2021-11-24 08:12:23"),
|
||||
("2021-11-24 07:12:23", 14, 1, "2021-11-24 07:13:23"),
|
||||
("2021-11-24 07:12:23", 17, 1, "2021-11-24 07:12:24"),
|
||||
("2021/11/24 07:12:23", 0, 1, "2022/11/24 07:12:23"),
|
||||
("2021/11/24 07:12:23", 5, 1, "2021/12/24 07:12:23"),
|
||||
("2021/11/24 07:12:23", 8, 1, "2021/11/25 07:12:23"),
|
||||
("2021/11/24 07:12:23", 11, 1, "2021/11/24 08:12:23"),
|
||||
("2021/11/24 07:12:23", 14, 1, "2021/11/24 07:13:23"),
|
||||
("2021/11/24 07:12:23", 17, 1, "2021/11/24 07:12:24"),
|
||||
("2021-11-24 07:12", 0, 1, "2022-11-24 07:12"),
|
||||
("2021-11-24 07:12", 5, 1, "2021-12-24 07:12"),
|
||||
("2021-11-24 07:12", 8, 1, "2021-11-25 07:12"),
|
||||
("2021-11-24 07:12", 11, 1, "2021-11-24 08:12"),
|
||||
("2021-11-24 07:12", 14, 1, "2021-11-24 07:13"),
|
||||
("2021/11/24 07:12", 0, 1, "2022/11/24 07:12"),
|
||||
("2021/11/24 07:12", 5, 1, "2021/12/24 07:12"),
|
||||
("2021/11/24 07:12", 8, 1, "2021/11/25 07:12"),
|
||||
("2021/11/24 07:12", 11, 1, "2021/11/24 08:12"),
|
||||
("2021/11/24 07:12", 14, 1, "2021/11/24 07:13"),
|
||||
("Wed Nov 24 2021", 0, 1, "Thu Nov 25 2021"),
|
||||
("Wed Nov 24 2021", 4, 1, "Fri Dec 24 2021"),
|
||||
("Wed Nov 24 2021", 8, 1, "Thu Nov 25 2021"),
|
||||
("Wed Nov 24 2021", 11, 1, "Thu Nov 24 2022"),
|
||||
("24-Nov-2021", 0, 1, "25-Nov-2021"),
|
||||
("24-Nov-2021", 3, 1, "24-Dec-2021"),
|
||||
("24-Nov-2021", 7, 1, "24-Nov-2022"),
|
||||
("2021 Nov 24", 0, 1, "2022 Nov 24"),
|
||||
("2021 Nov 24", 5, 1, "2021 Dec 24"),
|
||||
("2021 Nov 24", 9, 1, "2021 Nov 25"),
|
||||
("Nov 24, 2021", 0, 1, "Dec 24, 2021"),
|
||||
("Nov 24, 2021", 4, 1, "Nov 25, 2021"),
|
||||
("Nov 24, 2021", 8, 1, "Nov 24, 2022"),
|
||||
("7:21:53 am", 0, 1, "8:21:53 am"),
|
||||
("7:21:53 am", 3, 1, "7:22:53 am"),
|
||||
("7:21:53 am", 5, 1, "7:21:54 am"),
|
||||
("7:21:53 am", 8, 1, "7:21:53 pm"),
|
||||
("7:21:53 AM", 0, 1, "8:21:53 AM"),
|
||||
("7:21:53 AM", 3, 1, "7:22:53 AM"),
|
||||
("7:21:53 AM", 5, 1, "7:21:54 AM"),
|
||||
("7:21:53 AM", 8, 1, "7:21:53 PM"),
|
||||
("7:21 am", 0, 1, "8:21 am"),
|
||||
("7:21 am", 3, 1, "7:22 am"),
|
||||
("7:21 am", 5, 1, "7:21 pm"),
|
||||
("7:21 AM", 0, 1, "8:21 AM"),
|
||||
("7:21 AM", 3, 1, "7:22 AM"),
|
||||
("7:21 AM", 5, 1, "7:21 PM"),
|
||||
("23:24:23", 1, 1, "00:24:23"),
|
||||
("23:24:23", 3, 1, "23:25:23"),
|
||||
("23:24:23", 6, 1, "23:24:24"),
|
||||
("23:24", 1, 1, "00:24"),
|
||||
("23:24", 3, 1, "23:25"),
|
||||
];
|
||||
|
||||
for (original, cursor, amount, expected) in tests {
|
||||
let rope = Rope::from_str(original);
|
||||
let range = Range::new(cursor, cursor + 1);
|
||||
assert_eq!(
|
||||
DateTimeIncrementor::from_range(rope.slice(..), range)
|
||||
.unwrap()
|
||||
.increment(amount)
|
||||
.1,
|
||||
Tendril::from(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_date_times() {
|
||||
let tests = [
|
||||
"0000-00-00",
|
||||
"1980-2-21",
|
||||
"1980-12-1",
|
||||
"12345",
|
||||
"2020-02-30",
|
||||
"1999-12-32",
|
||||
"19-12-32",
|
||||
"1-2-3",
|
||||
"0000/00/00",
|
||||
"1980/2/21",
|
||||
"1980/12/1",
|
||||
"12345",
|
||||
"2020/02/30",
|
||||
"1999/12/32",
|
||||
"19/12/32",
|
||||
"1/2/3",
|
||||
"123:456:789",
|
||||
"11:61",
|
||||
"2021-55-12 08:12:54",
|
||||
];
|
||||
|
||||
for invalid in tests {
|
||||
let rope = Rope::from_str(invalid);
|
||||
let range = Range::new(0, 1);
|
||||
|
||||
assert_eq!(DateTimeIncrementor::from_range(rope.slice(..), range), None)
|
||||
}
|
||||
}
|
||||
}
|
8
helix-core/src/increment/mod.rs
Normal file
8
helix-core/src/increment/mod.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
pub mod date_time;
|
||||
pub mod number;
|
||||
|
||||
use crate::{Range, Tendril};
|
||||
|
||||
pub trait Increment {
|
||||
fn increment(&self, amount: i64) -> (Range, Tendril);
|
||||
}
|
507
helix-core/src/increment/number.rs
Normal file
507
helix-core/src/increment/number.rs
Normal file
@@ -0,0 +1,507 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use ropey::RopeSlice;
|
||||
|
||||
use super::Increment;
|
||||
|
||||
use crate::{
|
||||
textobject::{textobject_word, TextObject},
|
||||
Range, Tendril,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct NumberIncrementor<'a> {
|
||||
value: i64,
|
||||
radix: u32,
|
||||
range: Range,
|
||||
|
||||
text: RopeSlice<'a>,
|
||||
}
|
||||
|
||||
impl<'a> NumberIncrementor<'a> {
|
||||
/// Return information about number under rang if there is one.
|
||||
pub fn from_range(text: RopeSlice, range: Range) -> Option<NumberIncrementor> {
|
||||
// If the cursor is on the minus sign of a number we want to get the word textobject to the
|
||||
// right of it.
|
||||
let range = if range.to() < text.len_chars()
|
||||
&& range.to() - range.from() <= 1
|
||||
&& text.char(range.from()) == '-'
|
||||
{
|
||||
Range::new(range.from() + 1, range.to() + 1)
|
||||
} else {
|
||||
range
|
||||
};
|
||||
|
||||
let range = textobject_word(text, range, TextObject::Inside, 1, false);
|
||||
|
||||
// If there is a minus sign to the left of the word object, we want to include it in the range.
|
||||
let range = if range.from() > 0 && text.char(range.from() - 1) == '-' {
|
||||
range.extend(range.from() - 1, range.from())
|
||||
} else {
|
||||
range
|
||||
};
|
||||
|
||||
let word: String = text
|
||||
.slice(range.from()..range.to())
|
||||
.chars()
|
||||
.filter(|&c| c != '_')
|
||||
.collect();
|
||||
let (radix, prefixed) = if word.starts_with("0x") {
|
||||
(16, true)
|
||||
} else if word.starts_with("0o") {
|
||||
(8, true)
|
||||
} else if word.starts_with("0b") {
|
||||
(2, true)
|
||||
} else {
|
||||
(10, false)
|
||||
};
|
||||
|
||||
let number = if prefixed { &word[2..] } else { &word };
|
||||
|
||||
let value = i128::from_str_radix(number, radix).ok()?;
|
||||
if (value.is_positive() && value.leading_zeros() < 64)
|
||||
|| (value.is_negative() && value.leading_ones() < 64)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
let value = value as i64;
|
||||
Some(NumberIncrementor {
|
||||
range,
|
||||
value,
|
||||
radix,
|
||||
text,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Increment for NumberIncrementor<'a> {
|
||||
fn increment(&self, amount: i64) -> (Range, Tendril) {
|
||||
let old_text: Cow<str> = self.text.slice(self.range.from()..self.range.to()).into();
|
||||
let old_length = old_text.len();
|
||||
let new_value = self.value.wrapping_add(amount);
|
||||
|
||||
// Get separator indexes from right to left.
|
||||
let separator_rtl_indexes: Vec<usize> = old_text
|
||||
.chars()
|
||||
.rev()
|
||||
.enumerate()
|
||||
.filter_map(|(i, c)| if c == '_' { Some(i) } else { None })
|
||||
.collect();
|
||||
|
||||
let format_length = if self.radix == 10 {
|
||||
match (self.value.is_negative(), new_value.is_negative()) {
|
||||
(true, false) => old_length - 1,
|
||||
(false, true) => old_length + 1,
|
||||
_ => old_text.len(),
|
||||
}
|
||||
} else {
|
||||
old_text.len() - 2
|
||||
} - separator_rtl_indexes.len();
|
||||
|
||||
let mut new_text = match self.radix {
|
||||
2 => format!("0b{:01$b}", new_value, format_length),
|
||||
8 => format!("0o{:01$o}", new_value, format_length),
|
||||
10 if old_text.starts_with('0') || old_text.starts_with("-0") => {
|
||||
format!("{:01$}", new_value, format_length)
|
||||
}
|
||||
10 => format!("{}", new_value),
|
||||
16 => {
|
||||
let (lower_count, upper_count): (usize, usize) =
|
||||
old_text.chars().skip(2).fold((0, 0), |(lower, upper), c| {
|
||||
(
|
||||
lower + c.is_ascii_lowercase().then(|| 1).unwrap_or(0),
|
||||
upper + c.is_ascii_uppercase().then(|| 1).unwrap_or(0),
|
||||
)
|
||||
});
|
||||
if upper_count > lower_count {
|
||||
format!("0x{:01$X}", new_value, format_length)
|
||||
} else {
|
||||
format!("0x{:01$x}", new_value, format_length)
|
||||
}
|
||||
}
|
||||
_ => unimplemented!("radix not supported: {}", self.radix),
|
||||
};
|
||||
|
||||
// Add separators from original number.
|
||||
for &rtl_index in &separator_rtl_indexes {
|
||||
if rtl_index < new_text.len() {
|
||||
let new_index = new_text.len() - rtl_index;
|
||||
new_text.insert(new_index, '_');
|
||||
}
|
||||
}
|
||||
|
||||
// Add in additional separators if necessary.
|
||||
if new_text.len() > old_length && !separator_rtl_indexes.is_empty() {
|
||||
let spacing = match separator_rtl_indexes.as_slice() {
|
||||
[.., b, a] => a - b - 1,
|
||||
_ => separator_rtl_indexes[0],
|
||||
};
|
||||
|
||||
let prefix_length = if self.radix == 10 { 0 } else { 2 };
|
||||
if let Some(mut index) = new_text.find('_') {
|
||||
while index - prefix_length > spacing {
|
||||
index -= spacing;
|
||||
new_text.insert(index, '_');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(self.range, new_text.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::Rope;
|
||||
|
||||
#[test]
|
||||
fn test_decimal_at_point() {
|
||||
let rope = Rope::from_str("Test text 12345 more text.");
|
||||
let range = Range::point(12);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range),
|
||||
Some(NumberIncrementor {
|
||||
range: Range::new(10, 15),
|
||||
value: 12345,
|
||||
radix: 10,
|
||||
text: rope.slice(..),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_uppercase_hexadecimal_at_point() {
|
||||
let rope = Rope::from_str("Test text 0x123ABCDEF more text.");
|
||||
let range = Range::point(12);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range),
|
||||
Some(NumberIncrementor {
|
||||
range: Range::new(10, 21),
|
||||
value: 0x123ABCDEF,
|
||||
radix: 16,
|
||||
text: rope.slice(..),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lowercase_hexadecimal_at_point() {
|
||||
let rope = Rope::from_str("Test text 0xfa3b4e more text.");
|
||||
let range = Range::point(12);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range),
|
||||
Some(NumberIncrementor {
|
||||
range: Range::new(10, 18),
|
||||
value: 0xfa3b4e,
|
||||
radix: 16,
|
||||
text: rope.slice(..),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_octal_at_point() {
|
||||
let rope = Rope::from_str("Test text 0o1074312 more text.");
|
||||
let range = Range::point(12);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range),
|
||||
Some(NumberIncrementor {
|
||||
range: Range::new(10, 19),
|
||||
value: 0o1074312,
|
||||
radix: 8,
|
||||
text: rope.slice(..),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_binary_at_point() {
|
||||
let rope = Rope::from_str("Test text 0b10111010010101 more text.");
|
||||
let range = Range::point(12);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range),
|
||||
Some(NumberIncrementor {
|
||||
range: Range::new(10, 26),
|
||||
value: 0b10111010010101,
|
||||
radix: 2,
|
||||
text: rope.slice(..),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_negative_decimal_at_point() {
|
||||
let rope = Rope::from_str("Test text -54321 more text.");
|
||||
let range = Range::point(12);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range),
|
||||
Some(NumberIncrementor {
|
||||
range: Range::new(10, 16),
|
||||
value: -54321,
|
||||
radix: 10,
|
||||
text: rope.slice(..),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decimal_with_leading_zeroes_at_point() {
|
||||
let rope = Rope::from_str("Test text 000045326 more text.");
|
||||
let range = Range::point(12);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range),
|
||||
Some(NumberIncrementor {
|
||||
range: Range::new(10, 19),
|
||||
value: 45326,
|
||||
radix: 10,
|
||||
text: rope.slice(..),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_negative_decimal_cursor_on_minus_sign() {
|
||||
let rope = Rope::from_str("Test text -54321 more text.");
|
||||
let range = Range::point(10);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range),
|
||||
Some(NumberIncrementor {
|
||||
range: Range::new(10, 16),
|
||||
value: -54321,
|
||||
radix: 10,
|
||||
text: rope.slice(..),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_number_under_range_start_of_rope() {
|
||||
let rope = Rope::from_str("100");
|
||||
let range = Range::point(0);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range),
|
||||
Some(NumberIncrementor {
|
||||
range: Range::new(0, 3),
|
||||
value: 100,
|
||||
radix: 10,
|
||||
text: rope.slice(..),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_number_under_range_end_of_rope() {
|
||||
let rope = Rope::from_str("100");
|
||||
let range = Range::point(2);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range),
|
||||
Some(NumberIncrementor {
|
||||
range: Range::new(0, 3),
|
||||
value: 100,
|
||||
radix: 10,
|
||||
text: rope.slice(..),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_number_surrounded_by_punctuation() {
|
||||
let rope = Rope::from_str(",100;");
|
||||
let range = Range::point(1);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range),
|
||||
Some(NumberIncrementor {
|
||||
range: Range::new(1, 4),
|
||||
value: 100,
|
||||
radix: 10,
|
||||
text: rope.slice(..),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_not_a_number_point() {
|
||||
let rope = Rope::from_str("Test text 45326 more text.");
|
||||
let range = Range::point(6);
|
||||
assert_eq!(NumberIncrementor::from_range(rope.slice(..), range), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_number_too_large_at_point() {
|
||||
let rope = Rope::from_str("Test text 0xFFFFFFFFFFFFFFFFF more text.");
|
||||
let range = Range::point(12);
|
||||
assert_eq!(NumberIncrementor::from_range(rope.slice(..), range), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_number_cursor_one_right_of_number() {
|
||||
let rope = Rope::from_str("100 ");
|
||||
let range = Range::point(3);
|
||||
assert_eq!(NumberIncrementor::from_range(rope.slice(..), range), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_number_cursor_one_left_of_number() {
|
||||
let rope = Rope::from_str(" 100");
|
||||
let range = Range::point(0);
|
||||
assert_eq!(NumberIncrementor::from_range(rope.slice(..), range), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_basic_decimal_numbers() {
|
||||
let tests = [
|
||||
("100", 1, "101"),
|
||||
("100", -1, "99"),
|
||||
("99", 1, "100"),
|
||||
("100", 1000, "1100"),
|
||||
("100", -1000, "-900"),
|
||||
("-1", 1, "0"),
|
||||
("-1", 2, "1"),
|
||||
("1", -1, "0"),
|
||||
("1", -2, "-1"),
|
||||
];
|
||||
|
||||
for (original, amount, expected) in tests {
|
||||
let rope = Rope::from_str(original);
|
||||
let range = Range::point(0);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range)
|
||||
.unwrap()
|
||||
.increment(amount)
|
||||
.1,
|
||||
Tendril::from(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_basic_hexadedimal_numbers() {
|
||||
let tests = [
|
||||
("0x0100", 1, "0x0101"),
|
||||
("0x0100", -1, "0x00ff"),
|
||||
("0x0001", -1, "0x0000"),
|
||||
("0x0000", -1, "0xffffffffffffffff"),
|
||||
("0xffffffffffffffff", 1, "0x0000000000000000"),
|
||||
("0xffffffffffffffff", 2, "0x0000000000000001"),
|
||||
("0xffffffffffffffff", -1, "0xfffffffffffffffe"),
|
||||
("0xABCDEF1234567890", 1, "0xABCDEF1234567891"),
|
||||
("0xabcdef1234567890", 1, "0xabcdef1234567891"),
|
||||
];
|
||||
|
||||
for (original, amount, expected) in tests {
|
||||
let rope = Rope::from_str(original);
|
||||
let range = Range::point(0);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range)
|
||||
.unwrap()
|
||||
.increment(amount)
|
||||
.1,
|
||||
Tendril::from(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_basic_octal_numbers() {
|
||||
let tests = [
|
||||
("0o0107", 1, "0o0110"),
|
||||
("0o0110", -1, "0o0107"),
|
||||
("0o0001", -1, "0o0000"),
|
||||
("0o7777", 1, "0o10000"),
|
||||
("0o1000", -1, "0o0777"),
|
||||
("0o0107", 10, "0o0121"),
|
||||
("0o0000", -1, "0o1777777777777777777777"),
|
||||
("0o1777777777777777777777", 1, "0o0000000000000000000000"),
|
||||
("0o1777777777777777777777", 2, "0o0000000000000000000001"),
|
||||
("0o1777777777777777777777", -1, "0o1777777777777777777776"),
|
||||
];
|
||||
|
||||
for (original, amount, expected) in tests {
|
||||
let rope = Rope::from_str(original);
|
||||
let range = Range::point(0);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range)
|
||||
.unwrap()
|
||||
.increment(amount)
|
||||
.1,
|
||||
Tendril::from(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_basic_binary_numbers() {
|
||||
let tests = [
|
||||
("0b00000100", 1, "0b00000101"),
|
||||
("0b00000100", -1, "0b00000011"),
|
||||
("0b00000100", 2, "0b00000110"),
|
||||
("0b00000100", -2, "0b00000010"),
|
||||
("0b00000001", -1, "0b00000000"),
|
||||
("0b00111111", 10, "0b01001001"),
|
||||
("0b11111111", 1, "0b100000000"),
|
||||
("0b10000000", -1, "0b01111111"),
|
||||
(
|
||||
"0b0000",
|
||||
-1,
|
||||
"0b1111111111111111111111111111111111111111111111111111111111111111",
|
||||
),
|
||||
(
|
||||
"0b1111111111111111111111111111111111111111111111111111111111111111",
|
||||
1,
|
||||
"0b0000000000000000000000000000000000000000000000000000000000000000",
|
||||
),
|
||||
(
|
||||
"0b1111111111111111111111111111111111111111111111111111111111111111",
|
||||
2,
|
||||
"0b0000000000000000000000000000000000000000000000000000000000000001",
|
||||
),
|
||||
(
|
||||
"0b1111111111111111111111111111111111111111111111111111111111111111",
|
||||
-1,
|
||||
"0b1111111111111111111111111111111111111111111111111111111111111110",
|
||||
),
|
||||
];
|
||||
|
||||
for (original, amount, expected) in tests {
|
||||
let rope = Rope::from_str(original);
|
||||
let range = Range::point(0);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range)
|
||||
.unwrap()
|
||||
.increment(amount)
|
||||
.1,
|
||||
Tendril::from(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_with_separators() {
|
||||
let tests = [
|
||||
("999_999", 1, "1_000_000"),
|
||||
("1_000_000", -1, "999_999"),
|
||||
("-999_999", -1, "-1_000_000"),
|
||||
("0x0000_0000_0001", 0x1_ffff_0000, "0x0001_ffff_0001"),
|
||||
("0x0000_0000_0001", 0x1_ffff_0000, "0x0001_ffff_0001"),
|
||||
("0x0000_0000_0001", 0x1_ffff_0000, "0x0001_ffff_0001"),
|
||||
("0x0000_0000", -1, "0xffff_ffff_ffff_ffff"),
|
||||
("0x0000_0000_0000", -1, "0xffff_ffff_ffff_ffff"),
|
||||
("0b01111111_11111111", 1, "0b10000000_00000000"),
|
||||
("0b11111111_11111111", 1, "0b1_00000000_00000000"),
|
||||
];
|
||||
|
||||
for (original, amount, expected) in tests {
|
||||
let rope = Rope::from_str(original);
|
||||
let range = Range::point(0);
|
||||
assert_eq!(
|
||||
NumberIncrementor::from_range(rope.slice(..), range)
|
||||
.unwrap()
|
||||
.increment(amount)
|
||||
.1,
|
||||
Tendril::from(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,6 +1,5 @@
|
||||
use crate::{
|
||||
chars::{char_is_line_ending, char_is_whitespace},
|
||||
find_first_non_whitespace_char,
|
||||
syntax::{IndentQuery, LanguageConfiguration, Syntax},
|
||||
tree_sitter::Node,
|
||||
Rope, RopeSlice,
|
||||
@@ -174,8 +173,7 @@ pub fn auto_detect_indent_style(document_text: &Rope) -> Option<IndentStyle> {
|
||||
|
||||
/// To determine indentation of a newly inserted line, figure out the indentation at the last col
|
||||
/// of the previous line.
|
||||
#[allow(dead_code)]
|
||||
fn indent_level_for_line(line: RopeSlice, tab_width: usize) -> usize {
|
||||
pub fn indent_level_for_line(line: RopeSlice, tab_width: usize) -> usize {
|
||||
let mut len = 0;
|
||||
for ch in line.chars() {
|
||||
match ch {
|
||||
@@ -194,10 +192,7 @@ fn get_highest_syntax_node_at_bytepos(syntax: &Syntax, pos: usize) -> Option<Nod
|
||||
let tree = syntax.tree();
|
||||
|
||||
// named_descendant
|
||||
let mut node = match tree.root_node().descendant_for_byte_range(pos, pos) {
|
||||
Some(node) => node,
|
||||
None => return None,
|
||||
};
|
||||
let mut node = tree.root_node().descendant_for_byte_range(pos, pos)?;
|
||||
|
||||
while let Some(parent) = node.parent() {
|
||||
if parent.start_byte() == node.start_byte() {
|
||||
@@ -210,10 +205,15 @@ fn get_highest_syntax_node_at_bytepos(syntax: &Syntax, pos: usize) -> Option<Nod
|
||||
Some(node)
|
||||
}
|
||||
|
||||
fn calculate_indentation(query: &IndentQuery, node: Option<Node>, newline: bool) -> usize {
|
||||
// NOTE: can't use contains() on query because of comparing Vec<String> and &str
|
||||
// https://doc.rust-lang.org/std/vec/struct.Vec.html#method.contains
|
||||
|
||||
/// Calculate the indentation at a given treesitter node.
|
||||
/// If newline is false, then any "indent" nodes on the line are ignored ("outdent" still applies).
|
||||
/// This is because the indentation is only increased starting at the second line of the node.
|
||||
fn calculate_indentation(
|
||||
query: &IndentQuery,
|
||||
node: Option<Node>,
|
||||
line: usize,
|
||||
newline: bool,
|
||||
) -> usize {
|
||||
let mut increment: isize = 0;
|
||||
|
||||
let mut node = match node {
|
||||
@@ -221,70 +221,45 @@ fn calculate_indentation(query: &IndentQuery, node: Option<Node>, newline: bool)
|
||||
None => return 0,
|
||||
};
|
||||
|
||||
let mut prev_start = node.start_position().row;
|
||||
let mut current_line = line;
|
||||
let mut consider_indent = newline;
|
||||
let mut increment_from_line: isize = 0;
|
||||
|
||||
// if we're calculating indentation for a brand new line then the current node will become the
|
||||
// parent node. We need to take it's indentation level into account too.
|
||||
let node_kind = node.kind();
|
||||
if newline && query.indent.contains(node_kind) {
|
||||
increment += 1;
|
||||
}
|
||||
|
||||
while let Some(parent) = node.parent() {
|
||||
let parent_kind = parent.kind();
|
||||
let start = parent.start_position().row;
|
||||
|
||||
// detect deeply nested indents in the same line
|
||||
// .map(|a| { <-- ({ is two scopes
|
||||
// let len = 1; <-- indents one level
|
||||
// }) <-- }) is two scopes
|
||||
let starts_same_line = start == prev_start;
|
||||
|
||||
if query.outdent.contains(node.kind()) && !starts_same_line {
|
||||
// we outdent by skipping the rules for the current level and jumping up
|
||||
// node = parent;
|
||||
increment -= 1;
|
||||
// continue;
|
||||
loop {
|
||||
let node_kind = node.kind();
|
||||
let start = node.start_position().row;
|
||||
if current_line != start {
|
||||
// Indent/dedent by at most one per line:
|
||||
// .map(|a| { <-- ({ is two scopes
|
||||
// let len = 1; <-- indents one level
|
||||
// }) <-- }) is two scopes
|
||||
if consider_indent || increment_from_line < 0 {
|
||||
increment += increment_from_line.signum();
|
||||
}
|
||||
increment_from_line = 0;
|
||||
current_line = start;
|
||||
consider_indent = true;
|
||||
}
|
||||
|
||||
if query.indent.contains(parent_kind) // && not_first_or_last_sibling
|
||||
&& !starts_same_line
|
||||
{
|
||||
// println!("is_scope {}", parent_kind);
|
||||
prev_start = start;
|
||||
increment += 1
|
||||
if query.outdent.contains(node_kind) {
|
||||
increment_from_line -= 1;
|
||||
}
|
||||
if query.indent.contains(node_kind) {
|
||||
increment_from_line += 1;
|
||||
}
|
||||
|
||||
// if last_scope && increment > 0 && ...{ ignore }
|
||||
|
||||
node = parent;
|
||||
if let Some(parent) = node.parent() {
|
||||
node = parent;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if consider_indent || increment_from_line < 0 {
|
||||
increment += increment_from_line.signum();
|
||||
}
|
||||
|
||||
increment.max(0) as usize
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn suggested_indent_for_line(
|
||||
language_config: &LanguageConfiguration,
|
||||
syntax: Option<&Syntax>,
|
||||
text: RopeSlice,
|
||||
line_num: usize,
|
||||
_tab_width: usize,
|
||||
) -> usize {
|
||||
if let Some(start) = find_first_non_whitespace_char(text.line(line_num)) {
|
||||
return suggested_indent_for_pos(
|
||||
Some(language_config),
|
||||
syntax,
|
||||
text,
|
||||
start + text.line_to_char(line_num),
|
||||
false,
|
||||
);
|
||||
};
|
||||
|
||||
// if the line is blank, indent should be zero
|
||||
0
|
||||
}
|
||||
|
||||
// TODO: two usecases: if we are triggering this for a new, blank line:
|
||||
// - it should return 0 when mass indenting stuff
|
||||
// - it should look up the wrapper node and count it too when we press o/O
|
||||
@@ -293,23 +268,20 @@ pub fn suggested_indent_for_pos(
|
||||
syntax: Option<&Syntax>,
|
||||
text: RopeSlice,
|
||||
pos: usize,
|
||||
line: usize,
|
||||
new_line: bool,
|
||||
) -> usize {
|
||||
) -> Option<usize> {
|
||||
if let (Some(query), Some(syntax)) = (
|
||||
language_config.and_then(|config| config.indent_query()),
|
||||
syntax,
|
||||
) {
|
||||
let byte_start = text.char_to_byte(pos);
|
||||
let node = get_highest_syntax_node_at_bytepos(syntax, byte_start);
|
||||
|
||||
// let config = load indentation query config from Syntax(should contain language_config)
|
||||
|
||||
// TODO: special case for comments
|
||||
// TODO: if preserve_leading_whitespace
|
||||
calculate_indentation(query, node, new_line)
|
||||
Some(calculate_indentation(query, node, line, new_line))
|
||||
} else {
|
||||
// TODO: heuristics for non-tree sitter grammars
|
||||
0
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
@@ -441,7 +413,8 @@ where
|
||||
",
|
||||
);
|
||||
|
||||
let doc = Rope::from(doc);
|
||||
let doc = doc;
|
||||
use crate::diagnostic::Severity;
|
||||
use crate::syntax::{
|
||||
Configuration, IndentationConfiguration, LanguageConfiguration, Loader,
|
||||
};
|
||||
@@ -450,6 +423,7 @@ where
|
||||
language: vec![LanguageConfiguration {
|
||||
scope: "source.rust".to_string(),
|
||||
file_types: vec!["rs".to_string()],
|
||||
shebangs: vec![],
|
||||
language_id: "Rust".to_string(),
|
||||
highlight_config: OnceCell::new(),
|
||||
config: None,
|
||||
@@ -458,6 +432,8 @@ where
|
||||
roots: vec![],
|
||||
comment_token: None,
|
||||
auto_format: false,
|
||||
diagnostic_severity: Severity::Warning,
|
||||
grammar: None,
|
||||
language_server: None,
|
||||
indent: Some(IndentationConfiguration {
|
||||
tab_width: 4,
|
||||
@@ -465,6 +441,8 @@ where
|
||||
}),
|
||||
indent_query: OnceCell::new(),
|
||||
textobject_query: OnceCell::new(),
|
||||
debugger: None,
|
||||
auto_pairs: None,
|
||||
}],
|
||||
});
|
||||
|
||||
@@ -475,20 +453,29 @@ where
|
||||
|
||||
let language_config = loader.language_config_for_scope("source.rust").unwrap();
|
||||
let highlight_config = language_config.highlight_config(&[]).unwrap();
|
||||
let syntax = Syntax::new(&doc, highlight_config.clone());
|
||||
let syntax = Syntax::new(&doc, highlight_config, std::sync::Arc::new(loader));
|
||||
let text = doc.slice(..);
|
||||
let tab_width = 4;
|
||||
|
||||
for i in 0..doc.len_lines() {
|
||||
let line = text.line(i);
|
||||
let indent = indent_level_for_line(line, tab_width);
|
||||
assert_eq!(
|
||||
suggested_indent_for_line(&language_config, Some(&syntax), text, i, tab_width),
|
||||
indent,
|
||||
"line {}: {}",
|
||||
i,
|
||||
line
|
||||
);
|
||||
if let Some(pos) = crate::find_first_non_whitespace_char(line) {
|
||||
let indent = indent_level_for_line(line, tab_width);
|
||||
assert_eq!(
|
||||
suggested_indent_for_pos(
|
||||
Some(&language_config),
|
||||
Some(&syntax),
|
||||
text,
|
||||
text.line_to_char(i) + pos,
|
||||
i,
|
||||
false
|
||||
),
|
||||
Some(indent),
|
||||
"line {}: \"{}\"",
|
||||
i,
|
||||
line
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,10 +1,14 @@
|
||||
pub use encoding_rs as encoding;
|
||||
|
||||
pub mod auto_pairs;
|
||||
pub mod chars;
|
||||
pub mod comment;
|
||||
pub mod config;
|
||||
pub mod diagnostic;
|
||||
pub mod diff;
|
||||
pub mod graphemes;
|
||||
pub mod history;
|
||||
pub mod increment;
|
||||
pub mod indent;
|
||||
pub mod line_ending;
|
||||
pub mod macros;
|
||||
@@ -16,6 +20,7 @@ mod position;
|
||||
pub mod register;
|
||||
pub mod search;
|
||||
pub mod selection;
|
||||
pub mod shellwords;
|
||||
mod state;
|
||||
pub mod surround;
|
||||
pub mod syntax;
|
||||
@@ -28,14 +33,18 @@ pub mod unicode {
|
||||
pub use unicode_width as width;
|
||||
}
|
||||
|
||||
static RUNTIME_DIR: once_cell::sync::Lazy<std::path::PathBuf> =
|
||||
once_cell::sync::Lazy::new(runtime_dir);
|
||||
|
||||
pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option<usize> {
|
||||
line.chars().position(|ch| !ch.is_whitespace())
|
||||
}
|
||||
|
||||
pub fn find_root(root: Option<&str>) -> Option<std::path::PathBuf> {
|
||||
/// Find project root.
|
||||
///
|
||||
/// Order of detection:
|
||||
/// * Top-most folder containing a root marker in current git repository
|
||||
/// * Git repostory root if no marker detected
|
||||
/// * Top-most folder containing a root marker if not git repository detected
|
||||
/// * Current working directory as fallback
|
||||
pub fn find_root(root: Option<&str>, root_markers: &[String]) -> Option<std::path::PathBuf> {
|
||||
let current_dir = std::env::current_dir().expect("unable to determine current directory");
|
||||
|
||||
let root = match root {
|
||||
@@ -47,155 +56,46 @@ pub fn find_root(root: Option<&str>) -> Option<std::path::PathBuf> {
|
||||
current_dir.join(root)
|
||||
}
|
||||
}
|
||||
None => current_dir,
|
||||
None => current_dir.clone(),
|
||||
};
|
||||
|
||||
let mut top_marker = None;
|
||||
for ancestor in root.ancestors() {
|
||||
// TODO: also use defined roots if git isn't found
|
||||
for marker in root_markers {
|
||||
if ancestor.join(marker).exists() {
|
||||
top_marker = Some(ancestor);
|
||||
break;
|
||||
}
|
||||
}
|
||||
// don't go higher than repo
|
||||
if ancestor.join(".git").is_dir() {
|
||||
return Some(ancestor.to_path_buf());
|
||||
// Use workspace if detected from marker
|
||||
return Some(top_marker.unwrap_or(ancestor).to_path_buf());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn runtime_dir() -> std::path::PathBuf {
|
||||
if let Ok(dir) = std::env::var("HELIX_RUNTIME") {
|
||||
return dir.into();
|
||||
}
|
||||
|
||||
const RT_DIR: &str = "runtime";
|
||||
let conf_dir = config_dir().join(RT_DIR);
|
||||
if conf_dir.exists() {
|
||||
return conf_dir;
|
||||
}
|
||||
|
||||
if let Ok(dir) = std::env::var("CARGO_MANIFEST_DIR") {
|
||||
// this is the directory of the crate being run by cargo, we need the workspace path so we take the parent
|
||||
return std::path::PathBuf::from(dir).parent().unwrap().join(RT_DIR);
|
||||
}
|
||||
|
||||
// fallback to location of the executable being run
|
||||
std::env::current_exe()
|
||||
.ok()
|
||||
.and_then(|path| path.parent().map(|path| path.to_path_buf().join(RT_DIR)))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub fn config_dir() -> std::path::PathBuf {
|
||||
// TODO: allow env var override
|
||||
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
|
||||
let mut path = strategy.config_dir();
|
||||
path.push("helix");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn cache_dir() -> std::path::PathBuf {
|
||||
// TODO: allow env var override
|
||||
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
|
||||
let mut path = strategy.cache_dir();
|
||||
path.push("helix");
|
||||
path
|
||||
}
|
||||
|
||||
// right overrides left
|
||||
pub fn merge_toml_values(left: toml::Value, right: toml::Value) -> toml::Value {
|
||||
use toml::Value;
|
||||
|
||||
fn get_name(v: &Value) -> Option<&str> {
|
||||
v.get("name").and_then(Value::as_str)
|
||||
}
|
||||
|
||||
match (left, right) {
|
||||
(Value::Array(mut left_items), Value::Array(right_items)) => {
|
||||
left_items.reserve(right_items.len());
|
||||
for rvalue in right_items {
|
||||
let lvalue = get_name(&rvalue)
|
||||
.and_then(|rname| left_items.iter().position(|v| get_name(v) == Some(rname)))
|
||||
.map(|lpos| left_items.remove(lpos));
|
||||
let mvalue = match lvalue {
|
||||
Some(lvalue) => merge_toml_values(lvalue, rvalue),
|
||||
None => rvalue,
|
||||
};
|
||||
left_items.push(mvalue);
|
||||
}
|
||||
Value::Array(left_items)
|
||||
}
|
||||
(Value::Table(mut left_map), Value::Table(right_map)) => {
|
||||
for (rname, rvalue) in right_map {
|
||||
match left_map.remove(&rname) {
|
||||
Some(lvalue) => {
|
||||
let merged_value = merge_toml_values(lvalue, rvalue);
|
||||
left_map.insert(rname, merged_value);
|
||||
}
|
||||
None => {
|
||||
left_map.insert(rname, rvalue);
|
||||
}
|
||||
}
|
||||
}
|
||||
Value::Table(left_map)
|
||||
}
|
||||
// Catch everything else we didn't handle, and use the right value
|
||||
(_, value) => value,
|
||||
// In absence of git repo, use workspace if detected
|
||||
if top_marker.is_some() {
|
||||
top_marker.map(|a| a.to_path_buf())
|
||||
} else {
|
||||
Some(current_dir)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod merge_toml_tests {
|
||||
use super::merge_toml_values;
|
||||
|
||||
#[test]
|
||||
fn language_tomls() {
|
||||
use toml::Value;
|
||||
|
||||
const USER: &str = "
|
||||
[[language]]
|
||||
name = \"nix\"
|
||||
test = \"bbb\"
|
||||
indent = { tab-width = 4, unit = \" \", test = \"aaa\" }
|
||||
";
|
||||
|
||||
let base: Value = toml::from_slice(include_bytes!("../../languages.toml"))
|
||||
.expect("Couldn't parse built-in langauges config");
|
||||
let user: Value = toml::from_str(USER).unwrap();
|
||||
|
||||
let merged = merge_toml_values(base, user);
|
||||
let languages = merged.get("language").unwrap().as_array().unwrap();
|
||||
let nix = languages
|
||||
.iter()
|
||||
.find(|v| v.get("name").unwrap().as_str().unwrap() == "nix")
|
||||
.unwrap();
|
||||
let nix_indent = nix.get("indent").unwrap();
|
||||
|
||||
// We changed tab-width and unit in indent so check them if they are the new values
|
||||
assert_eq!(
|
||||
nix_indent.get("tab-width").unwrap().as_integer().unwrap(),
|
||||
4
|
||||
);
|
||||
assert_eq!(nix_indent.get("unit").unwrap().as_str().unwrap(), " ");
|
||||
// We added a new keys, so check them
|
||||
assert_eq!(nix.get("test").unwrap().as_str().unwrap(), "bbb");
|
||||
assert_eq!(nix_indent.get("test").unwrap().as_str().unwrap(), "aaa");
|
||||
// We didn't change comment-token so it should be same
|
||||
assert_eq!(nix.get("comment-token").unwrap().as_str().unwrap(), "#");
|
||||
}
|
||||
}
|
||||
|
||||
pub use etcetera::home_dir;
|
||||
|
||||
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
|
||||
|
||||
pub use ropey::{Rope, RopeBuilder, RopeSlice};
|
||||
|
||||
pub use tendril::StrTendril as Tendril;
|
||||
// pub use tendril::StrTendril as Tendril;
|
||||
pub use smartstring::SmartString;
|
||||
|
||||
pub type Tendril = SmartString<smartstring::LazyCompact>;
|
||||
|
||||
#[doc(inline)]
|
||||
pub use {regex, tree_sitter};
|
||||
|
||||
pub use graphemes::RopeGraphemes;
|
||||
pub use position::{coords_at_pos, pos_at_coords, Position};
|
||||
pub use position::{coords_at_pos, pos_at_coords, visual_coords_at_pos, Position};
|
||||
pub use selection::{Range, Selection};
|
||||
pub use smallvec::SmallVec;
|
||||
pub use smallvec::{smallvec, SmallVec};
|
||||
pub use syntax::Syntax;
|
||||
|
||||
pub use diagnostic::Diagnostic;
|
||||
|
@@ -250,7 +250,7 @@ mod line_ending_tests {
|
||||
assert_eq!(get_line_ending_of_str(&text[..6]), Some(LineEnding::CR));
|
||||
assert_eq!(get_line_ending_of_str(&text[..12]), Some(LineEnding::LF));
|
||||
assert_eq!(get_line_ending_of_str(&text[..17]), Some(LineEnding::Crlf));
|
||||
assert_eq!(get_line_ending_of_str(&text[..]), None);
|
||||
assert_eq!(get_line_ending_of_str(text), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@@ -1,48 +1,92 @@
|
||||
use tree_sitter::Node;
|
||||
|
||||
use crate::{Rope, Syntax};
|
||||
|
||||
const PAIRS: &[(char, char)] = &[('(', ')'), ('{', '}'), ('[', ']'), ('<', '>')];
|
||||
// limit matching pairs to only ( ) { } [ ] < >
|
||||
const PAIRS: &[(char, char)] = &[
|
||||
('(', ')'),
|
||||
('{', '}'),
|
||||
('[', ']'),
|
||||
('<', '>'),
|
||||
('\'', '\''),
|
||||
('\"', '\"'),
|
||||
];
|
||||
|
||||
// limit matching pairs to only ( ) { } [ ] < > ' ' " "
|
||||
|
||||
// Returns the position of the matching bracket under cursor.
|
||||
//
|
||||
// If the cursor is one the opening bracket, the position of
|
||||
// the closing bracket is returned. If the cursor in the closing
|
||||
// bracket, the position of the opening bracket is returned.
|
||||
//
|
||||
// If the cursor is not on a bracket, `None` is returned.
|
||||
#[must_use]
|
||||
pub fn find(syntax: &Syntax, doc: &Rope, pos: usize) -> Option<usize> {
|
||||
let tree = syntax.tree();
|
||||
|
||||
let byte_pos = doc.char_to_byte(pos);
|
||||
|
||||
// most naive implementation: find the innermost syntax node, if we're at the edge of a node,
|
||||
// return the other edge.
|
||||
|
||||
let node = match tree
|
||||
.root_node()
|
||||
.named_descendant_for_byte_range(byte_pos, byte_pos)
|
||||
{
|
||||
Some(node) => node,
|
||||
None => return None,
|
||||
};
|
||||
|
||||
if node.is_error() {
|
||||
pub fn find_matching_bracket(syntax: &Syntax, doc: &Rope, pos: usize) -> Option<usize> {
|
||||
if pos >= doc.len_chars() || !is_valid_bracket(doc.char(pos)) {
|
||||
return None;
|
||||
}
|
||||
find_pair(syntax, doc, pos, false)
|
||||
}
|
||||
|
||||
// Returns the position of the bracket that is closing the current scope.
|
||||
//
|
||||
// If the cursor is on an opening or closing bracket, the function
|
||||
// behaves equivalent to [`find_matching_bracket`].
|
||||
//
|
||||
// If the cursor position is within a scope, the function searches
|
||||
// for the surrounding scope that is surrounded by brackets and
|
||||
// returns the position of the closing bracket for that scope.
|
||||
//
|
||||
// If no surrounding scope is found, the function returns `None`.
|
||||
#[must_use]
|
||||
pub fn find_matching_bracket_fuzzy(syntax: &Syntax, doc: &Rope, pos: usize) -> Option<usize> {
|
||||
find_pair(syntax, doc, pos, true)
|
||||
}
|
||||
|
||||
fn find_pair(syntax: &Syntax, doc: &Rope, pos: usize, traverse_parents: bool) -> Option<usize> {
|
||||
let tree = syntax.tree();
|
||||
let pos = doc.char_to_byte(pos);
|
||||
|
||||
let mut node = tree.root_node().named_descendant_for_byte_range(pos, pos)?;
|
||||
|
||||
loop {
|
||||
let (start_byte, end_byte) = surrounding_bytes(doc, &node)?;
|
||||
let (start_char, end_char) = (doc.byte_to_char(start_byte), doc.byte_to_char(end_byte));
|
||||
|
||||
if is_valid_pair(doc, start_char, end_char) {
|
||||
if end_byte == pos {
|
||||
return Some(start_char);
|
||||
}
|
||||
// We return the end char if the cursor is either on the start char
|
||||
// or at some arbitrary position between start and end char.
|
||||
return Some(end_char);
|
||||
}
|
||||
|
||||
if traverse_parents {
|
||||
node = node.parent()?;
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_valid_bracket(c: char) -> bool {
|
||||
PAIRS.iter().any(|(l, r)| *l == c || *r == c)
|
||||
}
|
||||
|
||||
fn is_valid_pair(doc: &Rope, start_char: usize, end_char: usize) -> bool {
|
||||
PAIRS.contains(&(doc.char(start_char), doc.char(end_char)))
|
||||
}
|
||||
|
||||
fn surrounding_bytes(doc: &Rope, node: &Node) -> Option<(usize, usize)> {
|
||||
let len = doc.len_bytes();
|
||||
|
||||
let start_byte = node.start_byte();
|
||||
let end_byte = node.end_byte().saturating_sub(1); // it's end exclusive
|
||||
let end_byte = node.end_byte().saturating_sub(1);
|
||||
|
||||
if start_byte >= len || end_byte >= len {
|
||||
return None;
|
||||
}
|
||||
|
||||
let start_char = doc.byte_to_char(start_byte);
|
||||
let end_char = doc.byte_to_char(end_byte);
|
||||
|
||||
if PAIRS.contains(&(doc.char(start_char), doc.char(end_char))) {
|
||||
if start_byte == byte_pos {
|
||||
return Some(end_char);
|
||||
}
|
||||
|
||||
if end_byte == byte_pos {
|
||||
return Some(start_char);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
Some((start_byte, end_byte))
|
||||
}
|
||||
|
@@ -1,6 +1,7 @@
|
||||
use std::iter;
|
||||
|
||||
use ropey::iter::Chars;
|
||||
use tree_sitter::{Node, QueryCursor};
|
||||
|
||||
use crate::{
|
||||
chars::{categorize_char, char_is_line_ending, CharCategory},
|
||||
@@ -9,7 +10,10 @@ use crate::{
|
||||
next_grapheme_boundary, nth_next_grapheme_boundary, nth_prev_grapheme_boundary,
|
||||
prev_grapheme_boundary,
|
||||
},
|
||||
pos_at_coords, Position, Range, RopeSlice,
|
||||
pos_at_coords,
|
||||
syntax::LanguageConfiguration,
|
||||
textobject::TextObject,
|
||||
Position, Range, RopeSlice,
|
||||
};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
@@ -53,6 +57,10 @@ pub fn move_vertically(
|
||||
let pos = range.cursor(slice);
|
||||
|
||||
// Compute the current position's 2d coordinates.
|
||||
// TODO: switch this to use `visual_coords_at_pos` rather than
|
||||
// `coords_at_pos` as this will cause a jerky movement when the visual
|
||||
// position does not match, like moving from a line with tabs/CJK to
|
||||
// a line without
|
||||
let Position { row, col } = coords_at_pos(slice, pos);
|
||||
let horiz = range.horiz.unwrap_or(col as u32);
|
||||
|
||||
@@ -164,7 +172,7 @@ pub fn backwards_skip_while<F>(slice: RopeSlice, pos: usize, fun: F) -> Option<u
|
||||
where
|
||||
F: Fn(char) -> bool,
|
||||
{
|
||||
let mut chars_starting_from_next = slice.chars_at(pos + 1);
|
||||
let mut chars_starting_from_next = slice.chars_at(pos);
|
||||
let mut backwards = iter::from_fn(|| chars_starting_from_next.prev()).enumerate();
|
||||
backwards.find_map(|(i, c)| {
|
||||
if !fun(c) {
|
||||
@@ -301,10 +309,58 @@ fn reached_target(target: WordMotionTarget, prev_ch: char, next_ch: char) -> boo
|
||||
}
|
||||
}
|
||||
|
||||
pub fn goto_treesitter_object(
|
||||
slice: RopeSlice,
|
||||
range: Range,
|
||||
object_name: &str,
|
||||
dir: Direction,
|
||||
slice_tree: Node,
|
||||
lang_config: &LanguageConfiguration,
|
||||
_count: usize,
|
||||
) -> Range {
|
||||
let get_range = move || -> Option<Range> {
|
||||
let byte_pos = slice.char_to_byte(range.cursor(slice));
|
||||
|
||||
let cap_name = |t: TextObject| format!("{}.{}", object_name, t);
|
||||
let mut cursor = QueryCursor::new();
|
||||
let nodes = lang_config.textobject_query()?.capture_nodes_any(
|
||||
&[
|
||||
&cap_name(TextObject::Movement),
|
||||
&cap_name(TextObject::Around),
|
||||
&cap_name(TextObject::Inside),
|
||||
],
|
||||
slice_tree,
|
||||
slice,
|
||||
&mut cursor,
|
||||
)?;
|
||||
|
||||
let node = match dir {
|
||||
Direction::Forward => nodes
|
||||
.filter(|n| n.start_byte() > byte_pos)
|
||||
.min_by_key(|n| n.start_byte())?,
|
||||
Direction::Backward => nodes
|
||||
.filter(|n| n.start_byte() < byte_pos)
|
||||
.max_by_key(|n| n.start_byte())?,
|
||||
};
|
||||
|
||||
let len = slice.len_bytes();
|
||||
let start_byte = node.start_byte();
|
||||
let end_byte = node.end_byte();
|
||||
if start_byte >= len || end_byte >= len {
|
||||
return None;
|
||||
}
|
||||
|
||||
let start_char = slice.byte_to_char(start_byte);
|
||||
let end_char = slice.byte_to_char(end_byte);
|
||||
|
||||
// head of range should be at beginning
|
||||
Some(Range::new(end_char, start_char))
|
||||
};
|
||||
get_range().unwrap_or(range)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::array::{self, IntoIter};
|
||||
|
||||
use ropey::Rope;
|
||||
|
||||
use super::*;
|
||||
@@ -356,7 +412,7 @@ mod test {
|
||||
((Direction::Backward, 999usize), (0, 0)), // |This is a simple alphabetic line
|
||||
];
|
||||
|
||||
for ((direction, amount), coordinates) in IntoIter::new(moves_and_expected_coordinates) {
|
||||
for ((direction, amount), coordinates) in moves_and_expected_coordinates {
|
||||
range = move_horizontally(slice, range, direction, amount, Movement::Move);
|
||||
assert_eq!(coords_at_pos(slice, range.head), coordinates.into())
|
||||
}
|
||||
@@ -370,7 +426,7 @@ mod test {
|
||||
|
||||
let mut range = Range::point(position);
|
||||
|
||||
let moves_and_expected_coordinates = IntoIter::new([
|
||||
let moves_and_expected_coordinates = [
|
||||
((Direction::Forward, 11usize), (1, 1)), // Multiline\nt|ext sample\n...
|
||||
((Direction::Backward, 1usize), (1, 0)), // Multiline\n|text sample\n...
|
||||
((Direction::Backward, 5usize), (0, 5)), // Multi|line\ntext sample\n...
|
||||
@@ -380,7 +436,7 @@ mod test {
|
||||
((Direction::Backward, 0usize), (0, 3)), // Mul|tiline\ntext sample\n...
|
||||
((Direction::Forward, 999usize), (5, 0)), // ...and whitespaced\n|
|
||||
((Direction::Forward, 999usize), (5, 0)), // ...and whitespaced\n|
|
||||
]);
|
||||
];
|
||||
|
||||
for ((direction, amount), coordinates) in moves_and_expected_coordinates {
|
||||
range = move_horizontally(slice, range, direction, amount, Movement::Move);
|
||||
@@ -398,11 +454,11 @@ mod test {
|
||||
let mut range = Range::point(position);
|
||||
let original_anchor = range.anchor;
|
||||
|
||||
let moves = IntoIter::new([
|
||||
let moves = [
|
||||
(Direction::Forward, 1usize),
|
||||
(Direction::Forward, 5usize),
|
||||
(Direction::Backward, 3usize),
|
||||
]);
|
||||
];
|
||||
|
||||
for (direction, amount) in moves {
|
||||
range = move_horizontally(slice, range, direction, amount, Movement::Extend);
|
||||
@@ -416,7 +472,7 @@ mod test {
|
||||
let slice = text.slice(..);
|
||||
let position = pos_at_coords(slice, (0, 0).into(), true);
|
||||
let mut range = Range::point(position);
|
||||
let moves_and_expected_coordinates = IntoIter::new([
|
||||
let moves_and_expected_coordinates = [
|
||||
((Direction::Forward, 1usize), (1, 0)),
|
||||
((Direction::Forward, 2usize), (3, 0)),
|
||||
((Direction::Forward, 1usize), (4, 0)),
|
||||
@@ -426,7 +482,7 @@ mod test {
|
||||
((Direction::Backward, 0usize), (4, 0)),
|
||||
((Direction::Forward, 5), (5, 0)),
|
||||
((Direction::Forward, 999usize), (5, 0)),
|
||||
]);
|
||||
];
|
||||
|
||||
for ((direction, amount), coordinates) in moves_and_expected_coordinates {
|
||||
range = move_vertically(slice, range, direction, amount, Movement::Move);
|
||||
@@ -446,7 +502,7 @@ mod test {
|
||||
H,
|
||||
V,
|
||||
}
|
||||
let moves_and_expected_coordinates = IntoIter::new([
|
||||
let moves_and_expected_coordinates = [
|
||||
// Places cursor at the end of line
|
||||
((Axis::H, Direction::Forward, 8usize), (0, 8)),
|
||||
// First descent preserves column as the target line is wider
|
||||
@@ -459,7 +515,7 @@ mod test {
|
||||
((Axis::V, Direction::Backward, 999usize), (0, 8)),
|
||||
((Axis::V, Direction::Forward, 4usize), (4, 8)),
|
||||
((Axis::V, Direction::Forward, 999usize), (5, 0)),
|
||||
]);
|
||||
];
|
||||
|
||||
for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates {
|
||||
range = match axis {
|
||||
@@ -485,7 +541,7 @@ mod test {
|
||||
H,
|
||||
V,
|
||||
}
|
||||
let moves_and_expected_coordinates = IntoIter::new([
|
||||
let moves_and_expected_coordinates = [
|
||||
// Places cursor at the fourth kana.
|
||||
((Axis::H, Direction::Forward, 4), (0, 4)),
|
||||
// Descent places cursor at the 4th character.
|
||||
@@ -494,7 +550,7 @@ mod test {
|
||||
((Axis::H, Direction::Backward, 1usize), (1, 3)),
|
||||
// Jumping back up 1 line.
|
||||
((Axis::V, Direction::Backward, 1usize), (0, 3)),
|
||||
]);
|
||||
];
|
||||
|
||||
for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates {
|
||||
range = match axis {
|
||||
@@ -526,7 +582,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_behaviour_when_moving_to_start_of_next_words() {
|
||||
let tests = array::IntoIter::new([
|
||||
let tests = [
|
||||
("Basic forward motion stops at the first space",
|
||||
vec![(1, Range::new(0, 0), Range::new(0, 6))]),
|
||||
(" Starting from a boundary advances the anchor",
|
||||
@@ -600,7 +656,7 @@ mod test {
|
||||
vec![
|
||||
(1, Range::new(0, 0), Range::new(0, 6)),
|
||||
]),
|
||||
]);
|
||||
];
|
||||
|
||||
for (sample, scenario) in tests {
|
||||
for (count, begin, expected_end) in scenario.into_iter() {
|
||||
@@ -612,7 +668,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_behaviour_when_moving_to_start_of_next_long_words() {
|
||||
let tests = array::IntoIter::new([
|
||||
let tests = [
|
||||
("Basic forward motion stops at the first space",
|
||||
vec![(1, Range::new(0, 0), Range::new(0, 6))]),
|
||||
(" Starting from a boundary advances the anchor",
|
||||
@@ -684,7 +740,7 @@ mod test {
|
||||
vec![
|
||||
(1, Range::new(0, 0), Range::new(0, 8)),
|
||||
]),
|
||||
]);
|
||||
];
|
||||
|
||||
for (sample, scenario) in tests {
|
||||
for (count, begin, expected_end) in scenario.into_iter() {
|
||||
@@ -696,7 +752,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_behaviour_when_moving_to_start_of_previous_words() {
|
||||
let tests = array::IntoIter::new([
|
||||
let tests = [
|
||||
("Basic backward motion from the middle of a word",
|
||||
vec![(1, Range::new(3, 3), Range::new(4, 0))]),
|
||||
|
||||
@@ -769,7 +825,7 @@ mod test {
|
||||
vec![
|
||||
(1, Range::new(0, 6), Range::new(6, 0)),
|
||||
]),
|
||||
]);
|
||||
];
|
||||
|
||||
for (sample, scenario) in tests {
|
||||
for (count, begin, expected_end) in scenario.into_iter() {
|
||||
@@ -781,7 +837,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_behaviour_when_moving_to_start_of_previous_long_words() {
|
||||
let tests = array::IntoIter::new([
|
||||
let tests = [
|
||||
(
|
||||
"Basic backward motion from the middle of a word",
|
||||
vec![(1, Range::new(3, 3), Range::new(4, 0))],
|
||||
@@ -866,7 +922,7 @@ mod test {
|
||||
vec![
|
||||
(1, Range::new(0, 8), Range::new(8, 0)),
|
||||
]),
|
||||
]);
|
||||
];
|
||||
|
||||
for (sample, scenario) in tests {
|
||||
for (count, begin, expected_end) in scenario.into_iter() {
|
||||
@@ -878,7 +934,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_behaviour_when_moving_to_end_of_next_words() {
|
||||
let tests = array::IntoIter::new([
|
||||
let tests = [
|
||||
("Basic forward motion from the start of a word to the end of it",
|
||||
vec![(1, Range::new(0, 0), Range::new(0, 5))]),
|
||||
("Basic forward motion from the end of a word to the end of the next",
|
||||
@@ -950,7 +1006,7 @@ mod test {
|
||||
vec![
|
||||
(1, Range::new(0, 0), Range::new(0, 5)),
|
||||
]),
|
||||
]);
|
||||
];
|
||||
|
||||
for (sample, scenario) in tests {
|
||||
for (count, begin, expected_end) in scenario.into_iter() {
|
||||
@@ -962,7 +1018,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_behaviour_when_moving_to_end_of_previous_words() {
|
||||
let tests = array::IntoIter::new([
|
||||
let tests = [
|
||||
("Basic backward motion from the middle of a word",
|
||||
vec![(1, Range::new(9, 9), Range::new(10, 5))]),
|
||||
("Starting from after boundary retreats the anchor",
|
||||
@@ -1032,7 +1088,7 @@ mod test {
|
||||
vec![
|
||||
(1, Range::new(0, 10), Range::new(10, 4)),
|
||||
]),
|
||||
]);
|
||||
];
|
||||
|
||||
for (sample, scenario) in tests {
|
||||
for (count, begin, expected_end) in scenario.into_iter() {
|
||||
@@ -1044,7 +1100,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_behaviour_when_moving_to_end_of_next_long_words() {
|
||||
let tests = array::IntoIter::new([
|
||||
let tests = [
|
||||
("Basic forward motion from the start of a word to the end of it",
|
||||
vec![(1, Range::new(0, 0), Range::new(0, 5))]),
|
||||
("Basic forward motion from the end of a word to the end of the next",
|
||||
@@ -1114,7 +1170,7 @@ mod test {
|
||||
vec![
|
||||
(1, Range::new(0, 0), Range::new(0, 7)),
|
||||
]),
|
||||
]);
|
||||
];
|
||||
|
||||
for (sample, scenario) in tests {
|
||||
for (count, begin, expected_end) in scenario.into_iter() {
|
||||
|
@@ -1,26 +1,72 @@
|
||||
use crate::{Range, RopeSlice, Selection, Syntax};
|
||||
use tree_sitter::Node;
|
||||
|
||||
// TODO: to contract_selection we'd need to store the previous ranges before expand.
|
||||
// Maybe just contract to the first child node?
|
||||
pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: &Selection) -> Selection {
|
||||
pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||
select_node_impl(syntax, text, selection, |descendant, from, to| {
|
||||
if descendant.start_byte() == from && descendant.end_byte() == to {
|
||||
descendant.parent()
|
||||
} else {
|
||||
Some(descendant)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn shrink_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||
select_node_impl(syntax, text, selection, |descendant, _from, _to| {
|
||||
descendant.child(0).or(Some(descendant))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn select_sibling<F>(
|
||||
syntax: &Syntax,
|
||||
text: RopeSlice,
|
||||
selection: Selection,
|
||||
sibling_fn: &F,
|
||||
) -> Selection
|
||||
where
|
||||
F: Fn(Node) -> Option<Node>,
|
||||
{
|
||||
select_node_impl(syntax, text, selection, |descendant, _from, _to| {
|
||||
find_sibling_recursive(descendant, sibling_fn)
|
||||
})
|
||||
}
|
||||
|
||||
fn find_sibling_recursive<F>(node: Node, sibling_fn: F) -> Option<Node>
|
||||
where
|
||||
F: Fn(Node) -> Option<Node>,
|
||||
{
|
||||
sibling_fn(node).or_else(|| {
|
||||
node.parent()
|
||||
.and_then(|node| find_sibling_recursive(node, sibling_fn))
|
||||
})
|
||||
}
|
||||
|
||||
fn select_node_impl<F>(
|
||||
syntax: &Syntax,
|
||||
text: RopeSlice,
|
||||
selection: Selection,
|
||||
select_fn: F,
|
||||
) -> Selection
|
||||
where
|
||||
F: Fn(Node, usize, usize) -> Option<Node>,
|
||||
{
|
||||
let tree = syntax.tree();
|
||||
|
||||
selection.clone().transform(|range| {
|
||||
selection.transform(|range| {
|
||||
let from = text.char_to_byte(range.from());
|
||||
let to = text.char_to_byte(range.to());
|
||||
|
||||
// find parent of a descendant that matches the range
|
||||
let parent = match tree
|
||||
let node = match tree
|
||||
.root_node()
|
||||
.descendant_for_byte_range(from, to)
|
||||
.and_then(|node| node.parent())
|
||||
.and_then(|node| select_fn(node, from, to))
|
||||
{
|
||||
Some(parent) => parent,
|
||||
Some(node) => node,
|
||||
None => return range,
|
||||
};
|
||||
|
||||
let from = text.byte_to_char(parent.start_byte());
|
||||
let to = text.byte_to_char(parent.end_byte());
|
||||
let from = text.byte_to_char(node.start_byte());
|
||||
let to = text.byte_to_char(node.end_byte());
|
||||
|
||||
if range.head < range.anchor {
|
||||
Range::new(to, from)
|
||||
|
@@ -1,9 +1,10 @@
|
||||
use etcetera::home_dir;
|
||||
use std::path::{Component, Path, PathBuf};
|
||||
|
||||
/// Replaces users home directory from `path` with tilde `~` if the directory
|
||||
/// is available, otherwise returns the path unchanged.
|
||||
pub fn fold_home_dir(path: &Path) -> PathBuf {
|
||||
if let Ok(home) = super::home_dir() {
|
||||
if let Ok(home) = home_dir() {
|
||||
if path.starts_with(&home) {
|
||||
// it's ok to unwrap, the path starts with home dir
|
||||
return PathBuf::from("~").join(path.strip_prefix(&home).unwrap());
|
||||
@@ -20,7 +21,7 @@ pub fn expand_tilde(path: &Path) -> PathBuf {
|
||||
let mut components = path.components().peekable();
|
||||
if let Some(Component::Normal(c)) = components.peek() {
|
||||
if c == &"~" {
|
||||
if let Ok(home) = super::home_dir() {
|
||||
if let Ok(home) = home_dir() {
|
||||
// it's ok to unwrap, the path starts with `~`
|
||||
return home.join(path.strip_prefix("~").unwrap());
|
||||
}
|
||||
@@ -40,7 +41,6 @@ pub fn expand_tilde(path: &Path) -> PathBuf {
|
||||
/// needs to improve on.
|
||||
/// Copied from cargo: <https://github.com/rust-lang/cargo/blob/070e459c2d8b79c5b2ac5218064e7603329c92ae/crates/cargo-util/src/paths.rs#L81>
|
||||
pub fn get_normalized_path(path: &Path) -> PathBuf {
|
||||
let path = expand_tilde(path);
|
||||
let mut components = path.components().peekable();
|
||||
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
|
||||
components.next();
|
||||
@@ -72,10 +72,11 @@ pub fn get_normalized_path(path: &Path) -> PathBuf {
|
||||
/// This function is used instead of `std::fs::canonicalize` because we don't want to verify
|
||||
/// here if the path exists, just normalize it's components.
|
||||
pub fn get_canonicalized_path(path: &Path) -> std::io::Result<PathBuf> {
|
||||
let path = expand_tilde(path);
|
||||
let path = if path.is_relative() {
|
||||
std::env::current_dir().map(|current_dir| current_dir.join(path))?
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
path
|
||||
};
|
||||
|
||||
Ok(get_normalized_path(path.as_path()))
|
||||
|
@@ -1,6 +1,8 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use crate::{
|
||||
chars::char_is_line_ending,
|
||||
graphemes::{ensure_grapheme_boundary_prev, RopeGraphemes},
|
||||
graphemes::{ensure_grapheme_boundary_prev, grapheme_width, RopeGraphemes},
|
||||
line_ending::line_end_char_index,
|
||||
RopeSlice,
|
||||
};
|
||||
@@ -54,11 +56,8 @@ impl From<Position> for tree_sitter::Point {
|
||||
}
|
||||
/// Convert a character index to (line, column) coordinates.
|
||||
///
|
||||
/// TODO: this should be split into two methods: one for visual
|
||||
/// row/column, and one for "objective" row/column (possibly with
|
||||
/// the column specified in `char`s). The former would be used
|
||||
/// for cursor movement, and the latter would be used for e.g. the
|
||||
/// row:column display in the status line.
|
||||
/// column in `char` count which can be used for row:column display in
|
||||
/// status line. See [`visual_coords_at_pos`] for a visual one.
|
||||
pub fn coords_at_pos(text: RopeSlice, pos: usize) -> Position {
|
||||
let line = text.char_to_line(pos);
|
||||
|
||||
@@ -69,6 +68,31 @@ pub fn coords_at_pos(text: RopeSlice, pos: usize) -> Position {
|
||||
Position::new(line, col)
|
||||
}
|
||||
|
||||
/// Convert a character index to (line, column) coordinates visually.
|
||||
///
|
||||
/// Takes \t, double-width characters (CJK) into account as well as text
|
||||
/// not in the document in the future.
|
||||
/// See [`coords_at_pos`] for an "objective" one.
|
||||
pub fn visual_coords_at_pos(text: RopeSlice, pos: usize, tab_width: usize) -> Position {
|
||||
let line = text.char_to_line(pos);
|
||||
|
||||
let line_start = text.line_to_char(line);
|
||||
let pos = ensure_grapheme_boundary_prev(text, pos);
|
||||
|
||||
let mut col = 0;
|
||||
|
||||
for grapheme in RopeGraphemes::new(text.slice(line_start..pos)) {
|
||||
if grapheme == "\t" {
|
||||
col += tab_width - (col % tab_width);
|
||||
} else {
|
||||
let grapheme = Cow::from(grapheme);
|
||||
col += grapheme_width(&grapheme);
|
||||
}
|
||||
}
|
||||
|
||||
Position::new(line, col)
|
||||
}
|
||||
|
||||
/// Convert (line, column) coordinates to a character index.
|
||||
///
|
||||
/// If the `line` coordinate is beyond the end of the file, the EOF
|
||||
@@ -89,7 +113,10 @@ pub fn coords_at_pos(text: RopeSlice, pos: usize) -> Position {
|
||||
/// TODO: this should be changed to work in terms of visual row/column, not
|
||||
/// graphemes.
|
||||
pub fn pos_at_coords(text: RopeSlice, coords: Position, limit_before_line_ending: bool) -> usize {
|
||||
let Position { row, col } = coords;
|
||||
let Position { mut row, col } = coords;
|
||||
if limit_before_line_ending {
|
||||
row = row.min(text.len_lines() - 1);
|
||||
};
|
||||
let line_start = text.line_to_char(row);
|
||||
let line_end = if limit_before_line_ending {
|
||||
line_end_char_index(&text, row)
|
||||
@@ -130,7 +157,6 @@ mod test {
|
||||
assert_eq!(coords_at_pos(slice, 10), (1, 4).into()); // position on d
|
||||
|
||||
// Test with wide characters.
|
||||
// TODO: account for character width.
|
||||
let text = Rope::from("今日はいい\n");
|
||||
let slice = text.slice(..);
|
||||
assert_eq!(coords_at_pos(slice, 0), (0, 0).into());
|
||||
@@ -151,7 +177,6 @@ mod test {
|
||||
assert_eq!(coords_at_pos(slice, 9), (1, 0).into());
|
||||
|
||||
// Test with wide-character grapheme clusters.
|
||||
// TODO: account for character width.
|
||||
let text = Rope::from("किमपि\n");
|
||||
let slice = text.slice(..);
|
||||
assert_eq!(coords_at_pos(slice, 0), (0, 0).into());
|
||||
@@ -161,7 +186,6 @@ mod test {
|
||||
assert_eq!(coords_at_pos(slice, 6), (1, 0).into());
|
||||
|
||||
// Test with tabs.
|
||||
// Todo: account for tab stops.
|
||||
let text = Rope::from("\tHello\n");
|
||||
let slice = text.slice(..);
|
||||
assert_eq!(coords_at_pos(slice, 0), (0, 0).into());
|
||||
@@ -169,6 +193,54 @@ mod test {
|
||||
assert_eq!(coords_at_pos(slice, 2), (0, 2).into());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_visual_coords_at_pos() {
|
||||
let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ");
|
||||
let slice = text.slice(..);
|
||||
assert_eq!(visual_coords_at_pos(slice, 0, 8), (0, 0).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 5, 8), (0, 5).into()); // position on \n
|
||||
assert_eq!(visual_coords_at_pos(slice, 6, 8), (1, 0).into()); // position on w
|
||||
assert_eq!(visual_coords_at_pos(slice, 7, 8), (1, 1).into()); // position on o
|
||||
assert_eq!(visual_coords_at_pos(slice, 10, 8), (1, 4).into()); // position on d
|
||||
|
||||
// Test with wide characters.
|
||||
let text = Rope::from("今日はいい\n");
|
||||
let slice = text.slice(..);
|
||||
assert_eq!(visual_coords_at_pos(slice, 0, 8), (0, 0).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 1, 8), (0, 2).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 2, 8), (0, 4).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 3, 8), (0, 6).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 4, 8), (0, 8).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 5, 8), (0, 10).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 6, 8), (1, 0).into());
|
||||
|
||||
// Test with grapheme clusters.
|
||||
let text = Rope::from("a̐éö̲\r\n");
|
||||
let slice = text.slice(..);
|
||||
assert_eq!(visual_coords_at_pos(slice, 0, 8), (0, 0).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 2, 8), (0, 1).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 4, 8), (0, 2).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 7, 8), (0, 3).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 9, 8), (1, 0).into());
|
||||
|
||||
// Test with wide-character grapheme clusters.
|
||||
// TODO: account for cluster.
|
||||
let text = Rope::from("किमपि\n");
|
||||
let slice = text.slice(..);
|
||||
assert_eq!(visual_coords_at_pos(slice, 0, 8), (0, 0).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 2, 8), (0, 2).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 3, 8), (0, 3).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 5, 8), (0, 5).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 6, 8), (1, 0).into());
|
||||
|
||||
// Test with tabs.
|
||||
let text = Rope::from("\tHello\n");
|
||||
let slice = text.slice(..);
|
||||
assert_eq!(visual_coords_at_pos(slice, 0, 8), (0, 0).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 1, 8), (0, 8).into());
|
||||
assert_eq!(visual_coords_at_pos(slice, 2, 8), (0, 9).into());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pos_at_coords() {
|
||||
let text = Rope::from("ḧëḷḷö\nẅöṛḷḋ");
|
||||
@@ -225,5 +297,12 @@ mod test {
|
||||
assert_eq!(pos_at_coords(slice, (0, 0).into(), false), 0);
|
||||
assert_eq!(pos_at_coords(slice, (0, 1).into(), false), 1);
|
||||
assert_eq!(pos_at_coords(slice, (0, 2).into(), false), 2);
|
||||
|
||||
// Test out of bounds.
|
||||
let text = Rope::new();
|
||||
let slice = text.slice(..);
|
||||
assert_eq!(pos_at_coords(slice, (10, 0).into(), true), 0);
|
||||
assert_eq!(pos_at_coords(slice, (0, 10).into(), true), 0);
|
||||
assert_eq!(pos_at_coords(slice, (10, 10).into(), true), 0);
|
||||
}
|
||||
}
|
||||
|
@@ -15,7 +15,11 @@ impl Register {
|
||||
}
|
||||
|
||||
pub fn new_with_values(name: char, values: Vec<String>) -> Self {
|
||||
Self { name, values }
|
||||
if name == '_' {
|
||||
Self::new(name)
|
||||
} else {
|
||||
Self { name, values }
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn name(&self) -> char {
|
||||
@@ -27,11 +31,15 @@ impl Register {
|
||||
}
|
||||
|
||||
pub fn write(&mut self, values: Vec<String>) {
|
||||
self.values = values;
|
||||
if self.name != '_' {
|
||||
self.values = values;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push(&mut self, value: String) {
|
||||
self.values.push(value);
|
||||
if self.name != '_' {
|
||||
self.values.push(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,4 +68,8 @@ impl Registers {
|
||||
pub fn read(&self, name: char) -> Option<&[String]> {
|
||||
self.get(name).map(|reg| reg.read())
|
||||
}
|
||||
|
||||
pub fn inner(&self) -> &HashMap<char, Register> {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
@@ -7,6 +7,7 @@ use crate::{
|
||||
ensure_grapheme_boundary_next, ensure_grapheme_boundary_prev, next_grapheme_boundary,
|
||||
prev_grapheme_boundary,
|
||||
},
|
||||
movement::Direction,
|
||||
Assoc, ChangeSet, RopeSlice,
|
||||
};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
@@ -82,6 +83,13 @@ impl Range {
|
||||
std::cmp::max(self.anchor, self.head)
|
||||
}
|
||||
|
||||
/// Total length of the range.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn len(&self) -> usize {
|
||||
self.to() - self.from()
|
||||
}
|
||||
|
||||
/// The (inclusive) range of lines that the range overlaps.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
@@ -102,6 +110,27 @@ impl Range {
|
||||
self.anchor == self.head
|
||||
}
|
||||
|
||||
/// `Direction::Backward` when head < anchor.
|
||||
/// `Direction::Backward` otherwise.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn direction(&self) -> Direction {
|
||||
if self.head < self.anchor {
|
||||
Direction::Backward
|
||||
} else {
|
||||
Direction::Forward
|
||||
}
|
||||
}
|
||||
|
||||
// flips the direction of the selection
|
||||
pub fn flip(&self) -> Self {
|
||||
Self {
|
||||
anchor: self.head,
|
||||
head: self.anchor,
|
||||
horiz: self.horiz,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check two ranges for overlap.
|
||||
#[must_use]
|
||||
pub fn overlaps(&self, other: &Self) -> bool {
|
||||
@@ -111,6 +140,11 @@ impl Range {
|
||||
self.from() == other.from() || (self.to() > other.from() && other.to() > self.from())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn contains_range(&self, other: &Self) -> bool {
|
||||
self.from() <= other.from() && self.to() >= other.to()
|
||||
}
|
||||
|
||||
pub fn contains(&self, pos: usize) -> bool {
|
||||
self.from() <= pos && pos < self.to()
|
||||
}
|
||||
@@ -308,10 +342,10 @@ impl Range {
|
||||
}
|
||||
|
||||
impl From<(usize, usize)> for Range {
|
||||
fn from(tuple: (usize, usize)) -> Self {
|
||||
fn from((anchor, head): (usize, usize)) -> Self {
|
||||
Self {
|
||||
anchor: tuple.0,
|
||||
head: tuple.1,
|
||||
anchor,
|
||||
head,
|
||||
horiz: None,
|
||||
}
|
||||
}
|
||||
@@ -360,8 +394,13 @@ impl Selection {
|
||||
self.normalize()
|
||||
}
|
||||
|
||||
/// Adds a new range to the selection and makes it the primary range.
|
||||
/// Removes a range from the selection.
|
||||
pub fn remove(mut self, index: usize) -> Self {
|
||||
assert!(
|
||||
self.ranges.len() > 1,
|
||||
"can't remove the last range from a selection!"
|
||||
);
|
||||
|
||||
self.ranges.remove(index);
|
||||
if index < self.primary_index || self.primary_index == self.ranges.len() {
|
||||
self.primary_index -= 1;
|
||||
@@ -369,6 +408,12 @@ impl Selection {
|
||||
self
|
||||
}
|
||||
|
||||
/// Replace a range in the selection with a new range.
|
||||
pub fn replace(mut self, index: usize, range: Range) -> Self {
|
||||
self.ranges[index] = range;
|
||||
self.normalize()
|
||||
}
|
||||
|
||||
/// Map selections over a set of changes. Useful for adjusting the selection position after
|
||||
/// applying changes to a document.
|
||||
pub fn map(self, changes: &ChangeSet) -> Self {
|
||||
@@ -504,6 +549,39 @@ impl Selection {
|
||||
pub fn len(&self) -> usize {
|
||||
self.ranges.len()
|
||||
}
|
||||
|
||||
// returns true if self ⊇ other
|
||||
pub fn contains(&self, other: &Selection) -> bool {
|
||||
// can't contain other if it is larger
|
||||
if other.len() > self.len() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let (mut iter_self, mut iter_other) = (self.iter(), other.iter());
|
||||
let (mut ele_self, mut ele_other) = (iter_self.next(), iter_other.next());
|
||||
|
||||
loop {
|
||||
match (ele_self, ele_other) {
|
||||
(Some(ra), Some(rb)) => {
|
||||
if !ra.contains_range(rb) {
|
||||
// `self` doesn't contain next element from `other`, advance `self`, we need to match all from `other`
|
||||
ele_self = iter_self.next();
|
||||
} else {
|
||||
// matched element from `other`, advance `other`
|
||||
ele_other = iter_other.next();
|
||||
};
|
||||
}
|
||||
(None, Some(_)) => {
|
||||
// exhausted `self`, we can't match the reminder of `other`
|
||||
return false;
|
||||
}
|
||||
(_, None) => {
|
||||
// no elements from `other` left to match, `self` contains `other`
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a Selection {
|
||||
@@ -517,14 +595,15 @@ impl<'a> IntoIterator for &'a Selection {
|
||||
|
||||
// TODO: checkSelection -> check if valid for doc length && sorted
|
||||
|
||||
pub fn keep_matches(
|
||||
pub fn keep_or_remove_matches(
|
||||
text: RopeSlice,
|
||||
selection: &Selection,
|
||||
regex: &crate::regex::Regex,
|
||||
remove: bool,
|
||||
) -> Option<Selection> {
|
||||
let result: SmallVec<_> = selection
|
||||
.iter()
|
||||
.filter(|range| regex.is_match(&range.fragment(text)))
|
||||
.filter(|range| regex.is_match(&range.fragment(text)) ^ remove)
|
||||
.copied()
|
||||
.collect();
|
||||
|
||||
@@ -687,16 +766,16 @@ mod test {
|
||||
fn test_contains() {
|
||||
let range = Range::new(10, 12);
|
||||
|
||||
assert_eq!(range.contains(9), false);
|
||||
assert_eq!(range.contains(10), true);
|
||||
assert_eq!(range.contains(11), true);
|
||||
assert_eq!(range.contains(12), false);
|
||||
assert_eq!(range.contains(13), false);
|
||||
assert!(!range.contains(9));
|
||||
assert!(range.contains(10));
|
||||
assert!(range.contains(11));
|
||||
assert!(!range.contains(12));
|
||||
assert!(!range.contains(13));
|
||||
|
||||
let range = Range::new(9, 6);
|
||||
assert_eq!(range.contains(9), false);
|
||||
assert_eq!(range.contains(7), true);
|
||||
assert_eq!(range.contains(6), true);
|
||||
assert!(!range.contains(9));
|
||||
assert!(range.contains(7));
|
||||
assert!(range.contains(6));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -941,4 +1020,30 @@ mod test {
|
||||
&["", "abcd", "efg", "rs", "xyz"]
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn test_selection_contains() {
|
||||
fn contains(a: Vec<(usize, usize)>, b: Vec<(usize, usize)>) -> bool {
|
||||
let sela = Selection::new(a.iter().map(|a| Range::new(a.0, a.1)).collect(), 0);
|
||||
let selb = Selection::new(b.iter().map(|b| Range::new(b.0, b.1)).collect(), 0);
|
||||
sela.contains(&selb)
|
||||
}
|
||||
|
||||
// exact match
|
||||
assert!(contains(vec!((1, 1)), vec!((1, 1))));
|
||||
|
||||
// larger set contains smaller
|
||||
assert!(contains(vec!((1, 1), (2, 2), (3, 3)), vec!((2, 2))));
|
||||
|
||||
// multiple matches
|
||||
assert!(contains(vec!((1, 1), (2, 2)), vec!((1, 1), (2, 2))));
|
||||
|
||||
// smaller set can't contain bigger
|
||||
assert!(!contains(vec!((1, 1)), vec!((1, 1), (2, 2))));
|
||||
|
||||
assert!(contains(
|
||||
vec!((1, 1), (2, 4), (5, 6), (7, 9), (10, 13)),
|
||||
vec!((3, 4), (7, 9))
|
||||
));
|
||||
assert!(!contains(vec!((1, 1), (5, 6)), vec!((1, 6))));
|
||||
}
|
||||
}
|
||||
|
164
helix-core/src/shellwords.rs
Normal file
164
helix-core/src/shellwords.rs
Normal file
@@ -0,0 +1,164 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
/// Get the vec of escaped / quoted / doublequoted filenames from the input str
|
||||
pub fn shellwords(input: &str) -> Vec<Cow<'_, str>> {
|
||||
enum State {
|
||||
Normal,
|
||||
NormalEscaped,
|
||||
Quoted,
|
||||
QuoteEscaped,
|
||||
Dquoted,
|
||||
DquoteEscaped,
|
||||
}
|
||||
|
||||
use State::*;
|
||||
|
||||
let mut state = Normal;
|
||||
let mut args: Vec<Cow<str>> = Vec::new();
|
||||
let mut escaped = String::with_capacity(input.len());
|
||||
|
||||
let mut start = 0;
|
||||
let mut end = 0;
|
||||
|
||||
for (i, c) in input.char_indices() {
|
||||
state = match state {
|
||||
Normal => match c {
|
||||
'\\' => {
|
||||
escaped.push_str(&input[start..i]);
|
||||
start = i + 1;
|
||||
NormalEscaped
|
||||
}
|
||||
'"' => {
|
||||
end = i;
|
||||
Dquoted
|
||||
}
|
||||
'\'' => {
|
||||
end = i;
|
||||
Quoted
|
||||
}
|
||||
c if c.is_ascii_whitespace() => {
|
||||
end = i;
|
||||
Normal
|
||||
}
|
||||
_ => Normal,
|
||||
},
|
||||
NormalEscaped => Normal,
|
||||
Quoted => match c {
|
||||
'\\' => {
|
||||
escaped.push_str(&input[start..i]);
|
||||
start = i + 1;
|
||||
QuoteEscaped
|
||||
}
|
||||
'\'' => {
|
||||
end = i;
|
||||
Normal
|
||||
}
|
||||
_ => Quoted,
|
||||
},
|
||||
QuoteEscaped => Quoted,
|
||||
Dquoted => match c {
|
||||
'\\' => {
|
||||
escaped.push_str(&input[start..i]);
|
||||
start = i + 1;
|
||||
DquoteEscaped
|
||||
}
|
||||
'"' => {
|
||||
end = i;
|
||||
Normal
|
||||
}
|
||||
_ => Dquoted,
|
||||
},
|
||||
DquoteEscaped => Dquoted,
|
||||
};
|
||||
|
||||
if i >= input.len() - 1 && end == 0 {
|
||||
end = i + 1;
|
||||
}
|
||||
|
||||
if end > 0 {
|
||||
let esc_trim = escaped.trim();
|
||||
let inp = &input[start..end];
|
||||
|
||||
if !(esc_trim.is_empty() && inp.trim().is_empty()) {
|
||||
if esc_trim.is_empty() {
|
||||
args.push(inp.into());
|
||||
} else {
|
||||
args.push([escaped, inp.into()].concat().into());
|
||||
escaped = "".to_string();
|
||||
}
|
||||
}
|
||||
start = i + 1;
|
||||
end = 0;
|
||||
}
|
||||
}
|
||||
args
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_normal() {
|
||||
let input = r#":o single_word twó wörds \three\ \"with\ escaping\\"#;
|
||||
let result = shellwords(input);
|
||||
let expected = vec![
|
||||
Cow::from(":o"),
|
||||
Cow::from("single_word"),
|
||||
Cow::from("twó"),
|
||||
Cow::from("wörds"),
|
||||
Cow::from(r#"three "with escaping\"#),
|
||||
];
|
||||
// TODO test is_owned and is_borrowed, once they get stabilized.
|
||||
assert_eq!(expected, result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quoted() {
|
||||
let quoted =
|
||||
r#":o 'single_word' 'twó wörds' '' ' ''\three\' \"with\ escaping\\' 'quote incomplete"#;
|
||||
let result = shellwords(quoted);
|
||||
let expected = vec![
|
||||
Cow::from(":o"),
|
||||
Cow::from("single_word"),
|
||||
Cow::from("twó wörds"),
|
||||
Cow::from(r#"three' "with escaping\"#),
|
||||
Cow::from("quote incomplete"),
|
||||
];
|
||||
assert_eq!(expected, result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dquoted() {
|
||||
let dquoted = r#":o "single_word" "twó wörds" "" " ""\three\' \"with\ escaping\\" "dquote incomplete"#;
|
||||
let result = shellwords(dquoted);
|
||||
let expected = vec![
|
||||
Cow::from(":o"),
|
||||
Cow::from("single_word"),
|
||||
Cow::from("twó wörds"),
|
||||
Cow::from(r#"three' "with escaping\"#),
|
||||
Cow::from("dquote incomplete"),
|
||||
];
|
||||
assert_eq!(expected, result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mixed() {
|
||||
let dquoted = r#":o single_word 'twó wörds' "\three\' \"with\ escaping\\""no space before"'and after' $#%^@ "%^&(%^" ')(*&^%''a\\\\\b' '"#;
|
||||
let result = shellwords(dquoted);
|
||||
let expected = vec![
|
||||
Cow::from(":o"),
|
||||
Cow::from("single_word"),
|
||||
Cow::from("twó wörds"),
|
||||
Cow::from("three' \"with escaping\\"),
|
||||
Cow::from("no space before"),
|
||||
Cow::from("and after"),
|
||||
Cow::from("$#%^@"),
|
||||
Cow::from("%^&(%^"),
|
||||
Cow::from(")(*&^%"),
|
||||
Cow::from(r#"a\\b"#),
|
||||
//last ' just changes to quoted but since we dont have anything after it, it should be ignored
|
||||
];
|
||||
assert_eq!(expected, result);
|
||||
}
|
||||
}
|
@@ -1,4 +1,6 @@
|
||||
use crate::{search, Selection};
|
||||
use std::fmt::Display;
|
||||
|
||||
use crate::{search, Range, Selection};
|
||||
use ropey::RopeSlice;
|
||||
|
||||
pub const PAIRS: &[(char, char)] = &[
|
||||
@@ -11,6 +13,27 @@ pub const PAIRS: &[(char, char)] = &[
|
||||
('(', ')'),
|
||||
];
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum Error {
|
||||
PairNotFound,
|
||||
CursorOverlap,
|
||||
RangeExceedsText,
|
||||
CursorOnAmbiguousPair,
|
||||
}
|
||||
|
||||
impl Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(match *self {
|
||||
Error::PairNotFound => "Surround pair not found around all cursors",
|
||||
Error::CursorOverlap => "Cursors overlap for a single surround pair range",
|
||||
Error::RangeExceedsText => "Cursor range exceeds text length",
|
||||
Error::CursorOnAmbiguousPair => "Cursor on ambiguous surround pair",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
/// Given any char in [PAIRS], return the open and closing chars. If not found in
|
||||
/// [PAIRS] return (ch, ch).
|
||||
///
|
||||
@@ -35,39 +58,38 @@ pub fn get_pair(ch: char) -> (char, char) {
|
||||
pub fn find_nth_pairs_pos(
|
||||
text: RopeSlice,
|
||||
ch: char,
|
||||
pos: usize,
|
||||
range: Range,
|
||||
n: usize,
|
||||
) -> Option<(usize, usize)> {
|
||||
) -> Result<(usize, usize)> {
|
||||
if text.len_chars() < 2 {
|
||||
return Err(Error::PairNotFound);
|
||||
}
|
||||
if range.to() >= text.len_chars() {
|
||||
return Err(Error::RangeExceedsText);
|
||||
}
|
||||
|
||||
let (open, close) = get_pair(ch);
|
||||
let pos = range.cursor(text);
|
||||
|
||||
if text.len_chars() < 2 || pos >= text.len_chars() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if open == close {
|
||||
let (open, close) = if open == close {
|
||||
if Some(open) == text.get_char(pos) {
|
||||
// Special case: cursor is directly on a matching char.
|
||||
match pos {
|
||||
0 => Some((pos, search::find_nth_next(text, close, pos + 1, n)?)),
|
||||
_ if (pos + 1) == text.len_chars() => {
|
||||
Some((search::find_nth_prev(text, open, pos, n)?, pos))
|
||||
}
|
||||
// We return no match because there's no way to know which
|
||||
// side of the char we should be searching on.
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
Some((
|
||||
search::find_nth_prev(text, open, pos, n)?,
|
||||
search::find_nth_next(text, close, pos, n)?,
|
||||
))
|
||||
// Cursor is directly on match char. We return no match
|
||||
// because there's no way to know which side of the char
|
||||
// we should be searching on.
|
||||
return Err(Error::CursorOnAmbiguousPair);
|
||||
}
|
||||
(
|
||||
search::find_nth_prev(text, open, pos, n),
|
||||
search::find_nth_next(text, close, pos, n),
|
||||
)
|
||||
} else {
|
||||
Some((
|
||||
find_nth_open_pair(text, open, close, pos, n)?,
|
||||
find_nth_close_pair(text, open, close, pos, n)?,
|
||||
))
|
||||
}
|
||||
(
|
||||
find_nth_open_pair(text, open, close, pos, n),
|
||||
find_nth_close_pair(text, open, close, pos, n),
|
||||
)
|
||||
};
|
||||
|
||||
Option::zip(open, close).ok_or(Error::PairNotFound)
|
||||
}
|
||||
|
||||
fn find_nth_open_pair(
|
||||
@@ -157,17 +179,17 @@ pub fn get_surround_pos(
|
||||
selection: &Selection,
|
||||
ch: char,
|
||||
skip: usize,
|
||||
) -> Option<Vec<usize>> {
|
||||
) -> Result<Vec<usize>> {
|
||||
let mut change_pos = Vec::new();
|
||||
|
||||
for range in selection {
|
||||
let (open_pos, close_pos) = find_nth_pairs_pos(text, ch, range.head, skip)?;
|
||||
for &range in selection {
|
||||
let (open_pos, close_pos) = find_nth_pairs_pos(text, ch, range, skip)?;
|
||||
if change_pos.contains(&open_pos) || change_pos.contains(&close_pos) {
|
||||
return None;
|
||||
return Err(Error::CursorOverlap);
|
||||
}
|
||||
change_pos.extend_from_slice(&[open_pos, close_pos]);
|
||||
}
|
||||
Some(change_pos)
|
||||
Ok(change_pos)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -178,67 +200,92 @@ mod test {
|
||||
use ropey::Rope;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
#[test]
|
||||
fn test_find_nth_pairs_pos() {
|
||||
let doc = Rope::from("some (text) here");
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn check_find_nth_pair_pos(
|
||||
text: &str,
|
||||
cases: Vec<(usize, char, usize, Result<(usize, usize)>)>,
|
||||
) {
|
||||
let doc = Rope::from(text);
|
||||
let slice = doc.slice(..);
|
||||
|
||||
// cursor on [t]ext
|
||||
assert_eq!(find_nth_pairs_pos(slice, '(', 6, 1), Some((5, 10)));
|
||||
assert_eq!(find_nth_pairs_pos(slice, ')', 6, 1), Some((5, 10)));
|
||||
// cursor on so[m]e
|
||||
assert_eq!(find_nth_pairs_pos(slice, '(', 2, 1), None);
|
||||
// cursor on bracket itself
|
||||
assert_eq!(find_nth_pairs_pos(slice, '(', 5, 1), Some((5, 10)));
|
||||
assert_eq!(find_nth_pairs_pos(slice, '(', 10, 1), Some((5, 10)));
|
||||
for (cursor_pos, ch, n, expected_range) in cases {
|
||||
let range = find_nth_pairs_pos(slice, ch, (cursor_pos, cursor_pos + 1).into(), n);
|
||||
assert_eq!(
|
||||
range, expected_range,
|
||||
"Expected {:?}, got {:?}",
|
||||
expected_range, range
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_nth_pairs_pos() {
|
||||
check_find_nth_pair_pos(
|
||||
"some (text) here",
|
||||
vec![
|
||||
// cursor on [t]ext
|
||||
(6, '(', 1, Ok((5, 10))),
|
||||
(6, ')', 1, Ok((5, 10))),
|
||||
// cursor on so[m]e
|
||||
(2, '(', 1, Err(Error::PairNotFound)),
|
||||
// cursor on bracket itself
|
||||
(5, '(', 1, Ok((5, 10))),
|
||||
(10, '(', 1, Ok((5, 10))),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_nth_pairs_pos_skip() {
|
||||
let doc = Rope::from("(so (many (good) text) here)");
|
||||
let slice = doc.slice(..);
|
||||
|
||||
// cursor on go[o]d
|
||||
assert_eq!(find_nth_pairs_pos(slice, '(', 13, 1), Some((10, 15)));
|
||||
assert_eq!(find_nth_pairs_pos(slice, '(', 13, 2), Some((4, 21)));
|
||||
assert_eq!(find_nth_pairs_pos(slice, '(', 13, 3), Some((0, 27)));
|
||||
check_find_nth_pair_pos(
|
||||
"(so (many (good) text) here)",
|
||||
vec![
|
||||
// cursor on go[o]d
|
||||
(13, '(', 1, Ok((10, 15))),
|
||||
(13, '(', 2, Ok((4, 21))),
|
||||
(13, '(', 3, Ok((0, 27))),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_nth_pairs_pos_same() {
|
||||
let doc = Rope::from("'so 'many 'good' text' here'");
|
||||
let slice = doc.slice(..);
|
||||
|
||||
// cursor on go[o]d
|
||||
assert_eq!(find_nth_pairs_pos(slice, '\'', 13, 1), Some((10, 15)));
|
||||
assert_eq!(find_nth_pairs_pos(slice, '\'', 13, 2), Some((4, 21)));
|
||||
assert_eq!(find_nth_pairs_pos(slice, '\'', 13, 3), Some((0, 27)));
|
||||
// cursor on the quotes
|
||||
assert_eq!(find_nth_pairs_pos(slice, '\'', 10, 1), None);
|
||||
// this is the best we can do since opening and closing pairs are same
|
||||
assert_eq!(find_nth_pairs_pos(slice, '\'', 0, 1), Some((0, 4)));
|
||||
assert_eq!(find_nth_pairs_pos(slice, '\'', 27, 1), Some((21, 27)));
|
||||
check_find_nth_pair_pos(
|
||||
"'so 'many 'good' text' here'",
|
||||
vec![
|
||||
// cursor on go[o]d
|
||||
(13, '\'', 1, Ok((10, 15))),
|
||||
(13, '\'', 2, Ok((4, 21))),
|
||||
(13, '\'', 3, Ok((0, 27))),
|
||||
// cursor on the quotes
|
||||
(10, '\'', 1, Err(Error::CursorOnAmbiguousPair)),
|
||||
],
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_nth_pairs_pos_step() {
|
||||
let doc = Rope::from("((so)((many) good (text))(here))");
|
||||
let slice = doc.slice(..);
|
||||
|
||||
// cursor on go[o]d
|
||||
assert_eq!(find_nth_pairs_pos(slice, '(', 15, 1), Some((5, 24)));
|
||||
assert_eq!(find_nth_pairs_pos(slice, '(', 15, 2), Some((0, 31)));
|
||||
check_find_nth_pair_pos(
|
||||
"((so)((many) good (text))(here))",
|
||||
vec![
|
||||
// cursor on go[o]d
|
||||
(15, '(', 1, Ok((5, 24))),
|
||||
(15, '(', 2, Ok((0, 31))),
|
||||
],
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_nth_pairs_pos_mixed() {
|
||||
let doc = Rope::from("(so [many {good} text] here)");
|
||||
let slice = doc.slice(..);
|
||||
|
||||
// cursor on go[o]d
|
||||
assert_eq!(find_nth_pairs_pos(slice, '{', 13, 1), Some((10, 15)));
|
||||
assert_eq!(find_nth_pairs_pos(slice, '[', 13, 1), Some((4, 21)));
|
||||
assert_eq!(find_nth_pairs_pos(slice, '(', 13, 1), Some((0, 27)));
|
||||
check_find_nth_pair_pos(
|
||||
"(so [many {good} text] here)",
|
||||
vec![
|
||||
// cursor on go[o]d
|
||||
(13, '{', 1, Ok((10, 15))),
|
||||
(13, '[', 1, Ok((4, 21))),
|
||||
(13, '(', 1, Ok((0, 27))),
|
||||
],
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -266,11 +313,10 @@ mod test {
|
||||
|
||||
let selection =
|
||||
Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(9)]), 0);
|
||||
|
||||
// cursor on s[o]me, c[h]ars
|
||||
assert_eq!(
|
||||
get_surround_pos(slice, &selection, '(', 1),
|
||||
None // different surround chars
|
||||
Err(Error::PairNotFound) // different surround chars
|
||||
);
|
||||
|
||||
let selection = Selection::new(
|
||||
@@ -280,7 +326,15 @@ mod test {
|
||||
// cursor on [x]x, newli[n]e
|
||||
assert_eq!(
|
||||
get_surround_pos(slice, &selection, '(', 1),
|
||||
None // overlapping surround chars
|
||||
Err(Error::PairNotFound) // overlapping surround chars
|
||||
);
|
||||
|
||||
let selection =
|
||||
Selection::new(SmallVec::from_slice(&[Range::point(2), Range::point(3)]), 0);
|
||||
// cursor on s[o][m]e
|
||||
assert_eq!(
|
||||
get_surround_pos(slice, &selection, '[', 1),
|
||||
Err(Error::CursorOverlap)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -10,7 +10,7 @@ use crate::surround;
|
||||
use crate::syntax::LanguageConfiguration;
|
||||
use crate::Range;
|
||||
|
||||
fn find_word_boundary(slice: RopeSlice, mut pos: usize, direction: Direction) -> usize {
|
||||
fn find_word_boundary(slice: RopeSlice, mut pos: usize, direction: Direction, long: bool) -> usize {
|
||||
use CharCategory::{Eol, Whitespace};
|
||||
|
||||
let iter = match direction {
|
||||
@@ -33,7 +33,7 @@ fn find_word_boundary(slice: RopeSlice, mut pos: usize, direction: Direction) ->
|
||||
match categorize_char(ch) {
|
||||
Eol | Whitespace => return pos,
|
||||
category => {
|
||||
if category != prev_category && pos != 0 && pos != slice.len_chars() {
|
||||
if !long && category != prev_category && pos != 0 && pos != slice.len_chars() {
|
||||
return pos;
|
||||
} else {
|
||||
match direction {
|
||||
@@ -53,6 +53,8 @@ fn find_word_boundary(slice: RopeSlice, mut pos: usize, direction: Direction) ->
|
||||
pub enum TextObject {
|
||||
Around,
|
||||
Inside,
|
||||
/// Used for moving between objects.
|
||||
Movement,
|
||||
}
|
||||
|
||||
impl Display for TextObject {
|
||||
@@ -60,6 +62,7 @@ impl Display for TextObject {
|
||||
f.write_str(match self {
|
||||
Self::Around => "around",
|
||||
Self::Inside => "inside",
|
||||
Self::Movement => "movement",
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -70,13 +73,14 @@ pub fn textobject_word(
|
||||
range: Range,
|
||||
textobject: TextObject,
|
||||
_count: usize,
|
||||
long: bool,
|
||||
) -> Range {
|
||||
let pos = range.cursor(slice);
|
||||
|
||||
let word_start = find_word_boundary(slice, pos, Direction::Backward);
|
||||
let word_start = find_word_boundary(slice, pos, Direction::Backward, long);
|
||||
let word_end = match slice.get_char(pos).map(categorize_char) {
|
||||
None | Some(CharCategory::Whitespace | CharCategory::Eol) => pos,
|
||||
_ => find_word_boundary(slice, pos + 1, Direction::Forward),
|
||||
_ => find_word_boundary(slice, pos + 1, Direction::Forward, long),
|
||||
};
|
||||
|
||||
// Special case.
|
||||
@@ -103,6 +107,7 @@ pub fn textobject_word(
|
||||
Range::new(word_start - whitespace_count_left, word_end)
|
||||
}
|
||||
}
|
||||
TextObject::Movement => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -113,10 +118,11 @@ pub fn textobject_surround(
|
||||
ch: char,
|
||||
count: usize,
|
||||
) -> Range {
|
||||
surround::find_nth_pairs_pos(slice, ch, range.head, count)
|
||||
surround::find_nth_pairs_pos(slice, ch, range, count)
|
||||
.map(|(anchor, head)| match textobject {
|
||||
TextObject::Inside => Range::new(next_grapheme_boundary(slice, anchor), head),
|
||||
TextObject::Around => Range::new(anchor, next_grapheme_boundary(slice, head)),
|
||||
TextObject::Movement => unreachable!(),
|
||||
})
|
||||
.unwrap_or(range)
|
||||
}
|
||||
@@ -169,7 +175,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_textobject_word() {
|
||||
// (text, [(cursor position, textobject, final range), ...])
|
||||
// (text, [(char position, textobject, final range), ...])
|
||||
let tests = &[
|
||||
(
|
||||
"cursor at beginning of doc",
|
||||
@@ -268,7 +274,9 @@ mod test {
|
||||
let slice = doc.slice(..);
|
||||
for &case in scenario {
|
||||
let (pos, objtype, expected_range) = case;
|
||||
let result = textobject_word(slice, Range::point(pos), objtype, 1);
|
||||
// cursor is a single width selection
|
||||
let range = Range::new(pos, pos + 1);
|
||||
let result = textobject_word(slice, range, objtype, 1, false);
|
||||
assert_eq!(
|
||||
result,
|
||||
expected_range.into(),
|
||||
@@ -282,7 +290,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_textobject_surround() {
|
||||
// (text, [(cursor position, textobject, final range, count), ...])
|
||||
// (text, [(cursor position, textobject, final range, surround char, count), ...])
|
||||
let tests = &[
|
||||
(
|
||||
"simple (single) surround pairs",
|
||||
|
@@ -21,8 +21,7 @@ pub enum Assoc {
|
||||
After,
|
||||
}
|
||||
|
||||
// ChangeSpec = Change | ChangeSet | Vec<Change>
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
pub struct ChangeSet {
|
||||
pub(crate) changes: Vec<Operation>,
|
||||
/// The required document length. Will refuse to apply changes unless it matches.
|
||||
@@ -30,16 +29,6 @@ pub struct ChangeSet {
|
||||
len_after: usize,
|
||||
}
|
||||
|
||||
impl Default for ChangeSet {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
changes: Vec::new(),
|
||||
len: 0,
|
||||
len_after: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ChangeSet {
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
Self {
|
||||
@@ -60,7 +49,6 @@ impl ChangeSet {
|
||||
}
|
||||
|
||||
// TODO: from iter
|
||||
//
|
||||
|
||||
#[doc(hidden)] // used by lsp to convert to LSP changes
|
||||
pub fn changes(&self) -> &[Operation] {
|
||||
@@ -95,7 +83,7 @@ impl ChangeSet {
|
||||
|
||||
let new_last = match self.changes.as_mut_slice() {
|
||||
[.., Insert(prev)] | [.., Insert(prev), Delete(_)] => {
|
||||
prev.push_tendril(&fragment);
|
||||
prev.push_str(&fragment);
|
||||
return;
|
||||
}
|
||||
[.., last @ Delete(_)] => std::mem::replace(last, Insert(fragment)),
|
||||
@@ -199,7 +187,7 @@ impl ChangeSet {
|
||||
// TODO: cover this with a test
|
||||
// figure out the byte index of the truncated string end
|
||||
let (pos, _) = s.char_indices().nth(j).unwrap();
|
||||
s.pop_front(pos as u32);
|
||||
s.replace_range(0..pos, "");
|
||||
head_a = Some(Insert(s));
|
||||
head_b = changes_b.next();
|
||||
}
|
||||
@@ -221,9 +209,11 @@ impl ChangeSet {
|
||||
Ordering::Greater => {
|
||||
// figure out the byte index of the truncated string end
|
||||
let (pos, _) = s.char_indices().nth(j).unwrap();
|
||||
let pos = pos as u32;
|
||||
changes.insert(s.subtendril(0, pos));
|
||||
head_a = Some(Insert(s.subtendril(pos, s.len() as u32 - pos)));
|
||||
let mut before = s;
|
||||
let after = before.split_off(pos);
|
||||
|
||||
changes.insert(before);
|
||||
head_a = Some(Insert(after));
|
||||
head_b = changes_b.next();
|
||||
}
|
||||
}
|
||||
@@ -287,7 +277,7 @@ impl ChangeSet {
|
||||
}
|
||||
Delete(n) => {
|
||||
let text = Cow::from(original_doc.slice(pos..pos + *n));
|
||||
changes.insert(Tendril::from_slice(&text));
|
||||
changes.insert(Tendril::from(text.as_ref()));
|
||||
pos += n;
|
||||
}
|
||||
Insert(s) => {
|
||||
@@ -330,7 +320,7 @@ impl ChangeSet {
|
||||
/// `true` when the set is empty.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.changes.is_empty()
|
||||
self.changes.is_empty() || self.changes == [Operation::Retain(self.len)]
|
||||
}
|
||||
|
||||
/// Map a position through the changes.
|
||||
@@ -419,12 +409,10 @@ impl ChangeSet {
|
||||
|
||||
/// Transaction represents a single undoable unit of changes. Several changes can be grouped into
|
||||
/// a single transaction.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
pub struct Transaction {
|
||||
changes: ChangeSet,
|
||||
selection: Option<Selection>,
|
||||
// effects, annotations
|
||||
// scroll_into_view
|
||||
}
|
||||
|
||||
impl Transaction {
|
||||
@@ -448,14 +436,12 @@ impl Transaction {
|
||||
|
||||
/// Returns true if applied successfully.
|
||||
pub fn apply(&self, doc: &mut Rope) -> bool {
|
||||
if !self.changes.is_empty() {
|
||||
// apply changes to the document
|
||||
if !self.changes.apply(doc) {
|
||||
return false;
|
||||
}
|
||||
if self.changes.is_empty() {
|
||||
return true;
|
||||
}
|
||||
|
||||
true
|
||||
// apply changes to the document
|
||||
self.changes.apply(doc)
|
||||
}
|
||||
|
||||
/// Generate a transaction that reverts this one.
|
||||
@@ -483,7 +469,7 @@ impl Transaction {
|
||||
/// Generate a transaction from a set of changes.
|
||||
pub fn change<I>(doc: &Rope, changes: I) -> Self
|
||||
where
|
||||
I: IntoIterator<Item = Change> + Iterator,
|
||||
I: Iterator<Item = Change>,
|
||||
{
|
||||
let len = doc.len_chars();
|
||||
|
||||
@@ -491,12 +477,11 @@ impl Transaction {
|
||||
let size = upper.unwrap_or(lower);
|
||||
let mut changeset = ChangeSet::with_capacity(2 * size + 1); // rough estimate
|
||||
|
||||
// TODO: verify ranges are ordered and not overlapping or change will panic.
|
||||
|
||||
// TODO: test for (pos, pos, None) to factor out as nothing
|
||||
|
||||
let mut last = 0;
|
||||
for (from, to, tendril) in changes {
|
||||
// Verify ranges are ordered and not overlapping
|
||||
debug_assert!(last <= from);
|
||||
|
||||
// Retain from last "to" to current "from"
|
||||
changeset.retain(from - last);
|
||||
let span = to - from;
|
||||
@@ -702,7 +687,7 @@ mod test {
|
||||
let mut doc = Rope::from("hello world!\ntest 123");
|
||||
let transaction = Transaction::change(
|
||||
&doc,
|
||||
// (1, 1, None) is a useless 0-width delete
|
||||
// (1, 1, None) is a useless 0-width delete that gets factored out
|
||||
vec![(1, 1, None), (6, 11, Some("void".into())), (12, 17, None)].into_iter(),
|
||||
);
|
||||
transaction.apply(&mut doc);
|
||||
@@ -720,19 +705,19 @@ mod test {
|
||||
#[test]
|
||||
fn optimized_composition() {
|
||||
let mut state = State::new("".into());
|
||||
let t1 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('h'));
|
||||
let t1 = Transaction::insert(&state.doc, &state.selection, Tendril::from("h"));
|
||||
t1.apply(&mut state.doc);
|
||||
state.selection = state.selection.clone().map(t1.changes());
|
||||
let t2 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('e'));
|
||||
let t2 = Transaction::insert(&state.doc, &state.selection, Tendril::from("e"));
|
||||
t2.apply(&mut state.doc);
|
||||
state.selection = state.selection.clone().map(t2.changes());
|
||||
let t3 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('l'));
|
||||
let t3 = Transaction::insert(&state.doc, &state.selection, Tendril::from("l"));
|
||||
t3.apply(&mut state.doc);
|
||||
state.selection = state.selection.clone().map(t3.changes());
|
||||
let t4 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('l'));
|
||||
let t4 = Transaction::insert(&state.doc, &state.selection, Tendril::from("l"));
|
||||
t4.apply(&mut state.doc);
|
||||
state.selection = state.selection.clone().map(t4.changes());
|
||||
let t5 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('o'));
|
||||
let t5 = Transaction::insert(&state.doc, &state.selection, Tendril::from("o"));
|
||||
t5.apply(&mut state.doc);
|
||||
state.selection = state.selection.clone().map(t5.changes());
|
||||
|
||||
@@ -771,7 +756,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn combine_with_utf8() {
|
||||
const TEST_CASE: &'static str = "Hello, これはヘリックスエディターです!";
|
||||
const TEST_CASE: &str = "Hello, これはヘリックスエディターです!";
|
||||
|
||||
let empty = Rope::from("");
|
||||
let a = ChangeSet::new(&empty);
|
||||
|
25
helix-dap/Cargo.toml
Normal file
25
helix-dap/Cargo.toml
Normal file
@@ -0,0 +1,25 @@
|
||||
[package]
|
||||
name = "helix-dap"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2018"
|
||||
license = "MPL-2.0"
|
||||
description = "DAP client implementation for Helix project"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
anyhow = "1.0"
|
||||
log = "0.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] }
|
||||
which = "4.2"
|
||||
|
||||
[dev-dependencies]
|
||||
fern = "0.6"
|
480
helix-dap/src/client.rs
Normal file
480
helix-dap/src/client.rs
Normal file
@@ -0,0 +1,480 @@
|
||||
use crate::{
|
||||
transport::{Payload, Request, Response, Transport},
|
||||
types::*,
|
||||
Error, Result, ThreadId,
|
||||
};
|
||||
use helix_core::syntax::DebuggerQuirks;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use anyhow::anyhow;
|
||||
pub use log::{error, info};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
future::Future,
|
||||
net::{IpAddr, Ipv4Addr, SocketAddr},
|
||||
path::PathBuf,
|
||||
process::Stdio,
|
||||
sync::atomic::{AtomicU64, Ordering},
|
||||
};
|
||||
use tokio::{
|
||||
io::{AsyncBufRead, AsyncWrite, BufReader, BufWriter},
|
||||
net::TcpStream,
|
||||
process::{Child, Command},
|
||||
sync::mpsc::{channel, unbounded_channel, UnboundedReceiver, UnboundedSender},
|
||||
time,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Client {
|
||||
id: usize,
|
||||
_process: Option<Child>,
|
||||
server_tx: UnboundedSender<Payload>,
|
||||
request_counter: AtomicU64,
|
||||
pub caps: Option<DebuggerCapabilities>,
|
||||
// thread_id -> frames
|
||||
pub stack_frames: HashMap<ThreadId, Vec<StackFrame>>,
|
||||
pub thread_states: HashMap<ThreadId, String>,
|
||||
pub thread_id: Option<ThreadId>,
|
||||
/// Currently active frame for the current thread.
|
||||
pub active_frame: Option<usize>,
|
||||
pub quirks: DebuggerQuirks,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
// Spawn a process and communicate with it by either TCP or stdio
|
||||
pub async fn process(
|
||||
transport: &str,
|
||||
command: &str,
|
||||
args: Vec<&str>,
|
||||
port_arg: Option<&str>,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
if command.is_empty() {
|
||||
return Result::Err(Error::Other(anyhow!("Command not provided")));
|
||||
}
|
||||
if transport == "tcp" && port_arg.is_some() {
|
||||
Self::tcp_process(command, args, port_arg.unwrap(), id).await
|
||||
} else if transport == "stdio" {
|
||||
Self::stdio(command, args, id)
|
||||
} else {
|
||||
Result::Err(Error::Other(anyhow!("Incorrect transport {}", transport)))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn streams(
|
||||
rx: Box<dyn AsyncBufRead + Unpin + Send>,
|
||||
tx: Box<dyn AsyncWrite + Unpin + Send>,
|
||||
err: Option<Box<dyn AsyncBufRead + Unpin + Send>>,
|
||||
id: usize,
|
||||
process: Option<Child>,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
let (server_rx, server_tx) = Transport::start(rx, tx, err, id);
|
||||
let (client_rx, client_tx) = unbounded_channel();
|
||||
|
||||
let client = Self {
|
||||
id,
|
||||
_process: process,
|
||||
server_tx,
|
||||
request_counter: AtomicU64::new(0),
|
||||
caps: None,
|
||||
//
|
||||
stack_frames: HashMap::new(),
|
||||
thread_states: HashMap::new(),
|
||||
thread_id: None,
|
||||
active_frame: None,
|
||||
quirks: DebuggerQuirks::default(),
|
||||
};
|
||||
|
||||
tokio::spawn(Self::recv(server_rx, client_rx));
|
||||
|
||||
Ok((client, client_tx))
|
||||
}
|
||||
|
||||
pub async fn tcp(
|
||||
addr: std::net::SocketAddr,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
let stream = TcpStream::connect(addr).await?;
|
||||
let (rx, tx) = stream.into_split();
|
||||
Self::streams(Box::new(BufReader::new(rx)), Box::new(tx), None, id, None)
|
||||
}
|
||||
|
||||
pub fn stdio(
|
||||
cmd: &str,
|
||||
args: Vec<&str>,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
// Resolve path to the binary
|
||||
let cmd = which::which(cmd).map_err(|err| anyhow::anyhow!(err))?;
|
||||
|
||||
let process = Command::new(cmd)
|
||||
.args(args)
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
// make sure the process is reaped on drop
|
||||
.kill_on_drop(true)
|
||||
.spawn();
|
||||
|
||||
let mut process = process?;
|
||||
|
||||
// TODO: do we need bufreader/writer here? or do we use async wrappers on unblock?
|
||||
let writer = BufWriter::new(process.stdin.take().expect("Failed to open stdin"));
|
||||
let reader = BufReader::new(process.stdout.take().expect("Failed to open stdout"));
|
||||
let errors = process.stderr.take().map(BufReader::new);
|
||||
|
||||
Self::streams(
|
||||
Box::new(BufReader::new(reader)),
|
||||
Box::new(writer),
|
||||
// errors.map(|errors| Box::new(BufReader::new(errors))),
|
||||
match errors {
|
||||
Some(errors) => Some(Box::new(BufReader::new(errors))),
|
||||
None => None,
|
||||
},
|
||||
id,
|
||||
Some(process),
|
||||
)
|
||||
}
|
||||
|
||||
async fn get_port() -> Option<u16> {
|
||||
Some(
|
||||
tokio::net::TcpListener::bind(SocketAddr::new(
|
||||
IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
|
||||
0,
|
||||
))
|
||||
.await
|
||||
.ok()?
|
||||
.local_addr()
|
||||
.ok()?
|
||||
.port(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn tcp_process(
|
||||
cmd: &str,
|
||||
args: Vec<&str>,
|
||||
port_format: &str,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
let port = Self::get_port().await.unwrap();
|
||||
|
||||
let process = Command::new(cmd)
|
||||
.args(args)
|
||||
.args(port_format.replace("{}", &port.to_string()).split(' '))
|
||||
// silence messages
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
// Do not kill debug adapter when leaving, it should exit automatically
|
||||
.spawn()?;
|
||||
|
||||
// Wait for adapter to become ready for connection
|
||||
time::sleep(time::Duration::from_millis(500)).await;
|
||||
|
||||
let stream = TcpStream::connect(SocketAddr::new(
|
||||
IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
|
||||
port,
|
||||
))
|
||||
.await?;
|
||||
|
||||
let (rx, tx) = stream.into_split();
|
||||
Self::streams(
|
||||
Box::new(BufReader::new(rx)),
|
||||
Box::new(tx),
|
||||
None,
|
||||
id,
|
||||
Some(process),
|
||||
)
|
||||
}
|
||||
|
||||
async fn recv(mut server_rx: UnboundedReceiver<Payload>, client_tx: UnboundedSender<Payload>) {
|
||||
while let Some(msg) = server_rx.recv().await {
|
||||
match msg {
|
||||
Payload::Event(ev) => {
|
||||
client_tx.send(Payload::Event(ev)).expect("Failed to send");
|
||||
}
|
||||
Payload::Response(_) => unreachable!(),
|
||||
Payload::Request(req) => {
|
||||
client_tx
|
||||
.send(Payload::Request(req))
|
||||
.expect("Failed to send");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn id(&self) -> usize {
|
||||
self.id
|
||||
}
|
||||
|
||||
fn next_request_id(&self) -> u64 {
|
||||
self.request_counter.fetch_add(1, Ordering::Relaxed)
|
||||
}
|
||||
|
||||
// Internal, called by specific DAP commands when resuming
|
||||
pub fn resume_application(&mut self) {
|
||||
if let Some(thread_id) = self.thread_id {
|
||||
self.thread_states.insert(thread_id, "running".to_string());
|
||||
self.stack_frames.remove(&thread_id);
|
||||
}
|
||||
self.active_frame = None;
|
||||
self.thread_id = None;
|
||||
}
|
||||
|
||||
/// Execute a RPC request on the debugger.
|
||||
pub fn call<R: crate::types::Request>(
|
||||
&self,
|
||||
arguments: R::Arguments,
|
||||
) -> impl Future<Output = Result<Value>>
|
||||
where
|
||||
R::Arguments: serde::Serialize,
|
||||
{
|
||||
let server_tx = self.server_tx.clone();
|
||||
let id = self.next_request_id();
|
||||
|
||||
async move {
|
||||
use std::time::Duration;
|
||||
use tokio::time::timeout;
|
||||
|
||||
let arguments = Some(serde_json::to_value(arguments)?);
|
||||
|
||||
let (callback_tx, mut callback_rx) = channel(1);
|
||||
|
||||
let req = Request {
|
||||
back_ch: Some(callback_tx),
|
||||
seq: id,
|
||||
command: R::COMMAND.to_string(),
|
||||
arguments,
|
||||
};
|
||||
|
||||
server_tx
|
||||
.send(Payload::Request(req))
|
||||
.map_err(|e| Error::Other(e.into()))?;
|
||||
|
||||
// TODO: specifiable timeout, delay other calls until initialize success
|
||||
timeout(Duration::from_secs(20), callback_rx.recv())
|
||||
.await
|
||||
.map_err(|_| Error::Timeout)? // return Timeout
|
||||
.ok_or(Error::StreamClosed)?
|
||||
.map(|response| response.body.unwrap_or_default())
|
||||
// TODO: check response.success
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn request<R: crate::types::Request>(&self, params: R::Arguments) -> Result<R::Result>
|
||||
where
|
||||
R::Arguments: serde::Serialize,
|
||||
R::Result: core::fmt::Debug, // TODO: temporary
|
||||
{
|
||||
// a future that resolves into the response
|
||||
let json = self.call::<R>(params).await?;
|
||||
let response = serde_json::from_value(json)?;
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub fn reply(
|
||||
&self,
|
||||
request_seq: u64,
|
||||
command: &str,
|
||||
result: core::result::Result<Value, Error>,
|
||||
) -> impl Future<Output = Result<()>> {
|
||||
let server_tx = self.server_tx.clone();
|
||||
let command = command.to_string();
|
||||
|
||||
async move {
|
||||
let response = match result {
|
||||
Ok(result) => Response {
|
||||
request_seq,
|
||||
command,
|
||||
success: true,
|
||||
message: None,
|
||||
body: Some(result),
|
||||
},
|
||||
Err(error) => Response {
|
||||
request_seq,
|
||||
command,
|
||||
success: false,
|
||||
message: Some(error.to_string()),
|
||||
body: None,
|
||||
},
|
||||
};
|
||||
|
||||
server_tx
|
||||
.send(Payload::Response(response))
|
||||
.map_err(|e| Error::Other(e.into()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn capabilities(&self) -> &DebuggerCapabilities {
|
||||
self.caps.as_ref().expect("debugger not yet initialized!")
|
||||
}
|
||||
|
||||
pub async fn initialize(&mut self, adapter_id: String) -> Result<()> {
|
||||
let args = requests::InitializeArguments {
|
||||
client_id: Some("hx".to_owned()),
|
||||
client_name: Some("helix".to_owned()),
|
||||
adapter_id,
|
||||
locale: Some("en-us".to_owned()),
|
||||
lines_start_at_one: Some(true),
|
||||
columns_start_at_one: Some(true),
|
||||
path_format: Some("path".to_owned()),
|
||||
supports_variable_type: Some(true),
|
||||
supports_variable_paging: Some(false),
|
||||
supports_run_in_terminal_request: Some(true),
|
||||
supports_memory_references: Some(false),
|
||||
supports_progress_reporting: Some(false),
|
||||
supports_invalidated_event: Some(false),
|
||||
};
|
||||
|
||||
let response = self.request::<requests::Initialize>(args).await?;
|
||||
self.caps = Some(response);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn disconnect(&self) -> impl Future<Output = Result<Value>> {
|
||||
self.call::<requests::Disconnect>(())
|
||||
}
|
||||
|
||||
pub fn launch(&self, args: serde_json::Value) -> impl Future<Output = Result<Value>> {
|
||||
self.call::<requests::Launch>(args)
|
||||
}
|
||||
|
||||
pub fn attach(&self, args: serde_json::Value) -> impl Future<Output = Result<Value>> {
|
||||
self.call::<requests::Attach>(args)
|
||||
}
|
||||
|
||||
pub async fn set_breakpoints(
|
||||
&self,
|
||||
file: PathBuf,
|
||||
breakpoints: Vec<SourceBreakpoint>,
|
||||
) -> Result<Option<Vec<Breakpoint>>> {
|
||||
let args = requests::SetBreakpointsArguments {
|
||||
source: Source {
|
||||
path: Some(file),
|
||||
name: None,
|
||||
source_reference: None,
|
||||
presentation_hint: None,
|
||||
origin: None,
|
||||
sources: None,
|
||||
adapter_data: None,
|
||||
checksums: None,
|
||||
},
|
||||
breakpoints: Some(breakpoints),
|
||||
source_modified: Some(false),
|
||||
};
|
||||
|
||||
let response = self.request::<requests::SetBreakpoints>(args).await?;
|
||||
|
||||
Ok(response.breakpoints)
|
||||
}
|
||||
|
||||
pub async fn configuration_done(&self) -> Result<()> {
|
||||
self.request::<requests::ConfigurationDone>(()).await
|
||||
}
|
||||
|
||||
pub fn continue_thread(&self, thread_id: ThreadId) -> impl Future<Output = Result<Value>> {
|
||||
let args = requests::ContinueArguments { thread_id };
|
||||
|
||||
self.call::<requests::Continue>(args)
|
||||
}
|
||||
|
||||
pub async fn stack_trace(
|
||||
&self,
|
||||
thread_id: ThreadId,
|
||||
) -> Result<(Vec<StackFrame>, Option<usize>)> {
|
||||
let args = requests::StackTraceArguments {
|
||||
thread_id,
|
||||
start_frame: None,
|
||||
levels: None,
|
||||
format: None,
|
||||
};
|
||||
|
||||
let response = self.request::<requests::StackTrace>(args).await?;
|
||||
Ok((response.stack_frames, response.total_frames))
|
||||
}
|
||||
|
||||
pub fn threads(&self) -> impl Future<Output = Result<Value>> {
|
||||
self.call::<requests::Threads>(())
|
||||
}
|
||||
|
||||
pub async fn scopes(&self, frame_id: usize) -> Result<Vec<Scope>> {
|
||||
let args = requests::ScopesArguments { frame_id };
|
||||
|
||||
let response = self.request::<requests::Scopes>(args).await?;
|
||||
Ok(response.scopes)
|
||||
}
|
||||
|
||||
pub async fn variables(&self, variables_reference: usize) -> Result<Vec<Variable>> {
|
||||
let args = requests::VariablesArguments {
|
||||
variables_reference,
|
||||
filter: None,
|
||||
start: None,
|
||||
count: None,
|
||||
format: None,
|
||||
};
|
||||
|
||||
let response = self.request::<requests::Variables>(args).await?;
|
||||
Ok(response.variables)
|
||||
}
|
||||
|
||||
pub fn step_in(&self, thread_id: ThreadId) -> impl Future<Output = Result<Value>> {
|
||||
let args = requests::StepInArguments {
|
||||
thread_id,
|
||||
target_id: None,
|
||||
granularity: None,
|
||||
};
|
||||
|
||||
self.call::<requests::StepIn>(args)
|
||||
}
|
||||
|
||||
pub fn step_out(&self, thread_id: ThreadId) -> impl Future<Output = Result<Value>> {
|
||||
let args = requests::StepOutArguments {
|
||||
thread_id,
|
||||
granularity: None,
|
||||
};
|
||||
|
||||
self.call::<requests::StepOut>(args)
|
||||
}
|
||||
|
||||
pub fn next(&self, thread_id: ThreadId) -> impl Future<Output = Result<Value>> {
|
||||
let args = requests::NextArguments {
|
||||
thread_id,
|
||||
granularity: None,
|
||||
};
|
||||
|
||||
self.call::<requests::Next>(args)
|
||||
}
|
||||
|
||||
pub fn pause(&self, thread_id: ThreadId) -> impl Future<Output = Result<Value>> {
|
||||
let args = requests::PauseArguments { thread_id };
|
||||
|
||||
self.call::<requests::Pause>(args)
|
||||
}
|
||||
|
||||
pub async fn eval(
|
||||
&self,
|
||||
expression: String,
|
||||
frame_id: Option<usize>,
|
||||
) -> Result<requests::EvaluateResponse> {
|
||||
let args = requests::EvaluateArguments {
|
||||
expression,
|
||||
frame_id,
|
||||
context: None,
|
||||
format: None,
|
||||
};
|
||||
|
||||
self.request::<requests::Evaluate>(args).await
|
||||
}
|
||||
|
||||
pub fn set_exception_breakpoints(
|
||||
&self,
|
||||
filters: Vec<String>,
|
||||
) -> impl Future<Output = Result<Value>> {
|
||||
let args = requests::SetExceptionBreakpointsArguments { filters };
|
||||
|
||||
self.call::<requests::SetExceptionBreakpoints>(args)
|
||||
}
|
||||
}
|
24
helix-dap/src/lib.rs
Normal file
24
helix-dap/src/lib.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
mod client;
|
||||
mod transport;
|
||||
mod types;
|
||||
|
||||
pub use client::Client;
|
||||
pub use events::Event;
|
||||
pub use transport::{Payload, Response, Transport};
|
||||
pub use types::*;
|
||||
|
||||
use thiserror::Error;
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error("failed to parse: {0}")]
|
||||
Parse(#[from] serde_json::Error),
|
||||
#[error("IO Error: {0}")]
|
||||
IO(#[from] std::io::Error),
|
||||
#[error("request timed out")]
|
||||
Timeout,
|
||||
#[error("server closed the stream")]
|
||||
StreamClosed,
|
||||
#[error(transparent)]
|
||||
Other(#[from] anyhow::Error),
|
||||
}
|
||||
pub type Result<T> = core::result::Result<T, Error>;
|
280
helix-dap/src/transport.rs
Normal file
280
helix-dap/src/transport.rs
Normal file
@@ -0,0 +1,280 @@
|
||||
use crate::{Error, Event, Result};
|
||||
use anyhow::Context;
|
||||
use log::{error, info, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::{
|
||||
io::{AsyncBufRead, AsyncBufReadExt, AsyncReadExt, AsyncWrite, AsyncWriteExt},
|
||||
sync::{
|
||||
mpsc::{unbounded_channel, Sender, UnboundedReceiver, UnboundedSender},
|
||||
Mutex,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct Request {
|
||||
#[serde(skip)]
|
||||
pub back_ch: Option<Sender<Result<Response>>>,
|
||||
pub seq: u64,
|
||||
pub command: String,
|
||||
pub arguments: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
pub struct Response {
|
||||
// seq is omitted as unused and is not sent by some implementations
|
||||
pub request_seq: u64,
|
||||
pub success: bool,
|
||||
pub command: String,
|
||||
pub message: Option<String>,
|
||||
pub body: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
pub enum Payload {
|
||||
// type = "event"
|
||||
Event(Box<Event>),
|
||||
// type = "response"
|
||||
Response(Response),
|
||||
// type = "request"
|
||||
Request(Request),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Transport {
|
||||
#[allow(unused)]
|
||||
id: usize,
|
||||
pending_requests: Mutex<HashMap<u64, Sender<Result<Response>>>>,
|
||||
}
|
||||
|
||||
impl Transport {
|
||||
pub fn start(
|
||||
server_stdout: Box<dyn AsyncBufRead + Unpin + Send>,
|
||||
server_stdin: Box<dyn AsyncWrite + Unpin + Send>,
|
||||
server_stderr: Option<Box<dyn AsyncBufRead + Unpin + Send>>,
|
||||
id: usize,
|
||||
) -> (UnboundedReceiver<Payload>, UnboundedSender<Payload>) {
|
||||
let (client_tx, rx) = unbounded_channel();
|
||||
let (tx, client_rx) = unbounded_channel();
|
||||
|
||||
let transport = Self {
|
||||
id,
|
||||
pending_requests: Mutex::new(HashMap::default()),
|
||||
};
|
||||
|
||||
let transport = Arc::new(transport);
|
||||
|
||||
tokio::spawn(Self::recv(transport.clone(), server_stdout, client_tx));
|
||||
tokio::spawn(Self::send(transport, server_stdin, client_rx));
|
||||
if let Some(stderr) = server_stderr {
|
||||
tokio::spawn(Self::err(stderr));
|
||||
}
|
||||
|
||||
(rx, tx)
|
||||
}
|
||||
|
||||
async fn recv_server_message(
|
||||
reader: &mut Box<dyn AsyncBufRead + Unpin + Send>,
|
||||
buffer: &mut String,
|
||||
) -> Result<Payload> {
|
||||
let mut content_length = None;
|
||||
loop {
|
||||
buffer.truncate(0);
|
||||
if reader.read_line(buffer).await? == 0 {
|
||||
return Err(Error::StreamClosed);
|
||||
};
|
||||
|
||||
if buffer == "\r\n" {
|
||||
// look for an empty CRLF line
|
||||
break;
|
||||
}
|
||||
|
||||
let header = buffer.trim();
|
||||
let parts = header.split_once(": ");
|
||||
|
||||
match parts {
|
||||
Some(("Content-Length", value)) => {
|
||||
content_length = Some(value.parse().context("invalid content length")?);
|
||||
}
|
||||
Some((_, _)) => {}
|
||||
None => {
|
||||
// Workaround: Some non-conformant language servers will output logging and other garbage
|
||||
// into the same stream as JSON-RPC messages. This can also happen from shell scripts that spawn
|
||||
// the server. Skip such lines and log a warning.
|
||||
|
||||
// warn!("Failed to parse header: {:?}", header);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let content_length = content_length.context("missing content length")?;
|
||||
|
||||
//TODO: reuse vector
|
||||
let mut content = vec![0; content_length];
|
||||
reader.read_exact(&mut content).await?;
|
||||
let msg = std::str::from_utf8(&content).context("invalid utf8 from server")?;
|
||||
|
||||
info!("<- DAP {}", msg);
|
||||
|
||||
// try parsing as output (server response) or call (server request)
|
||||
let output: serde_json::Result<Payload> = serde_json::from_str(msg);
|
||||
|
||||
Ok(output?)
|
||||
}
|
||||
|
||||
async fn recv_server_error(
|
||||
err: &mut (impl AsyncBufRead + Unpin + Send),
|
||||
buffer: &mut String,
|
||||
) -> Result<()> {
|
||||
buffer.truncate(0);
|
||||
if err.read_line(buffer).await? == 0 {
|
||||
return Err(Error::StreamClosed);
|
||||
};
|
||||
error!("err <- {}", buffer);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn send_payload_to_server(
|
||||
&self,
|
||||
server_stdin: &mut Box<dyn AsyncWrite + Unpin + Send>,
|
||||
mut payload: Payload,
|
||||
) -> Result<()> {
|
||||
if let Payload::Request(request) = &mut payload {
|
||||
if let Some(back) = request.back_ch.take() {
|
||||
self.pending_requests.lock().await.insert(request.seq, back);
|
||||
}
|
||||
}
|
||||
let json = serde_json::to_string(&payload)?;
|
||||
self.send_string_to_server(server_stdin, json).await
|
||||
}
|
||||
|
||||
async fn send_string_to_server(
|
||||
&self,
|
||||
server_stdin: &mut Box<dyn AsyncWrite + Unpin + Send>,
|
||||
request: String,
|
||||
) -> Result<()> {
|
||||
info!("-> DAP {}", request);
|
||||
|
||||
// send the headers
|
||||
server_stdin
|
||||
.write_all(format!("Content-Length: {}\r\n\r\n", request.len()).as_bytes())
|
||||
.await?;
|
||||
|
||||
// send the body
|
||||
server_stdin.write_all(request.as_bytes()).await?;
|
||||
|
||||
server_stdin.flush().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn process_response(res: Response) -> Result<Response> {
|
||||
if res.success {
|
||||
info!("<- DAP success in response to {}", res.request_seq);
|
||||
|
||||
Ok(res)
|
||||
} else {
|
||||
error!(
|
||||
"<- DAP error {:?} ({:?}) for command #{} {}",
|
||||
res.message, res.body, res.request_seq, res.command
|
||||
);
|
||||
|
||||
Err(Error::Other(anyhow::format_err!("{:?}", res.body)))
|
||||
}
|
||||
}
|
||||
|
||||
async fn process_server_message(
|
||||
&self,
|
||||
client_tx: &UnboundedSender<Payload>,
|
||||
msg: Payload,
|
||||
) -> Result<()> {
|
||||
match msg {
|
||||
Payload::Response(res) => {
|
||||
let request_seq = res.request_seq;
|
||||
let tx = self.pending_requests.lock().await.remove(&request_seq);
|
||||
|
||||
match tx {
|
||||
Some(tx) => match tx.send(Self::process_response(res)).await {
|
||||
Ok(_) => (),
|
||||
Err(_) => error!(
|
||||
"Tried sending response into a closed channel (id={:?}), original request likely timed out",
|
||||
request_seq
|
||||
),
|
||||
}
|
||||
None => {
|
||||
warn!("Response to nonexistent request #{}", res.request_seq);
|
||||
client_tx.send(Payload::Response(res)).expect("Failed to send");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Payload::Request(Request {
|
||||
ref command,
|
||||
ref seq,
|
||||
..
|
||||
}) => {
|
||||
info!("<- DAP request {} #{}", command, seq);
|
||||
client_tx.send(msg).expect("Failed to send");
|
||||
Ok(())
|
||||
}
|
||||
Payload::Event(ref event) => {
|
||||
info!("<- DAP event {:?}", event);
|
||||
client_tx.send(msg).expect("Failed to send");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn recv(
|
||||
transport: Arc<Self>,
|
||||
mut server_stdout: Box<dyn AsyncBufRead + Unpin + Send>,
|
||||
client_tx: UnboundedSender<Payload>,
|
||||
) {
|
||||
let mut recv_buffer = String::new();
|
||||
loop {
|
||||
match Self::recv_server_message(&mut server_stdout, &mut recv_buffer).await {
|
||||
Ok(msg) => {
|
||||
transport
|
||||
.process_server_message(&client_tx, msg)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
Err(err) => {
|
||||
error!("err: <- {:?}", err);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn send(
|
||||
transport: Arc<Self>,
|
||||
mut server_stdin: Box<dyn AsyncWrite + Unpin + Send>,
|
||||
mut client_rx: UnboundedReceiver<Payload>,
|
||||
) {
|
||||
while let Some(payload) = client_rx.recv().await {
|
||||
transport
|
||||
.send_payload_to_server(&mut server_stdin, payload)
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
async fn err(mut server_stderr: Box<dyn AsyncBufRead + Unpin + Send>) {
|
||||
let mut recv_buffer = String::new();
|
||||
loop {
|
||||
match Self::recv_server_error(&mut server_stderr, &mut recv_buffer).await {
|
||||
Ok(_) => {}
|
||||
Err(err) => {
|
||||
error!("err: <- {:?}", err);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
707
helix-dap/src/types.rs
Normal file
707
helix-dap/src/types.rs
Normal file
@@ -0,0 +1,707 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(
|
||||
Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize,
|
||||
)]
|
||||
pub struct ThreadId(isize);
|
||||
|
||||
impl std::fmt::Display for ThreadId {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Request {
|
||||
type Arguments: serde::de::DeserializeOwned + serde::Serialize;
|
||||
type Result: serde::de::DeserializeOwned + serde::Serialize;
|
||||
const COMMAND: &'static str;
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ColumnDescriptor {
|
||||
pub attribute_name: String,
|
||||
pub label: String,
|
||||
pub format: Option<String>,
|
||||
#[serde(rename = "type")]
|
||||
pub ty: Option<String>,
|
||||
pub width: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExceptionBreakpointsFilter {
|
||||
pub filter: String,
|
||||
pub label: String,
|
||||
pub description: Option<String>,
|
||||
pub default: Option<bool>,
|
||||
pub supports_condition: Option<bool>,
|
||||
pub condition_description: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DebuggerCapabilities {
|
||||
pub supports_configuration_done_request: Option<bool>,
|
||||
pub supports_function_breakpoints: Option<bool>,
|
||||
pub supports_conditional_breakpoints: Option<bool>,
|
||||
pub supports_hit_conditional_breakpoints: Option<bool>,
|
||||
pub supports_evaluate_for_hovers: Option<bool>,
|
||||
pub supports_step_back: Option<bool>,
|
||||
pub supports_set_variable: Option<bool>,
|
||||
pub supports_restart_frame: Option<bool>,
|
||||
pub supports_goto_targets_request: Option<bool>,
|
||||
pub supports_step_in_targets_request: Option<bool>,
|
||||
pub supports_completions_request: Option<bool>,
|
||||
pub supports_modules_request: Option<bool>,
|
||||
pub supports_restart_request: Option<bool>,
|
||||
pub supports_exception_options: Option<bool>,
|
||||
pub supports_value_formatting_options: Option<bool>,
|
||||
pub supports_exception_info_request: Option<bool>,
|
||||
pub support_terminate_debuggee: Option<bool>,
|
||||
pub support_suspend_debuggee: Option<bool>,
|
||||
pub supports_delayed_stack_trace_loading: Option<bool>,
|
||||
pub supports_loaded_sources_request: Option<bool>,
|
||||
pub supports_log_points: Option<bool>,
|
||||
pub supports_terminate_threads_request: Option<bool>,
|
||||
pub supports_set_expression: Option<bool>,
|
||||
pub supports_terminate_request: Option<bool>,
|
||||
pub supports_data_breakpoints: Option<bool>,
|
||||
pub supports_read_memory_request: Option<bool>,
|
||||
pub supports_write_memory_request: Option<bool>,
|
||||
pub supports_disassemble_request: Option<bool>,
|
||||
pub supports_cancel_request: Option<bool>,
|
||||
pub supports_breakpoint_locations_request: Option<bool>,
|
||||
pub supports_clipboard_context: Option<bool>,
|
||||
pub supports_stepping_granularity: Option<bool>,
|
||||
pub supports_instruction_breakpoints: Option<bool>,
|
||||
pub supports_exception_filter_options: Option<bool>,
|
||||
pub exception_breakpoint_filters: Option<Vec<ExceptionBreakpointsFilter>>,
|
||||
pub completion_trigger_characters: Option<Vec<String>>,
|
||||
pub additional_module_columns: Option<Vec<ColumnDescriptor>>,
|
||||
pub supported_checksum_algorithms: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Checksum {
|
||||
pub algorithm: String,
|
||||
pub checksum: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Source {
|
||||
pub name: Option<String>,
|
||||
pub path: Option<PathBuf>,
|
||||
pub source_reference: Option<usize>,
|
||||
pub presentation_hint: Option<String>,
|
||||
pub origin: Option<String>,
|
||||
pub sources: Option<Vec<Source>>,
|
||||
pub adapter_data: Option<Value>,
|
||||
pub checksums: Option<Vec<Checksum>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SourceBreakpoint {
|
||||
pub line: usize,
|
||||
pub column: Option<usize>,
|
||||
pub condition: Option<String>,
|
||||
pub hit_condition: Option<String>,
|
||||
pub log_message: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Breakpoint {
|
||||
pub id: Option<usize>,
|
||||
pub verified: bool,
|
||||
pub message: Option<String>,
|
||||
pub source: Option<Source>,
|
||||
pub line: Option<usize>,
|
||||
pub column: Option<usize>,
|
||||
pub end_line: Option<usize>,
|
||||
pub end_column: Option<usize>,
|
||||
pub instruction_reference: Option<String>,
|
||||
pub offset: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StackFrameFormat {
|
||||
pub parameters: Option<bool>,
|
||||
pub parameter_types: Option<bool>,
|
||||
pub parameter_names: Option<bool>,
|
||||
pub parameter_values: Option<bool>,
|
||||
pub line: Option<bool>,
|
||||
pub module: Option<bool>,
|
||||
pub include_all: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StackFrame {
|
||||
pub id: usize,
|
||||
pub name: String,
|
||||
pub source: Option<Source>,
|
||||
pub line: usize,
|
||||
pub column: usize,
|
||||
pub end_line: Option<usize>,
|
||||
pub end_column: Option<usize>,
|
||||
pub can_restart: Option<bool>,
|
||||
pub instruction_pointer_reference: Option<String>,
|
||||
pub module_id: Option<Value>,
|
||||
pub presentation_hint: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Thread {
|
||||
pub id: ThreadId,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Scope {
|
||||
pub name: String,
|
||||
pub presentation_hint: Option<String>,
|
||||
pub variables_reference: usize,
|
||||
pub named_variables: Option<usize>,
|
||||
pub indexed_variables: Option<usize>,
|
||||
pub expensive: bool,
|
||||
pub source: Option<Source>,
|
||||
pub line: Option<usize>,
|
||||
pub column: Option<usize>,
|
||||
pub end_line: Option<usize>,
|
||||
pub end_column: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ValueFormat {
|
||||
pub hex: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct VariablePresentationHint {
|
||||
pub kind: Option<String>,
|
||||
pub attributes: Option<Vec<String>>,
|
||||
pub visibility: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Variable {
|
||||
pub name: String,
|
||||
pub value: String,
|
||||
#[serde(rename = "type")]
|
||||
pub ty: Option<String>,
|
||||
pub presentation_hint: Option<VariablePresentationHint>,
|
||||
pub evaluate_name: Option<String>,
|
||||
pub variables_reference: usize,
|
||||
pub named_variables: Option<usize>,
|
||||
pub indexed_variables: Option<usize>,
|
||||
pub memory_reference: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Module {
|
||||
pub id: String, // TODO: || number
|
||||
pub name: String,
|
||||
pub path: Option<PathBuf>,
|
||||
pub is_optimized: Option<bool>,
|
||||
pub is_user_code: Option<bool>,
|
||||
pub version: Option<String>,
|
||||
pub symbol_status: Option<String>,
|
||||
pub symbol_file_path: Option<String>,
|
||||
pub date_time_stamp: Option<String>,
|
||||
pub address_range: Option<String>,
|
||||
}
|
||||
|
||||
pub mod requests {
|
||||
use super::*;
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeArguments {
|
||||
#[serde(rename = "clientID")]
|
||||
pub client_id: Option<String>,
|
||||
pub client_name: Option<String>,
|
||||
#[serde(rename = "adapterID")]
|
||||
pub adapter_id: String,
|
||||
pub locale: Option<String>,
|
||||
#[serde(rename = "linesStartAt1")]
|
||||
pub lines_start_at_one: Option<bool>,
|
||||
#[serde(rename = "columnsStartAt1")]
|
||||
pub columns_start_at_one: Option<bool>,
|
||||
pub path_format: Option<String>,
|
||||
pub supports_variable_type: Option<bool>,
|
||||
pub supports_variable_paging: Option<bool>,
|
||||
pub supports_run_in_terminal_request: Option<bool>,
|
||||
pub supports_memory_references: Option<bool>,
|
||||
pub supports_progress_reporting: Option<bool>,
|
||||
pub supports_invalidated_event: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Initialize {}
|
||||
|
||||
impl Request for Initialize {
|
||||
type Arguments = InitializeArguments;
|
||||
type Result = DebuggerCapabilities;
|
||||
const COMMAND: &'static str = "initialize";
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Launch {}
|
||||
|
||||
impl Request for Launch {
|
||||
type Arguments = Value;
|
||||
type Result = Value;
|
||||
const COMMAND: &'static str = "launch";
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Attach {}
|
||||
|
||||
impl Request for Attach {
|
||||
type Arguments = Value;
|
||||
type Result = Value;
|
||||
const COMMAND: &'static str = "attach";
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Disconnect {}
|
||||
|
||||
impl Request for Disconnect {
|
||||
type Arguments = ();
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "disconnect";
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ConfigurationDone {}
|
||||
|
||||
impl Request for ConfigurationDone {
|
||||
type Arguments = ();
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "configurationDone";
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetBreakpointsArguments {
|
||||
pub source: Source,
|
||||
pub breakpoints: Option<Vec<SourceBreakpoint>>,
|
||||
// lines is deprecated
|
||||
pub source_modified: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetBreakpointsResponse {
|
||||
pub breakpoints: Option<Vec<Breakpoint>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SetBreakpoints {}
|
||||
|
||||
impl Request for SetBreakpoints {
|
||||
type Arguments = SetBreakpointsArguments;
|
||||
type Result = SetBreakpointsResponse;
|
||||
const COMMAND: &'static str = "setBreakpoints";
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ContinueArguments {
|
||||
pub thread_id: ThreadId,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ContinueResponse {
|
||||
pub all_threads_continued: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Continue {}
|
||||
|
||||
impl Request for Continue {
|
||||
type Arguments = ContinueArguments;
|
||||
type Result = ContinueResponse;
|
||||
const COMMAND: &'static str = "continue";
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StackTraceArguments {
|
||||
pub thread_id: ThreadId,
|
||||
pub start_frame: Option<usize>,
|
||||
pub levels: Option<usize>,
|
||||
pub format: Option<StackFrameFormat>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StackTraceResponse {
|
||||
pub total_frames: Option<usize>,
|
||||
pub stack_frames: Vec<StackFrame>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum StackTrace {}
|
||||
|
||||
impl Request for StackTrace {
|
||||
type Arguments = StackTraceArguments;
|
||||
type Result = StackTraceResponse;
|
||||
const COMMAND: &'static str = "stackTrace";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ThreadsResponse {
|
||||
pub threads: Vec<Thread>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Threads {}
|
||||
|
||||
impl Request for Threads {
|
||||
type Arguments = ();
|
||||
type Result = ThreadsResponse;
|
||||
const COMMAND: &'static str = "threads";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ScopesArguments {
|
||||
pub frame_id: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ScopesResponse {
|
||||
pub scopes: Vec<Scope>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Scopes {}
|
||||
|
||||
impl Request for Scopes {
|
||||
type Arguments = ScopesArguments;
|
||||
type Result = ScopesResponse;
|
||||
const COMMAND: &'static str = "scopes";
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct VariablesArguments {
|
||||
pub variables_reference: usize,
|
||||
pub filter: Option<String>,
|
||||
pub start: Option<usize>,
|
||||
pub count: Option<usize>,
|
||||
pub format: Option<ValueFormat>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct VariablesResponse {
|
||||
pub variables: Vec<Variable>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Variables {}
|
||||
|
||||
impl Request for Variables {
|
||||
type Arguments = VariablesArguments;
|
||||
type Result = VariablesResponse;
|
||||
const COMMAND: &'static str = "variables";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StepInArguments {
|
||||
pub thread_id: ThreadId,
|
||||
pub target_id: Option<usize>,
|
||||
pub granularity: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum StepIn {}
|
||||
|
||||
impl Request for StepIn {
|
||||
type Arguments = StepInArguments;
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "stepIn";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StepOutArguments {
|
||||
pub thread_id: ThreadId,
|
||||
pub granularity: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum StepOut {}
|
||||
|
||||
impl Request for StepOut {
|
||||
type Arguments = StepOutArguments;
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "stepOut";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NextArguments {
|
||||
pub thread_id: ThreadId,
|
||||
pub granularity: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Next {}
|
||||
|
||||
impl Request for Next {
|
||||
type Arguments = NextArguments;
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "next";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PauseArguments {
|
||||
pub thread_id: ThreadId,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Pause {}
|
||||
|
||||
impl Request for Pause {
|
||||
type Arguments = PauseArguments;
|
||||
type Result = ();
|
||||
const COMMAND: &'static str = "pause";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct EvaluateArguments {
|
||||
pub expression: String,
|
||||
pub frame_id: Option<usize>,
|
||||
pub context: Option<String>,
|
||||
pub format: Option<ValueFormat>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct EvaluateResponse {
|
||||
pub result: String,
|
||||
#[serde(rename = "type")]
|
||||
pub ty: Option<String>,
|
||||
pub presentation_hint: Option<VariablePresentationHint>,
|
||||
pub variables_reference: usize,
|
||||
pub named_variables: Option<usize>,
|
||||
pub indexed_variables: Option<usize>,
|
||||
pub memory_reference: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Evaluate {}
|
||||
|
||||
impl Request for Evaluate {
|
||||
type Arguments = EvaluateArguments;
|
||||
type Result = EvaluateResponse;
|
||||
const COMMAND: &'static str = "evaluate";
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetExceptionBreakpointsArguments {
|
||||
pub filters: Vec<String>,
|
||||
// pub filterOptions: Option<Vec<ExceptionFilterOptions>>, // needs capability
|
||||
// pub exceptionOptions: Option<Vec<ExceptionOptions>>, // needs capability
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetExceptionBreakpointsResponse {
|
||||
pub breakpoints: Option<Vec<Breakpoint>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SetExceptionBreakpoints {}
|
||||
|
||||
impl Request for SetExceptionBreakpoints {
|
||||
type Arguments = SetExceptionBreakpointsArguments;
|
||||
type Result = SetExceptionBreakpointsResponse;
|
||||
const COMMAND: &'static str = "setExceptionBreakpoints";
|
||||
}
|
||||
|
||||
// Reverse Requests
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RunInTerminalResponse {
|
||||
pub process_id: Option<u32>,
|
||||
pub shell_process_id: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RunInTerminalArguments {
|
||||
pub kind: Option<String>,
|
||||
pub title: Option<String>,
|
||||
pub cwd: Option<String>,
|
||||
pub args: Vec<String>,
|
||||
pub env: Option<HashMap<String, Option<String>>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum RunInTerminal {}
|
||||
|
||||
impl Request for RunInTerminal {
|
||||
type Arguments = RunInTerminalArguments;
|
||||
type Result = RunInTerminalResponse;
|
||||
const COMMAND: &'static str = "runInTerminal";
|
||||
}
|
||||
}
|
||||
|
||||
// Events
|
||||
|
||||
pub mod events {
|
||||
use super::*;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "event", content = "body")]
|
||||
// seq is omitted as unused and is not sent by some implementations
|
||||
pub enum Event {
|
||||
Initialized,
|
||||
Stopped(Stopped),
|
||||
Continued(Continued),
|
||||
Exited(Exited),
|
||||
Terminated(Option<Terminated>),
|
||||
Thread(Thread),
|
||||
Output(Output),
|
||||
Breakpoint(Breakpoint),
|
||||
Module(Module),
|
||||
LoadedSource(LoadedSource),
|
||||
Process(Process),
|
||||
Capabilities(Capabilities),
|
||||
// ProgressStart(),
|
||||
// ProgressUpdate(),
|
||||
// ProgressEnd(),
|
||||
// Invalidated(),
|
||||
Memory(Memory),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Stopped {
|
||||
pub reason: String,
|
||||
pub description: Option<String>,
|
||||
pub thread_id: Option<ThreadId>,
|
||||
pub preserve_focus_hint: Option<bool>,
|
||||
pub text: Option<String>,
|
||||
pub all_threads_stopped: Option<bool>,
|
||||
pub hit_breakpoint_ids: Option<Vec<usize>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Continued {
|
||||
pub thread_id: ThreadId,
|
||||
pub all_threads_continued: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Exited {
|
||||
pub exit_code: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Terminated {
|
||||
pub restart: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Thread {
|
||||
pub reason: String,
|
||||
pub thread_id: ThreadId,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Output {
|
||||
pub output: String,
|
||||
pub category: Option<String>,
|
||||
pub group: Option<String>,
|
||||
pub line: Option<usize>,
|
||||
pub column: Option<usize>,
|
||||
pub variables_reference: Option<usize>,
|
||||
pub source: Option<Source>,
|
||||
pub data: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Breakpoint {
|
||||
pub reason: String,
|
||||
pub breakpoint: super::Breakpoint,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Module {
|
||||
pub reason: String,
|
||||
pub module: super::Module,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoadedSource {
|
||||
pub reason: String,
|
||||
pub source: super::Source,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Process {
|
||||
pub name: String,
|
||||
pub system_process_id: Option<usize>,
|
||||
pub is_local_process: Option<bool>,
|
||||
pub start_method: Option<String>, // TODO: use enum
|
||||
pub pointer_size: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Capabilities {
|
||||
pub capabilities: super::DebuggerCapabilities,
|
||||
}
|
||||
|
||||
// #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
// #[serde(rename_all = "camelCase")]
|
||||
// pub struct Invalidated {
|
||||
// pub areas: Vec<InvalidatedArea>,
|
||||
// pub thread_id: Option<ThreadId>,
|
||||
// pub stack_frame_id: Option<usize>,
|
||||
// }
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Memory {
|
||||
pub memory_reference: String,
|
||||
pub offset: usize,
|
||||
pub count: usize,
|
||||
}
|
||||
}
|
@@ -1,21 +1,23 @@
|
||||
[package]
|
||||
name = "helix-syntax"
|
||||
version = "0.5.0"
|
||||
name = "helix-loader"
|
||||
version = "0.6.0"
|
||||
description = "A post-modern text editor."
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
description = "Tree-sitter grammars support"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
include = ["src/**/*", "languages/**/*", "build.rs", "!**/docs/**/*", "!**/test/**/*", "!**/examples/**/*", "!**/build/**/*"]
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
toml = "0.5"
|
||||
etcetera = "0.3"
|
||||
tree-sitter = "0.20"
|
||||
libloading = "0.7"
|
||||
anyhow = "1"
|
||||
once_cell = "1.9"
|
||||
|
||||
[build-dependencies]
|
||||
# cloning/compiling tree-sitter grammars
|
||||
cc = { version = "1" }
|
||||
threadpool = { version = "1.0" }
|
||||
anyhow = "1"
|
6
helix-loader/build.rs
Normal file
6
helix-loader/build.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
fn main() {
|
||||
println!(
|
||||
"cargo:rustc-env=BUILD_TARGET={}",
|
||||
std::env::var("TARGET").unwrap()
|
||||
);
|
||||
}
|
387
helix-loader/src/grammar.rs
Normal file
387
helix-loader/src/grammar.rs
Normal file
@@ -0,0 +1,387 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use libloading::{Library, Symbol};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::time::SystemTime;
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
sync::mpsc::channel,
|
||||
};
|
||||
use tree_sitter::Language;
|
||||
|
||||
#[cfg(unix)]
|
||||
const DYLIB_EXTENSION: &str = "so";
|
||||
|
||||
#[cfg(windows)]
|
||||
const DYLIB_EXTENSION: &str = "dll";
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct Configuration {
|
||||
#[serde(rename = "use-grammars")]
|
||||
pub grammar_selection: Option<GrammarSelection>,
|
||||
pub grammar: Vec<GrammarConfiguration>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase", untagged)]
|
||||
pub enum GrammarSelection {
|
||||
Only(HashSet<String>),
|
||||
Except(HashSet<String>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct GrammarConfiguration {
|
||||
#[serde(rename = "name")]
|
||||
pub grammar_id: String,
|
||||
pub source: GrammarSource,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase", untagged)]
|
||||
pub enum GrammarSource {
|
||||
Local {
|
||||
path: String,
|
||||
},
|
||||
Git {
|
||||
#[serde(rename = "git")]
|
||||
remote: String,
|
||||
#[serde(rename = "rev")]
|
||||
revision: String,
|
||||
subpath: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
const BUILD_TARGET: &str = env!("BUILD_TARGET");
|
||||
const REMOTE_NAME: &str = "origin";
|
||||
|
||||
pub fn get_language(name: &str) -> Result<Language> {
|
||||
let name = name.to_ascii_lowercase();
|
||||
let mut library_path = crate::runtime_dir().join("grammars").join(&name);
|
||||
library_path.set_extension(DYLIB_EXTENSION);
|
||||
|
||||
let library = unsafe { Library::new(&library_path) }
|
||||
.with_context(|| format!("Error opening dynamic library {library_path:?}"))?;
|
||||
let language_fn_name = format!("tree_sitter_{}", name.replace('-', "_"));
|
||||
let language = unsafe {
|
||||
let language_fn: Symbol<unsafe extern "C" fn() -> Language> = library
|
||||
.get(language_fn_name.as_bytes())
|
||||
.with_context(|| format!("Failed to load symbol {language_fn_name}"))?;
|
||||
language_fn()
|
||||
};
|
||||
std::mem::forget(library);
|
||||
Ok(language)
|
||||
}
|
||||
|
||||
pub fn fetch_grammars() -> Result<()> {
|
||||
// We do not need to fetch local grammars.
|
||||
let mut grammars = get_grammar_configs()?;
|
||||
grammars.retain(|grammar| !matches!(grammar.source, GrammarSource::Local { .. }));
|
||||
|
||||
run_parallel(grammars, fetch_grammar, "fetch")
|
||||
}
|
||||
|
||||
pub fn build_grammars() -> Result<()> {
|
||||
run_parallel(get_grammar_configs()?, build_grammar, "build")
|
||||
}
|
||||
|
||||
// Returns the set of grammar configurations the user requests.
|
||||
// Grammars are configured in the default and user `languages.toml` and are
|
||||
// merged. The `grammar_selection` key of the config is then used to filter
|
||||
// down all grammars into a subset of the user's choosing.
|
||||
fn get_grammar_configs() -> Result<Vec<GrammarConfiguration>> {
|
||||
let config: Configuration = crate::user_lang_config()
|
||||
.context("Could not parse languages.toml")?
|
||||
.try_into()?;
|
||||
|
||||
let grammars = match config.grammar_selection {
|
||||
Some(GrammarSelection::Only(selections)) => config
|
||||
.grammar
|
||||
.into_iter()
|
||||
.filter(|grammar| selections.contains(&grammar.grammar_id))
|
||||
.collect(),
|
||||
Some(GrammarSelection::Except(rejections)) => config
|
||||
.grammar
|
||||
.into_iter()
|
||||
.filter(|grammar| !rejections.contains(&grammar.grammar_id))
|
||||
.collect(),
|
||||
None => config.grammar,
|
||||
};
|
||||
|
||||
Ok(grammars)
|
||||
}
|
||||
|
||||
fn run_parallel<F>(grammars: Vec<GrammarConfiguration>, job: F, action: &'static str) -> Result<()>
|
||||
where
|
||||
F: Fn(GrammarConfiguration) -> Result<()> + std::marker::Send + 'static + Copy,
|
||||
{
|
||||
let pool = threadpool::Builder::new().build();
|
||||
let (tx, rx) = channel();
|
||||
|
||||
for grammar in grammars {
|
||||
let tx = tx.clone();
|
||||
|
||||
pool.execute(move || {
|
||||
tx.send(job(grammar)).unwrap();
|
||||
});
|
||||
}
|
||||
|
||||
drop(tx);
|
||||
|
||||
// TODO: print all failures instead of the first one found.
|
||||
rx.iter()
|
||||
.find(|result| result.is_err())
|
||||
.map(|err| err.with_context(|| format!("Failed to {action} some grammar(s)")))
|
||||
.unwrap_or(Ok(()))
|
||||
}
|
||||
|
||||
fn fetch_grammar(grammar: GrammarConfiguration) -> Result<()> {
|
||||
if let GrammarSource::Git {
|
||||
remote, revision, ..
|
||||
} = grammar.source
|
||||
{
|
||||
let grammar_dir = crate::runtime_dir()
|
||||
.join("grammars/sources")
|
||||
.join(&grammar.grammar_id);
|
||||
|
||||
fs::create_dir_all(&grammar_dir).context(format!(
|
||||
"Could not create grammar directory {:?}",
|
||||
grammar_dir
|
||||
))?;
|
||||
|
||||
// create the grammar dir contains a git directory
|
||||
if !grammar_dir.join(".git").is_dir() {
|
||||
git(&grammar_dir, ["init"])?;
|
||||
}
|
||||
|
||||
// ensure the remote matches the configured remote
|
||||
if get_remote_url(&grammar_dir).map_or(true, |s| s != remote) {
|
||||
set_remote(&grammar_dir, &remote)?;
|
||||
}
|
||||
|
||||
// ensure the revision matches the configured revision
|
||||
if get_revision(&grammar_dir).map_or(true, |s| s != revision) {
|
||||
// Fetch the exact revision from the remote.
|
||||
// Supported by server-side git since v2.5.0 (July 2015),
|
||||
// enabled by default on major git hosts.
|
||||
git(
|
||||
&grammar_dir,
|
||||
["fetch", "--depth", "1", REMOTE_NAME, &revision],
|
||||
)?;
|
||||
git(&grammar_dir, ["checkout", &revision])?;
|
||||
|
||||
println!(
|
||||
"Grammar '{}' checked out at '{}'.",
|
||||
grammar.grammar_id, revision
|
||||
);
|
||||
} else {
|
||||
println!("Grammar '{}' is already up to date.", grammar.grammar_id);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Sets the remote for a repository to the given URL, creating the remote if
|
||||
// it does not yet exist.
|
||||
fn set_remote(repository_dir: &Path, remote_url: &str) -> Result<String> {
|
||||
git(
|
||||
repository_dir,
|
||||
["remote", "set-url", REMOTE_NAME, remote_url],
|
||||
)
|
||||
.or_else(|_| git(repository_dir, ["remote", "add", REMOTE_NAME, remote_url]))
|
||||
}
|
||||
|
||||
fn get_remote_url(repository_dir: &Path) -> Option<String> {
|
||||
git(repository_dir, ["remote", "get-url", REMOTE_NAME]).ok()
|
||||
}
|
||||
|
||||
fn get_revision(repository_dir: &Path) -> Option<String> {
|
||||
git(repository_dir, ["rev-parse", "HEAD"]).ok()
|
||||
}
|
||||
|
||||
// A wrapper around 'git' commands which returns stdout in success and a
|
||||
// helpful error message showing the command, stdout, and stderr in error.
|
||||
fn git<I, S>(repository_dir: &Path, args: I) -> Result<String>
|
||||
where
|
||||
I: IntoIterator<Item = S>,
|
||||
S: AsRef<std::ffi::OsStr>,
|
||||
{
|
||||
let output = Command::new("git")
|
||||
.args(args)
|
||||
.current_dir(repository_dir)
|
||||
.output()?;
|
||||
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8_lossy(&output.stdout)
|
||||
.trim_end()
|
||||
.to_owned())
|
||||
} else {
|
||||
// TODO: figure out how to display the git command using `args`
|
||||
Err(anyhow!(
|
||||
"Git command failed.\nStdout: {}\nStderr: {}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn build_grammar(grammar: GrammarConfiguration) -> Result<()> {
|
||||
let grammar_dir = if let GrammarSource::Local { path } = &grammar.source {
|
||||
PathBuf::from(&path)
|
||||
} else {
|
||||
crate::runtime_dir()
|
||||
.join("grammars/sources")
|
||||
.join(&grammar.grammar_id)
|
||||
};
|
||||
|
||||
let grammar_dir_entries = grammar_dir.read_dir().with_context(|| {
|
||||
format!("Failed to read directory {grammar_dir:?}. Did you use 'hx --grammar fetch'?")
|
||||
})?;
|
||||
|
||||
if grammar_dir_entries.count() == 0 {
|
||||
return Err(anyhow!(
|
||||
"Directory {grammar_dir:?} is empty. Did you use 'hx --grammar fetch'?"
|
||||
));
|
||||
};
|
||||
|
||||
let path = match &grammar.source {
|
||||
GrammarSource::Git {
|
||||
subpath: Some(subpath),
|
||||
..
|
||||
} => grammar_dir.join(subpath),
|
||||
_ => grammar_dir,
|
||||
}
|
||||
.join("src");
|
||||
|
||||
build_tree_sitter_library(&path, grammar)
|
||||
}
|
||||
|
||||
fn build_tree_sitter_library(src_path: &Path, grammar: GrammarConfiguration) -> Result<()> {
|
||||
let header_path = src_path;
|
||||
let parser_path = src_path.join("parser.c");
|
||||
let mut scanner_path = src_path.join("scanner.c");
|
||||
|
||||
let scanner_path = if scanner_path.exists() {
|
||||
Some(scanner_path)
|
||||
} else {
|
||||
scanner_path.set_extension("cc");
|
||||
if scanner_path.exists() {
|
||||
Some(scanner_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
let parser_lib_path = crate::runtime_dir().join("grammars");
|
||||
let mut library_path = parser_lib_path.join(&grammar.grammar_id);
|
||||
library_path.set_extension(DYLIB_EXTENSION);
|
||||
|
||||
let recompile = needs_recompile(&library_path, &parser_path, &scanner_path)
|
||||
.context("Failed to compare source and binary timestamps")?;
|
||||
|
||||
if !recompile {
|
||||
println!("Grammar '{}' is already built.", grammar.grammar_id);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("Building grammar '{}'", grammar.grammar_id);
|
||||
|
||||
let mut config = cc::Build::new();
|
||||
config
|
||||
.cpp(true)
|
||||
.opt_level(3)
|
||||
.cargo_metadata(false)
|
||||
.host(BUILD_TARGET)
|
||||
.target(BUILD_TARGET);
|
||||
let compiler = config.get_compiler();
|
||||
let mut command = Command::new(compiler.path());
|
||||
command.current_dir(src_path);
|
||||
for (key, value) in compiler.env() {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
if cfg!(windows) {
|
||||
command
|
||||
.args(&["/nologo", "/LD", "/I"])
|
||||
.arg(header_path)
|
||||
.arg("/Od")
|
||||
.arg("/utf-8");
|
||||
if let Some(scanner_path) = scanner_path.as_ref() {
|
||||
command.arg(scanner_path);
|
||||
}
|
||||
|
||||
command
|
||||
.arg(parser_path)
|
||||
.arg("/link")
|
||||
.arg(format!("/out:{}", library_path.to_str().unwrap()));
|
||||
} else {
|
||||
command
|
||||
.arg("-shared")
|
||||
.arg("-fPIC")
|
||||
.arg("-fno-exceptions")
|
||||
.arg("-g")
|
||||
.arg("-I")
|
||||
.arg(header_path)
|
||||
.arg("-o")
|
||||
.arg(&library_path)
|
||||
.arg("-O3");
|
||||
if let Some(scanner_path) = scanner_path.as_ref() {
|
||||
if scanner_path.extension() == Some("c".as_ref()) {
|
||||
command.arg("-xc").arg("-std=c99").arg(scanner_path);
|
||||
} else {
|
||||
command.arg(scanner_path);
|
||||
}
|
||||
}
|
||||
command.arg("-xc").arg(parser_path);
|
||||
if cfg!(all(unix, not(target_os = "macos"))) {
|
||||
command.arg("-Wl,-z,relro,-z,now");
|
||||
}
|
||||
}
|
||||
|
||||
let output = command.output().context("Failed to execute C compiler")?;
|
||||
if !output.status.success() {
|
||||
return Err(anyhow!(
|
||||
"Parser compilation failed.\nStdout: {}\nStderr: {}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn needs_recompile(
|
||||
lib_path: &Path,
|
||||
parser_c_path: &Path,
|
||||
scanner_path: &Option<PathBuf>,
|
||||
) -> Result<bool> {
|
||||
if !lib_path.exists() {
|
||||
return Ok(true);
|
||||
}
|
||||
let lib_mtime = mtime(lib_path)?;
|
||||
if mtime(parser_c_path)? > lib_mtime {
|
||||
return Ok(true);
|
||||
}
|
||||
if let Some(scanner_path) = scanner_path {
|
||||
if mtime(scanner_path)? > lib_mtime {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn mtime(path: &Path) -> Result<SystemTime> {
|
||||
Ok(fs::metadata(path)?.modified()?)
|
||||
}
|
||||
|
||||
/// Gives the contents of a file from a language's `runtime/queries/<lang>`
|
||||
/// directory
|
||||
pub fn load_runtime_file(language: &str, filename: &str) -> Result<String, std::io::Error> {
|
||||
let path = crate::RUNTIME_DIR
|
||||
.join("queries")
|
||||
.join(language)
|
||||
.join(filename);
|
||||
std::fs::read_to_string(&path)
|
||||
}
|
161
helix-loader/src/lib.rs
Normal file
161
helix-loader/src/lib.rs
Normal file
@@ -0,0 +1,161 @@
|
||||
pub mod grammar;
|
||||
|
||||
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
|
||||
|
||||
pub static RUNTIME_DIR: once_cell::sync::Lazy<std::path::PathBuf> =
|
||||
once_cell::sync::Lazy::new(runtime_dir);
|
||||
|
||||
pub fn runtime_dir() -> std::path::PathBuf {
|
||||
if let Ok(dir) = std::env::var("HELIX_RUNTIME") {
|
||||
return dir.into();
|
||||
}
|
||||
|
||||
const RT_DIR: &str = "runtime";
|
||||
let conf_dir = config_dir().join(RT_DIR);
|
||||
if conf_dir.exists() {
|
||||
return conf_dir;
|
||||
}
|
||||
|
||||
if let Ok(dir) = std::env::var("CARGO_MANIFEST_DIR") {
|
||||
// this is the directory of the crate being run by cargo, we need the workspace path so we take the parent
|
||||
return std::path::PathBuf::from(dir).parent().unwrap().join(RT_DIR);
|
||||
}
|
||||
|
||||
// fallback to location of the executable being run
|
||||
std::env::current_exe()
|
||||
.ok()
|
||||
.and_then(|path| path.parent().map(|path| path.to_path_buf().join(RT_DIR)))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub fn config_dir() -> std::path::PathBuf {
|
||||
// TODO: allow env var override
|
||||
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
|
||||
let mut path = strategy.config_dir();
|
||||
path.push("helix");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn cache_dir() -> std::path::PathBuf {
|
||||
// TODO: allow env var override
|
||||
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
|
||||
let mut path = strategy.cache_dir();
|
||||
path.push("helix");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn config_file() -> std::path::PathBuf {
|
||||
config_dir().join("config.toml")
|
||||
}
|
||||
|
||||
pub fn lang_config_file() -> std::path::PathBuf {
|
||||
config_dir().join("languages.toml")
|
||||
}
|
||||
|
||||
pub fn log_file() -> std::path::PathBuf {
|
||||
cache_dir().join("helix.log")
|
||||
}
|
||||
|
||||
/// Default bultin-in languages.toml.
|
||||
pub fn default_lang_config() -> toml::Value {
|
||||
toml::from_slice(include_bytes!("../../languages.toml"))
|
||||
.expect("Could not parse bultin-in languages.toml to valid toml")
|
||||
}
|
||||
|
||||
/// User configured languages.toml file, merged with the default config.
|
||||
pub fn user_lang_config() -> Result<toml::Value, toml::de::Error> {
|
||||
let def_lang_conf = default_lang_config();
|
||||
let data = std::fs::read(crate::config_dir().join("languages.toml"));
|
||||
let user_lang_conf = match data {
|
||||
Ok(raw) => {
|
||||
let value = toml::from_slice(&raw)?;
|
||||
merge_toml_values(def_lang_conf, value)
|
||||
}
|
||||
Err(_) => def_lang_conf,
|
||||
};
|
||||
|
||||
Ok(user_lang_conf)
|
||||
}
|
||||
|
||||
// right overrides left
|
||||
pub fn merge_toml_values(left: toml::Value, right: toml::Value) -> toml::Value {
|
||||
use toml::Value;
|
||||
|
||||
fn get_name(v: &Value) -> Option<&str> {
|
||||
v.get("name").and_then(Value::as_str)
|
||||
}
|
||||
|
||||
match (left, right) {
|
||||
(Value::Array(mut left_items), Value::Array(right_items)) => {
|
||||
left_items.reserve(right_items.len());
|
||||
for rvalue in right_items {
|
||||
let lvalue = get_name(&rvalue)
|
||||
.and_then(|rname| left_items.iter().position(|v| get_name(v) == Some(rname)))
|
||||
.map(|lpos| left_items.remove(lpos));
|
||||
let mvalue = match lvalue {
|
||||
Some(lvalue) => merge_toml_values(lvalue, rvalue),
|
||||
None => rvalue,
|
||||
};
|
||||
left_items.push(mvalue);
|
||||
}
|
||||
Value::Array(left_items)
|
||||
}
|
||||
(Value::Table(mut left_map), Value::Table(right_map)) => {
|
||||
for (rname, rvalue) in right_map {
|
||||
match left_map.remove(&rname) {
|
||||
Some(lvalue) => {
|
||||
let merged_value = merge_toml_values(lvalue, rvalue);
|
||||
left_map.insert(rname, merged_value);
|
||||
}
|
||||
None => {
|
||||
left_map.insert(rname, rvalue);
|
||||
}
|
||||
}
|
||||
}
|
||||
Value::Table(left_map)
|
||||
}
|
||||
// Catch everything else we didn't handle, and use the right value
|
||||
(_, value) => value,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod merge_toml_tests {
|
||||
use super::merge_toml_values;
|
||||
|
||||
#[test]
|
||||
fn language_tomls() {
|
||||
use toml::Value;
|
||||
|
||||
const USER: &str = "
|
||||
[[language]]
|
||||
name = \"nix\"
|
||||
test = \"bbb\"
|
||||
indent = { tab-width = 4, unit = \" \", test = \"aaa\" }
|
||||
";
|
||||
|
||||
let base: Value = toml::from_slice(include_bytes!("../../languages.toml"))
|
||||
.expect("Couldn't parse built-in languages config");
|
||||
let user: Value = toml::from_str(USER).unwrap();
|
||||
|
||||
let merged = merge_toml_values(base, user);
|
||||
let languages = merged.get("language").unwrap().as_array().unwrap();
|
||||
let nix = languages
|
||||
.iter()
|
||||
.find(|v| v.get("name").unwrap().as_str().unwrap() == "nix")
|
||||
.unwrap();
|
||||
let nix_indent = nix.get("indent").unwrap();
|
||||
|
||||
// We changed tab-width and unit in indent so check them if they are the new values
|
||||
assert_eq!(
|
||||
nix_indent.get("tab-width").unwrap().as_integer().unwrap(),
|
||||
4
|
||||
);
|
||||
assert_eq!(nix_indent.get("unit").unwrap().as_str().unwrap(), " ");
|
||||
// We added a new keys, so check them
|
||||
assert_eq!(nix.get("test").unwrap().as_str().unwrap(), "bbb");
|
||||
assert_eq!(nix_indent.get("test").unwrap().as_str().unwrap(), "aaa");
|
||||
// We didn't change comment-token so it should be same
|
||||
assert_eq!(nix.get("comment-token").unwrap().as_str().unwrap(), "#");
|
||||
}
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "helix-lsp"
|
||||
version = "0.5.0"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
@@ -12,16 +12,17 @@ homepage = "https://helix-editor.com"
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
helix-core = { version = "0.5", path = "../helix-core" }
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
|
||||
anyhow = "1.0"
|
||||
futures-executor = "0.3"
|
||||
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
|
||||
jsonrpc-core = { version = "18.0", default-features = false } # don't pull in all of futures
|
||||
log = "0.4"
|
||||
lsp-types = { version = "0.90", features = ["proposed"] }
|
||||
lsp-types = { version = "0.92", features = ["proposed"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1.12", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] }
|
||||
tokio-stream = "0.1.7"
|
||||
tokio = { version = "1.17", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
||||
tokio-stream = "0.1.8"
|
||||
which = "4.2"
|
||||
|
@@ -31,6 +31,7 @@ pub struct Client {
|
||||
pub(crate) capabilities: OnceCell<lsp::ServerCapabilities>,
|
||||
offset_encoding: OffsetEncoding,
|
||||
config: Option<Value>,
|
||||
root_markers: Vec<String>,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
@@ -39,8 +40,12 @@ impl Client {
|
||||
cmd: &str,
|
||||
args: &[String],
|
||||
config: Option<Value>,
|
||||
root_markers: Vec<String>,
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<(usize, Call)>, Arc<Notify>)> {
|
||||
// Resolve path to the binary
|
||||
let cmd = which::which(cmd).map_err(|err| anyhow::anyhow!(err))?;
|
||||
|
||||
let process = Command::new(cmd)
|
||||
.args(args)
|
||||
.stdin(Stdio::piped())
|
||||
@@ -68,6 +73,7 @@ impl Client {
|
||||
capabilities: OnceCell::new(),
|
||||
offset_encoding: OffsetEncoding::Utf8,
|
||||
config,
|
||||
root_markers,
|
||||
};
|
||||
|
||||
Ok((client, server_rx, initialize_notify))
|
||||
@@ -107,6 +113,10 @@ impl Client {
|
||||
self.offset_encoding
|
||||
}
|
||||
|
||||
pub fn config(&self) -> Option<&Value> {
|
||||
self.config.as_ref()
|
||||
}
|
||||
|
||||
/// Execute a RPC request on the language server.
|
||||
async fn request<R: lsp::request::Request>(&self, params: R::Params) -> Result<R::Result>
|
||||
where
|
||||
@@ -202,7 +212,7 @@ impl Client {
|
||||
Ok(result) => Output::Success(Success {
|
||||
jsonrpc: Some(Version::V2),
|
||||
id,
|
||||
result,
|
||||
result: serde_json::to_value(result)?,
|
||||
}),
|
||||
Err(error) => Output::Failure(Failure {
|
||||
jsonrpc: Some(Version::V2),
|
||||
@@ -225,7 +235,8 @@ impl Client {
|
||||
|
||||
pub(crate) async fn initialize(&self) -> Result<lsp::InitializeResult> {
|
||||
// TODO: delay any requests that are triggered prior to initialize
|
||||
let root = find_root(None).and_then(|root| lsp::Url::from_file_path(root).ok());
|
||||
let root = find_root(None, &self.root_markers)
|
||||
.and_then(|root| lsp::Url::from_file_path(root).ok());
|
||||
|
||||
if self.config.is_some() {
|
||||
log::info!("Using custom LSP config: {}", self.config.as_ref().unwrap());
|
||||
@@ -239,6 +250,13 @@ impl Client {
|
||||
root_uri: root,
|
||||
initialization_options: self.config.clone(),
|
||||
capabilities: lsp::ClientCapabilities {
|
||||
workspace: Some(lsp::WorkspaceClientCapabilities {
|
||||
configuration: Some(true),
|
||||
did_change_configuration: Some(lsp::DynamicRegistrationClientCapabilities {
|
||||
dynamic_registration: Some(false),
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
text_document: Some(lsp::TextDocumentClientCapabilities {
|
||||
completion: Some(lsp::CompletionClientCapabilities {
|
||||
completion_item: Some(lsp::CompletionItemCapability {
|
||||
@@ -257,6 +275,12 @@ impl Client {
|
||||
content_format: Some(vec![lsp::MarkupKind::Markdown]),
|
||||
..Default::default()
|
||||
}),
|
||||
rename: Some(lsp::RenameClientCapabilities {
|
||||
dynamic_registration: Some(false),
|
||||
prepare_support: Some(false),
|
||||
prepare_support_default_behavior: None,
|
||||
honors_change_annotations: Some(false),
|
||||
}),
|
||||
code_action: Some(lsp::CodeActionClientCapabilities {
|
||||
code_action_literal_support: Some(lsp::CodeActionLiteralSupport {
|
||||
code_action_kind: lsp::CodeActionKindLiteralSupport {
|
||||
@@ -317,6 +341,16 @@ impl Client {
|
||||
self.exit().await
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------------------------
|
||||
// Workspace
|
||||
// -------------------------------------------------------------------------------------------
|
||||
|
||||
pub fn did_change_configuration(&self, settings: Value) -> impl Future<Output = Result<()>> {
|
||||
self.notify::<lsp::notification::DidChangeConfiguration>(
|
||||
lsp::DidChangeConfigurationParams { settings },
|
||||
)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------------------------
|
||||
// Text document
|
||||
// -------------------------------------------------------------------------------------------
|
||||
@@ -428,7 +462,7 @@ impl Client {
|
||||
|
||||
changes.push(lsp::TextDocumentContentChangeEvent {
|
||||
range: Some(lsp::Range::new(start, end)),
|
||||
text: s.into(),
|
||||
text: s.to_string(),
|
||||
range_length: None,
|
||||
});
|
||||
}
|
||||
@@ -461,7 +495,7 @@ impl Client {
|
||||
};
|
||||
|
||||
let changes = match sync_capabilities {
|
||||
lsp::TextDocumentSyncKind::Full => {
|
||||
lsp::TextDocumentSyncKind::FULL => {
|
||||
vec![lsp::TextDocumentContentChangeEvent {
|
||||
// range = None -> whole document
|
||||
range: None, //Some(Range)
|
||||
@@ -469,10 +503,11 @@ impl Client {
|
||||
text: new_text.to_string(),
|
||||
}]
|
||||
}
|
||||
lsp::TextDocumentSyncKind::Incremental => {
|
||||
lsp::TextDocumentSyncKind::INCREMENTAL => {
|
||||
Self::changeset_to_changes(old_text, new_text, changes, self.offset_encoding)
|
||||
}
|
||||
lsp::TextDocumentSyncKind::None => return None,
|
||||
lsp::TextDocumentSyncKind::NONE => return None,
|
||||
kind => unimplemented!("{:?}", kind),
|
||||
};
|
||||
|
||||
Some(self.notify::<lsp::notification::DidChangeTextDocument>(
|
||||
@@ -549,6 +584,14 @@ impl Client {
|
||||
self.call::<lsp::request::Completion>(params)
|
||||
}
|
||||
|
||||
pub async fn resolve_completion_item(
|
||||
&self,
|
||||
completion_item: lsp::CompletionItem,
|
||||
) -> Result<lsp::CompletionItem> {
|
||||
self.request::<lsp::request::ResolveCompletionItem>(completion_item)
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn text_document_signature_help(
|
||||
&self,
|
||||
text_document: lsp::TextDocumentIdentifier,
|
||||
@@ -772,4 +815,37 @@ impl Client {
|
||||
|
||||
self.call::<lsp::request::CodeActionRequest>(params)
|
||||
}
|
||||
|
||||
pub async fn rename_symbol(
|
||||
&self,
|
||||
text_document: lsp::TextDocumentIdentifier,
|
||||
position: lsp::Position,
|
||||
new_name: String,
|
||||
) -> anyhow::Result<lsp::WorkspaceEdit> {
|
||||
let params = lsp::RenameParams {
|
||||
text_document_position: lsp::TextDocumentPositionParams {
|
||||
text_document,
|
||||
position,
|
||||
},
|
||||
new_name,
|
||||
work_done_progress_params: lsp::WorkDoneProgressParams {
|
||||
work_done_token: None,
|
||||
},
|
||||
};
|
||||
|
||||
let response = self.request::<lsp::request::Rename>(params).await?;
|
||||
Ok(response.unwrap_or_default())
|
||||
}
|
||||
|
||||
pub fn command(&self, command: lsp::Command) -> impl Future<Output = Result<Value>> {
|
||||
let params = lsp::ExecuteCommandParams {
|
||||
command: command.command,
|
||||
arguments: command.arguments.unwrap_or_default(),
|
||||
work_done_progress_params: lsp::WorkDoneProgressParams {
|
||||
work_done_token: None,
|
||||
},
|
||||
};
|
||||
|
||||
self.call::<lsp::request::ExecuteCommand>(params)
|
||||
}
|
||||
}
|
||||
|
@@ -66,39 +66,26 @@ pub mod util {
|
||||
pos: lsp::Position,
|
||||
offset_encoding: OffsetEncoding,
|
||||
) -> Option<usize> {
|
||||
let max_line = doc.lines().count().saturating_sub(1);
|
||||
let pos_line = pos.line as usize;
|
||||
let pos_line = if pos_line > max_line {
|
||||
if pos_line > doc.len_lines() - 1 {
|
||||
return None;
|
||||
} else {
|
||||
pos_line
|
||||
};
|
||||
}
|
||||
|
||||
match offset_encoding {
|
||||
OffsetEncoding::Utf8 => {
|
||||
let max_char = doc
|
||||
.line_to_char(max_line)
|
||||
.checked_add(doc.line(max_line).len_chars())?;
|
||||
let line = doc.line_to_char(pos_line);
|
||||
let pos = line.checked_add(pos.character as usize)?;
|
||||
if pos <= max_char {
|
||||
if pos <= doc.len_chars() {
|
||||
Some(pos)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
OffsetEncoding::Utf16 => {
|
||||
let max_char = doc
|
||||
.line_to_char(max_line)
|
||||
.checked_add(doc.line(max_line).len_chars())?;
|
||||
let max_cu = doc.char_to_utf16_cu(max_char);
|
||||
let line = doc.line_to_char(pos_line);
|
||||
let line_start = doc.char_to_utf16_cu(line);
|
||||
let pos = line_start.checked_add(pos.character as usize)?;
|
||||
if pos <= max_cu {
|
||||
Some(doc.utf16_cu_to_char(pos))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
doc.try_utf16_cu_to_char(pos).ok()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -203,6 +190,8 @@ pub mod util {
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum MethodCall {
|
||||
WorkDoneProgressCreate(lsp::WorkDoneProgressCreateParams),
|
||||
ApplyWorkspaceEdit(lsp::ApplyWorkspaceEditParams),
|
||||
WorkspaceConfiguration(lsp::ConfigurationParams),
|
||||
}
|
||||
|
||||
impl MethodCall {
|
||||
@@ -215,6 +204,18 @@ impl MethodCall {
|
||||
.expect("Failed to parse WorkDoneCreate params");
|
||||
Self::WorkDoneProgressCreate(params)
|
||||
}
|
||||
lsp::request::ApplyWorkspaceEdit::METHOD => {
|
||||
let params: lsp::ApplyWorkspaceEditParams = params
|
||||
.parse()
|
||||
.expect("Failed to parse ApplyWorkspaceEdit params");
|
||||
Self::ApplyWorkspaceEdit(params)
|
||||
}
|
||||
lsp::request::WorkspaceConfiguration::METHOD => {
|
||||
let params: lsp::ConfigurationParams = params
|
||||
.parse()
|
||||
.expect("Failed to parse WorkspaceConfiguration params");
|
||||
Self::WorkspaceConfiguration(params)
|
||||
}
|
||||
_ => {
|
||||
log::warn!("unhandled lsp request: {}", method);
|
||||
return None;
|
||||
@@ -319,6 +320,7 @@ impl Registry {
|
||||
&config.command,
|
||||
&config.args,
|
||||
language_config.config.clone(),
|
||||
language_config.roots.clone(),
|
||||
id,
|
||||
)?;
|
||||
self.incoming.push(UnboundedReceiverStream::new(incoming));
|
||||
@@ -337,7 +339,10 @@ impl Registry {
|
||||
})
|
||||
.await;
|
||||
|
||||
value.expect("failed to initialize capabilities");
|
||||
if let Err(e) = value {
|
||||
log::error!("failed to initialize language server: {}", e);
|
||||
return;
|
||||
}
|
||||
|
||||
// next up, notify<initialized>
|
||||
_client
|
||||
|
@@ -1,207 +0,0 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use std::fs;
|
||||
use std::time::SystemTime;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
};
|
||||
|
||||
use std::sync::mpsc::channel;
|
||||
|
||||
fn collect_tree_sitter_dirs(ignore: &[String]) -> Result<Vec<String>> {
|
||||
let mut dirs = Vec::new();
|
||||
let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("languages");
|
||||
|
||||
for entry in fs::read_dir(path)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if !entry.file_type()?.is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let dir = path.file_name().unwrap().to_str().unwrap().to_string();
|
||||
|
||||
// filter ignores
|
||||
if ignore.contains(&dir) {
|
||||
continue;
|
||||
}
|
||||
dirs.push(dir)
|
||||
}
|
||||
|
||||
Ok(dirs)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
const DYLIB_EXTENSION: &str = "so";
|
||||
|
||||
#[cfg(windows)]
|
||||
const DYLIB_EXTENSION: &str = "dll";
|
||||
|
||||
fn build_library(src_path: &Path, language: &str) -> Result<()> {
|
||||
let header_path = src_path;
|
||||
// let grammar_path = src_path.join("grammar.json");
|
||||
let parser_path = src_path.join("parser.c");
|
||||
let mut scanner_path = src_path.join("scanner.c");
|
||||
|
||||
let scanner_path = if scanner_path.exists() {
|
||||
Some(scanner_path)
|
||||
} else {
|
||||
scanner_path.set_extension("cc");
|
||||
if scanner_path.exists() {
|
||||
Some(scanner_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
let parser_lib_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../runtime/grammars");
|
||||
let mut library_path = parser_lib_path.join(language);
|
||||
library_path.set_extension(DYLIB_EXTENSION);
|
||||
|
||||
let recompile = needs_recompile(&library_path, &parser_path, &scanner_path)
|
||||
.with_context(|| "Failed to compare source and binary timestamps")?;
|
||||
|
||||
if !recompile {
|
||||
return Ok(());
|
||||
}
|
||||
let mut config = cc::Build::new();
|
||||
config.cpp(true).opt_level(2).cargo_metadata(false);
|
||||
let compiler = config.get_compiler();
|
||||
let mut command = Command::new(compiler.path());
|
||||
command.current_dir(src_path);
|
||||
for (key, value) in compiler.env() {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
if cfg!(windows) {
|
||||
command
|
||||
.args(&["/nologo", "/LD", "/I"])
|
||||
.arg(header_path)
|
||||
.arg("/Od")
|
||||
.arg("/utf-8");
|
||||
if let Some(scanner_path) = scanner_path.as_ref() {
|
||||
command.arg(scanner_path);
|
||||
}
|
||||
|
||||
command
|
||||
.arg(parser_path)
|
||||
.arg("/link")
|
||||
.arg(format!("/out:{}", library_path.to_str().unwrap()));
|
||||
} else {
|
||||
command
|
||||
.arg("-shared")
|
||||
.arg("-fPIC")
|
||||
.arg("-fno-exceptions")
|
||||
.arg("-g")
|
||||
.arg("-I")
|
||||
.arg(header_path)
|
||||
.arg("-o")
|
||||
.arg(&library_path)
|
||||
.arg("-O2");
|
||||
if let Some(scanner_path) = scanner_path.as_ref() {
|
||||
if scanner_path.extension() == Some("c".as_ref()) {
|
||||
command.arg("-xc").arg("-std=c99").arg(scanner_path);
|
||||
} else {
|
||||
command.arg(scanner_path);
|
||||
}
|
||||
}
|
||||
command.arg("-xc").arg(parser_path);
|
||||
if cfg!(all(unix, not(target_os = "macos"))) {
|
||||
command.arg("-Wl,-z,relro,-z,now");
|
||||
}
|
||||
}
|
||||
|
||||
let output = command
|
||||
.output()
|
||||
.with_context(|| "Failed to execute C compiler")?;
|
||||
if !output.status.success() {
|
||||
return Err(anyhow!(
|
||||
"Parser compilation failed.\nStdout: {}\nStderr: {}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
fn needs_recompile(
|
||||
lib_path: &Path,
|
||||
parser_c_path: &Path,
|
||||
scanner_path: &Option<PathBuf>,
|
||||
) -> Result<bool> {
|
||||
if !lib_path.exists() {
|
||||
return Ok(true);
|
||||
}
|
||||
let lib_mtime = mtime(lib_path)?;
|
||||
if mtime(parser_c_path)? > lib_mtime {
|
||||
return Ok(true);
|
||||
}
|
||||
if let Some(scanner_path) = scanner_path {
|
||||
if mtime(scanner_path)? > lib_mtime {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn mtime(path: &Path) -> Result<SystemTime> {
|
||||
Ok(fs::metadata(path)?.modified()?)
|
||||
}
|
||||
|
||||
fn build_dir(dir: &str, language: &str) {
|
||||
println!("Build language {}", language);
|
||||
if PathBuf::from("languages")
|
||||
.join(dir)
|
||||
.read_dir()
|
||||
.unwrap()
|
||||
.next()
|
||||
.is_none()
|
||||
{
|
||||
eprintln!(
|
||||
"The directory {} is empty, you probably need to use 'git submodule update --init --recursive'?",
|
||||
dir
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("languages")
|
||||
.join(dir)
|
||||
.join("src");
|
||||
|
||||
build_library(&path, language).unwrap();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let ignore = vec![
|
||||
"tree-sitter-typescript".to_string(),
|
||||
"tree-sitter-haskell".to_string(), // aarch64 failures: https://github.com/tree-sitter/tree-sitter-haskell/issues/34
|
||||
"tree-sitter-ocaml".to_string(),
|
||||
];
|
||||
let dirs = collect_tree_sitter_dirs(&ignore).unwrap();
|
||||
|
||||
let mut n_jobs = 0;
|
||||
let pool = threadpool::Builder::new().build(); // by going through the builder, it'll use num_cpus
|
||||
let (tx, rx) = channel();
|
||||
|
||||
for dir in dirs {
|
||||
let tx = tx.clone();
|
||||
n_jobs += 1;
|
||||
|
||||
pool.execute(move || {
|
||||
let language = &dir.strip_prefix("tree-sitter-").unwrap();
|
||||
build_dir(&dir, language);
|
||||
|
||||
// report progress
|
||||
tx.send(1).unwrap();
|
||||
});
|
||||
}
|
||||
pool.join();
|
||||
// drop(tx);
|
||||
assert_eq!(rx.try_iter().sum::<usize>(), n_jobs);
|
||||
|
||||
build_dir("tree-sitter-typescript/tsx", "tsx");
|
||||
build_dir("tree-sitter-typescript/typescript", "typescript");
|
||||
build_dir("tree-sitter-ocaml/ocaml", "ocaml");
|
||||
build_dir("tree-sitter-ocaml/interface", "ocaml-interface")
|
||||
}
|
Submodule helix-syntax/languages/tree-sitter-agda deleted from ca69cdf485
Submodule helix-syntax/languages/tree-sitter-bash deleted from a8eb5cb57c
Submodule helix-syntax/languages/tree-sitter-c deleted from f05e279aed
Submodule helix-syntax/languages/tree-sitter-c-sharp deleted from 53a65a9081
Submodule helix-syntax/languages/tree-sitter-cmake deleted from f6616f1e41
Submodule helix-syntax/languages/tree-sitter-cpp deleted from c61212414a
Submodule helix-syntax/languages/tree-sitter-css deleted from 94e1023093
Submodule helix-syntax/languages/tree-sitter-elixir deleted from f5d7bda543
Submodule helix-syntax/languages/tree-sitter-go deleted from 2a83dfdd75
Submodule helix-syntax/languages/tree-sitter-haskell deleted from 237f4eb441
Submodule helix-syntax/languages/tree-sitter-html deleted from d93af487cc
Submodule helix-syntax/languages/tree-sitter-java deleted from bd6186c24d
Submodule helix-syntax/languages/tree-sitter-javascript deleted from 4a95461c47
Submodule helix-syntax/languages/tree-sitter-json deleted from 65bceef69c
Submodule helix-syntax/languages/tree-sitter-julia deleted from 12ea597262
Submodule helix-syntax/languages/tree-sitter-latex deleted from 7f720661de
Submodule helix-syntax/languages/tree-sitter-ledger deleted from 0cdeb0e514
Submodule helix-syntax/languages/tree-sitter-lua deleted from 6f5d40190e
Submodule helix-syntax/languages/tree-sitter-nix deleted from 50f38ceab6
Submodule helix-syntax/languages/tree-sitter-ocaml deleted from 23d419ba45
Submodule helix-syntax/languages/tree-sitter-php deleted from 0d63eaf94e
Submodule helix-syntax/languages/tree-sitter-protobuf deleted from 19c211a014
Submodule helix-syntax/languages/tree-sitter-python deleted from d6210ceab1
Submodule helix-syntax/languages/tree-sitter-ruby deleted from dfff673b41
Submodule helix-syntax/languages/tree-sitter-rust deleted from a360da0a29
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user