mirror of
https://github.com/helix-editor/helix.git
synced 2025-10-06 08:23:27 +02:00
Compare commits
44 Commits
get-lang-c
...
allowlist
Author | SHA1 | Date | |
---|---|---|---|
|
5ed223f947 | ||
|
83f09ecbff | ||
|
cb25d13028 | ||
|
6d724a8f33 | ||
|
6bfe1ddc53 | ||
|
b606c05246 | ||
|
299bcce481 | ||
|
2058b3732c | ||
|
7caae13465 | ||
|
cbd8602018 | ||
|
4168864572 | ||
|
ae8042bb83 | ||
|
e85507ccac | ||
|
960cda60ab | ||
|
8e592a151f | ||
|
13ed4f6c47 | ||
|
7d7ace551c | ||
|
9ed3dc52e0 | ||
|
52a43bcdfc | ||
|
f5f08becef | ||
|
9c56afeff3 | ||
|
1bc7aac780 | ||
|
1f916e65cf | ||
|
af8e524a7d | ||
|
c60ba4ba04 | ||
|
6754acd83f | ||
|
dcdecaab22 | ||
|
f41727cc9c | ||
|
6339a8c95a | ||
|
44cb8e5475 | ||
|
eef46b1aed | ||
|
bd9eef1f90 | ||
|
0e7f5d604e | ||
|
fa1d8dfabc | ||
|
2d8d16ff5e | ||
|
8a00620a71 | ||
|
6ce57b7924 | ||
|
eca3ccff76 | ||
|
3011df4f35 | ||
|
445f7a273a | ||
|
2fb7e50b54 | ||
|
3f88a3f4e6 | ||
|
a0b02106c3 | ||
|
054ce3961a |
@@ -1,3 +1,17 @@
|
||||
# we use tokio_unstable to enable runtime::Handle::id so we can separate
|
||||
# globals from multiple parallel tests. If that function ever does get removed
|
||||
# its possible to replace (with some additional overhead and effort)
|
||||
# Annoyingly build.rustflags doesn't work here because it gets overwritten
|
||||
# if people have their own global target.<..> config (for example to enable mold)
|
||||
# specifying flags this way is more robust as they get merged
|
||||
# This still gets overwritten by RUST_FLAGS though, luckily it shouldn't be necessary
|
||||
# to set those most of the time. If downstream does overwrite this its not a huge
|
||||
# deal since it will only break tests anyway
|
||||
[target."cfg(all())"]
|
||||
rustflags = ["--cfg", "tokio_unstable", "-C", "target-feature=-crt-static"]
|
||||
|
||||
|
||||
[alias]
|
||||
xtask = "run --package xtask --"
|
||||
integration-test = "test --features integration --profile integration --workspace --test integration"
|
||||
|
||||
|
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
shared-key: "build"
|
||||
|
||||
- name: Cache test tree-sitter grammar
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: runtime/grammars
|
||||
key: ${{ runner.os }}-stable-v${{ env.CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
|
||||
|
4
.github/workflows/cachix.yml
vendored
4
.github/workflows/cachix.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install nix
|
||||
uses: cachix/install-nix-action@v24
|
||||
uses: cachix/install-nix-action@v25
|
||||
|
||||
- name: Authenticate with Cachix
|
||||
uses: cachix/cachix-action@v13
|
||||
uses: cachix/cachix-action@v14
|
||||
with:
|
||||
name: helix
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
|
2
.ignore
2
.ignore
@@ -1,2 +0,0 @@
|
||||
# Things that we don't want ripgrep to search that we do want in git
|
||||
# https://github.com/BurntSushi/ripgrep/blob/master/GUIDE.md#automatic-filtering
|
234
Cargo.lock
generated
234
Cargo.lock
generated
@@ -62,9 +62,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.78"
|
||||
version = "1.0.79"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ca87830a3e3fb156dc96cfbd31cb620265dd053be734723f22b760d6cc3c3051"
|
||||
checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
@@ -101,9 +101,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.4.1"
|
||||
version = "2.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
|
||||
checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf"
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
@@ -171,14 +171,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "chrono"
|
||||
version = "0.4.31"
|
||||
version = "0.4.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38"
|
||||
checksum = "41daef31d7a747c5c847246f36de49ced6f7403b4cdabc807a97b5cc184cda7a"
|
||||
dependencies = [
|
||||
"android-tzdata",
|
||||
"iana-time-zone",
|
||||
"num-traits",
|
||||
"windows-targets 0.48.0",
|
||||
"windows-targets 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -285,7 +285,7 @@ version = "0.27.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df"
|
||||
dependencies = [
|
||||
"bitflags 2.4.1",
|
||||
"bitflags 2.4.2",
|
||||
"crossterm_winapi",
|
||||
"filedescriptor",
|
||||
"futures-core",
|
||||
@@ -358,9 +358,9 @@ checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b"
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.8.1"
|
||||
version = "1.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
|
||||
checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
|
||||
|
||||
[[package]]
|
||||
name = "encoding_rs"
|
||||
@@ -522,9 +522,9 @@ checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e"
|
||||
|
||||
[[package]]
|
||||
name = "gix"
|
||||
version = "0.57.1"
|
||||
version = "0.58.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6dd025382892c7b500a9ce1582cd803f9c2ebfe44aff52e9c7f86feee7ced75e"
|
||||
checksum = "31887c304d9a935f3e5494fb5d6a0106c34e965168ec0db9b457424eedd0c741"
|
||||
dependencies = [
|
||||
"gix-actor",
|
||||
"gix-commitgraph",
|
||||
@@ -558,14 +558,13 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"smallvec",
|
||||
"thiserror",
|
||||
"unicode-normalization",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gix-actor"
|
||||
version = "0.29.1"
|
||||
version = "0.30.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da27b5ab4ab5c75ff891dccd48409f8cc53c28a79480f1efdd33184b2dc1d958"
|
||||
checksum = "0a7bb9fad6125c81372987c06469601d37e1a2d421511adb69971b9083517a8a"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"btoi",
|
||||
@@ -586,9 +585,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-commitgraph"
|
||||
version = "0.23.1"
|
||||
version = "0.24.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a39c675fd737cb43a2120eddf1aa652c19d76b28d79783a198ac1b398ed9ce6"
|
||||
checksum = "82dbd7fb959862e3df2583331f0ad032ac93533e8a52f1b0694bc517f5d292bc"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"gix-chunk",
|
||||
@@ -600,9 +599,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-config"
|
||||
version = "0.33.1"
|
||||
version = "0.34.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "367304855b369cadcac4ee5fb5a3a20da9378dd7905106141070b79f85241079"
|
||||
checksum = "e62bf2073b6ce3921ffa6d8326f645f30eec5fc4a8e8a4bc0fcb721a2f3f69dc"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"gix-config-value",
|
||||
@@ -621,11 +620,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-config-value"
|
||||
version = "0.14.3"
|
||||
version = "0.14.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "52e0be46f4cf1f8f9e88d0e3eb7b29718aff23889563249f379119bd1ab6910e"
|
||||
checksum = "5b8a1e7bfb37a46ed0b8468db37a6d8a0a61d56bdbe4603ae492cb322e5f3958"
|
||||
dependencies = [
|
||||
"bitflags 2.4.1",
|
||||
"bitflags 2.4.2",
|
||||
"bstr",
|
||||
"gix-path",
|
||||
"libc",
|
||||
@@ -646,9 +645,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-diff"
|
||||
version = "0.39.1"
|
||||
version = "0.40.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fd6a0454f8c42d686f17e7f084057c717c082b7dbb8209729e4e8f26749eb93a"
|
||||
checksum = "cbdcb5e49c4b9729dd1c361040ae5c3cd7c497b2260b18c954f62db3a63e98cf"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"gix-hash",
|
||||
@@ -658,12 +657,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-discover"
|
||||
version = "0.28.1"
|
||||
version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8d7b2896edc3d899d28a646ccc6df729827a6600e546570b2783466404a42d6"
|
||||
checksum = "b4669218f3ec0cbbf8f16857b32200890f8ca585f36f5817242e4115fe4551af"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"dunce",
|
||||
"gix-fs",
|
||||
"gix-hash",
|
||||
"gix-path",
|
||||
"gix-ref",
|
||||
@@ -673,14 +673,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-features"
|
||||
version = "0.37.1"
|
||||
version = "0.38.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77a80f0fe688d654c2a741751578b11131071026d1934d03c1820d6d767525ce"
|
||||
checksum = "184f7f7d4e45db0e2a362aeaf12c06c5e84817d0ef91d08e8e90170dad9f0b07"
|
||||
dependencies = [
|
||||
"crc32fast",
|
||||
"flate2",
|
||||
"gix-hash",
|
||||
"gix-trace",
|
||||
"gix-utils",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"prodash",
|
||||
@@ -691,20 +692,21 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-fs"
|
||||
version = "0.9.1"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7555c23a005537434bbfcb8939694e18cad42602961d0de617f8477cc2adecdd"
|
||||
checksum = "4436e883d5769f9fb18677b8712b49228357815f9e4104174a6fc2d8461a437b"
|
||||
dependencies = [
|
||||
"gix-features",
|
||||
"gix-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gix-glob"
|
||||
version = "0.15.1"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae6232f18b262770e343dcdd461c0011c9b9ae27f0c805e115012aa2b902c1b8"
|
||||
checksum = "4965a1d06d0ab84a29d4a67697a97352ab14ae1da821084e5afb1fd6d8191ca0"
|
||||
dependencies = [
|
||||
"bitflags 2.4.1",
|
||||
"bitflags 2.4.2",
|
||||
"bstr",
|
||||
"gix-features",
|
||||
"gix-path",
|
||||
@@ -733,9 +735,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-lock"
|
||||
version = "12.0.0"
|
||||
version = "13.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6cf112ddee94223c119a8534dad027740dc3aba3365ac5edeef8a7f6660c74db"
|
||||
checksum = "651e46174dc5e7d18b7b809d31937b6de3681b1debd78618c99162cc30fcf3e1"
|
||||
dependencies = [
|
||||
"gix-tempfile",
|
||||
"gix-utils",
|
||||
@@ -755,9 +757,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-object"
|
||||
version = "0.40.1"
|
||||
version = "0.41.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0c89402e8faa41b49fde348665a8f38589e461036475af43b6b70615a6a313a2"
|
||||
checksum = "693ce9d30741506cb082ef2d8b797415b48e032cce0ab23eff894c19a7e4777b"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"btoi",
|
||||
@@ -774,13 +776,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-odb"
|
||||
version = "0.56.1"
|
||||
version = "0.57.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46ae6da873de41c6c2b73570e82c571b69df5154dcd8f46dfafc6687767c33b1"
|
||||
checksum = "8ba2fa9e81f2461b78b4d81a807867667326c84cdab48e0aed7b73a593aa1be4"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"gix-date",
|
||||
"gix-features",
|
||||
"gix-fs",
|
||||
"gix-hash",
|
||||
"gix-object",
|
||||
"gix-pack",
|
||||
@@ -793,9 +796,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-pack"
|
||||
version = "0.46.1"
|
||||
version = "0.47.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "782b4d42790a14072d5c400deda9851f5765f50fe72bca6dece0da1cd6f05a9a"
|
||||
checksum = "8da5f3e78c96b76c4e6fe5e8e06b76221e4a0ee9a255aa935ed1fdf68988dfd8"
|
||||
dependencies = [
|
||||
"clru",
|
||||
"gix-chunk",
|
||||
@@ -813,9 +816,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-path"
|
||||
version = "0.10.3"
|
||||
version = "0.10.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8dd0998ab245f33d40ca2267e58d542fe54185ebd1dc41923346cf28d179fb6"
|
||||
checksum = "14a6282621aed1becc3f83d64099a564b3b9063f22783d9a87ea502a3e9f2e40"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"gix-trace",
|
||||
@@ -837,9 +840,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-ref"
|
||||
version = "0.40.1"
|
||||
version = "0.41.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64d9bd1984638d8f3511a2fcbe84fcedb8a5b5d64df677353620572383f42649"
|
||||
checksum = "5818958994ad7879fa566f5441ebcc48f0926aa027b28948e6fbf6578894dc31"
|
||||
dependencies = [
|
||||
"gix-actor",
|
||||
"gix-date",
|
||||
@@ -850,6 +853,7 @@ dependencies = [
|
||||
"gix-object",
|
||||
"gix-path",
|
||||
"gix-tempfile",
|
||||
"gix-utils",
|
||||
"gix-validate",
|
||||
"memmap2",
|
||||
"thiserror",
|
||||
@@ -858,9 +862,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-refspec"
|
||||
version = "0.21.1"
|
||||
version = "0.22.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "be219df5092c1735abb2a53eccdf775e945eea6986ee1b6e7a5896dccc0be704"
|
||||
checksum = "613aa4d93034c5791d13bdc635e530f4ddab1412ddfb4a8215f76213177b61c7"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"gix-hash",
|
||||
@@ -872,9 +876,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-revision"
|
||||
version = "0.25.1"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa78e1df3633bc937d4db15f8dca2abdb1300ca971c0fabcf9fa97e38cf4cd9f"
|
||||
checksum = "288f6549d7666db74dc3f169a9a333694fc28ecd2f5aa7b2c979c89eb556751a"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"gix-date",
|
||||
@@ -888,9 +892,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-revwalk"
|
||||
version = "0.11.1"
|
||||
version = "0.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "702de5fe5c2bbdde80219f3a8b9723eb927466e7ecd187cfd1b45d986408e45f"
|
||||
checksum = "5b9b4d91dfc5c14fee61a28c65113ded720403b65a0f46169c0460f731a5d03c"
|
||||
dependencies = [
|
||||
"gix-commitgraph",
|
||||
"gix-date",
|
||||
@@ -903,21 +907,21 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-sec"
|
||||
version = "0.10.3"
|
||||
version = "0.10.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78f6dce0c6683e2219e8169aac4b1c29e89540a8262fef7056b31d80d969408c"
|
||||
checksum = "f8d9bf462feaf05f2121cba7399dbc6c34d88a9cad58fc1e95027791d6a3c6d2"
|
||||
dependencies = [
|
||||
"bitflags 2.4.1",
|
||||
"bitflags 2.4.2",
|
||||
"gix-path",
|
||||
"libc",
|
||||
"windows 0.52.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gix-tempfile"
|
||||
version = "12.0.0"
|
||||
version = "13.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e76a494bd530e1a1309188ff971825a24f159c76c2db0bf71fa5dfb469a2c915"
|
||||
checksum = "2d337955b7af00fb87120d053d87cdfb422a80b9ff7a3aa4057a99c79422dc30"
|
||||
dependencies = [
|
||||
"gix-fs",
|
||||
"libc",
|
||||
@@ -928,15 +932,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-trace"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8e1127ede0475b58f4fe9c0aaa0d9bb0bad2af90bbd93ccd307c8632b863d89"
|
||||
checksum = "02b202d766a7fefc596e2cc6a89cda8ad8ad733aed82da635ac120691112a9b1"
|
||||
|
||||
[[package]]
|
||||
name = "gix-traverse"
|
||||
version = "0.36.1"
|
||||
version = "0.37.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb64213e52e1b726cb04581690c1e98b5910f983b977d5e9f2eb09f1a7fea6d2"
|
||||
checksum = "bfc30c5b5e4e838683b59e1b0574ce6bc1c35916df9709aaab32bb7751daf08b"
|
||||
dependencies = [
|
||||
"gix-commitgraph",
|
||||
"gix-date",
|
||||
@@ -950,9 +954,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-url"
|
||||
version = "0.26.1"
|
||||
version = "0.27.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f0f17cceb7552a231d1fec690bc2740c346554e3be6f5d2c41dfa809594dc44"
|
||||
checksum = "26f1981ecc700f4fd73ae62b9ca2da7c8816c8fd267f0185e3f8c21e967984ac"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"gix-features",
|
||||
@@ -964,11 +968,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gix-utils"
|
||||
version = "0.1.8"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "de6225e2de30b6e9bca2d9f1cc4731640fcef0fb3cabddceee366e7e85d3e94f"
|
||||
checksum = "56e839f3d0798b296411263da6bee780a176ef8008a5dfc31287f7eda9266ab8"
|
||||
dependencies = [
|
||||
"fastrand",
|
||||
"unicode-normalization",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1053,13 +1058,14 @@ version = "23.10.0"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"arc-swap",
|
||||
"bitflags 2.4.1",
|
||||
"bitflags 2.4.2",
|
||||
"chrono",
|
||||
"dunce",
|
||||
"encoding_rs",
|
||||
"etcetera",
|
||||
"hashbrown 0.14.3",
|
||||
"helix-loader",
|
||||
"helix-stdx",
|
||||
"imara-diff",
|
||||
"indoc",
|
||||
"log",
|
||||
@@ -1089,18 +1095,24 @@ dependencies = [
|
||||
"anyhow",
|
||||
"fern",
|
||||
"helix-core",
|
||||
"helix-stdx",
|
||||
"log",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "helix-event"
|
||||
version = "23.10.0"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"anyhow",
|
||||
"futures-executor",
|
||||
"hashbrown 0.14.3",
|
||||
"log",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"tokio",
|
||||
]
|
||||
@@ -1113,6 +1125,7 @@ dependencies = [
|
||||
"cc",
|
||||
"dunce",
|
||||
"etcetera",
|
||||
"helix-stdx",
|
||||
"libloading",
|
||||
"log",
|
||||
"once_cell",
|
||||
@@ -1121,7 +1134,6 @@ dependencies = [
|
||||
"threadpool",
|
||||
"toml",
|
||||
"tree-sitter",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1135,6 +1147,7 @@ dependencies = [
|
||||
"helix-core",
|
||||
"helix-loader",
|
||||
"helix-parsec",
|
||||
"helix-stdx",
|
||||
"log",
|
||||
"lsp-types",
|
||||
"parking_lot",
|
||||
@@ -1143,13 +1156,23 @@ dependencies = [
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "helix-parsec"
|
||||
version = "23.10.0"
|
||||
|
||||
[[package]]
|
||||
name = "helix-stdx"
|
||||
version = "23.10.0"
|
||||
dependencies = [
|
||||
"dunce",
|
||||
"etcetera",
|
||||
"ropey",
|
||||
"tempfile",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "helix-term"
|
||||
version = "23.10.0"
|
||||
@@ -1168,6 +1191,7 @@ dependencies = [
|
||||
"helix-event",
|
||||
"helix-loader",
|
||||
"helix-lsp",
|
||||
"helix-stdx",
|
||||
"helix-tui",
|
||||
"helix-vcs",
|
||||
"helix-view",
|
||||
@@ -1189,14 +1213,13 @@ dependencies = [
|
||||
"tokio-stream",
|
||||
"toml",
|
||||
"url",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "helix-tui"
|
||||
version = "23.10.0"
|
||||
dependencies = [
|
||||
"bitflags 2.4.1",
|
||||
"bitflags 2.4.2",
|
||||
"cassowary",
|
||||
"crossterm",
|
||||
"helix-core",
|
||||
@@ -1230,7 +1253,7 @@ version = "23.10.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"bitflags 2.4.1",
|
||||
"bitflags 2.4.2",
|
||||
"chardetng",
|
||||
"clipboard-win",
|
||||
"crossterm",
|
||||
@@ -1240,6 +1263,7 @@ dependencies = [
|
||||
"helix-event",
|
||||
"helix-loader",
|
||||
"helix-lsp",
|
||||
"helix-stdx",
|
||||
"helix-tui",
|
||||
"helix-vcs",
|
||||
"libc",
|
||||
@@ -1254,7 +1278,6 @@ dependencies = [
|
||||
"tokio-stream",
|
||||
"toml",
|
||||
"url",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1268,11 +1291,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "home"
|
||||
version = "0.5.5"
|
||||
version = "0.5.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb"
|
||||
checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1286,7 +1309,7 @@ dependencies = [
|
||||
"iana-time-zone-haiku",
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
"windows 0.48.0",
|
||||
"windows",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1412,9 +1435,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.4.11"
|
||||
version = "0.4.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829"
|
||||
checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456"
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
@@ -1717,9 +1740,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.10.2"
|
||||
version = "1.10.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343"
|
||||
checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -1729,9 +1752,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.4.3"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f"
|
||||
checksum = "3b7fa1134405e2ec9353fd416b17f8dacd46c473d7d3fd1cf202706a14eb792a"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -1762,11 +1785,11 @@ checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "0.38.28"
|
||||
version = "0.38.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316"
|
||||
checksum = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca"
|
||||
dependencies = [
|
||||
"bitflags 2.4.1",
|
||||
"bitflags 2.4.2",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
@@ -1919,9 +1942,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.11.2"
|
||||
version = "1.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970"
|
||||
checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7"
|
||||
|
||||
[[package]]
|
||||
name = "smartstring"
|
||||
@@ -2028,18 +2051,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.52"
|
||||
version = "1.0.56"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "83a48fd946b02c0a526b2e9481c8e2a17755e47039164a86c4070446e3a4614d"
|
||||
checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.52"
|
||||
version = "1.0.56"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e7fbe9b594d6568a6a1443250a7e67d80b74e1e96f6d1715e1e21cc1888291d3"
|
||||
checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2177,7 +2200,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.20.10"
|
||||
source = "git+https://github.com/tree-sitter/tree-sitter?rev=ab09ae20d640711174b8da8a654f6b3dec93da1a#ab09ae20d640711174b8da8a654f6b3dec93da1a"
|
||||
source = "git+https://github.com/helix-editor/tree-sitter?rev=660481dbf71413eba5a928b0b0ab8da50c1109e0#660481dbf71413eba5a928b0b0ab8da50c1109e0"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"regex",
|
||||
@@ -2333,15 +2356,15 @@ checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d"
|
||||
|
||||
[[package]]
|
||||
name = "which"
|
||||
version = "5.0.0"
|
||||
version = "6.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9bf3ea8596f3a0dd5980b46430f2058dfe2c36a27ccfbb1845d6fbfcd9ba6e14"
|
||||
checksum = "7fa5e0c10bf77f44aac573e498d1a82d5fbd5e91f6fc0a99e7be4b38e85e101c"
|
||||
dependencies = [
|
||||
"either",
|
||||
"home",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2384,25 +2407,6 @@ dependencies = [
|
||||
"windows-targets 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be"
|
||||
dependencies = [
|
||||
"windows-core",
|
||||
"windows-targets 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-core"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.45.0"
|
||||
|
@@ -11,6 +11,7 @@ members = [
|
||||
"helix-loader",
|
||||
"helix-vcs",
|
||||
"helix-parsec",
|
||||
"helix-stdx",
|
||||
"xtask",
|
||||
]
|
||||
|
||||
@@ -36,7 +37,7 @@ package.helix-tui.opt-level = 2
|
||||
package.helix-term.opt-level = 2
|
||||
|
||||
[workspace.dependencies]
|
||||
tree-sitter = { version = "0.20", git = "https://github.com/tree-sitter/tree-sitter", rev = "ab09ae20d640711174b8da8a654f6b3dec93da1a" }
|
||||
tree-sitter = { version = "0.20", git = "https://github.com/helix-editor/tree-sitter", rev = "660481dbf71413eba5a928b0b0ab8da50c1109e0" }
|
||||
nucleo = "0.2.0"
|
||||
|
||||
[workspace.package]
|
||||
|
@@ -51,7 +51,8 @@ Its settings will be merged with the configuration directory `config.toml` and t
|
||||
| `auto-completion` | Enable automatic pop up of auto-completion | `true` |
|
||||
| `auto-format` | Enable automatic formatting on save | `true` |
|
||||
| `auto-save` | Enable automatic saving on the focus moving away from Helix. Requires [focus event support](https://github.com/helix-editor/helix/wiki/Terminal-Support) from your terminal | `false` |
|
||||
| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant | `250` |
|
||||
| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. | `250` |
|
||||
| `completion-timeout` | Time in milliseconds after typing a word character before completions are shown, set to 5 for instant. | `250` |
|
||||
| `preview-completion-insert` | Whether to apply completion item instantly when selected | `true` |
|
||||
| `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` |
|
||||
| `completion-replace` | Set to `true` to make completions always replace the entire word and not just the part before the cursor | `false` |
|
||||
|
@@ -21,10 +21,10 @@
|
||||
| cpon | ✓ | | ✓ | |
|
||||
| cpp | ✓ | ✓ | ✓ | `clangd` |
|
||||
| crystal | ✓ | ✓ | | `crystalline` |
|
||||
| css | ✓ | | | `vscode-css-language-server` |
|
||||
| css | ✓ | | ✓ | `vscode-css-language-server` |
|
||||
| cue | ✓ | | | `cuelsp` |
|
||||
| d | ✓ | ✓ | ✓ | `serve-d` |
|
||||
| dart | ✓ | | ✓ | `dart` |
|
||||
| dart | ✓ | ✓ | ✓ | `dart` |
|
||||
| dbml | ✓ | | | |
|
||||
| devicetree | ✓ | | | |
|
||||
| dhall | ✓ | ✓ | | `dhall-lsp-server` |
|
||||
@@ -97,7 +97,7 @@
|
||||
| log | ✓ | | | |
|
||||
| lpf | ✓ | | | |
|
||||
| lua | ✓ | ✓ | ✓ | `lua-language-server` |
|
||||
| make | ✓ | | | |
|
||||
| make | ✓ | | ✓ | |
|
||||
| markdoc | ✓ | | | `markdoc-ls` |
|
||||
| markdown | ✓ | | | `marksman` |
|
||||
| markdown.inline | ✓ | | | |
|
||||
|
@@ -315,6 +315,10 @@ The first argument (a capture) must/must not be equal to the second argument
|
||||
The first argument (a capture) must/must not match the regex given in the
|
||||
second argument (a string).
|
||||
|
||||
- `#any-of?`/`#not-any-of?`:
|
||||
The first argument (a capture) must/must not be one of the other arguments
|
||||
(strings).
|
||||
|
||||
Additionally, we support some custom predicates for indent queries:
|
||||
|
||||
- `#not-kind-eq?`:
|
||||
@@ -366,4 +370,4 @@ Everything up to and including the closing brace gets an indent level of 1.
|
||||
Then, on the closing brace, we encounter an outdent with a scope of "all", which
|
||||
means the first line is included, and the indent level is cancelled out on this
|
||||
line. (Note these scopes are the defaults for `@indent` and `@outdent`—they are
|
||||
written explicitly for demonstration.)
|
||||
written explicitly for demonstration.)
|
||||
|
@@ -54,4 +54,7 @@ The first argument (a capture) must be equal to the second argument
|
||||
The first argument (a capture) must match the regex given in the
|
||||
second argument (a string).
|
||||
|
||||
- `#any-of?` (standard):
|
||||
The first argument (a capture) must be one of the other arguments (strings).
|
||||
|
||||
[upstream-docs]: http://tree-sitter.github.io/tree-sitter/syntax-highlighting#language-injection
|
||||
|
@@ -216,12 +216,12 @@ RUSTFLAGS="-C target-feature=-crt-static"
|
||||
|
||||
#### Linux and macOS
|
||||
|
||||
The **runtime** directory is one below the Helix source, so either set a
|
||||
The **runtime** directory is one below the Helix source, so either export a
|
||||
`HELIX_RUNTIME` environment variable to point to that directory and add it to
|
||||
your `~/.bashrc` or equivalent:
|
||||
|
||||
```sh
|
||||
HELIX_RUNTIME=~/src/helix/runtime
|
||||
export HELIX_RUNTIME=~/src/helix/runtime
|
||||
```
|
||||
|
||||
Or, create a symbolic link:
|
||||
|
12
grammars.nix
12
grammars.nix
@@ -28,7 +28,17 @@
|
||||
owner = builtins.elemAt match 0;
|
||||
repo = builtins.elemAt match 1;
|
||||
};
|
||||
gitGrammars = builtins.filter isGitGrammar languagesConfig.grammar;
|
||||
# If `use-grammars.only` is set, use only those grammars.
|
||||
# If `use-grammars.except` is set, use all other grammars.
|
||||
# Otherwise use all grammars.
|
||||
useGrammar = grammar:
|
||||
if languagesConfig?use-grammars.only then
|
||||
builtins.elem grammar.name languagesConfig.use-grammars.only
|
||||
else if languagesConfig?use-grammars.except then
|
||||
!(builtins.elem grammar.name languagesConfig.use-grammars.except)
|
||||
else true;
|
||||
grammarsToUse = builtins.filter useGrammar languagesConfig.grammar;
|
||||
gitGrammars = builtins.filter isGitGrammar grammarsToUse;
|
||||
buildGrammar = grammar: let
|
||||
gh = toGitHubFetcher grammar.source.git;
|
||||
sourceGit = builtins.fetchTree {
|
||||
|
@@ -16,10 +16,11 @@ unicode-lines = ["ropey/unicode_lines"]
|
||||
integration = []
|
||||
|
||||
[dependencies]
|
||||
helix-stdx = { path = "../helix-stdx" }
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
|
||||
ropey = { version = "1.6.1", default-features = false, features = ["simd"] }
|
||||
smallvec = "1.11"
|
||||
smallvec = "1.13"
|
||||
smartstring = "1.0.1"
|
||||
unicode-segmentation = "1.10"
|
||||
unicode-width = "0.1"
|
||||
|
@@ -551,7 +551,7 @@ fn query_indents<'a>(
|
||||
// The row/column position of the optional anchor in this query
|
||||
let mut anchor: Option<tree_sitter::Node> = None;
|
||||
for capture in m.captures {
|
||||
let capture_name = query.capture_names()[capture.index as usize].as_str();
|
||||
let capture_name = query.capture_names()[capture.index as usize];
|
||||
let capture_type = match capture_name {
|
||||
"indent" => IndentCaptureType::Indent,
|
||||
"indent.always" => IndentCaptureType::IndentAlways,
|
||||
|
@@ -17,7 +17,6 @@ pub mod macros;
|
||||
pub mod match_brackets;
|
||||
pub mod movement;
|
||||
pub mod object;
|
||||
pub mod path;
|
||||
mod position;
|
||||
pub mod search;
|
||||
pub mod selection;
|
||||
|
@@ -60,7 +60,7 @@ fn find_pair(
|
||||
let tree = syntax.tree();
|
||||
let pos = doc.char_to_byte(pos_);
|
||||
|
||||
let mut node = tree.root_node().descendant_for_byte_range(pos, pos)?;
|
||||
let mut node = tree.root_node().descendant_for_byte_range(pos, pos + 1)?;
|
||||
|
||||
loop {
|
||||
if node.is_named() {
|
||||
@@ -118,7 +118,9 @@ fn find_pair(
|
||||
};
|
||||
node = parent;
|
||||
}
|
||||
let node = tree.root_node().named_descendant_for_byte_range(pos, pos)?;
|
||||
let node = tree
|
||||
.root_node()
|
||||
.named_descendant_for_byte_range(pos, pos + 1)?;
|
||||
if node.child_count() != 0 {
|
||||
return None;
|
||||
}
|
||||
@@ -141,7 +143,7 @@ fn find_pair(
|
||||
#[must_use]
|
||||
pub fn find_matching_bracket_plaintext(doc: RopeSlice, cursor_pos: usize) -> Option<usize> {
|
||||
// Don't do anything when the cursor is not on top of a bracket.
|
||||
let bracket = doc.char(cursor_pos);
|
||||
let bracket = doc.get_char(cursor_pos)?;
|
||||
if !is_valid_bracket(bracket) {
|
||||
return None;
|
||||
}
|
||||
@@ -265,6 +267,12 @@ fn as_char(doc: RopeSlice, node: &Node) -> Option<(usize, char)> {
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn find_matching_bracket_empty_file() {
|
||||
let actual = find_matching_bracket_plaintext("".into(), 0);
|
||||
assert_eq!(actual, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_matching_bracket_current_line_plaintext() {
|
||||
let assert = |input: &str, pos, expected| {
|
||||
|
@@ -263,7 +263,7 @@ impl Display for LanguageServerFeature {
|
||||
GotoDeclaration => "goto-declaration",
|
||||
GotoDefinition => "goto-definition",
|
||||
GotoTypeDefinition => "goto-type-definition",
|
||||
GotoReference => "goto-type-definition",
|
||||
GotoReference => "goto-reference",
|
||||
GotoImplementation => "goto-implementation",
|
||||
SignatureHelp => "signature-help",
|
||||
Hover => "hover",
|
||||
@@ -1338,6 +1338,23 @@ impl Syntax {
|
||||
result
|
||||
}
|
||||
|
||||
pub fn descendant_for_byte_range(&self, start: usize, end: usize) -> Option<Node<'_>> {
|
||||
let mut container_id = self.root;
|
||||
|
||||
for (layer_id, layer) in self.layers.iter() {
|
||||
if layer.depth > self.layers[container_id].depth
|
||||
&& layer.contains_byte_range(start, end)
|
||||
{
|
||||
container_id = layer_id;
|
||||
}
|
||||
}
|
||||
|
||||
self.layers[container_id]
|
||||
.tree()
|
||||
.root_node()
|
||||
.descendant_for_byte_range(start, end)
|
||||
}
|
||||
|
||||
// Commenting
|
||||
// comment_strings_for_pos
|
||||
// is_commented
|
||||
@@ -1434,6 +1451,32 @@ impl LanguageLayer {
|
||||
self.tree = Some(tree);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Whether the layer contains the given byte range.
|
||||
///
|
||||
/// If the layer has multiple ranges (i.e. combined injections), the
|
||||
/// given range is considered contained if it is within the start and
|
||||
/// end bytes of the first and last ranges **and** if the given range
|
||||
/// starts or ends within any of the layer's ranges.
|
||||
fn contains_byte_range(&self, start: usize, end: usize) -> bool {
|
||||
let layer_start = self
|
||||
.ranges
|
||||
.first()
|
||||
.expect("ranges should not be empty")
|
||||
.start_byte;
|
||||
let layer_end = self
|
||||
.ranges
|
||||
.last()
|
||||
.expect("ranges should not be empty")
|
||||
.end_byte;
|
||||
|
||||
layer_start <= start
|
||||
&& layer_end >= end
|
||||
&& self.ranges.iter().any(|range| {
|
||||
let byte_range = range.start_byte..range.end_byte;
|
||||
byte_range.contains(&start) || byte_range.contains(&end)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn generate_edits(
|
||||
@@ -1727,7 +1770,7 @@ impl HighlightConfiguration {
|
||||
let mut local_scope_capture_index = None;
|
||||
for (i, name) in query.capture_names().iter().enumerate() {
|
||||
let i = Some(i as u32);
|
||||
match name.as_str() {
|
||||
match *name {
|
||||
"local.definition" => local_def_capture_index = i,
|
||||
"local.definition-value" => local_def_value_capture_index = i,
|
||||
"local.reference" => local_ref_capture_index = i,
|
||||
@@ -1738,7 +1781,7 @@ impl HighlightConfiguration {
|
||||
|
||||
for (i, name) in injections_query.capture_names().iter().enumerate() {
|
||||
let i = Some(i as u32);
|
||||
match name.as_str() {
|
||||
match *name {
|
||||
"injection.content" => injection_content_capture_index = i,
|
||||
"injection.language" => injection_language_capture_index = i,
|
||||
"injection.filename" => injection_filename_capture_index = i,
|
||||
@@ -1768,7 +1811,7 @@ impl HighlightConfiguration {
|
||||
}
|
||||
|
||||
/// Get a slice containing all of the highlight names used in the configuration.
|
||||
pub fn names(&self) -> &[String] {
|
||||
pub fn names(&self) -> &[&str] {
|
||||
self.query.capture_names()
|
||||
}
|
||||
|
||||
|
@@ -13,6 +13,7 @@ homepage.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
helix-stdx = { path = "../helix-stdx" }
|
||||
helix-core = { path = "../helix-core" }
|
||||
|
||||
anyhow = "1.0"
|
||||
@@ -21,7 +22,6 @@ serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] }
|
||||
which = "5.0.0"
|
||||
|
||||
[dev-dependencies]
|
||||
fern = "0.6"
|
||||
|
@@ -113,7 +113,7 @@ impl Client {
|
||||
id: usize,
|
||||
) -> Result<(Self, UnboundedReceiver<Payload>)> {
|
||||
// Resolve path to the binary
|
||||
let cmd = which::which(cmd).map_err(|err| anyhow::anyhow!(err))?;
|
||||
let cmd = helix_stdx::env::which(cmd)?;
|
||||
|
||||
let process = Command::new(cmd)
|
||||
.args(args)
|
||||
|
@@ -19,6 +19,8 @@ pub enum Error {
|
||||
#[error("server closed the stream")]
|
||||
StreamClosed,
|
||||
#[error(transparent)]
|
||||
ExecutableNotFound(#[from] helix_stdx::env::ExecutableNotFoundError),
|
||||
#[error(transparent)]
|
||||
Other(#[from] anyhow::Error),
|
||||
}
|
||||
pub type Result<T> = core::result::Result<T, Error>;
|
||||
|
@@ -12,5 +12,18 @@ homepage.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot"] }
|
||||
parking_lot = { version = "0.12", features = ["send_guard"] }
|
||||
ahash = "0.8.3"
|
||||
hashbrown = "0.14.0"
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] }
|
||||
# the event registry is essentially read only but must be an rwlock so we can
|
||||
# setup new events on initialization, hardware-lock-elision hugely benefits this case
|
||||
# as it essentially makes the lock entirely free as long as there is no writes
|
||||
parking_lot = { version = "0.12", features = ["hardware-lock-elision"] }
|
||||
once_cell = "1.18"
|
||||
|
||||
anyhow = "1"
|
||||
log = "0.4"
|
||||
futures-executor = "0.3.28"
|
||||
|
||||
[features]
|
||||
integration_test = []
|
||||
|
19
helix-event/src/cancel.rs
Normal file
19
helix-event/src/cancel.rs
Normal file
@@ -0,0 +1,19 @@
|
||||
use std::future::Future;
|
||||
|
||||
pub use oneshot::channel as cancelation;
|
||||
use tokio::sync::oneshot;
|
||||
|
||||
pub type CancelTx = oneshot::Sender<()>;
|
||||
pub type CancelRx = oneshot::Receiver<()>;
|
||||
|
||||
pub async fn cancelable_future<T>(future: impl Future<Output = T>, cancel: CancelRx) -> Option<T> {
|
||||
tokio::select! {
|
||||
biased;
|
||||
_ = cancel => {
|
||||
None
|
||||
}
|
||||
res = future => {
|
||||
Some(res)
|
||||
}
|
||||
}
|
||||
}
|
67
helix-event/src/debounce.rs
Normal file
67
helix-event/src/debounce.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
//! Utilities for declaring an async (usually debounced) hook
|
||||
|
||||
use std::time::Duration;
|
||||
|
||||
use futures_executor::block_on;
|
||||
use tokio::sync::mpsc::{self, error::TrySendError, Sender};
|
||||
use tokio::time::Instant;
|
||||
|
||||
/// Async hooks provide a convenient framework for implementing (debounced)
|
||||
/// async event handlers. Most synchronous event hooks will likely need to
|
||||
/// debounce their events, coordinate multiple different hooks and potentially
|
||||
/// track some state. `AsyncHooks` facilitate these use cases by running as
|
||||
/// a background tokio task that waits for events (usually an enum) to be
|
||||
/// sent through a channel.
|
||||
pub trait AsyncHook: Sync + Send + 'static + Sized {
|
||||
type Event: Sync + Send + 'static;
|
||||
/// Called immediately whenever an event is received, this function can
|
||||
/// consume the event immediately or debounce it. In case of debouncing,
|
||||
/// it can either define a new debounce timeout or continue the current one
|
||||
fn handle_event(&mut self, event: Self::Event, timeout: Option<Instant>) -> Option<Instant>;
|
||||
|
||||
/// Called whenever the debounce timeline is reached
|
||||
fn finish_debounce(&mut self);
|
||||
|
||||
fn spawn(self) -> mpsc::Sender<Self::Event> {
|
||||
// the capacity doesn't matter too much here, unless the cpu is totally overwhelmed
|
||||
// the cap will never be reached since we always immediately drain the channel
|
||||
// so it should only be reached in case of total CPU overload.
|
||||
// However, a bounded channel is much more efficient so it's nice to use here
|
||||
let (tx, rx) = mpsc::channel(128);
|
||||
tokio::spawn(run(self, rx));
|
||||
tx
|
||||
}
|
||||
}
|
||||
|
||||
async fn run<Hook: AsyncHook>(mut hook: Hook, mut rx: mpsc::Receiver<Hook::Event>) {
|
||||
let mut deadline = None;
|
||||
loop {
|
||||
let event = match deadline {
|
||||
Some(deadline_) => {
|
||||
let res = tokio::time::timeout_at(deadline_, rx.recv()).await;
|
||||
match res {
|
||||
Ok(event) => event,
|
||||
Err(_) => {
|
||||
hook.finish_debounce();
|
||||
deadline = None;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
None => rx.recv().await,
|
||||
};
|
||||
let Some(event) = event else {
|
||||
break;
|
||||
};
|
||||
deadline = hook.handle_event(event, deadline);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn send_blocking<T>(tx: &Sender<T>, data: T) {
|
||||
// block_on has some overhead and in practice the channel should basically
|
||||
// never be full anyway so first try sending without blocking
|
||||
if let Err(TrySendError::Full(data)) = tx.try_send(data) {
|
||||
// set a timeout so that we just drop a message instead of freezing the editor in the worst case
|
||||
let _ = block_on(tx.send_timeout(data, Duration::from_millis(10)));
|
||||
}
|
||||
}
|
91
helix-event/src/hook.rs
Normal file
91
helix-event/src/hook.rs
Normal file
@@ -0,0 +1,91 @@
|
||||
//! rust dynamic dispatch is extremely limited so we have to build our
|
||||
//! own vtable implementation. Otherwise implementing the event system would not be possible.
|
||||
//! A nice bonus of this approach is that we can optimize the vtable a bit more. Normally
|
||||
//! a dyn Trait fat pointer contains two pointers: A pointer to the data itself and a
|
||||
//! pointer to a global (static) vtable entry which itself contains multiple other pointers
|
||||
//! (the various functions of the trait, drop, size and align). That makes dynamic
|
||||
//! dispatch pretty slow (double pointer indirections). However, we only have a single function
|
||||
//! in the hook trait and don't need a drop implementation (event system is global anyway
|
||||
//! and never dropped) so we can just store the entire vtable inline.
|
||||
|
||||
use anyhow::Result;
|
||||
use std::ptr::{self, NonNull};
|
||||
|
||||
use crate::Event;
|
||||
|
||||
/// Opaque handle type that represents an erased type parameter.
|
||||
///
|
||||
/// If extern types were stable, this could be implemented as `extern { pub type Opaque; }` but
|
||||
/// until then we can use this.
|
||||
///
|
||||
/// Care should be taken that we don't use a concrete instance of this. It should only be used
|
||||
/// through a reference, so we can maintain something else's lifetime.
|
||||
struct Opaque(());
|
||||
|
||||
pub(crate) struct ErasedHook {
|
||||
data: NonNull<Opaque>,
|
||||
call: unsafe fn(NonNull<Opaque>, NonNull<Opaque>, NonNull<Opaque>),
|
||||
}
|
||||
|
||||
impl ErasedHook {
|
||||
pub(crate) fn new_dynamic<H: Fn() -> Result<()> + 'static + Send + Sync>(
|
||||
hook: H,
|
||||
) -> ErasedHook {
|
||||
unsafe fn call<F: Fn() -> Result<()> + 'static + Send + Sync>(
|
||||
hook: NonNull<Opaque>,
|
||||
_event: NonNull<Opaque>,
|
||||
result: NonNull<Opaque>,
|
||||
) {
|
||||
let hook: NonNull<F> = hook.cast();
|
||||
let result: NonNull<Result<()>> = result.cast();
|
||||
let hook: &F = hook.as_ref();
|
||||
let res = hook();
|
||||
ptr::write(result.as_ptr(), res)
|
||||
}
|
||||
|
||||
unsafe {
|
||||
ErasedHook {
|
||||
data: NonNull::new_unchecked(Box::into_raw(Box::new(hook)) as *mut Opaque),
|
||||
call: call::<H>,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new<E: Event, F: Fn(&mut E) -> Result<()>>(hook: F) -> ErasedHook {
|
||||
unsafe fn call<E: Event, F: Fn(&mut E) -> Result<()>>(
|
||||
hook: NonNull<Opaque>,
|
||||
event: NonNull<Opaque>,
|
||||
result: NonNull<Opaque>,
|
||||
) {
|
||||
let hook: NonNull<F> = hook.cast();
|
||||
let mut event: NonNull<E> = event.cast();
|
||||
let result: NonNull<Result<()>> = result.cast();
|
||||
let hook: &F = hook.as_ref();
|
||||
let res = hook(event.as_mut());
|
||||
ptr::write(result.as_ptr(), res)
|
||||
}
|
||||
|
||||
unsafe {
|
||||
ErasedHook {
|
||||
data: NonNull::new_unchecked(Box::into_raw(Box::new(hook)) as *mut Opaque),
|
||||
call: call::<E, F>,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) unsafe fn call<E: Event>(&self, event: &mut E) -> Result<()> {
|
||||
let mut res = Ok(());
|
||||
|
||||
unsafe {
|
||||
(self.call)(
|
||||
self.data,
|
||||
NonNull::from(event).cast(),
|
||||
NonNull::from(&mut res).cast(),
|
||||
);
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl Sync for ErasedHook {}
|
||||
unsafe impl Send for ErasedHook {}
|
@@ -1,8 +1,203 @@
|
||||
//! `helix-event` contains systems that allow (often async) communication between
|
||||
//! different editor components without strongly coupling them. Currently this
|
||||
//! crate only contains some smaller facilities but the intend is to add more
|
||||
//! functionality in the future ( like a generic hook system)
|
||||
//! different editor components without strongly coupling them. Specifically
|
||||
//! it allows defining synchronous hooks that run when certain editor events
|
||||
//! occur.
|
||||
//!
|
||||
//! The core of the event system are hook callbacks and the [`Event`] trait. A
|
||||
//! hook is essentially just a closure `Fn(event: &mut impl Event) -> Result<()>`
|
||||
//! that gets called every time an appropriate event is dispatched. The implementation
|
||||
//! details of the [`Event`] trait are considered private. The [`events`] macro is
|
||||
//! provided which automatically declares event types. Similarly the `register_hook`
|
||||
//! macro should be used to (safely) declare event hooks.
|
||||
//!
|
||||
//! Hooks run synchronously which can be advantageous since they can modify the
|
||||
//! current editor state right away (for example to immediately hide the completion
|
||||
//! popup). However, they can not contain their own state without locking since
|
||||
//! they only receive immutable references. For handler that want to track state, do
|
||||
//! expensive background computations or debouncing an [`AsyncHook`] is preferable.
|
||||
//! Async hooks are based around a channels that receive events specific to
|
||||
//! that `AsyncHook` (usually an enum). These events can be sent by synchronous
|
||||
//! hooks. Due to some limitations around tokio channels the [`send_blocking`]
|
||||
//! function exported in this crate should be used instead of the builtin
|
||||
//! `blocking_send`.
|
||||
//!
|
||||
//! In addition to the core event system, this crate contains some message queues
|
||||
//! that allow transfer of data back to the main event loop from async hooks and
|
||||
//! hooks that may not have access to all application data (for example in helix-view).
|
||||
//! This include the ability to control rendering ([`lock_frame`], [`request_redraw`]) and
|
||||
//! display status messages ([`status`]).
|
||||
//!
|
||||
//! Hooks declared in helix-term can furthermore dispatch synchronous jobs to be run on the
|
||||
//! main loop (including access to the compositor). Ideally that queue will be moved
|
||||
//! to helix-view in the future if we manage to detach the compositor from its rendering backend.
|
||||
|
||||
use anyhow::Result;
|
||||
pub use cancel::{cancelable_future, cancelation, CancelRx, CancelTx};
|
||||
pub use debounce::{send_blocking, AsyncHook};
|
||||
pub use redraw::{lock_frame, redraw_requested, request_redraw, start_frame, RenderLockGuard};
|
||||
pub use registry::Event;
|
||||
|
||||
mod cancel;
|
||||
mod debounce;
|
||||
mod hook;
|
||||
mod redraw;
|
||||
mod registry;
|
||||
#[doc(hidden)]
|
||||
pub mod runtime;
|
||||
pub mod status;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
pub fn register_event<E: Event + 'static>() {
|
||||
registry::with_mut(|registry| registry.register_event::<E>())
|
||||
}
|
||||
|
||||
/// Registers a hook that will be called when an event of type `E` is dispatched.
|
||||
/// This function should usually not be used directly, use the [`register_hook`]
|
||||
/// macro instead.
|
||||
///
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// `hook` must be totally generic over all lifetime parameters of `E`. For
|
||||
/// example if `E` was a known type `Foo<'a, 'b>`, then the correct trait bound
|
||||
/// would be `F: for<'a, 'b, 'c> Fn(&'a mut Foo<'b, 'c>)`, but there is no way to
|
||||
/// express that kind of constraint for a generic type with the Rust type system
|
||||
/// as of this writing.
|
||||
pub unsafe fn register_hook_raw<E: Event>(
|
||||
hook: impl Fn(&mut E) -> Result<()> + 'static + Send + Sync,
|
||||
) {
|
||||
registry::with_mut(|registry| registry.register_hook(hook))
|
||||
}
|
||||
|
||||
/// Register a hook solely by event name
|
||||
pub fn register_dynamic_hook(
|
||||
hook: impl Fn() -> Result<()> + 'static + Send + Sync,
|
||||
id: &str,
|
||||
) -> Result<()> {
|
||||
registry::with_mut(|reg| reg.register_dynamic_hook(hook, id))
|
||||
}
|
||||
|
||||
pub fn dispatch(e: impl Event) {
|
||||
registry::with(|registry| registry.dispatch(e));
|
||||
}
|
||||
|
||||
/// Macro to declare events
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ``` no-compile
|
||||
/// events! {
|
||||
/// FileWrite(&Path)
|
||||
/// ViewScrolled{ view: View, new_pos: ViewOffset }
|
||||
/// DocumentChanged<'a> { old_doc: &'a Rope, doc: &'a mut Document, changes: &'a ChangeSet }
|
||||
/// }
|
||||
///
|
||||
/// fn init() {
|
||||
/// register_event::<FileWrite>();
|
||||
/// register_event::<ViewScrolled>();
|
||||
/// register_event::<DocumentChanged>();
|
||||
/// }
|
||||
///
|
||||
/// fn save(path: &Path, content: &str){
|
||||
/// std::fs::write(path, content);
|
||||
/// dispatch(FileWrite(path));
|
||||
/// }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! events {
|
||||
($name: ident<$($lt: lifetime),*> { $($data:ident : $data_ty:ty),* } $($rem:tt)*) => {
|
||||
pub struct $name<$($lt),*> { $(pub $data: $data_ty),* }
|
||||
unsafe impl<$($lt),*> $crate::Event for $name<$($lt),*> {
|
||||
const ID: &'static str = stringify!($name);
|
||||
const LIFETIMES: usize = $crate::events!(@sum $(1, $lt),*);
|
||||
type Static = $crate::events!(@replace_lt $name, $('static, $lt),*);
|
||||
}
|
||||
$crate::events!{ $($rem)* }
|
||||
};
|
||||
($name: ident { $($data:ident : $data_ty:ty),* } $($rem:tt)*) => {
|
||||
pub struct $name { $(pub $data: $data_ty),* }
|
||||
unsafe impl $crate::Event for $name {
|
||||
const ID: &'static str = stringify!($name);
|
||||
const LIFETIMES: usize = 0;
|
||||
type Static = Self;
|
||||
}
|
||||
$crate::events!{ $($rem)* }
|
||||
};
|
||||
() => {};
|
||||
(@replace_lt $name: ident, $($lt1: lifetime, $lt2: lifetime),* ) => {$name<$($lt1),*>};
|
||||
(@sum $($val: expr, $lt1: lifetime),* ) => {0 $(+ $val)*};
|
||||
}
|
||||
|
||||
/// Safely register statically typed event hooks
|
||||
#[macro_export]
|
||||
macro_rules! register_hook {
|
||||
// Safety: this is safe because we fully control the type of the event here and
|
||||
// ensure all lifetime arguments are fully generic and the correct number of lifetime arguments
|
||||
// is present
|
||||
(move |$event:ident: &mut $event_ty: ident<$($lt: lifetime),*>| $body: expr) => {
|
||||
let val = move |$event: &mut $event_ty<$($lt),*>| $body;
|
||||
unsafe {
|
||||
// Lifetimes are a bit of a pain. We want to allow events being
|
||||
// non-static. Lifetimes don't actually exist at runtime so its
|
||||
// fine to essentially transmute the lifetimes as long as we can
|
||||
// prove soundness. The hook must therefore accept any combination
|
||||
// of lifetimes. In other words fn(&'_ mut Event<'_, '_>) is ok
|
||||
// but examples like fn(&'_ mut Event<'_, 'static>) or fn<'a>(&'a
|
||||
// mut Event<'a, 'a>) are not. To make this safe we use a macro to
|
||||
// forbid the user from specifying lifetimes manually (all lifetimes
|
||||
// specified are always function generics and passed to the event so
|
||||
// lifetimes can't be used multiple times and using 'static causes a
|
||||
// syntax error).
|
||||
//
|
||||
// There is one soundness hole tough: Type Aliases allow
|
||||
// "accidentally" creating these problems. For example:
|
||||
//
|
||||
// type Event2 = Event<'static>.
|
||||
// type Event2<'a> = Event<'a, a>.
|
||||
//
|
||||
// These cases can be caught by counting the number of lifetimes
|
||||
// parameters at the parameter declaration site and then at the hook
|
||||
// declaration site. By asserting the number of lifetime parameters
|
||||
// are equal we can catch all bad type aliases under one assumption:
|
||||
// There are no unused lifetime parameters. Introducing a static
|
||||
// would reduce the number of arguments of the alias by one in the
|
||||
// above example Event2 has zero lifetime arguments while the original
|
||||
// event has one lifetime argument. Similar logic applies to using
|
||||
// a lifetime argument multiple times. The ASSERT below performs a
|
||||
// a compile time assertion to ensure exactly this property.
|
||||
//
|
||||
// With unused lifetime arguments it is still one way to cause unsound code:
|
||||
//
|
||||
// type Event2<'a, 'b> = Event<'a, 'a>;
|
||||
//
|
||||
// However, this case will always emit a compiler warning/cause CI
|
||||
// failures so a user would have to introduce #[allow(unused)] which
|
||||
// is easily caught in review (and a very theoretical case anyway).
|
||||
// If we want to be pedantic we can simply compile helix with
|
||||
// forbid(unused). All of this is just a safety net to prevent
|
||||
// very theoretical misuse. This won't come up in real code (and is
|
||||
// easily caught in review).
|
||||
#[allow(unused)]
|
||||
const ASSERT: () = {
|
||||
if <$event_ty as $crate::Event>::LIFETIMES != 0 + $crate::events!(@sum $(1, $lt),*){
|
||||
panic!("invalid type alias");
|
||||
}
|
||||
};
|
||||
$crate::register_hook_raw::<$crate::events!(@replace_lt $event_ty, $('static, $lt),*)>(val);
|
||||
}
|
||||
};
|
||||
(move |$event:ident: &mut $event_ty: ident| $body: expr) => {
|
||||
let val = move |$event: &mut $event_ty| $body;
|
||||
unsafe {
|
||||
#[allow(unused)]
|
||||
const ASSERT: () = {
|
||||
if <$event_ty as $crate::Event>::LIFETIMES != 0{
|
||||
panic!("invalid type alias");
|
||||
}
|
||||
};
|
||||
$crate::register_hook_raw::<$event_ty>(val);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@@ -5,16 +5,20 @@ use std::future::Future;
|
||||
use parking_lot::{RwLock, RwLockReadGuard};
|
||||
use tokio::sync::Notify;
|
||||
|
||||
/// A `Notify` instance that can be used to (asynchronously) request
|
||||
/// the editor the render a new frame.
|
||||
static REDRAW_NOTIFY: Notify = Notify::const_new();
|
||||
use crate::runtime_local;
|
||||
|
||||
/// A `RwLock` that prevents the next frame from being
|
||||
/// drawn until an exclusive (write) lock can be acquired.
|
||||
/// This allows asynchsonous tasks to acquire `non-exclusive`
|
||||
/// locks (read) to prevent the next frame from being drawn
|
||||
/// until a certain computation has finished.
|
||||
static RENDER_LOCK: RwLock<()> = RwLock::new(());
|
||||
runtime_local! {
|
||||
/// A `Notify` instance that can be used to (asynchronously) request
|
||||
/// the editor to render a new frame.
|
||||
static REDRAW_NOTIFY: Notify = Notify::const_new();
|
||||
|
||||
/// A `RwLock` that prevents the next frame from being
|
||||
/// drawn until an exclusive (write) lock can be acquired.
|
||||
/// This allows asynchronous tasks to acquire `non-exclusive`
|
||||
/// locks (read) to prevent the next frame from being drawn
|
||||
/// until a certain computation has finished.
|
||||
static RENDER_LOCK: RwLock<()> = RwLock::new(());
|
||||
}
|
||||
|
||||
pub type RenderLockGuard = RwLockReadGuard<'static, ()>;
|
||||
|
||||
|
131
helix-event/src/registry.rs
Normal file
131
helix-event/src/registry.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
//! A global registry where events are registered and can be
|
||||
//! subscribed to by registering hooks. The registry identifies event
|
||||
//! types using their type name so multiple event with the same type name
|
||||
//! may not be registered (will cause a panic to ensure soundness)
|
||||
|
||||
use std::any::TypeId;
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use hashbrown::hash_map::Entry;
|
||||
use hashbrown::HashMap;
|
||||
use parking_lot::RwLock;
|
||||
|
||||
use crate::hook::ErasedHook;
|
||||
use crate::runtime_local;
|
||||
|
||||
pub struct Registry {
|
||||
events: HashMap<&'static str, TypeId, ahash::RandomState>,
|
||||
handlers: HashMap<&'static str, Vec<ErasedHook>, ahash::RandomState>,
|
||||
}
|
||||
|
||||
impl Registry {
|
||||
pub fn register_event<E: Event + 'static>(&mut self) {
|
||||
let ty = TypeId::of::<E>();
|
||||
assert_eq!(ty, TypeId::of::<E::Static>());
|
||||
match self.events.entry(E::ID) {
|
||||
Entry::Occupied(entry) => {
|
||||
if entry.get() == &ty {
|
||||
// don't warn during tests to avoid log spam
|
||||
#[cfg(not(feature = "integration_test"))]
|
||||
panic!("Event {} was registered multiple times", E::ID);
|
||||
} else {
|
||||
panic!("Multiple events with ID {} were registered", E::ID);
|
||||
}
|
||||
}
|
||||
Entry::Vacant(ent) => {
|
||||
ent.insert(ty);
|
||||
self.handlers.insert(E::ID, Vec::new());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
///
|
||||
/// `hook` must be totally generic over all lifetime parameters of `E`. For
|
||||
/// example if `E` was a known type `Foo<'a, 'b> then the correct trait bound
|
||||
/// would be `F: for<'a, 'b, 'c> Fn(&'a mut Foo<'b, 'c>)` but there is no way to
|
||||
/// express that kind of constraint for a generic type with the rust type system
|
||||
/// right now.
|
||||
pub unsafe fn register_hook<E: Event>(
|
||||
&mut self,
|
||||
hook: impl Fn(&mut E) -> Result<()> + 'static + Send + Sync,
|
||||
) {
|
||||
// ensure event type ids match so we can rely on them always matching
|
||||
let id = E::ID;
|
||||
let Some(&event_id) = self.events.get(id) else {
|
||||
panic!("Tried to register handler for unknown event {id}");
|
||||
};
|
||||
assert!(
|
||||
TypeId::of::<E::Static>() == event_id,
|
||||
"Tried to register invalid hook for event {id}"
|
||||
);
|
||||
let hook = ErasedHook::new(hook);
|
||||
self.handlers.get_mut(id).unwrap().push(hook);
|
||||
}
|
||||
|
||||
pub fn register_dynamic_hook(
|
||||
&mut self,
|
||||
hook: impl Fn() -> Result<()> + 'static + Send + Sync,
|
||||
id: &str,
|
||||
) -> Result<()> {
|
||||
// ensure event type ids match so we can rely on them always matching
|
||||
if self.events.get(id).is_none() {
|
||||
bail!("Tried to register handler for unknown event {id}");
|
||||
};
|
||||
let hook = ErasedHook::new_dynamic(hook);
|
||||
self.handlers.get_mut(id).unwrap().push(hook);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn dispatch<E: Event>(&self, mut event: E) {
|
||||
let Some(hooks) = self.handlers.get(E::ID) else {
|
||||
log::error!("Dispatched unknown event {}", E::ID);
|
||||
return;
|
||||
};
|
||||
let event_id = self.events[E::ID];
|
||||
|
||||
assert_eq!(
|
||||
TypeId::of::<E::Static>(),
|
||||
event_id,
|
||||
"Tried to dispatch invalid event {}",
|
||||
E::ID
|
||||
);
|
||||
|
||||
for hook in hooks {
|
||||
// safety: event type is the same
|
||||
if let Err(err) = unsafe { hook.call(&mut event) } {
|
||||
log::error!("{} hook failed: {err:#?}", E::ID);
|
||||
crate::status::report_blocking(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
runtime_local! {
|
||||
static REGISTRY: RwLock<Registry> = RwLock::new(Registry {
|
||||
// hardcoded random number is good enough here we don't care about DOS resistance
|
||||
// and avoids the additional complexity of `Option<Registry>`
|
||||
events: HashMap::with_hasher(ahash::RandomState::with_seeds(423, 9978, 38322, 3280080)),
|
||||
handlers: HashMap::with_hasher(ahash::RandomState::with_seeds(423, 99078, 382322, 3282938)),
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn with<T>(f: impl FnOnce(&Registry) -> T) -> T {
|
||||
f(®ISTRY.read())
|
||||
}
|
||||
|
||||
pub(crate) fn with_mut<T>(f: impl FnOnce(&mut Registry) -> T) -> T {
|
||||
f(&mut REGISTRY.write())
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// The number of specified lifetimes and the static type *must* be correct.
|
||||
/// This is ensured automatically by the [`events`](crate::events)
|
||||
/// macro.
|
||||
pub unsafe trait Event: Sized {
|
||||
/// Globally unique (case sensitive) string that identifies this type.
|
||||
/// A good candidate is the events type name
|
||||
const ID: &'static str;
|
||||
const LIFETIMES: usize;
|
||||
type Static: Event + 'static;
|
||||
}
|
88
helix-event/src/runtime.rs
Normal file
88
helix-event/src/runtime.rs
Normal file
@@ -0,0 +1,88 @@
|
||||
//! The event system makes use of global to decouple different systems.
|
||||
//! However, this can cause problems for the integration test system because
|
||||
//! it runs multiple helix applications in parallel. Making the globals
|
||||
//! thread-local does not work because a applications can/does have multiple
|
||||
//! runtime threads. Instead this crate implements a similar notion to a thread
|
||||
//! local but instead of being local to a single thread, the statics are local to
|
||||
//! a single tokio-runtime. The implementation requires locking so it's not exactly efficient.
|
||||
//!
|
||||
//! Therefore this function is only enabled during integration tests and behaves like
|
||||
//! a normal static otherwise. I would prefer this module to be fully private and to only
|
||||
//! export the macro but the macro still need to construct these internals so it's marked
|
||||
//! `doc(hidden)` instead
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
#[cfg(not(feature = "integration_test"))]
|
||||
pub struct RuntimeLocal<T: 'static> {
|
||||
/// inner API used in the macro, not part of public API
|
||||
#[doc(hidden)]
|
||||
pub __data: T,
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "integration_test"))]
|
||||
impl<T> Deref for RuntimeLocal<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.__data
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "integration_test"))]
|
||||
#[macro_export]
|
||||
macro_rules! runtime_local {
|
||||
($($(#[$attr:meta])* $vis: vis static $name:ident: $ty: ty = $init: expr;)*) => {
|
||||
$($(#[$attr])* $vis static $name: $crate::runtime::RuntimeLocal<$ty> = $crate::runtime::RuntimeLocal {
|
||||
__data: $init
|
||||
};)*
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(feature = "integration_test")]
|
||||
pub struct RuntimeLocal<T: 'static> {
|
||||
data:
|
||||
parking_lot::RwLock<hashbrown::HashMap<tokio::runtime::Id, &'static T, ahash::RandomState>>,
|
||||
init: fn() -> T,
|
||||
}
|
||||
|
||||
#[cfg(feature = "integration_test")]
|
||||
impl<T> RuntimeLocal<T> {
|
||||
/// inner API used in the macro, not part of public API
|
||||
#[doc(hidden)]
|
||||
pub const fn __new(init: fn() -> T) -> Self {
|
||||
Self {
|
||||
data: parking_lot::RwLock::new(hashbrown::HashMap::with_hasher(
|
||||
ahash::RandomState::with_seeds(423, 9978, 38322, 3280080),
|
||||
)),
|
||||
init,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "integration_test")]
|
||||
impl<T> Deref for RuntimeLocal<T> {
|
||||
type Target = T;
|
||||
fn deref(&self) -> &T {
|
||||
let id = tokio::runtime::Handle::current().id();
|
||||
let guard = self.data.read();
|
||||
match guard.get(&id) {
|
||||
Some(res) => res,
|
||||
None => {
|
||||
drop(guard);
|
||||
let data = Box::leak(Box::new((self.init)()));
|
||||
let mut guard = self.data.write();
|
||||
guard.insert(id, data);
|
||||
data
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "integration_test")]
|
||||
#[macro_export]
|
||||
macro_rules! runtime_local {
|
||||
($($(#[$attr:meta])* $vis: vis static $name:ident: $ty: ty = $init: expr;)*) => {
|
||||
$($(#[$attr])* $vis static $name: $crate::runtime::RuntimeLocal<$ty> = $crate::runtime::RuntimeLocal::__new(|| $init);)*
|
||||
};
|
||||
}
|
68
helix-event/src/status.rs
Normal file
68
helix-event/src/status.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
//! A queue of async messages/errors that will be shown in the editor
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::{runtime_local, send_blocking};
|
||||
use once_cell::sync::OnceCell;
|
||||
use tokio::sync::mpsc::{Receiver, Sender};
|
||||
|
||||
/// Describes the severity level of a [`StatusMessage`].
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, PartialOrd, Ord)]
|
||||
pub enum Severity {
|
||||
Hint,
|
||||
Info,
|
||||
Warning,
|
||||
Error,
|
||||
}
|
||||
|
||||
pub struct StatusMessage {
|
||||
pub severity: Severity,
|
||||
pub message: Cow<'static, str>,
|
||||
}
|
||||
|
||||
impl From<anyhow::Error> for StatusMessage {
|
||||
fn from(err: anyhow::Error) -> Self {
|
||||
StatusMessage {
|
||||
severity: Severity::Error,
|
||||
message: err.to_string().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&'static str> for StatusMessage {
|
||||
fn from(msg: &'static str) -> Self {
|
||||
StatusMessage {
|
||||
severity: Severity::Info,
|
||||
message: msg.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
runtime_local! {
|
||||
static MESSAGES: OnceCell<Sender<StatusMessage>> = OnceCell::new();
|
||||
}
|
||||
|
||||
pub async fn report(msg: impl Into<StatusMessage>) {
|
||||
// if the error channel overflows just ignore it
|
||||
let _ = MESSAGES
|
||||
.wait()
|
||||
.send_timeout(msg.into(), Duration::from_millis(10))
|
||||
.await;
|
||||
}
|
||||
|
||||
pub fn report_blocking(msg: impl Into<StatusMessage>) {
|
||||
let messages = MESSAGES.wait();
|
||||
send_blocking(messages, msg.into())
|
||||
}
|
||||
|
||||
/// Must be called once during editor startup exactly once
|
||||
/// before any of the messages in this module can be used
|
||||
///
|
||||
/// # Panics
|
||||
/// If called multiple times
|
||||
pub fn setup() -> Receiver<StatusMessage> {
|
||||
let (tx, rx) = tokio::sync::mpsc::channel(128);
|
||||
let _ = MESSAGES.set(tx);
|
||||
rx
|
||||
}
|
90
helix-event/src/test.rs
Normal file
90
helix-event/src/test.rs
Normal file
@@ -0,0 +1,90 @@
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use crate::{dispatch, events, register_dynamic_hook, register_event, register_hook};
|
||||
#[test]
|
||||
fn smoke_test() {
|
||||
events! {
|
||||
Event1 { content: String }
|
||||
Event2 { content: usize }
|
||||
}
|
||||
register_event::<Event1>();
|
||||
register_event::<Event2>();
|
||||
|
||||
// setup hooks
|
||||
let res1: Arc<Mutex<String>> = Arc::default();
|
||||
let acc = Arc::clone(&res1);
|
||||
register_hook!(move |event: &mut Event1| {
|
||||
acc.lock().push_str(&event.content);
|
||||
Ok(())
|
||||
});
|
||||
let res2: Arc<AtomicUsize> = Arc::default();
|
||||
let acc = Arc::clone(&res2);
|
||||
register_hook!(move |event: &mut Event2| {
|
||||
acc.fetch_add(event.content, Ordering::Relaxed);
|
||||
Ok(())
|
||||
});
|
||||
|
||||
// triggers events
|
||||
let thread = std::thread::spawn(|| {
|
||||
for i in 0..1000 {
|
||||
dispatch(Event2 { content: i });
|
||||
}
|
||||
});
|
||||
std::thread::sleep(Duration::from_millis(1));
|
||||
dispatch(Event1 {
|
||||
content: "foo".to_owned(),
|
||||
});
|
||||
dispatch(Event2 { content: 42 });
|
||||
dispatch(Event1 {
|
||||
content: "bar".to_owned(),
|
||||
});
|
||||
dispatch(Event1 {
|
||||
content: "hello world".to_owned(),
|
||||
});
|
||||
thread.join().unwrap();
|
||||
|
||||
// check output
|
||||
assert_eq!(&**res1.lock(), "foobarhello world");
|
||||
assert_eq!(
|
||||
res2.load(Ordering::Relaxed),
|
||||
42 + (0..1000usize).sum::<usize>()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dynamic() {
|
||||
events! {
|
||||
Event3 {}
|
||||
Event4 { count: usize }
|
||||
};
|
||||
register_event::<Event3>();
|
||||
register_event::<Event4>();
|
||||
|
||||
let count = Arc::new(AtomicUsize::new(0));
|
||||
let count1 = count.clone();
|
||||
let count2 = count.clone();
|
||||
register_dynamic_hook(
|
||||
move || {
|
||||
count1.fetch_add(2, Ordering::Relaxed);
|
||||
Ok(())
|
||||
},
|
||||
"Event3",
|
||||
)
|
||||
.unwrap();
|
||||
register_dynamic_hook(
|
||||
move || {
|
||||
count2.fetch_add(3, Ordering::Relaxed);
|
||||
Ok(())
|
||||
},
|
||||
"Event4",
|
||||
)
|
||||
.unwrap();
|
||||
dispatch(Event3 {});
|
||||
dispatch(Event4 { count: 0 });
|
||||
dispatch(Event3 {});
|
||||
assert_eq!(count.load(Ordering::Relaxed), 7)
|
||||
}
|
@@ -15,6 +15,8 @@ name = "hx-loader"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
helix-stdx = { path = "../helix-stdx" }
|
||||
|
||||
anyhow = "1"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
toml = "0.7"
|
||||
@@ -22,7 +24,6 @@ etcetera = "0.8"
|
||||
tree-sitter.workspace = true
|
||||
once_cell = "1.19"
|
||||
log = "0.4"
|
||||
which = "5.0.0"
|
||||
|
||||
# TODO: these two should be on !wasm32 only
|
||||
|
||||
|
@@ -86,10 +86,8 @@ pub fn get_language(name: &str) -> Result<Language> {
|
||||
}
|
||||
|
||||
fn ensure_git_is_available() -> Result<()> {
|
||||
match which::which("git") {
|
||||
Ok(_cmd) => Ok(()),
|
||||
Err(err) => Err(anyhow::anyhow!("'git' could not be found ({err})")),
|
||||
}
|
||||
helix_stdx::env::which("git")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn fetch_grammars() -> Result<()> {
|
||||
|
@@ -1,14 +1,13 @@
|
||||
pub mod config;
|
||||
pub mod grammar;
|
||||
|
||||
use helix_stdx::{env::current_working_dir, path};
|
||||
|
||||
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::RwLock;
|
||||
|
||||
pub const VERSION_AND_GIT_HASH: &str = env!("VERSION_AND_GIT_HASH");
|
||||
|
||||
static CWD: RwLock<Option<PathBuf>> = RwLock::new(None);
|
||||
|
||||
static RUNTIME_DIRS: once_cell::sync::Lazy<Vec<PathBuf>> =
|
||||
once_cell::sync::Lazy::new(prioritize_runtime_dirs);
|
||||
|
||||
@@ -16,31 +15,6 @@ static CONFIG_FILE: once_cell::sync::OnceCell<PathBuf> = once_cell::sync::OnceCe
|
||||
|
||||
static LOG_FILE: once_cell::sync::OnceCell<PathBuf> = once_cell::sync::OnceCell::new();
|
||||
|
||||
// Get the current working directory.
|
||||
// This information is managed internally as the call to std::env::current_dir
|
||||
// might fail if the cwd has been deleted.
|
||||
pub fn current_working_dir() -> PathBuf {
|
||||
if let Some(path) = &*CWD.read().unwrap() {
|
||||
return path.clone();
|
||||
}
|
||||
|
||||
let path = std::env::current_dir()
|
||||
.and_then(dunce::canonicalize)
|
||||
.expect("Couldn't determine current working directory");
|
||||
let mut cwd = CWD.write().unwrap();
|
||||
*cwd = Some(path.clone());
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
pub fn set_current_working_dir(path: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
let path = dunce::canonicalize(path)?;
|
||||
std::env::set_current_dir(&path)?;
|
||||
let mut cwd = CWD.write().unwrap();
|
||||
*cwd = Some(path);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn initialize_config_file(specified_file: Option<PathBuf>) {
|
||||
let config_file = specified_file.unwrap_or_else(default_config_file);
|
||||
ensure_parent_dir(&config_file);
|
||||
@@ -79,7 +53,8 @@ fn prioritize_runtime_dirs() -> Vec<PathBuf> {
|
||||
rt_dirs.push(conf_rt_dir);
|
||||
|
||||
if let Ok(dir) = std::env::var("HELIX_RUNTIME") {
|
||||
rt_dirs.push(dir.into());
|
||||
let dir = path::expand_tilde(dir);
|
||||
rt_dirs.push(path::normalize(dir));
|
||||
}
|
||||
|
||||
// If this variable is set during build time, it will always be included
|
||||
@@ -280,21 +255,9 @@ fn ensure_parent_dir(path: &Path) {
|
||||
mod merge_toml_tests {
|
||||
use std::str;
|
||||
|
||||
use super::{current_working_dir, merge_toml_values, set_current_working_dir};
|
||||
use super::merge_toml_values;
|
||||
use toml::Value;
|
||||
|
||||
#[test]
|
||||
fn current_dir_is_set() {
|
||||
let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap();
|
||||
let cwd = current_working_dir();
|
||||
assert_ne!(cwd, new_path);
|
||||
|
||||
set_current_working_dir(&new_path).expect("Couldn't set new path");
|
||||
|
||||
let cwd = current_working_dir();
|
||||
assert_eq!(cwd, new_path);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn language_toml_map_merges() {
|
||||
const USER: &str = r#"
|
||||
|
@@ -13,6 +13,7 @@ homepage.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
helix-stdx = { path = "../helix-stdx" }
|
||||
helix-core = { path = "../helix-core" }
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
helix-parsec = { path = "../helix-parsec" }
|
||||
@@ -28,5 +29,4 @@ serde_json = "1.0"
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1.35", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
||||
tokio-stream = "0.1.14"
|
||||
which = "5.0.0"
|
||||
parking_lot = "0.12.1"
|
||||
|
@@ -4,11 +4,12 @@ use crate::{
|
||||
Call, Error, OffsetEncoding, Result,
|
||||
};
|
||||
|
||||
use helix_core::{find_workspace, path, syntax::LanguageServerFeature, ChangeSet, Rope};
|
||||
use helix_core::{find_workspace, syntax::LanguageServerFeature, ChangeSet, Rope};
|
||||
use helix_loader::{self, VERSION_AND_GIT_HASH};
|
||||
use helix_stdx::path;
|
||||
use lsp::{
|
||||
notification::DidChangeWorkspaceFolders, CodeActionCapabilityResolveSupport,
|
||||
DidChangeWorkspaceFoldersParams, OneOf, PositionEncodingKind, WorkspaceFolder,
|
||||
DidChangeWorkspaceFoldersParams, OneOf, PositionEncodingKind, SignatureHelp, WorkspaceFolder,
|
||||
WorkspaceFoldersChangeEvent,
|
||||
};
|
||||
use lsp_types as lsp;
|
||||
@@ -68,7 +69,7 @@ impl Client {
|
||||
may_support_workspace: bool,
|
||||
) -> bool {
|
||||
let (workspace, workspace_is_cwd) = find_workspace();
|
||||
let workspace = path::get_normalized_path(&workspace);
|
||||
let workspace = path::normalize(workspace);
|
||||
let root = find_lsp_workspace(
|
||||
doc_path
|
||||
.and_then(|x| x.parent().and_then(|x| x.to_str()))
|
||||
@@ -182,7 +183,7 @@ impl Client {
|
||||
doc_path: Option<&std::path::PathBuf>,
|
||||
) -> Result<(Self, UnboundedReceiver<(usize, Call)>, Arc<Notify>)> {
|
||||
// Resolve path to the binary
|
||||
let cmd = which::which(cmd).map_err(|err| anyhow::anyhow!(err))?;
|
||||
let cmd = helix_stdx::env::which(cmd)?;
|
||||
|
||||
let process = Command::new(cmd)
|
||||
.envs(server_environment)
|
||||
@@ -204,7 +205,7 @@ impl Client {
|
||||
let (server_rx, server_tx, initialize_notify) =
|
||||
Transport::start(reader, writer, stderr, id, name.clone());
|
||||
let (workspace, workspace_is_cwd) = find_workspace();
|
||||
let workspace = path::get_normalized_path(&workspace);
|
||||
let workspace = path::normalize(workspace);
|
||||
let root = find_lsp_workspace(
|
||||
doc_path
|
||||
.and_then(|x| x.parent().and_then(|x| x.to_str()))
|
||||
@@ -998,6 +999,7 @@ impl Client {
|
||||
text_document: lsp::TextDocumentIdentifier,
|
||||
position: lsp::Position,
|
||||
work_done_token: Option<lsp::ProgressToken>,
|
||||
context: lsp::CompletionContext,
|
||||
) -> Option<impl Future<Output = Result<Value>>> {
|
||||
let capabilities = self.capabilities.get().unwrap();
|
||||
|
||||
@@ -1009,13 +1011,12 @@ impl Client {
|
||||
text_document,
|
||||
position,
|
||||
},
|
||||
context: Some(context),
|
||||
// TODO: support these tokens by async receiving and updating the choice list
|
||||
work_done_progress_params: lsp::WorkDoneProgressParams { work_done_token },
|
||||
partial_result_params: lsp::PartialResultParams {
|
||||
partial_result_token: None,
|
||||
},
|
||||
context: None,
|
||||
// lsp::CompletionContext { trigger_kind: , trigger_character: Some(), }
|
||||
};
|
||||
|
||||
Some(self.call::<lsp::request::Completion>(params))
|
||||
@@ -1062,7 +1063,7 @@ impl Client {
|
||||
text_document: lsp::TextDocumentIdentifier,
|
||||
position: lsp::Position,
|
||||
work_done_token: Option<lsp::ProgressToken>,
|
||||
) -> Option<impl Future<Output = Result<Value>>> {
|
||||
) -> Option<impl Future<Output = Result<Option<SignatureHelp>>>> {
|
||||
let capabilities = self.capabilities.get().unwrap();
|
||||
|
||||
// Return early if the server does not support signature help.
|
||||
@@ -1078,7 +1079,8 @@ impl Client {
|
||||
// lsp::SignatureHelpContext
|
||||
};
|
||||
|
||||
Some(self.call::<lsp::request::SignatureHelpRequest>(params))
|
||||
let res = self.call::<lsp::request::SignatureHelpRequest>(params);
|
||||
Some(async move { Ok(serde_json::from_value(res.await?)?) })
|
||||
}
|
||||
|
||||
pub fn text_document_range_inlay_hints(
|
||||
|
@@ -11,10 +11,10 @@ pub use lsp::{Position, Url};
|
||||
pub use lsp_types as lsp;
|
||||
|
||||
use futures_util::stream::select_all::SelectAll;
|
||||
use helix_core::{
|
||||
path,
|
||||
syntax::{LanguageConfiguration, LanguageServerConfiguration, LanguageServerFeatures},
|
||||
use helix_core::syntax::{
|
||||
LanguageConfiguration, LanguageServerConfiguration, LanguageServerFeatures,
|
||||
};
|
||||
use helix_stdx::path;
|
||||
use tokio::sync::mpsc::UnboundedReceiver;
|
||||
|
||||
use std::{
|
||||
@@ -44,6 +44,8 @@ pub enum Error {
|
||||
#[error("Unhandled")]
|
||||
Unhandled,
|
||||
#[error(transparent)]
|
||||
ExecutableNotFound(#[from] helix_stdx::env::ExecutableNotFoundError),
|
||||
#[error(transparent)]
|
||||
Other(#[from] anyhow::Error),
|
||||
}
|
||||
|
||||
@@ -549,6 +551,7 @@ pub enum MethodCall {
|
||||
WorkspaceConfiguration(lsp::ConfigurationParams),
|
||||
RegisterCapability(lsp::RegistrationParams),
|
||||
UnregisterCapability(lsp::UnregistrationParams),
|
||||
ShowDocument(lsp::ShowDocumentParams),
|
||||
}
|
||||
|
||||
impl MethodCall {
|
||||
@@ -576,6 +579,10 @@ impl MethodCall {
|
||||
let params: lsp::UnregistrationParams = params.parse()?;
|
||||
Self::UnregisterCapability(params)
|
||||
}
|
||||
lsp::request::ShowDocument::METHOD => {
|
||||
let params: lsp::ShowDocumentParams = params.parse()?;
|
||||
Self::ShowDocument(params)
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::Unhandled);
|
||||
}
|
||||
@@ -953,10 +960,10 @@ pub fn find_lsp_workspace(
|
||||
let mut file = if file.is_absolute() {
|
||||
file.to_path_buf()
|
||||
} else {
|
||||
let current_dir = helix_loader::current_working_dir();
|
||||
let current_dir = helix_stdx::env::current_working_dir();
|
||||
current_dir.join(file)
|
||||
};
|
||||
file = path::get_normalized_path(&file);
|
||||
file = path::normalize(&file);
|
||||
|
||||
if !file.starts_with(workspace) {
|
||||
return None;
|
||||
@@ -973,7 +980,7 @@ pub fn find_lsp_workspace(
|
||||
|
||||
if root_dirs
|
||||
.iter()
|
||||
.any(|root_dir| path::get_normalized_path(&workspace.join(root_dir)) == ancestor)
|
||||
.any(|root_dir| path::normalize(workspace.join(root_dir)) == ancestor)
|
||||
{
|
||||
// if the worskapce is the cwd do not search any higher for workspaces
|
||||
// but specify
|
||||
|
@@ -270,7 +270,14 @@ impl Transport {
|
||||
}
|
||||
};
|
||||
}
|
||||
Err(Error::StreamClosed) => {
|
||||
Err(err) => {
|
||||
if !matches!(err, Error::StreamClosed) {
|
||||
error!(
|
||||
"Exiting {} after unexpected error: {err:?}",
|
||||
&transport.name
|
||||
);
|
||||
}
|
||||
|
||||
// Close any outstanding requests.
|
||||
for (id, tx) in transport.pending_requests.lock().await.drain() {
|
||||
match tx.send(Err(Error::StreamClosed)).await {
|
||||
@@ -300,10 +307,6 @@ impl Transport {
|
||||
}
|
||||
break;
|
||||
}
|
||||
Err(err) => {
|
||||
error!("{} err: <- {err:?}", transport.name);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
21
helix-stdx/Cargo.toml
Normal file
21
helix-stdx/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "helix-stdx"
|
||||
description = "Standard library extensions"
|
||||
include = ["src/**/*", "README.md"]
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
categories.workspace = true
|
||||
repository.workspace = true
|
||||
homepage.workspace = true
|
||||
|
||||
[dependencies]
|
||||
dunce = "1.0"
|
||||
etcetera = "0.8"
|
||||
ropey = { version = "1.6.1", default-features = false }
|
||||
which = "6.0"
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.9"
|
80
helix-stdx/src/env.rs
Normal file
80
helix-stdx/src/env.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
path::{Path, PathBuf},
|
||||
sync::RwLock,
|
||||
};
|
||||
|
||||
static CWD: RwLock<Option<PathBuf>> = RwLock::new(None);
|
||||
|
||||
// Get the current working directory.
|
||||
// This information is managed internally as the call to std::env::current_dir
|
||||
// might fail if the cwd has been deleted.
|
||||
pub fn current_working_dir() -> PathBuf {
|
||||
if let Some(path) = &*CWD.read().unwrap() {
|
||||
return path.clone();
|
||||
}
|
||||
|
||||
let path = std::env::current_dir()
|
||||
.map(crate::path::normalize)
|
||||
.expect("Couldn't determine current working directory");
|
||||
let mut cwd = CWD.write().unwrap();
|
||||
*cwd = Some(path.clone());
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
pub fn set_current_working_dir(path: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
let path = crate::path::canonicalize(path);
|
||||
std::env::set_current_dir(&path)?;
|
||||
let mut cwd = CWD.write().unwrap();
|
||||
*cwd = Some(path);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn env_var_is_set(env_var_name: &str) -> bool {
|
||||
std::env::var_os(env_var_name).is_some()
|
||||
}
|
||||
|
||||
pub fn binary_exists<T: AsRef<OsStr>>(binary_name: T) -> bool {
|
||||
which::which(binary_name).is_ok()
|
||||
}
|
||||
|
||||
pub fn which<T: AsRef<OsStr>>(
|
||||
binary_name: T,
|
||||
) -> Result<std::path::PathBuf, ExecutableNotFoundError> {
|
||||
which::which(binary_name.as_ref()).map_err(|err| ExecutableNotFoundError {
|
||||
command: binary_name.as_ref().to_string_lossy().into_owned(),
|
||||
inner: err,
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExecutableNotFoundError {
|
||||
command: String,
|
||||
inner: which::Error,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ExecutableNotFoundError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "command '{}' not found: {}", self.command, self.inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for ExecutableNotFoundError {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{current_working_dir, set_current_working_dir};
|
||||
|
||||
#[test]
|
||||
fn current_dir_is_set() {
|
||||
let new_path = dunce::canonicalize(std::env::temp_dir()).unwrap();
|
||||
let cwd = current_working_dir();
|
||||
assert_ne!(cwd, new_path);
|
||||
|
||||
set_current_working_dir(&new_path).expect("Couldn't set new path");
|
||||
|
||||
let cwd = current_working_dir();
|
||||
assert_eq!(cwd, new_path);
|
||||
}
|
||||
}
|
3
helix-stdx/src/lib.rs
Normal file
3
helix-stdx/src/lib.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod env;
|
||||
pub mod path;
|
||||
pub mod rope;
|
@@ -1,6 +1,9 @@
|
||||
use etcetera::home_dir;
|
||||
pub use etcetera::home_dir;
|
||||
|
||||
use std::path::{Component, Path, PathBuf};
|
||||
|
||||
use crate::env::current_working_dir;
|
||||
|
||||
/// Replaces users home directory from `path` with tilde `~` if the directory
|
||||
/// is available, otherwise returns the path unchanged.
|
||||
pub fn fold_home_dir(path: &Path) -> PathBuf {
|
||||
@@ -16,7 +19,8 @@ pub fn fold_home_dir(path: &Path) -> PathBuf {
|
||||
/// Expands tilde `~` into users home directory if available, otherwise returns the path
|
||||
/// unchanged. The tilde will only be expanded when present as the first component of the path
|
||||
/// and only slash follows it.
|
||||
pub fn expand_tilde(path: &Path) -> PathBuf {
|
||||
pub fn expand_tilde(path: impl AsRef<Path>) -> PathBuf {
|
||||
let path = path.as_ref();
|
||||
let mut components = path.components().peekable();
|
||||
if let Some(Component::Normal(c)) = components.peek() {
|
||||
if c == &"~" {
|
||||
@@ -30,32 +34,11 @@ pub fn expand_tilde(path: &Path) -> PathBuf {
|
||||
path.to_path_buf()
|
||||
}
|
||||
|
||||
/// Normalize a path, removing things like `.` and `..`.
|
||||
///
|
||||
/// CAUTION: This does not resolve symlinks (unlike
|
||||
/// [`std::fs::canonicalize`]). This may cause incorrect or surprising
|
||||
/// behavior at times. This should be used carefully. Unfortunately,
|
||||
/// [`std::fs::canonicalize`] can be hard to use correctly, since it can often
|
||||
/// fail, or on Windows returns annoying device paths. This is a problem Cargo
|
||||
/// needs to improve on.
|
||||
/// Copied from cargo: <https://github.com/rust-lang/cargo/blob/070e459c2d8b79c5b2ac5218064e7603329c92ae/crates/cargo-util/src/paths.rs#L81>
|
||||
pub fn get_normalized_path(path: &Path) -> PathBuf {
|
||||
// normalization strategy is to canonicalize first ancestor path that exists (i.e., canonicalize as much as possible),
|
||||
// then run handrolled normalization on the non-existent remainder
|
||||
let (base, path) = path
|
||||
.ancestors()
|
||||
.find_map(|base| {
|
||||
let canonicalized_base = dunce::canonicalize(base).ok()?;
|
||||
let remainder = path.strip_prefix(base).ok()?.into();
|
||||
Some((canonicalized_base, remainder))
|
||||
})
|
||||
.unwrap_or_else(|| (PathBuf::new(), PathBuf::from(path)));
|
||||
|
||||
if path.as_os_str().is_empty() {
|
||||
return base;
|
||||
}
|
||||
|
||||
let mut components = path.components().peekable();
|
||||
/// Normalize a path without resolving symlinks.
|
||||
// Strategy: start from the first component and move up. Cannonicalize previous path,
|
||||
// join component, cannonicalize new path, strip prefix and join to the final result.
|
||||
pub fn normalize(path: impl AsRef<Path>) -> PathBuf {
|
||||
let mut components = path.as_ref().components().peekable();
|
||||
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
|
||||
components.next();
|
||||
PathBuf::from(c.as_os_str())
|
||||
@@ -70,37 +53,77 @@ pub fn get_normalized_path(path: &Path) -> PathBuf {
|
||||
ret.push(component.as_os_str());
|
||||
}
|
||||
Component::CurDir => {}
|
||||
#[cfg(not(windows))]
|
||||
Component::ParentDir => {
|
||||
ret.pop();
|
||||
}
|
||||
#[cfg(windows)]
|
||||
Component::ParentDir => {
|
||||
if let Some(head) = ret.components().next_back() {
|
||||
match head {
|
||||
Component::Prefix(_) | Component::RootDir => {}
|
||||
Component::CurDir => unreachable!(),
|
||||
// If we left previous component as ".." it means we met a symlink before and we can't pop path.
|
||||
Component::ParentDir => {
|
||||
ret.push("..");
|
||||
}
|
||||
Component::Normal(_) => {
|
||||
if ret.is_symlink() {
|
||||
ret.push("..");
|
||||
} else {
|
||||
ret.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
Component::Normal(c) => {
|
||||
ret.push(c);
|
||||
}
|
||||
#[cfg(windows)]
|
||||
Component::Normal(c) => 'normal: {
|
||||
use std::fs::canonicalize;
|
||||
|
||||
let new_path = ret.join(c);
|
||||
if new_path.is_symlink() {
|
||||
ret = new_path;
|
||||
break 'normal;
|
||||
}
|
||||
let (can_new, can_old) = (canonicalize(&new_path), canonicalize(&ret));
|
||||
match (can_new, can_old) {
|
||||
(Ok(can_new), Ok(can_old)) => {
|
||||
let striped = can_new.strip_prefix(can_old);
|
||||
ret.push(striped.unwrap_or_else(|_| c.as_ref()));
|
||||
}
|
||||
_ => ret.push(c),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
base.join(ret)
|
||||
dunce::simplified(&ret).to_path_buf()
|
||||
}
|
||||
|
||||
/// Returns the canonical, absolute form of a path with all intermediate components normalized.
|
||||
///
|
||||
/// This function is used instead of `std::fs::canonicalize` because we don't want to verify
|
||||
/// This function is used instead of [`std::fs::canonicalize`] because we don't want to verify
|
||||
/// here if the path exists, just normalize it's components.
|
||||
pub fn get_canonicalized_path(path: &Path) -> PathBuf {
|
||||
pub fn canonicalize(path: impl AsRef<Path>) -> PathBuf {
|
||||
let path = expand_tilde(path);
|
||||
let path = if path.is_relative() {
|
||||
helix_loader::current_working_dir().join(path)
|
||||
current_working_dir().join(path)
|
||||
} else {
|
||||
path
|
||||
};
|
||||
|
||||
get_normalized_path(path.as_path())
|
||||
normalize(path)
|
||||
}
|
||||
|
||||
pub fn get_relative_path(path: &Path) -> PathBuf {
|
||||
let path = PathBuf::from(path);
|
||||
pub fn get_relative_path(path: impl AsRef<Path>) -> PathBuf {
|
||||
let path = PathBuf::from(path.as_ref());
|
||||
let path = if path.is_absolute() {
|
||||
let cwdir = get_normalized_path(&helix_loader::current_working_dir());
|
||||
get_normalized_path(&path)
|
||||
let cwdir = normalize(current_working_dir());
|
||||
normalize(&path)
|
||||
.strip_prefix(cwdir)
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or(path)
|
||||
@@ -116,8 +139,8 @@ pub fn get_relative_path(path: &Path) -> PathBuf {
|
||||
/// Also strip the current working directory from the beginning of the path.
|
||||
/// Note that this function does not check if the truncated path is unambiguous.
|
||||
///
|
||||
/// ```
|
||||
/// use helix_core::path::get_truncated_path;
|
||||
/// ```
|
||||
/// use helix_stdx::path::get_truncated_path;
|
||||
/// use std::path::Path;
|
||||
///
|
||||
/// assert_eq!(
|
||||
@@ -139,8 +162,8 @@ pub fn get_relative_path(path: &Path) -> PathBuf {
|
||||
/// assert_eq!(get_truncated_path("").as_path(), Path::new(""));
|
||||
/// ```
|
||||
///
|
||||
pub fn get_truncated_path<P: AsRef<Path>>(path: P) -> PathBuf {
|
||||
let cwd = helix_loader::current_working_dir();
|
||||
pub fn get_truncated_path(path: impl AsRef<Path>) -> PathBuf {
|
||||
let cwd = current_working_dir();
|
||||
let path = path
|
||||
.as_ref()
|
||||
.strip_prefix(cwd)
|
26
helix-stdx/src/rope.rs
Normal file
26
helix-stdx/src/rope.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use ropey::RopeSlice;
|
||||
|
||||
pub trait RopeSliceExt: Sized {
|
||||
fn ends_with(self, text: &str) -> bool;
|
||||
fn starts_with(self, text: &str) -> bool;
|
||||
}
|
||||
|
||||
impl RopeSliceExt for RopeSlice<'_> {
|
||||
fn ends_with(self, text: &str) -> bool {
|
||||
let len = self.len_bytes();
|
||||
if len < text.len() {
|
||||
return false;
|
||||
}
|
||||
self.get_byte_slice(len - text.len()..)
|
||||
.map_or(false, |end| end == text)
|
||||
}
|
||||
|
||||
fn starts_with(self, text: &str) -> bool {
|
||||
let len = self.len_bytes();
|
||||
if len < text.len() {
|
||||
return false;
|
||||
}
|
||||
self.get_byte_slice(..len - text.len())
|
||||
.map_or(false, |start| start == text)
|
||||
}
|
||||
}
|
124
helix-stdx/tests/path.rs
Normal file
124
helix-stdx/tests/path.rs
Normal file
@@ -0,0 +1,124 @@
|
||||
#![cfg(windows)]
|
||||
|
||||
use std::{
|
||||
env::set_current_dir,
|
||||
error::Error,
|
||||
path::{Component, Path, PathBuf},
|
||||
};
|
||||
|
||||
use helix_stdx::path;
|
||||
use tempfile::Builder;
|
||||
|
||||
// Paths on Windows are almost always case-insensitive.
|
||||
// Normalization should return the original path.
|
||||
// E.g. mkdir `CaSe`, normalize(`case`) = `CaSe`.
|
||||
#[test]
|
||||
fn test_case_folding_windows() -> Result<(), Box<dyn Error>> {
|
||||
// tmp/root/case
|
||||
let tmp_prefix = std::env::temp_dir();
|
||||
set_current_dir(&tmp_prefix)?;
|
||||
|
||||
let root = Builder::new().prefix("root-").tempdir()?;
|
||||
let case = Builder::new().prefix("CaSe-").tempdir_in(&root)?;
|
||||
|
||||
let root_without_prefix = root.path().strip_prefix(&tmp_prefix)?;
|
||||
|
||||
let lowercase_case = format!(
|
||||
"case-{}",
|
||||
case.path()
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.split_at(5)
|
||||
.1
|
||||
);
|
||||
let test_path = root_without_prefix.join(lowercase_case);
|
||||
assert_eq!(
|
||||
path::normalize(&test_path),
|
||||
case.path().strip_prefix(&tmp_prefix)?
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_path() -> Result<(), Box<dyn Error>> {
|
||||
/*
|
||||
tmp/root/
|
||||
├── link -> dir1/orig_file
|
||||
├── dir1/
|
||||
│ └── orig_file
|
||||
└── dir2/
|
||||
└── dir_link -> ../dir1/
|
||||
*/
|
||||
|
||||
let tmp_prefix = std::env::temp_dir();
|
||||
set_current_dir(&tmp_prefix)?;
|
||||
|
||||
// Create a tree structure as shown above
|
||||
let root = Builder::new().prefix("root-").tempdir()?;
|
||||
let dir1 = Builder::new().prefix("dir1-").tempdir_in(&root)?;
|
||||
let orig_file = Builder::new().prefix("orig_file-").tempfile_in(&dir1)?;
|
||||
let dir2 = Builder::new().prefix("dir2-").tempdir_in(&root)?;
|
||||
|
||||
// Create path and delete existing file
|
||||
let dir_link = Builder::new()
|
||||
.prefix("dir_link-")
|
||||
.tempfile_in(&dir2)?
|
||||
.path()
|
||||
.to_owned();
|
||||
let link = Builder::new()
|
||||
.prefix("link-")
|
||||
.tempfile_in(&root)?
|
||||
.path()
|
||||
.to_owned();
|
||||
|
||||
use std::os::windows;
|
||||
windows::fs::symlink_dir(&dir1, &dir_link)?;
|
||||
windows::fs::symlink_file(&orig_file, &link)?;
|
||||
|
||||
// root/link
|
||||
let path = link.strip_prefix(&tmp_prefix)?;
|
||||
assert_eq!(
|
||||
path::normalize(path),
|
||||
path,
|
||||
"input {:?} and symlink last component shouldn't be resolved",
|
||||
path
|
||||
);
|
||||
|
||||
// root/dir2/dir_link/orig_file/../..
|
||||
let path = dir_link
|
||||
.strip_prefix(&tmp_prefix)
|
||||
.unwrap()
|
||||
.join(orig_file.path().file_name().unwrap())
|
||||
.join(Component::ParentDir)
|
||||
.join(Component::ParentDir);
|
||||
let expected = dir_link
|
||||
.strip_prefix(&tmp_prefix)
|
||||
.unwrap()
|
||||
.join(Component::ParentDir);
|
||||
assert_eq!(
|
||||
path::normalize(&path),
|
||||
expected,
|
||||
"input {:?} and \"..\" should not erase the simlink that goes ahead",
|
||||
&path
|
||||
);
|
||||
|
||||
// root/link/.././../dir2/../
|
||||
let path = link
|
||||
.strip_prefix(&tmp_prefix)
|
||||
.unwrap()
|
||||
.join(Component::ParentDir)
|
||||
.join(Component::CurDir)
|
||||
.join(Component::ParentDir)
|
||||
.join(dir2.path().file_name().unwrap())
|
||||
.join(Component::ParentDir);
|
||||
let expected = link
|
||||
.strip_prefix(&tmp_prefix)
|
||||
.unwrap()
|
||||
.join(Component::ParentDir)
|
||||
.join(Component::ParentDir);
|
||||
assert_eq!(path::normalize(&path), expected, "input {:?}", &path);
|
||||
|
||||
Ok(())
|
||||
}
|
@@ -15,7 +15,7 @@ homepage.workspace = true
|
||||
[features]
|
||||
default = ["git"]
|
||||
unicode-lines = ["helix-core/unicode-lines"]
|
||||
integration = []
|
||||
integration = ["helix-event/integration_test"]
|
||||
git = ["helix-vcs/git"]
|
||||
|
||||
[[bin]]
|
||||
@@ -23,6 +23,7 @@ name = "hx"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
helix-stdx = { path = "../helix-stdx" }
|
||||
helix-core = { path = "../helix-core" }
|
||||
helix-event = { path = "../helix-event" }
|
||||
helix-view = { path = "../helix-view" }
|
||||
@@ -34,8 +35,6 @@ helix-loader = { path = "../helix-loader" }
|
||||
anyhow = "1"
|
||||
once_cell = "1.19"
|
||||
|
||||
which = "5.0.0"
|
||||
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] }
|
||||
tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] }
|
||||
crossterm = { version = "0.27", features = ["event-stream"] }
|
||||
@@ -82,6 +81,6 @@ crossterm = { version = "0.27", features = ["event-stream", "use-dev-tty"] }
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
|
||||
[dev-dependencies]
|
||||
smallvec = "1.11"
|
||||
smallvec = "1.13"
|
||||
indoc = "2.0.4"
|
||||
tempfile = "3.9.0"
|
||||
|
@@ -1,10 +1,12 @@
|
||||
use arc_swap::{access::Map, ArcSwap};
|
||||
use futures_util::Stream;
|
||||
use helix_core::{path::get_relative_path, pos_at_coords, syntax, Selection};
|
||||
use helix_core::{diagnostic::Severity, pos_at_coords, syntax, Selection};
|
||||
use helix_lsp::{
|
||||
lsp::{self, notification::Notification},
|
||||
util::lsp_range_to_range,
|
||||
LspProgressMap,
|
||||
};
|
||||
use helix_stdx::path::get_relative_path;
|
||||
use helix_view::{
|
||||
align_view,
|
||||
document::DocumentSavedEventResult,
|
||||
@@ -22,12 +24,13 @@ use crate::{
|
||||
commands::apply_workspace_edit,
|
||||
compositor::{Compositor, Event},
|
||||
config::Config,
|
||||
handlers,
|
||||
job::Jobs,
|
||||
keymap::Keymaps,
|
||||
ui::{self, overlay::overlaid},
|
||||
};
|
||||
|
||||
use log::{debug, error, warn};
|
||||
use log::{debug, error, info, warn};
|
||||
#[cfg(not(feature = "integration"))]
|
||||
use std::io::stdout;
|
||||
use std::{collections::btree_map::Entry, io::stdin, path::Path, sync::Arc};
|
||||
@@ -136,6 +139,7 @@ impl Application {
|
||||
let area = terminal.size().expect("couldn't get terminal size");
|
||||
let mut compositor = Compositor::new(area);
|
||||
let config = Arc::new(ArcSwap::from_pointee(config));
|
||||
let handlers = handlers::setup(config.clone());
|
||||
let mut editor = Editor::new(
|
||||
area,
|
||||
theme_loader.clone(),
|
||||
@@ -143,6 +147,7 @@ impl Application {
|
||||
Arc::new(Map::new(Arc::clone(&config), |config: &Config| {
|
||||
&config.editor
|
||||
})),
|
||||
handlers,
|
||||
);
|
||||
|
||||
let keys = Box::new(Map::new(Arc::clone(&config), |config: &Config| {
|
||||
@@ -319,10 +324,21 @@ impl Application {
|
||||
Some(event) = input_stream.next() => {
|
||||
self.handle_terminal_events(event).await;
|
||||
}
|
||||
Some(callback) = self.jobs.futures.next() => {
|
||||
self.jobs.handle_callback(&mut self.editor, &mut self.compositor, callback);
|
||||
Some(callback) = self.jobs.callbacks.recv() => {
|
||||
self.jobs.handle_callback(&mut self.editor, &mut self.compositor, Ok(Some(callback)));
|
||||
self.render().await;
|
||||
}
|
||||
Some(msg) = self.jobs.status_messages.recv() => {
|
||||
let severity = match msg.severity{
|
||||
helix_event::status::Severity::Hint => Severity::Hint,
|
||||
helix_event::status::Severity::Info => Severity::Info,
|
||||
helix_event::status::Severity::Warning => Severity::Warning,
|
||||
helix_event::status::Severity::Error => Severity::Error,
|
||||
};
|
||||
// TODO: show multiple status messages at once to avoid clobbering
|
||||
self.editor.status_msg = Some((msg.message, severity));
|
||||
helix_event::request_redraw();
|
||||
}
|
||||
Some(callback) = self.jobs.wait_futures.next() => {
|
||||
self.jobs.handle_callback(&mut self.editor, &mut self.compositor, callback);
|
||||
self.render().await;
|
||||
@@ -683,9 +699,13 @@ impl Application {
|
||||
Call::Notification(helix_lsp::jsonrpc::Notification { method, params, .. }) => {
|
||||
let notification = match Notification::parse(&method, params) {
|
||||
Ok(notification) => notification,
|
||||
Err(helix_lsp::Error::Unhandled) => {
|
||||
info!("Ignoring Unhandled notification from Language Server");
|
||||
return;
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!(
|
||||
"received malformed notification from Language Server: {}",
|
||||
error!(
|
||||
"Ignoring unknown notification from Language Server: {}",
|
||||
err
|
||||
);
|
||||
return;
|
||||
@@ -1096,6 +1116,13 @@ impl Application {
|
||||
}
|
||||
Ok(serde_json::Value::Null)
|
||||
}
|
||||
Ok(MethodCall::ShowDocument(params)) => {
|
||||
let language_server = language_server!();
|
||||
let offset_encoding = language_server.offset_encoding();
|
||||
|
||||
let result = self.handle_show_document(params, offset_encoding);
|
||||
Ok(json!(result))
|
||||
}
|
||||
};
|
||||
|
||||
tokio::spawn(language_server!().reply(id, reply));
|
||||
@@ -1104,6 +1131,68 @@ impl Application {
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_show_document(
|
||||
&mut self,
|
||||
params: lsp::ShowDocumentParams,
|
||||
offset_encoding: helix_lsp::OffsetEncoding,
|
||||
) -> lsp::ShowDocumentResult {
|
||||
if let lsp::ShowDocumentParams {
|
||||
external: Some(true),
|
||||
uri,
|
||||
..
|
||||
} = params
|
||||
{
|
||||
self.jobs.callback(crate::open_external_url_callback(uri));
|
||||
return lsp::ShowDocumentResult { success: true };
|
||||
};
|
||||
|
||||
let lsp::ShowDocumentParams {
|
||||
uri,
|
||||
selection,
|
||||
take_focus,
|
||||
..
|
||||
} = params;
|
||||
|
||||
let path = match uri.to_file_path() {
|
||||
Ok(path) => path,
|
||||
Err(err) => {
|
||||
log::error!("unsupported file URI: {}: {:?}", uri, err);
|
||||
return lsp::ShowDocumentResult { success: false };
|
||||
}
|
||||
};
|
||||
|
||||
let action = match take_focus {
|
||||
Some(true) => helix_view::editor::Action::Replace,
|
||||
_ => helix_view::editor::Action::VerticalSplit,
|
||||
};
|
||||
|
||||
let doc_id = match self.editor.open(&path, action) {
|
||||
Ok(id) => id,
|
||||
Err(err) => {
|
||||
log::error!("failed to open path: {:?}: {:?}", uri, err);
|
||||
return lsp::ShowDocumentResult { success: false };
|
||||
}
|
||||
};
|
||||
|
||||
let doc = doc_mut!(self.editor, &doc_id);
|
||||
if let Some(range) = selection {
|
||||
// TODO: convert inside server
|
||||
if let Some(new_range) = lsp_range_to_range(doc.text(), range, offset_encoding) {
|
||||
let view = view_mut!(self.editor);
|
||||
|
||||
// we flip the range so that the cursor sits on the start of the symbol
|
||||
// (for example start of the function).
|
||||
doc.set_selection(view.id, Selection::single(new_range.head, new_range.anchor));
|
||||
if action.align_view(view, doc.id()) {
|
||||
align_view(doc, view, Align::Center);
|
||||
}
|
||||
} else {
|
||||
log::warn!("lsp position out of bounds - {:?}", range);
|
||||
};
|
||||
};
|
||||
lsp::ShowDocumentResult { success: true }
|
||||
}
|
||||
|
||||
async fn claim_term(&mut self) -> std::io::Result<()> {
|
||||
let terminal_config = self.config.load().editor.clone().into();
|
||||
self.terminal.claim(terminal_config)
|
||||
|
@@ -90,10 +90,9 @@ impl Args {
|
||||
}
|
||||
}
|
||||
arg if arg.starts_with('+') => {
|
||||
let arg = &arg[1..];
|
||||
line_number = match arg.parse::<usize>() {
|
||||
Ok(n) => n.saturating_sub(1),
|
||||
_ => anyhow::bail!("bad line number after +"),
|
||||
match arg[1..].parse::<usize>() {
|
||||
Ok(n) => line_number = n.saturating_sub(1),
|
||||
_ => args.files.push(parse_file(arg)),
|
||||
};
|
||||
}
|
||||
arg => args.files.push(parse_file(arg)),
|
||||
|
@@ -5,7 +5,6 @@ pub(crate) mod typed;
|
||||
pub use dap::*;
|
||||
use helix_vcs::Hunk;
|
||||
pub use lsp::*;
|
||||
use tokio::sync::oneshot;
|
||||
use tui::widgets::Row;
|
||||
pub use typed::*;
|
||||
|
||||
@@ -33,7 +32,7 @@ use helix_core::{
|
||||
};
|
||||
use helix_view::{
|
||||
document::{FormatterError, Mode, SCRATCH_BUFFER_NAME},
|
||||
editor::{Action, CompleteAction},
|
||||
editor::Action,
|
||||
info::Info,
|
||||
input::KeyEvent,
|
||||
keyboard::KeyCode,
|
||||
@@ -52,14 +51,10 @@ use crate::{
|
||||
filter_picker_entry,
|
||||
job::Callback,
|
||||
keymap::ReverseKeymap,
|
||||
ui::{
|
||||
self, editor::InsertEvent, lsp::SignatureHelp, overlay::overlaid, CompletionItem, Picker,
|
||||
Popup, Prompt, PromptEvent,
|
||||
},
|
||||
ui::{self, overlay::overlaid, Picker, Popup, Prompt, PromptEvent},
|
||||
};
|
||||
|
||||
use crate::job::{self, Jobs};
|
||||
use futures_util::{stream::FuturesUnordered, TryStreamExt};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt,
|
||||
@@ -88,7 +83,7 @@ pub struct Context<'a> {
|
||||
pub count: Option<NonZeroUsize>,
|
||||
pub editor: &'a mut Editor,
|
||||
|
||||
pub callback: Option<crate::compositor::Callback>,
|
||||
pub callback: Vec<crate::compositor::Callback>,
|
||||
pub on_next_key_callback: Option<OnKeyCallback>,
|
||||
pub jobs: &'a mut Jobs,
|
||||
}
|
||||
@@ -96,16 +91,18 @@ pub struct Context<'a> {
|
||||
impl<'a> Context<'a> {
|
||||
/// Push a new component onto the compositor.
|
||||
pub fn push_layer(&mut self, component: Box<dyn Component>) {
|
||||
self.callback = Some(Box::new(|compositor: &mut Compositor, _| {
|
||||
compositor.push(component)
|
||||
}));
|
||||
self.callback
|
||||
.push(Box::new(|compositor: &mut Compositor, _| {
|
||||
compositor.push(component)
|
||||
}));
|
||||
}
|
||||
|
||||
/// Call `replace_or_push` on the Compositor
|
||||
pub fn replace_or_push_layer<T: Component>(&mut self, id: &'static str, component: T) {
|
||||
self.callback = Some(Box::new(move |compositor: &mut Compositor, _| {
|
||||
compositor.replace_or_push(id, component);
|
||||
}));
|
||||
self.callback
|
||||
.push(Box::new(move |compositor: &mut Compositor, _| {
|
||||
compositor.replace_or_push(id, component);
|
||||
}));
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@@ -795,7 +792,7 @@ fn goto_buffer(editor: &mut Editor, direction: Direction) {
|
||||
let iter = editor.documents.keys();
|
||||
let mut iter = iter.rev().skip_while(|id| *id != ¤t);
|
||||
iter.next(); // skip current item
|
||||
iter.next().or_else(|| editor.documents.keys().rev().next())
|
||||
iter.next().or_else(|| editor.documents.keys().next_back())
|
||||
}
|
||||
}
|
||||
.unwrap();
|
||||
@@ -1227,7 +1224,7 @@ fn open_url(cx: &mut Context, url: Url, action: Action) {
|
||||
.unwrap_or_default();
|
||||
|
||||
if url.scheme() != "file" {
|
||||
return open_external_url(cx, url);
|
||||
return cx.jobs.callback(crate::open_external_url_callback(url));
|
||||
}
|
||||
|
||||
let content_type = std::fs::File::open(url.path()).and_then(|file| {
|
||||
@@ -1240,7 +1237,9 @@ fn open_url(cx: &mut Context, url: Url, action: Action) {
|
||||
// we attempt to open binary files - files that can't be open in helix - using external
|
||||
// program as well, e.g. pdf files or images
|
||||
match content_type {
|
||||
Ok(content_inspector::ContentType::BINARY) => open_external_url(cx, url),
|
||||
Ok(content_inspector::ContentType::BINARY) => {
|
||||
cx.jobs.callback(crate::open_external_url_callback(url))
|
||||
}
|
||||
Ok(_) | Err(_) => {
|
||||
let path = &rel_path.join(url.path());
|
||||
if path.is_dir() {
|
||||
@@ -1253,23 +1252,6 @@ fn open_url(cx: &mut Context, url: Url, action: Action) {
|
||||
}
|
||||
}
|
||||
|
||||
/// Opens URL in external program.
|
||||
fn open_external_url(cx: &mut Context, url: Url) {
|
||||
let commands = open::commands(url.as_str());
|
||||
cx.jobs.callback(async {
|
||||
for cmd in commands {
|
||||
let mut command = tokio::process::Command::new(cmd.get_program());
|
||||
command.args(cmd.get_args());
|
||||
if command.output().await.is_ok() {
|
||||
return Ok(job::Callback::Editor(Box::new(|_| {})));
|
||||
}
|
||||
}
|
||||
Ok(job::Callback::Editor(Box::new(move |editor| {
|
||||
editor.set_error("Opening URL in external program failed")
|
||||
})))
|
||||
});
|
||||
}
|
||||
|
||||
fn extend_word_impl<F>(cx: &mut Context, extend_fn: F)
|
||||
where
|
||||
F: Fn(RopeSlice, Range, usize) -> Range,
|
||||
@@ -2184,7 +2166,7 @@ fn global_search(cx: &mut Context) {
|
||||
type Data = Option<PathBuf>;
|
||||
|
||||
fn format(&self, current_path: &Self::Data) -> Row {
|
||||
let relative_path = helix_core::path::get_relative_path(&self.path)
|
||||
let relative_path = helix_stdx::path::get_relative_path(&self.path)
|
||||
.to_string_lossy()
|
||||
.into_owned();
|
||||
if current_path
|
||||
@@ -2233,7 +2215,7 @@ fn global_search(cx: &mut Context) {
|
||||
.case_smart(smart_case)
|
||||
.build(regex.as_str())
|
||||
{
|
||||
let search_root = helix_loader::current_working_dir();
|
||||
let search_root = helix_stdx::env::current_working_dir();
|
||||
if !search_root.exists() {
|
||||
cx.editor
|
||||
.set_error("Current working directory does not exist");
|
||||
@@ -2606,7 +2588,6 @@ fn delete_by_selection_insert_mode(
|
||||
);
|
||||
}
|
||||
doc.apply(&transaction, view.id);
|
||||
lsp::signature_help_impl(cx, SignatureHelpInvoked::Automatic);
|
||||
}
|
||||
|
||||
fn delete_selection(cx: &mut Context) {
|
||||
@@ -2680,10 +2661,6 @@ fn insert_mode(cx: &mut Context) {
|
||||
.transform(|range| Range::new(range.to(), range.from()));
|
||||
|
||||
doc.set_selection(view.id, selection);
|
||||
|
||||
// [TODO] temporary workaround until we're not using the idle timer to
|
||||
// trigger auto completions any more
|
||||
cx.editor.clear_idle_timer();
|
||||
}
|
||||
|
||||
// inserts at the end of each selection
|
||||
@@ -2746,7 +2723,7 @@ fn file_picker_in_current_buffer_directory(cx: &mut Context) {
|
||||
}
|
||||
|
||||
fn file_picker_in_current_directory(cx: &mut Context) {
|
||||
let cwd = helix_loader::current_working_dir();
|
||||
let cwd = helix_stdx::env::current_working_dir();
|
||||
if !cwd.exists() {
|
||||
cx.editor
|
||||
.set_error("Current working directory does not exist");
|
||||
@@ -2774,7 +2751,7 @@ fn buffer_picker(cx: &mut Context) {
|
||||
let path = self
|
||||
.path
|
||||
.as_deref()
|
||||
.map(helix_core::path::get_relative_path);
|
||||
.map(helix_stdx::path::get_relative_path);
|
||||
let path = match path.as_deref().and_then(Path::to_str) {
|
||||
Some(path) => path,
|
||||
None => SCRATCH_BUFFER_NAME,
|
||||
@@ -2804,7 +2781,7 @@ fn buffer_picker(cx: &mut Context) {
|
||||
.editor
|
||||
.documents
|
||||
.values()
|
||||
.map(|doc| new_meta(doc))
|
||||
.map(new_meta)
|
||||
.collect::<Vec<BufferMeta>>();
|
||||
|
||||
// mru
|
||||
@@ -2841,7 +2818,7 @@ fn jumplist_picker(cx: &mut Context) {
|
||||
let path = self
|
||||
.path
|
||||
.as_deref()
|
||||
.map(helix_core::path::get_relative_path);
|
||||
.map(helix_stdx::path::get_relative_path);
|
||||
let path = match path.as_deref().and_then(Path::to_str) {
|
||||
Some(path) => path,
|
||||
None => SCRATCH_BUFFER_NAME,
|
||||
@@ -2949,7 +2926,7 @@ pub fn command_palette(cx: &mut Context) {
|
||||
let register = cx.register;
|
||||
let count = cx.count;
|
||||
|
||||
cx.callback = Some(Box::new(
|
||||
cx.callback.push(Box::new(
|
||||
move |compositor: &mut Compositor, cx: &mut compositor::Context| {
|
||||
let keymap = compositor.find::<ui::EditorView>().unwrap().keymaps.map()
|
||||
[&cx.editor.mode]
|
||||
@@ -2969,7 +2946,7 @@ pub fn command_palette(cx: &mut Context) {
|
||||
register,
|
||||
count,
|
||||
editor: cx.editor,
|
||||
callback: None,
|
||||
callback: Vec::new(),
|
||||
on_next_key_callback: None,
|
||||
jobs: cx.jobs,
|
||||
};
|
||||
@@ -2997,7 +2974,7 @@ pub fn command_palette(cx: &mut Context) {
|
||||
|
||||
fn last_picker(cx: &mut Context) {
|
||||
// TODO: last picker does not seem to work well with buffer_picker
|
||||
cx.callback = Some(Box::new(|compositor, cx| {
|
||||
cx.callback.push(Box::new(|compositor, cx| {
|
||||
if let Some(picker) = compositor.last_picker.take() {
|
||||
compositor.push(picker);
|
||||
} else {
|
||||
@@ -3509,9 +3486,10 @@ fn hunk_range(hunk: Hunk, text: RopeSlice) -> Range {
|
||||
}
|
||||
|
||||
pub mod insert {
|
||||
use crate::events::PostInsertChar;
|
||||
|
||||
use super::*;
|
||||
pub type Hook = fn(&Rope, &Selection, char) -> Option<Transaction>;
|
||||
pub type PostHook = fn(&mut Context, char);
|
||||
|
||||
/// Exclude the cursor in range.
|
||||
fn exclude_cursor(text: RopeSlice, range: Range, cursor: Range) -> Range {
|
||||
@@ -3525,88 +3503,6 @@ pub mod insert {
|
||||
}
|
||||
}
|
||||
|
||||
// It trigger completion when idle timer reaches deadline
|
||||
// Only trigger completion if the word under cursor is longer than n characters
|
||||
pub fn idle_completion(cx: &mut Context) {
|
||||
let config = cx.editor.config();
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let text = doc.text().slice(..);
|
||||
let cursor = doc.selection(view.id).primary().cursor(text);
|
||||
|
||||
use helix_core::chars::char_is_word;
|
||||
let mut iter = text.chars_at(cursor);
|
||||
iter.reverse();
|
||||
for _ in 0..config.completion_trigger_len {
|
||||
match iter.next() {
|
||||
Some(c) if char_is_word(c) => {}
|
||||
_ => return,
|
||||
}
|
||||
}
|
||||
super::completion(cx);
|
||||
}
|
||||
|
||||
fn language_server_completion(cx: &mut Context, ch: char) {
|
||||
let config = cx.editor.config();
|
||||
if !config.auto_completion {
|
||||
return;
|
||||
}
|
||||
|
||||
use helix_lsp::lsp;
|
||||
// if ch matches completion char, trigger completion
|
||||
let doc = doc_mut!(cx.editor);
|
||||
let trigger_completion = doc
|
||||
.language_servers_with_feature(LanguageServerFeature::Completion)
|
||||
.any(|ls| {
|
||||
// TODO: what if trigger is multiple chars long
|
||||
matches!(&ls.capabilities().completion_provider, Some(lsp::CompletionOptions {
|
||||
trigger_characters: Some(triggers),
|
||||
..
|
||||
}) if triggers.iter().any(|trigger| trigger.contains(ch)))
|
||||
});
|
||||
|
||||
if trigger_completion {
|
||||
cx.editor.clear_idle_timer();
|
||||
super::completion(cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn signature_help(cx: &mut Context, ch: char) {
|
||||
use helix_lsp::lsp;
|
||||
// if ch matches signature_help char, trigger
|
||||
let doc = doc_mut!(cx.editor);
|
||||
// TODO support multiple language servers (not just the first that is found), likely by merging UI somehow
|
||||
let Some(language_server) = doc
|
||||
.language_servers_with_feature(LanguageServerFeature::SignatureHelp)
|
||||
.next()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let capabilities = language_server.capabilities();
|
||||
|
||||
if let lsp::ServerCapabilities {
|
||||
signature_help_provider:
|
||||
Some(lsp::SignatureHelpOptions {
|
||||
trigger_characters: Some(triggers),
|
||||
// TODO: retrigger_characters
|
||||
..
|
||||
}),
|
||||
..
|
||||
} = capabilities
|
||||
{
|
||||
// TODO: what if trigger is multiple chars long
|
||||
let is_trigger = triggers.iter().any(|trigger| trigger.contains(ch));
|
||||
// lsp doesn't tell us when to close the signature help, so we request
|
||||
// the help information again after common close triggers which should
|
||||
// return None, which in turn closes the popup.
|
||||
let close_triggers = &[')', ';', '.'];
|
||||
|
||||
if is_trigger || close_triggers.contains(&ch) {
|
||||
super::signature_help_impl(cx, SignatureHelpInvoked::Automatic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The default insert hook: simply insert the character
|
||||
#[allow(clippy::unnecessary_wraps)] // need to use Option<> because of the Hook signature
|
||||
fn insert(doc: &Rope, selection: &Selection, ch: char) -> Option<Transaction> {
|
||||
@@ -3636,12 +3532,7 @@ pub mod insert {
|
||||
doc.apply(&t, view.id);
|
||||
}
|
||||
|
||||
// TODO: need a post insert hook too for certain triggers (autocomplete, signature help, etc)
|
||||
// this could also generically look at Transaction, but it's a bit annoying to look at
|
||||
// Operation instead of Change.
|
||||
for hook in &[language_server_completion, signature_help] {
|
||||
hook(cx, c);
|
||||
}
|
||||
helix_event::dispatch(PostInsertChar { c, cx });
|
||||
}
|
||||
|
||||
pub fn smart_tab(cx: &mut Context) {
|
||||
@@ -3866,8 +3757,6 @@ pub mod insert {
|
||||
});
|
||||
let (view, doc) = current!(cx.editor);
|
||||
doc.apply(&transaction, view.id);
|
||||
|
||||
lsp::signature_help_impl(cx, SignatureHelpInvoked::Automatic);
|
||||
}
|
||||
|
||||
pub fn delete_char_forward(cx: &mut Context) {
|
||||
@@ -4521,151 +4410,14 @@ fn remove_primary_selection(cx: &mut Context) {
|
||||
}
|
||||
|
||||
pub fn completion(cx: &mut Context) {
|
||||
use helix_lsp::{lsp, util::pos_to_lsp_pos};
|
||||
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let range = doc.selection(view.id).primary();
|
||||
let text = doc.text().slice(..);
|
||||
let cursor = range.cursor(text);
|
||||
|
||||
let savepoint = if let Some(CompleteAction::Selected { savepoint }) = &cx.editor.last_completion
|
||||
{
|
||||
savepoint.clone()
|
||||
} else {
|
||||
doc.savepoint(view)
|
||||
};
|
||||
|
||||
let text = savepoint.text.clone();
|
||||
let cursor = savepoint.cursor();
|
||||
|
||||
let mut seen_language_servers = HashSet::new();
|
||||
|
||||
let mut futures: FuturesUnordered<_> = doc
|
||||
.language_servers_with_feature(LanguageServerFeature::Completion)
|
||||
.filter(|ls| seen_language_servers.insert(ls.id()))
|
||||
.map(|language_server| {
|
||||
let language_server_id = language_server.id();
|
||||
let offset_encoding = language_server.offset_encoding();
|
||||
let pos = pos_to_lsp_pos(&text, cursor, offset_encoding);
|
||||
let doc_id = doc.identifier();
|
||||
let completion_request = language_server.completion(doc_id, pos, None).unwrap();
|
||||
|
||||
async move {
|
||||
let json = completion_request.await?;
|
||||
let response: Option<lsp::CompletionResponse> = serde_json::from_value(json)?;
|
||||
|
||||
let items = match response {
|
||||
Some(lsp::CompletionResponse::Array(items)) => items,
|
||||
// TODO: do something with is_incomplete
|
||||
Some(lsp::CompletionResponse::List(lsp::CompletionList {
|
||||
is_incomplete: _is_incomplete,
|
||||
items,
|
||||
})) => items,
|
||||
None => Vec::new(),
|
||||
}
|
||||
.into_iter()
|
||||
.map(|item| CompletionItem {
|
||||
item,
|
||||
language_server_id,
|
||||
resolved: false,
|
||||
})
|
||||
.collect();
|
||||
|
||||
anyhow::Ok(items)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
// setup a channel that allows the request to be canceled
|
||||
let (tx, rx) = oneshot::channel();
|
||||
// set completion_request so that this request can be canceled
|
||||
// by setting completion_request, the old channel stored there is dropped
|
||||
// and the associated request is automatically dropped
|
||||
cx.editor.completion_request_handle = Some(tx);
|
||||
let future = async move {
|
||||
let items_future = async move {
|
||||
let mut items = Vec::new();
|
||||
// TODO if one completion request errors, all other completion requests are discarded (even if they're valid)
|
||||
while let Some(mut lsp_items) = futures.try_next().await? {
|
||||
items.append(&mut lsp_items);
|
||||
}
|
||||
anyhow::Ok(items)
|
||||
};
|
||||
tokio::select! {
|
||||
biased;
|
||||
_ = rx => {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
res = items_future => {
|
||||
res
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let trigger_offset = cursor;
|
||||
|
||||
// TODO: trigger_offset should be the cursor offset but we also need a starting offset from where we want to apply
|
||||
// completion filtering. For example logger.te| should filter the initial suggestion list with "te".
|
||||
|
||||
use helix_core::chars;
|
||||
let mut iter = text.chars_at(cursor);
|
||||
iter.reverse();
|
||||
let offset = iter.take_while(|ch| chars::char_is_word(*ch)).count();
|
||||
let start_offset = cursor.saturating_sub(offset);
|
||||
|
||||
let trigger_doc = doc.id();
|
||||
let trigger_view = view.id;
|
||||
|
||||
// FIXME: The commands Context can only have a single callback
|
||||
// which means it gets overwritten when executing keybindings
|
||||
// with multiple commands or macros. This would mean that completion
|
||||
// might be incorrectly applied when repeating the insertmode action
|
||||
//
|
||||
// TODO: to solve this either make cx.callback a Vec of callbacks or
|
||||
// alternatively move `last_insert` to `helix_view::Editor`
|
||||
cx.callback = Some(Box::new(
|
||||
move |compositor: &mut Compositor, _cx: &mut compositor::Context| {
|
||||
let ui = compositor.find::<ui::EditorView>().unwrap();
|
||||
ui.last_insert.1.push(InsertEvent::RequestCompletion);
|
||||
},
|
||||
));
|
||||
|
||||
cx.jobs.callback(async move {
|
||||
let items = future.await?;
|
||||
let call = move |editor: &mut Editor, compositor: &mut Compositor| {
|
||||
let (view, doc) = current_ref!(editor);
|
||||
// check if the completion request is stale.
|
||||
//
|
||||
// Completions are completed asynchronously and therefore the user could
|
||||
//switch document/view or leave insert mode. In all of thoise cases the
|
||||
// completion should be discarded
|
||||
if editor.mode != Mode::Insert || view.id != trigger_view || doc.id() != trigger_doc {
|
||||
return;
|
||||
}
|
||||
|
||||
if items.is_empty() {
|
||||
// editor.set_error("No completion available");
|
||||
return;
|
||||
}
|
||||
let size = compositor.size();
|
||||
let ui = compositor.find::<ui::EditorView>().unwrap();
|
||||
let completion_area = ui.set_completion(
|
||||
editor,
|
||||
savepoint,
|
||||
items,
|
||||
start_offset,
|
||||
trigger_offset,
|
||||
size,
|
||||
);
|
||||
let size = compositor.size();
|
||||
let signature_help_area = compositor
|
||||
.find_id::<Popup<SignatureHelp>>(SignatureHelp::ID)
|
||||
.map(|signature_help| signature_help.area(size, editor));
|
||||
// Delete the signature help popup if they intersect.
|
||||
if matches!((completion_area, signature_help_area),(Some(a), Some(b)) if a.intersects(b))
|
||||
{
|
||||
compositor.remove(SignatureHelp::ID);
|
||||
}
|
||||
};
|
||||
Ok(Callback::EditorCompositor(Box::new(call)))
|
||||
});
|
||||
cx.editor
|
||||
.handlers
|
||||
.trigger_completions(cursor, doc.id(), view.id);
|
||||
}
|
||||
|
||||
// comments
|
||||
@@ -4844,10 +4596,6 @@ fn move_node_bound_impl(cx: &mut Context, dir: Direction, movement: Movement) {
|
||||
);
|
||||
|
||||
doc.set_selection(view.id, selection);
|
||||
|
||||
// [TODO] temporary workaround until we're not using the idle timer to
|
||||
// trigger auto completions any more
|
||||
editor.clear_idle_timer();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -5835,7 +5583,7 @@ fn replay_macro(cx: &mut Context) {
|
||||
cx.editor.macro_replaying.push(reg);
|
||||
|
||||
let count = cx.count();
|
||||
cx.callback = Some(Box::new(move |compositor, cx| {
|
||||
cx.callback.push(Box::new(move |compositor, cx| {
|
||||
for _ in 0..count {
|
||||
for &key in keys.iter() {
|
||||
compositor.handle_event(&compositor::Event::Key(key), cx);
|
||||
|
@@ -78,7 +78,7 @@ fn thread_picker(
|
||||
})
|
||||
.with_preview(move |editor, thread| {
|
||||
let frames = editor.debugger.as_ref()?.stack_frames.get(&thread.id)?;
|
||||
let frame = frames.get(0)?;
|
||||
let frame = frames.first()?;
|
||||
let path = frame.source.as_ref()?.path.clone()?;
|
||||
let pos = Some((
|
||||
frame.line.saturating_sub(1),
|
||||
@@ -166,7 +166,7 @@ pub fn dap_start_impl(
|
||||
// TODO: avoid refetching all of this... pass a config in
|
||||
let template = match name {
|
||||
Some(name) => config.templates.iter().find(|t| t.name == name),
|
||||
None => config.templates.get(0),
|
||||
None => config.templates.first(),
|
||||
}
|
||||
.ok_or_else(|| anyhow!("No debug config with given name"))?;
|
||||
|
||||
@@ -217,7 +217,7 @@ pub fn dap_start_impl(
|
||||
}
|
||||
}
|
||||
|
||||
args.insert("cwd", to_value(helix_loader::current_working_dir())?);
|
||||
args.insert("cwd", to_value(helix_stdx::env::current_working_dir())?);
|
||||
|
||||
let args = to_value(args).unwrap();
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
use futures_util::{future::BoxFuture, stream::FuturesUnordered, FutureExt};
|
||||
use futures_util::{stream::FuturesUnordered, FutureExt};
|
||||
use helix_lsp::{
|
||||
block_on,
|
||||
lsp::{
|
||||
@@ -8,22 +8,21 @@ use helix_lsp::{
|
||||
util::{diagnostic_to_lsp_diagnostic, lsp_range_to_range, range_to_lsp_range},
|
||||
Client, OffsetEncoding,
|
||||
};
|
||||
use serde_json::Value;
|
||||
use tokio_stream::StreamExt;
|
||||
use tui::{
|
||||
text::{Span, Spans},
|
||||
widgets::Row,
|
||||
};
|
||||
|
||||
use super::{align_view, push_jump, Align, Context, Editor, Open};
|
||||
use super::{align_view, push_jump, Align, Context, Editor};
|
||||
|
||||
use helix_core::{
|
||||
path, syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection,
|
||||
};
|
||||
use helix_core::{syntax::LanguageServerFeature, text_annotations::InlineAnnotation, Selection};
|
||||
use helix_stdx::path;
|
||||
use helix_view::{
|
||||
document::{DocumentInlayHints, DocumentInlayHintsId, Mode},
|
||||
document::{DocumentInlayHints, DocumentInlayHintsId},
|
||||
editor::Action,
|
||||
graphics::Margin,
|
||||
handlers::lsp::SignatureHelpInvoked,
|
||||
theme::Style,
|
||||
Document, View,
|
||||
};
|
||||
@@ -31,10 +30,7 @@ use helix_view::{
|
||||
use crate::{
|
||||
compositor::{self, Compositor},
|
||||
job::Callback,
|
||||
ui::{
|
||||
self, lsp::SignatureHelp, overlay::overlaid, DynamicPicker, FileLocation, Picker, Popup,
|
||||
PromptEvent,
|
||||
},
|
||||
ui::{self, overlay::overlaid, DynamicPicker, FileLocation, Picker, Popup, PromptEvent},
|
||||
};
|
||||
|
||||
use std::{
|
||||
@@ -43,7 +39,6 @@ use std::{
|
||||
fmt::Write,
|
||||
future::Future,
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
/// Gets the first language server that is attached to a document which supports a specific feature.
|
||||
@@ -1018,7 +1013,7 @@ fn goto_impl(
|
||||
locations: Vec<lsp::Location>,
|
||||
offset_encoding: OffsetEncoding,
|
||||
) {
|
||||
let cwdir = helix_loader::current_working_dir();
|
||||
let cwdir = helix_stdx::env::current_working_dir();
|
||||
|
||||
match locations.as_slice() {
|
||||
[location] => {
|
||||
@@ -1133,146 +1128,10 @@ pub fn goto_reference(cx: &mut Context) {
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy)]
|
||||
pub enum SignatureHelpInvoked {
|
||||
Manual,
|
||||
Automatic,
|
||||
}
|
||||
|
||||
pub fn signature_help(cx: &mut Context) {
|
||||
signature_help_impl(cx, SignatureHelpInvoked::Manual)
|
||||
}
|
||||
|
||||
pub fn signature_help_impl(cx: &mut Context, invoked: SignatureHelpInvoked) {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
|
||||
// TODO merge multiple language server signature help into one instead of just taking the first language server that supports it
|
||||
let future = doc
|
||||
.language_servers_with_feature(LanguageServerFeature::SignatureHelp)
|
||||
.find_map(|language_server| {
|
||||
let pos = doc.position(view.id, language_server.offset_encoding());
|
||||
language_server.text_document_signature_help(doc.identifier(), pos, None)
|
||||
});
|
||||
|
||||
let Some(future) = future else {
|
||||
// Do not show the message if signature help was invoked
|
||||
// automatically on backspace, trigger characters, etc.
|
||||
if invoked == SignatureHelpInvoked::Manual {
|
||||
cx.editor
|
||||
.set_error("No configured language server supports signature-help");
|
||||
}
|
||||
return;
|
||||
};
|
||||
signature_help_impl_with_future(cx, future.boxed(), invoked);
|
||||
}
|
||||
|
||||
pub fn signature_help_impl_with_future(
|
||||
cx: &mut Context,
|
||||
future: BoxFuture<'static, helix_lsp::Result<Value>>,
|
||||
invoked: SignatureHelpInvoked,
|
||||
) {
|
||||
cx.callback(
|
||||
future,
|
||||
move |editor, compositor, response: Option<lsp::SignatureHelp>| {
|
||||
let config = &editor.config();
|
||||
|
||||
if !(config.lsp.auto_signature_help
|
||||
|| SignatureHelp::visible_popup(compositor).is_some()
|
||||
|| invoked == SignatureHelpInvoked::Manual)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// If the signature help invocation is automatic, don't show it outside of Insert Mode:
|
||||
// it very probably means the server was a little slow to respond and the user has
|
||||
// already moved on to something else, making a signature help popup will just be an
|
||||
// annoyance, see https://github.com/helix-editor/helix/issues/3112
|
||||
if invoked == SignatureHelpInvoked::Automatic && editor.mode != Mode::Insert {
|
||||
return;
|
||||
}
|
||||
|
||||
let response = match response {
|
||||
// According to the spec the response should be None if there
|
||||
// are no signatures, but some servers don't follow this.
|
||||
Some(s) if !s.signatures.is_empty() => s,
|
||||
_ => {
|
||||
compositor.remove(SignatureHelp::ID);
|
||||
return;
|
||||
}
|
||||
};
|
||||
let doc = doc!(editor);
|
||||
let language = doc.language_name().unwrap_or("");
|
||||
|
||||
let signature = match response
|
||||
.signatures
|
||||
.get(response.active_signature.unwrap_or(0) as usize)
|
||||
{
|
||||
Some(s) => s,
|
||||
None => return,
|
||||
};
|
||||
let mut contents = SignatureHelp::new(
|
||||
signature.label.clone(),
|
||||
language.to_string(),
|
||||
Arc::clone(&editor.syn_loader),
|
||||
);
|
||||
|
||||
let signature_doc = if config.lsp.display_signature_help_docs {
|
||||
signature.documentation.as_ref().map(|doc| match doc {
|
||||
lsp::Documentation::String(s) => s.clone(),
|
||||
lsp::Documentation::MarkupContent(markup) => markup.value.clone(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
contents.set_signature_doc(signature_doc);
|
||||
|
||||
let active_param_range = || -> Option<(usize, usize)> {
|
||||
let param_idx = signature
|
||||
.active_parameter
|
||||
.or(response.active_parameter)
|
||||
.unwrap_or(0) as usize;
|
||||
let param = signature.parameters.as_ref()?.get(param_idx)?;
|
||||
match ¶m.label {
|
||||
lsp::ParameterLabel::Simple(string) => {
|
||||
let start = signature.label.find(string.as_str())?;
|
||||
Some((start, start + string.len()))
|
||||
}
|
||||
lsp::ParameterLabel::LabelOffsets([start, end]) => {
|
||||
// LS sends offsets based on utf-16 based string representation
|
||||
// but highlighting in helix is done using byte offset.
|
||||
use helix_core::str_utils::char_to_byte_idx;
|
||||
let from = char_to_byte_idx(&signature.label, *start as usize);
|
||||
let to = char_to_byte_idx(&signature.label, *end as usize);
|
||||
Some((from, to))
|
||||
}
|
||||
}
|
||||
};
|
||||
contents.set_active_param_range(active_param_range());
|
||||
|
||||
let old_popup = compositor.find_id::<Popup<SignatureHelp>>(SignatureHelp::ID);
|
||||
let mut popup = Popup::new(SignatureHelp::ID, contents)
|
||||
.position(old_popup.and_then(|p| p.get_position()))
|
||||
.position_bias(Open::Above)
|
||||
.ignore_escape_key(true);
|
||||
|
||||
// Don't create a popup if it intersects the auto-complete menu.
|
||||
let size = compositor.size();
|
||||
if compositor
|
||||
.find::<ui::EditorView>()
|
||||
.unwrap()
|
||||
.completion
|
||||
.as_mut()
|
||||
.map(|completion| completion.area(size, editor))
|
||||
.filter(|area| area.intersects(popup.area(size, editor)))
|
||||
.is_some()
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
compositor.replace_or_push(SignatureHelp::ID, popup);
|
||||
},
|
||||
);
|
||||
cx.editor
|
||||
.handlers
|
||||
.trigger_signature_help(SignatureHelpInvoked::Manual, cx.editor)
|
||||
}
|
||||
|
||||
pub fn hover(cx: &mut Context) {
|
||||
|
@@ -7,7 +7,7 @@ use super::*;
|
||||
|
||||
use helix_core::fuzzy::fuzzy_match;
|
||||
use helix_core::indent::MAX_INDENT;
|
||||
use helix_core::{encoding, line_ending, path::get_canonicalized_path, shellwords::Shellwords};
|
||||
use helix_core::{encoding, line_ending, shellwords::Shellwords};
|
||||
use helix_lsp::{OffsetEncoding, Url};
|
||||
use helix_view::document::DEFAULT_LANGUAGE_NAME;
|
||||
use helix_view::editor::{Action, CloseError, ConfigEvent};
|
||||
@@ -111,7 +111,7 @@ fn open(cx: &mut compositor::Context, args: &[Cow<str>], event: PromptEvent) ->
|
||||
ensure!(!args.is_empty(), "wrong argument count");
|
||||
for arg in args {
|
||||
let (path, pos) = args::parse_file(arg);
|
||||
let path = helix_core::path::expand_tilde(&path);
|
||||
let path = helix_stdx::path::expand_tilde(&path);
|
||||
// If the path is a directory, open a file picker on that directory and update the status
|
||||
// message
|
||||
if let Ok(true) = std::fs::canonicalize(&path).map(|p| p.is_dir()) {
|
||||
@@ -483,7 +483,7 @@ fn set_indent_style(
|
||||
}
|
||||
|
||||
// Attempt to parse argument as an indent style.
|
||||
let style = match args.get(0) {
|
||||
let style = match args.first() {
|
||||
Some(arg) if "tabs".starts_with(&arg.to_lowercase()) => Some(Tabs),
|
||||
Some(Cow::Borrowed("0")) => Some(Tabs),
|
||||
Some(arg) => arg
|
||||
@@ -535,7 +535,7 @@ fn set_line_ending(
|
||||
}
|
||||
|
||||
let arg = args
|
||||
.get(0)
|
||||
.first()
|
||||
.context("argument missing")?
|
||||
.to_ascii_lowercase();
|
||||
|
||||
@@ -1079,18 +1079,17 @@ fn change_current_directory(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let dir = helix_core::path::expand_tilde(
|
||||
let dir = helix_stdx::path::expand_tilde(
|
||||
args.first()
|
||||
.context("target directory not provided")?
|
||||
.as_ref()
|
||||
.as_ref(),
|
||||
);
|
||||
|
||||
helix_loader::set_current_working_dir(dir)?;
|
||||
helix_stdx::env::set_current_working_dir(dir)?;
|
||||
|
||||
cx.editor.set_status(format!(
|
||||
"Current working directory is now {}",
|
||||
helix_loader::current_working_dir().display()
|
||||
helix_stdx::env::current_working_dir().display()
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
@@ -1104,7 +1103,7 @@ fn show_current_directory(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let cwd = helix_loader::current_working_dir();
|
||||
let cwd = helix_stdx::env::current_working_dir();
|
||||
let message = format!("Current working directory is {}", cwd.display());
|
||||
|
||||
if cwd.exists() {
|
||||
@@ -2078,7 +2077,7 @@ fn reflow(
|
||||
// - The configured text-width for this language in languages.toml
|
||||
// - The configured text-width in the config.toml
|
||||
let text_width: usize = args
|
||||
.get(0)
|
||||
.first()
|
||||
.map(|num| num.parse::<usize>())
|
||||
.transpose()?
|
||||
.or_else(|| doc.language_config().and_then(|config| config.text_width))
|
||||
@@ -2117,11 +2116,7 @@ fn tree_sitter_subtree(
|
||||
let text = doc.text();
|
||||
let from = text.char_to_byte(primary_selection.from());
|
||||
let to = text.char_to_byte(primary_selection.to());
|
||||
if let Some(selected_node) = syntax
|
||||
.tree()
|
||||
.root_node()
|
||||
.descendant_for_byte_range(from, to)
|
||||
{
|
||||
if let Some(selected_node) = syntax.descendant_for_byte_range(from, to) {
|
||||
let mut contents = String::from("```tsq\n");
|
||||
helix_core::syntax::pretty_print_tree(&mut contents, selected_node)?;
|
||||
contents.push_str("\n```");
|
||||
@@ -2413,7 +2408,8 @@ fn move_buffer(
|
||||
ensure!(args.len() == 1, format!(":move takes one argument"));
|
||||
let doc = doc!(cx.editor);
|
||||
|
||||
let new_path = get_canonicalized_path(&PathBuf::from(args.first().unwrap().to_string()));
|
||||
let new_path =
|
||||
helix_stdx::path::canonicalize(&PathBuf::from(args.first().unwrap().to_string()));
|
||||
let old_path = doc
|
||||
.path()
|
||||
.ok_or_else(|| anyhow!("Scratch buffer cannot be moved. Use :write instead"))?
|
||||
|
@@ -7,6 +7,7 @@ use std::collections::HashMap;
|
||||
use std::fmt::Display;
|
||||
use std::fs;
|
||||
use std::io::Error as IOError;
|
||||
use std::path::Path;
|
||||
use toml::de::Error as TomlError;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
@@ -24,6 +25,59 @@ pub struct ConfigRaw {
|
||||
pub editor: Option<toml::Value>,
|
||||
}
|
||||
|
||||
impl ConfigRaw {
|
||||
fn merge(global: ConfigRaw, local: ConfigRaw) -> Result<ConfigRaw, ConfigLoadError> {
|
||||
let keys = match (global.keys, local.keys) {
|
||||
(None, None) => None,
|
||||
(Some(keys), None) | (None, Some(keys)) => Some(keys),
|
||||
(Some(mut global_keys), Some(local_keys)) => {
|
||||
merge_keys(&mut global_keys, local_keys);
|
||||
Some(global_keys)
|
||||
}
|
||||
};
|
||||
|
||||
let editor = match (global.editor, local.editor) {
|
||||
(None, None) => None,
|
||||
(None, Some(val)) | (Some(val), None) => {
|
||||
val.try_into().map_err(ConfigLoadError::BadConfig)?
|
||||
}
|
||||
(Some(global), Some(local)) => merge_toml_values(global, local, 3)
|
||||
.try_into()
|
||||
.map_err(ConfigLoadError::BadConfig)?,
|
||||
};
|
||||
|
||||
Ok(ConfigRaw {
|
||||
theme: local.theme.or(global.theme),
|
||||
keys,
|
||||
editor,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<ConfigRaw> for Config {
|
||||
type Error = ConfigLoadError;
|
||||
fn try_from(config: ConfigRaw) -> Result<Self, Self::Error> {
|
||||
// merge raw config into defaults
|
||||
let mut keys = keymap::default();
|
||||
if let Some(config_keys) = config.keys {
|
||||
merge_keys(&mut keys, config_keys)
|
||||
}
|
||||
let editor = config
|
||||
.editor
|
||||
.map(|value| value.try_into())
|
||||
.transpose()
|
||||
.map_err(ConfigLoadError::BadConfig)?
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(Self {
|
||||
// workspace_config: config.workspace_config.unwrap_or_default(),
|
||||
theme: config.theme,
|
||||
keys,
|
||||
editor,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Config {
|
||||
Config {
|
||||
@@ -56,73 +110,22 @@ impl Display for ConfigLoadError {
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn load(
|
||||
global: Result<String, ConfigLoadError>,
|
||||
local: Result<String, ConfigLoadError>,
|
||||
) -> Result<Config, ConfigLoadError> {
|
||||
let global_config: Result<ConfigRaw, ConfigLoadError> =
|
||||
global.and_then(|file| toml::from_str(&file).map_err(ConfigLoadError::BadConfig));
|
||||
let local_config: Result<ConfigRaw, ConfigLoadError> =
|
||||
local.and_then(|file| toml::from_str(&file).map_err(ConfigLoadError::BadConfig));
|
||||
let res = match (global_config, local_config) {
|
||||
(Ok(global), Ok(local)) => {
|
||||
let mut keys = keymap::default();
|
||||
if let Some(global_keys) = global.keys {
|
||||
merge_keys(&mut keys, global_keys)
|
||||
}
|
||||
if let Some(local_keys) = local.keys {
|
||||
merge_keys(&mut keys, local_keys)
|
||||
}
|
||||
|
||||
let editor = match (global.editor, local.editor) {
|
||||
(None, None) => helix_view::editor::Config::default(),
|
||||
(None, Some(val)) | (Some(val), None) => {
|
||||
val.try_into().map_err(ConfigLoadError::BadConfig)?
|
||||
}
|
||||
(Some(global), Some(local)) => merge_toml_values(global, local, 3)
|
||||
.try_into()
|
||||
.map_err(ConfigLoadError::BadConfig)?,
|
||||
};
|
||||
|
||||
Config {
|
||||
theme: local.theme.or(global.theme),
|
||||
keys,
|
||||
editor,
|
||||
}
|
||||
}
|
||||
// if any configs are invalid return that first
|
||||
(_, Err(ConfigLoadError::BadConfig(err)))
|
||||
| (Err(ConfigLoadError::BadConfig(err)), _) => {
|
||||
return Err(ConfigLoadError::BadConfig(err))
|
||||
}
|
||||
(Ok(config), Err(_)) | (Err(_), Ok(config)) => {
|
||||
let mut keys = keymap::default();
|
||||
if let Some(keymap) = config.keys {
|
||||
merge_keys(&mut keys, keymap);
|
||||
}
|
||||
Config {
|
||||
theme: config.theme,
|
||||
keys,
|
||||
editor: config.editor.map_or_else(
|
||||
|| Ok(helix_view::editor::Config::default()),
|
||||
|val| val.try_into().map_err(ConfigLoadError::BadConfig),
|
||||
)?,
|
||||
}
|
||||
}
|
||||
|
||||
// these are just two io errors return the one for the global config
|
||||
(Err(err), Err(_)) => return Err(err),
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn load_default() -> Result<Config, ConfigLoadError> {
|
||||
let global_config =
|
||||
fs::read_to_string(helix_loader::config_file()).map_err(ConfigLoadError::Error);
|
||||
let local_config = fs::read_to_string(helix_loader::workspace_config_file())
|
||||
.map_err(ConfigLoadError::Error);
|
||||
Config::load(global_config, local_config)
|
||||
fn load(path: &Path) -> Result<ConfigRaw, ConfigLoadError> {
|
||||
fs::read_to_string(path)
|
||||
.map_err(ConfigLoadError::Error)
|
||||
.and_then(|file| toml::from_str(&file).map_err(ConfigLoadError::BadConfig))
|
||||
}
|
||||
|
||||
let global = load(&helix_loader::config_file())?;
|
||||
let workspace = load(&helix_loader::workspace_config_file());
|
||||
|
||||
if let Ok(workspace) = workspace {
|
||||
let config = ConfigRaw::merge(global, workspace)?;
|
||||
config.try_into()
|
||||
} else {
|
||||
global.try_into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -132,7 +135,8 @@ mod tests {
|
||||
|
||||
impl Config {
|
||||
fn load_test(config: &str) -> Config {
|
||||
Config::load(Ok(config.to_owned()), Err(ConfigLoadError::default())).unwrap()
|
||||
let config: ConfigRaw = toml::from_str(config).unwrap();
|
||||
config.try_into().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
|
20
helix-term/src/events.rs
Normal file
20
helix-term/src/events.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use helix_event::{events, register_event};
|
||||
use helix_view::document::Mode;
|
||||
use helix_view::events::{DocumentDidChange, SelectionDidChange};
|
||||
|
||||
use crate::commands;
|
||||
use crate::keymap::MappableCommand;
|
||||
|
||||
events! {
|
||||
OnModeSwitch<'a, 'cx> { old_mode: Mode, new_mode: Mode, cx: &'a mut commands::Context<'cx> }
|
||||
PostInsertChar<'a, 'cx> { c: char, cx: &'a mut commands::Context<'cx> }
|
||||
PostCommand<'a, 'cx> { command: & 'a MappableCommand, cx: &'a mut commands::Context<'cx> }
|
||||
}
|
||||
|
||||
pub fn register() {
|
||||
register_event::<OnModeSwitch>();
|
||||
register_event::<PostInsertChar>();
|
||||
register_event::<PostCommand>();
|
||||
register_event::<DocumentDidChange>();
|
||||
register_event::<SelectionDidChange>();
|
||||
}
|
30
helix-term/src/handlers.rs
Normal file
30
helix-term/src/handlers.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use arc_swap::ArcSwap;
|
||||
use helix_event::AsyncHook;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::events;
|
||||
use crate::handlers::completion::CompletionHandler;
|
||||
use crate::handlers::signature_help::SignatureHelpHandler;
|
||||
|
||||
pub use completion::trigger_auto_completion;
|
||||
pub use helix_view::handlers::lsp::SignatureHelpInvoked;
|
||||
pub use helix_view::handlers::Handlers;
|
||||
|
||||
mod completion;
|
||||
mod signature_help;
|
||||
|
||||
pub fn setup(config: Arc<ArcSwap<Config>>) -> Handlers {
|
||||
events::register();
|
||||
|
||||
let completions = CompletionHandler::new(config).spawn();
|
||||
let signature_hints = SignatureHelpHandler::new().spawn();
|
||||
let handlers = Handlers {
|
||||
completions,
|
||||
signature_hints,
|
||||
};
|
||||
completion::register_hooks(&handlers);
|
||||
signature_help::register_hooks(&handlers);
|
||||
handlers
|
||||
}
|
465
helix-term/src/handlers/completion.rs
Normal file
465
helix-term/src/handlers/completion.rs
Normal file
@@ -0,0 +1,465 @@
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use arc_swap::ArcSwap;
|
||||
use futures_util::stream::FuturesUnordered;
|
||||
use helix_core::chars::char_is_word;
|
||||
use helix_core::syntax::LanguageServerFeature;
|
||||
use helix_event::{
|
||||
cancelable_future, cancelation, register_hook, send_blocking, CancelRx, CancelTx,
|
||||
};
|
||||
use helix_lsp::lsp;
|
||||
use helix_lsp::util::pos_to_lsp_pos;
|
||||
use helix_stdx::rope::RopeSliceExt;
|
||||
use helix_view::document::{Mode, SavePoint};
|
||||
use helix_view::handlers::lsp::CompletionEvent;
|
||||
use helix_view::{DocumentId, Editor, ViewId};
|
||||
use tokio::sync::mpsc::Sender;
|
||||
use tokio::time::Instant;
|
||||
use tokio_stream::StreamExt;
|
||||
|
||||
use crate::commands;
|
||||
use crate::compositor::Compositor;
|
||||
use crate::config::Config;
|
||||
use crate::events::{OnModeSwitch, PostCommand, PostInsertChar};
|
||||
use crate::job::{dispatch, dispatch_blocking};
|
||||
use crate::keymap::MappableCommand;
|
||||
use crate::ui::editor::InsertEvent;
|
||||
use crate::ui::lsp::SignatureHelp;
|
||||
use crate::ui::{self, CompletionItem, Popup};
|
||||
|
||||
use super::Handlers;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
enum TriggerKind {
|
||||
Auto,
|
||||
TriggerChar,
|
||||
Manual,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct Trigger {
|
||||
pos: usize,
|
||||
view: ViewId,
|
||||
doc: DocumentId,
|
||||
kind: TriggerKind,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct CompletionHandler {
|
||||
/// currently active trigger which will cause a
|
||||
/// completion request after the timeout
|
||||
trigger: Option<Trigger>,
|
||||
/// A handle for currently active completion request.
|
||||
/// This can be used to determine whether the current
|
||||
/// request is still active (and new triggers should be
|
||||
/// ignored) and can also be used to abort the current
|
||||
/// request (by dropping the handle)
|
||||
request: Option<CancelTx>,
|
||||
config: Arc<ArcSwap<Config>>,
|
||||
}
|
||||
|
||||
impl CompletionHandler {
|
||||
pub fn new(config: Arc<ArcSwap<Config>>) -> CompletionHandler {
|
||||
Self {
|
||||
config,
|
||||
request: None,
|
||||
trigger: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl helix_event::AsyncHook for CompletionHandler {
|
||||
type Event = CompletionEvent;
|
||||
|
||||
fn handle_event(
|
||||
&mut self,
|
||||
event: Self::Event,
|
||||
_old_timeout: Option<Instant>,
|
||||
) -> Option<Instant> {
|
||||
match event {
|
||||
CompletionEvent::AutoTrigger {
|
||||
cursor: trigger_pos,
|
||||
doc,
|
||||
view,
|
||||
} => {
|
||||
// techically it shouldn't be possible to switch views/documents in insert mode
|
||||
// but people may create weird keymaps/use the mouse so lets be extra careful
|
||||
if self
|
||||
.trigger
|
||||
.as_ref()
|
||||
.map_or(true, |trigger| trigger.doc != doc || trigger.view != view)
|
||||
{
|
||||
self.trigger = Some(Trigger {
|
||||
pos: trigger_pos,
|
||||
view,
|
||||
doc,
|
||||
kind: TriggerKind::Auto,
|
||||
});
|
||||
}
|
||||
}
|
||||
CompletionEvent::TriggerChar { cursor, doc, view } => {
|
||||
// immediately request completions and drop all auto completion requests
|
||||
self.request = None;
|
||||
self.trigger = Some(Trigger {
|
||||
pos: cursor,
|
||||
view,
|
||||
doc,
|
||||
kind: TriggerKind::TriggerChar,
|
||||
});
|
||||
}
|
||||
CompletionEvent::ManualTrigger { cursor, doc, view } => {
|
||||
// immediately request completions and drop all auto completion requests
|
||||
self.request = None;
|
||||
self.trigger = Some(Trigger {
|
||||
pos: cursor,
|
||||
view,
|
||||
doc,
|
||||
kind: TriggerKind::Manual,
|
||||
});
|
||||
// stop debouncing immediately and request the completion
|
||||
self.finish_debounce();
|
||||
return None;
|
||||
}
|
||||
CompletionEvent::Cancel => {
|
||||
self.trigger = None;
|
||||
self.request = None;
|
||||
}
|
||||
CompletionEvent::DeleteText { cursor } => {
|
||||
// if we deleted the original trigger, abort the completion
|
||||
if matches!(self.trigger, Some(Trigger{ pos, .. }) if cursor < pos) {
|
||||
self.trigger = None;
|
||||
self.request = None;
|
||||
}
|
||||
}
|
||||
}
|
||||
self.trigger.map(|trigger| {
|
||||
// if the current request was closed forget about it
|
||||
// otherwise immediately restart the completion request
|
||||
let cancel = self.request.take().map_or(false, |req| !req.is_closed());
|
||||
let timeout = if trigger.kind == TriggerKind::Auto && !cancel {
|
||||
self.config.load().editor.completion_timeout
|
||||
} else {
|
||||
// we want almost instant completions for trigger chars
|
||||
// and restarting completion requests. The small timeout here mainly
|
||||
// serves to better handle cases where the completion handler
|
||||
// may fall behind (so multiple events in the channel) and macros
|
||||
Duration::from_millis(5)
|
||||
};
|
||||
Instant::now() + timeout
|
||||
})
|
||||
}
|
||||
|
||||
fn finish_debounce(&mut self) {
|
||||
let trigger = self.trigger.take().expect("debounce always has a trigger");
|
||||
let (tx, rx) = cancelation();
|
||||
self.request = Some(tx);
|
||||
dispatch_blocking(move |editor, compositor| {
|
||||
request_completion(trigger, rx, editor, compositor)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn request_completion(
|
||||
mut trigger: Trigger,
|
||||
cancel: CancelRx,
|
||||
editor: &mut Editor,
|
||||
compositor: &mut Compositor,
|
||||
) {
|
||||
let (view, doc) = current!(editor);
|
||||
|
||||
if compositor
|
||||
.find::<ui::EditorView>()
|
||||
.unwrap()
|
||||
.completion
|
||||
.is_some()
|
||||
|| editor.mode != Mode::Insert
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
let text = doc.text();
|
||||
let cursor = doc.selection(view.id).primary().cursor(text.slice(..));
|
||||
if trigger.view != view.id || trigger.doc != doc.id() || cursor < trigger.pos {
|
||||
return;
|
||||
}
|
||||
// this looks odd... Why are we not using the trigger position from
|
||||
// the `trigger` here? Won't that mean that the trigger char doesn't get
|
||||
// send to the LS if we type fast enougn? Yes that is true but it's
|
||||
// not actually a problem. The LSP will resolve the completion to the identifier
|
||||
// anyway (in fact sending the later position is necessary to get the right results
|
||||
// from LSPs that provide incomplete completion list). We rely on trigger offset
|
||||
// and primary cursor matching for multi-cursor completions so this is definitely
|
||||
// necessary from our side too.
|
||||
trigger.pos = cursor;
|
||||
let trigger_text = text.slice(..cursor);
|
||||
|
||||
let mut seen_language_servers = HashSet::new();
|
||||
let mut futures: FuturesUnordered<_> = doc
|
||||
.language_servers_with_feature(LanguageServerFeature::Completion)
|
||||
.filter(|ls| seen_language_servers.insert(ls.id()))
|
||||
.map(|ls| {
|
||||
let language_server_id = ls.id();
|
||||
let offset_encoding = ls.offset_encoding();
|
||||
let pos = pos_to_lsp_pos(text, cursor, offset_encoding);
|
||||
let doc_id = doc.identifier();
|
||||
let context = if trigger.kind == TriggerKind::Manual {
|
||||
lsp::CompletionContext {
|
||||
trigger_kind: lsp::CompletionTriggerKind::INVOKED,
|
||||
trigger_character: None,
|
||||
}
|
||||
} else {
|
||||
let trigger_char =
|
||||
ls.capabilities()
|
||||
.completion_provider
|
||||
.as_ref()
|
||||
.and_then(|provider| {
|
||||
provider
|
||||
.trigger_characters
|
||||
.as_deref()?
|
||||
.iter()
|
||||
.find(|&trigger| trigger_text.ends_with(trigger))
|
||||
});
|
||||
lsp::CompletionContext {
|
||||
trigger_kind: lsp::CompletionTriggerKind::TRIGGER_CHARACTER,
|
||||
trigger_character: trigger_char.cloned(),
|
||||
}
|
||||
};
|
||||
|
||||
let completion_response = ls.completion(doc_id, pos, None, context).unwrap();
|
||||
async move {
|
||||
let json = completion_response.await?;
|
||||
let response: Option<lsp::CompletionResponse> = serde_json::from_value(json)?;
|
||||
let items = match response {
|
||||
Some(lsp::CompletionResponse::Array(items)) => items,
|
||||
// TODO: do something with is_incomplete
|
||||
Some(lsp::CompletionResponse::List(lsp::CompletionList {
|
||||
is_incomplete: _is_incomplete,
|
||||
items,
|
||||
})) => items,
|
||||
None => Vec::new(),
|
||||
}
|
||||
.into_iter()
|
||||
.map(|item| CompletionItem {
|
||||
item,
|
||||
language_server_id,
|
||||
resolved: false,
|
||||
})
|
||||
.collect();
|
||||
anyhow::Ok(items)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let future = async move {
|
||||
let mut items = Vec::new();
|
||||
while let Some(lsp_items) = futures.next().await {
|
||||
match lsp_items {
|
||||
Ok(mut lsp_items) => items.append(&mut lsp_items),
|
||||
Err(err) => {
|
||||
log::debug!("completion request failed: {err:?}");
|
||||
}
|
||||
};
|
||||
}
|
||||
items
|
||||
};
|
||||
|
||||
let savepoint = doc.savepoint(view);
|
||||
|
||||
let ui = compositor.find::<ui::EditorView>().unwrap();
|
||||
ui.last_insert.1.push(InsertEvent::RequestCompletion);
|
||||
tokio::spawn(async move {
|
||||
let items = cancelable_future(future, cancel).await.unwrap_or_default();
|
||||
if items.is_empty() {
|
||||
return;
|
||||
}
|
||||
dispatch(move |editor, compositor| {
|
||||
show_completion(editor, compositor, items, trigger, savepoint)
|
||||
})
|
||||
.await
|
||||
});
|
||||
}
|
||||
|
||||
fn show_completion(
|
||||
editor: &mut Editor,
|
||||
compositor: &mut Compositor,
|
||||
items: Vec<CompletionItem>,
|
||||
trigger: Trigger,
|
||||
savepoint: Arc<SavePoint>,
|
||||
) {
|
||||
let (view, doc) = current_ref!(editor);
|
||||
// check if the completion request is stale.
|
||||
//
|
||||
// Completions are completed asynchronously and therefore the user could
|
||||
//switch document/view or leave insert mode. In all of thoise cases the
|
||||
// completion should be discarded
|
||||
if editor.mode != Mode::Insert || view.id != trigger.view || doc.id() != trigger.doc {
|
||||
return;
|
||||
}
|
||||
|
||||
let size = compositor.size();
|
||||
let ui = compositor.find::<ui::EditorView>().unwrap();
|
||||
if ui.completion.is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
let completion_area = ui.set_completion(editor, savepoint, items, trigger.pos, size);
|
||||
let signature_help_area = compositor
|
||||
.find_id::<Popup<SignatureHelp>>(SignatureHelp::ID)
|
||||
.map(|signature_help| signature_help.area(size, editor));
|
||||
// Delete the signature help popup if they intersect.
|
||||
if matches!((completion_area, signature_help_area),(Some(a), Some(b)) if a.intersects(b)) {
|
||||
compositor.remove(SignatureHelp::ID);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn trigger_auto_completion(
|
||||
tx: &Sender<CompletionEvent>,
|
||||
editor: &Editor,
|
||||
trigger_char_only: bool,
|
||||
) {
|
||||
let config = editor.config.load();
|
||||
if !config.auto_completion {
|
||||
return;
|
||||
}
|
||||
let (view, doc): (&helix_view::View, &helix_view::Document) = current_ref!(editor);
|
||||
let mut text = doc.text().slice(..);
|
||||
let cursor = doc.selection(view.id).primary().cursor(text);
|
||||
text = doc.text().slice(..cursor);
|
||||
|
||||
let is_trigger_char = doc
|
||||
.language_servers_with_feature(LanguageServerFeature::Completion)
|
||||
.any(|ls| {
|
||||
matches!(&ls.capabilities().completion_provider, Some(lsp::CompletionOptions {
|
||||
trigger_characters: Some(triggers),
|
||||
..
|
||||
}) if triggers.iter().any(|trigger| text.ends_with(trigger)))
|
||||
});
|
||||
if is_trigger_char {
|
||||
send_blocking(
|
||||
tx,
|
||||
CompletionEvent::TriggerChar {
|
||||
cursor,
|
||||
doc: doc.id(),
|
||||
view: view.id,
|
||||
},
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let is_auto_trigger = !trigger_char_only
|
||||
&& doc
|
||||
.text()
|
||||
.chars_at(cursor)
|
||||
.reversed()
|
||||
.take(config.completion_trigger_len as usize)
|
||||
.all(char_is_word);
|
||||
|
||||
if is_auto_trigger {
|
||||
send_blocking(
|
||||
tx,
|
||||
CompletionEvent::AutoTrigger {
|
||||
cursor,
|
||||
doc: doc.id(),
|
||||
view: view.id,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn update_completions(cx: &mut commands::Context, c: Option<char>) {
|
||||
cx.callback.push(Box::new(move |compositor, cx| {
|
||||
let editor_view = compositor.find::<ui::EditorView>().unwrap();
|
||||
if let Some(completion) = &mut editor_view.completion {
|
||||
completion.update_filter(c);
|
||||
if completion.is_empty() {
|
||||
editor_view.clear_completion(cx.editor);
|
||||
// clearing completions might mean we want to immediately rerequest them (usually
|
||||
// this occurs if typing a trigger char)
|
||||
if c.is_some() {
|
||||
trigger_auto_completion(&cx.editor.handlers.completions, cx.editor, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
fn clear_completions(cx: &mut commands::Context) {
|
||||
cx.callback.push(Box::new(|compositor, cx| {
|
||||
let editor_view = compositor.find::<ui::EditorView>().unwrap();
|
||||
editor_view.clear_completion(cx.editor);
|
||||
}))
|
||||
}
|
||||
|
||||
fn completion_post_command_hook(
|
||||
tx: &Sender<CompletionEvent>,
|
||||
PostCommand { command, cx }: &mut PostCommand<'_, '_>,
|
||||
) -> anyhow::Result<()> {
|
||||
if cx.editor.mode == Mode::Insert {
|
||||
if cx.editor.last_completion.is_some() {
|
||||
match command {
|
||||
MappableCommand::Static {
|
||||
name: "delete_word_forward" | "delete_char_forward" | "completion",
|
||||
..
|
||||
} => (),
|
||||
MappableCommand::Static {
|
||||
name: "delete_char_backward",
|
||||
..
|
||||
} => update_completions(cx, None),
|
||||
_ => clear_completions(cx),
|
||||
}
|
||||
} else {
|
||||
let event = match command {
|
||||
MappableCommand::Static {
|
||||
name: "delete_char_backward" | "delete_word_forward" | "delete_char_forward",
|
||||
..
|
||||
} => {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let primary_cursor = doc
|
||||
.selection(view.id)
|
||||
.primary()
|
||||
.cursor(doc.text().slice(..));
|
||||
CompletionEvent::DeleteText {
|
||||
cursor: primary_cursor,
|
||||
}
|
||||
}
|
||||
// hacks: some commands are handeled elsewhere and we don't want to
|
||||
// cancel in that case
|
||||
MappableCommand::Static {
|
||||
name: "completion" | "insert_mode" | "append_mode",
|
||||
..
|
||||
} => return Ok(()),
|
||||
_ => CompletionEvent::Cancel,
|
||||
};
|
||||
send_blocking(tx, event);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(super) fn register_hooks(handlers: &Handlers) {
|
||||
let tx = handlers.completions.clone();
|
||||
register_hook!(move |event: &mut PostCommand<'_, '_>| completion_post_command_hook(&tx, event));
|
||||
|
||||
let tx = handlers.completions.clone();
|
||||
register_hook!(move |event: &mut OnModeSwitch<'_, '_>| {
|
||||
if event.old_mode == Mode::Insert {
|
||||
send_blocking(&tx, CompletionEvent::Cancel);
|
||||
clear_completions(event.cx);
|
||||
} else if event.new_mode == Mode::Insert {
|
||||
trigger_auto_completion(&tx, event.cx.editor, false)
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
|
||||
let tx = handlers.completions.clone();
|
||||
register_hook!(move |event: &mut PostInsertChar<'_, '_>| {
|
||||
if event.cx.editor.last_completion.is_some() {
|
||||
update_completions(event.cx, Some(event.c))
|
||||
} else {
|
||||
trigger_auto_completion(&tx, event.cx.editor, false);
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
}
|
335
helix-term/src/handlers/signature_help.rs
Normal file
335
helix-term/src/handlers/signature_help.rs
Normal file
@@ -0,0 +1,335 @@
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use helix_core::syntax::LanguageServerFeature;
|
||||
use helix_event::{
|
||||
cancelable_future, cancelation, register_hook, send_blocking, CancelRx, CancelTx,
|
||||
};
|
||||
use helix_lsp::lsp;
|
||||
use helix_stdx::rope::RopeSliceExt;
|
||||
use helix_view::document::Mode;
|
||||
use helix_view::events::{DocumentDidChange, SelectionDidChange};
|
||||
use helix_view::handlers::lsp::{SignatureHelpEvent, SignatureHelpInvoked};
|
||||
use helix_view::Editor;
|
||||
use tokio::sync::mpsc::Sender;
|
||||
use tokio::time::Instant;
|
||||
|
||||
use crate::commands::Open;
|
||||
use crate::compositor::Compositor;
|
||||
use crate::events::{OnModeSwitch, PostInsertChar};
|
||||
use crate::handlers::Handlers;
|
||||
use crate::ui::lsp::SignatureHelp;
|
||||
use crate::ui::Popup;
|
||||
use crate::{job, ui};
|
||||
|
||||
#[derive(Debug)]
|
||||
enum State {
|
||||
Open,
|
||||
Closed,
|
||||
Pending { request: CancelTx },
|
||||
}
|
||||
|
||||
/// debounce timeout in ms, value taken from VSCode
|
||||
/// TODO: make this configurable?
|
||||
const TIMEOUT: u64 = 120;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct SignatureHelpHandler {
|
||||
trigger: Option<SignatureHelpInvoked>,
|
||||
state: State,
|
||||
}
|
||||
|
||||
impl SignatureHelpHandler {
|
||||
pub fn new() -> SignatureHelpHandler {
|
||||
SignatureHelpHandler {
|
||||
trigger: None,
|
||||
state: State::Closed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl helix_event::AsyncHook for SignatureHelpHandler {
|
||||
type Event = SignatureHelpEvent;
|
||||
|
||||
fn handle_event(
|
||||
&mut self,
|
||||
event: Self::Event,
|
||||
timeout: Option<tokio::time::Instant>,
|
||||
) -> Option<Instant> {
|
||||
match event {
|
||||
SignatureHelpEvent::Invoked => {
|
||||
self.trigger = Some(SignatureHelpInvoked::Manual);
|
||||
self.state = State::Closed;
|
||||
self.finish_debounce();
|
||||
return None;
|
||||
}
|
||||
SignatureHelpEvent::Trigger => {}
|
||||
SignatureHelpEvent::ReTrigger => {
|
||||
// don't retrigger if we aren't open/pending yet
|
||||
if matches!(self.state, State::Closed) {
|
||||
return timeout;
|
||||
}
|
||||
}
|
||||
SignatureHelpEvent::Cancel => {
|
||||
self.state = State::Closed;
|
||||
return None;
|
||||
}
|
||||
SignatureHelpEvent::RequestComplete { open } => {
|
||||
// don't cancel rerequest that was already triggered
|
||||
if let State::Pending { request } = &self.state {
|
||||
if !request.is_closed() {
|
||||
return timeout;
|
||||
}
|
||||
}
|
||||
self.state = if open { State::Open } else { State::Closed };
|
||||
return timeout;
|
||||
}
|
||||
}
|
||||
if self.trigger.is_none() {
|
||||
self.trigger = Some(SignatureHelpInvoked::Automatic)
|
||||
}
|
||||
Some(Instant::now() + Duration::from_millis(TIMEOUT))
|
||||
}
|
||||
|
||||
fn finish_debounce(&mut self) {
|
||||
let invocation = self.trigger.take().unwrap();
|
||||
let (tx, rx) = cancelation();
|
||||
self.state = State::Pending { request: tx };
|
||||
job::dispatch_blocking(move |editor, _| request_signature_help(editor, invocation, rx))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn request_signature_help(
|
||||
editor: &mut Editor,
|
||||
invoked: SignatureHelpInvoked,
|
||||
cancel: CancelRx,
|
||||
) {
|
||||
let (view, doc) = current!(editor);
|
||||
|
||||
// TODO merge multiple language server signature help into one instead of just taking the first language server that supports it
|
||||
let future = doc
|
||||
.language_servers_with_feature(LanguageServerFeature::SignatureHelp)
|
||||
.find_map(|language_server| {
|
||||
let pos = doc.position(view.id, language_server.offset_encoding());
|
||||
language_server.text_document_signature_help(doc.identifier(), pos, None)
|
||||
});
|
||||
|
||||
let Some(future) = future else {
|
||||
// Do not show the message if signature help was invoked
|
||||
// automatically on backspace, trigger characters, etc.
|
||||
if invoked == SignatureHelpInvoked::Manual {
|
||||
editor
|
||||
.set_error("No configured language server supports signature-help");
|
||||
}
|
||||
return;
|
||||
};
|
||||
|
||||
tokio::spawn(async move {
|
||||
match cancelable_future(future, cancel).await {
|
||||
Some(Ok(res)) => {
|
||||
job::dispatch(move |editor, compositor| {
|
||||
show_signature_help(editor, compositor, invoked, res)
|
||||
})
|
||||
.await
|
||||
}
|
||||
Some(Err(err)) => log::error!("signature help request failed: {err}"),
|
||||
None => (),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn show_signature_help(
|
||||
editor: &mut Editor,
|
||||
compositor: &mut Compositor,
|
||||
invoked: SignatureHelpInvoked,
|
||||
response: Option<lsp::SignatureHelp>,
|
||||
) {
|
||||
let config = &editor.config();
|
||||
|
||||
if !(config.lsp.auto_signature_help
|
||||
|| SignatureHelp::visible_popup(compositor).is_some()
|
||||
|| invoked == SignatureHelpInvoked::Manual)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// If the signature help invocation is automatic, don't show it outside of Insert Mode:
|
||||
// it very probably means the server was a little slow to respond and the user has
|
||||
// already moved on to something else, making a signature help popup will just be an
|
||||
// annoyance, see https://github.com/helix-editor/helix/issues/3112
|
||||
// For the most part this should not be needed as the request gets canceled automatically now
|
||||
// but it's technically possible for the mode change to just preempt this callback so better safe than sorry
|
||||
if invoked == SignatureHelpInvoked::Automatic && editor.mode != Mode::Insert {
|
||||
return;
|
||||
}
|
||||
|
||||
let response = match response {
|
||||
// According to the spec the response should be None if there
|
||||
// are no signatures, but some servers don't follow this.
|
||||
Some(s) if !s.signatures.is_empty() => s,
|
||||
_ => {
|
||||
send_blocking(
|
||||
&editor.handlers.signature_hints,
|
||||
SignatureHelpEvent::RequestComplete { open: false },
|
||||
);
|
||||
compositor.remove(SignatureHelp::ID);
|
||||
return;
|
||||
}
|
||||
};
|
||||
send_blocking(
|
||||
&editor.handlers.signature_hints,
|
||||
SignatureHelpEvent::RequestComplete { open: true },
|
||||
);
|
||||
|
||||
let doc = doc!(editor);
|
||||
let language = doc.language_name().unwrap_or("");
|
||||
|
||||
let signature = match response
|
||||
.signatures
|
||||
.get(response.active_signature.unwrap_or(0) as usize)
|
||||
{
|
||||
Some(s) => s,
|
||||
None => return,
|
||||
};
|
||||
let mut contents = SignatureHelp::new(
|
||||
signature.label.clone(),
|
||||
language.to_string(),
|
||||
Arc::clone(&editor.syn_loader),
|
||||
);
|
||||
|
||||
let signature_doc = if config.lsp.display_signature_help_docs {
|
||||
signature.documentation.as_ref().map(|doc| match doc {
|
||||
lsp::Documentation::String(s) => s.clone(),
|
||||
lsp::Documentation::MarkupContent(markup) => markup.value.clone(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
contents.set_signature_doc(signature_doc);
|
||||
|
||||
let active_param_range = || -> Option<(usize, usize)> {
|
||||
let param_idx = signature
|
||||
.active_parameter
|
||||
.or(response.active_parameter)
|
||||
.unwrap_or(0) as usize;
|
||||
let param = signature.parameters.as_ref()?.get(param_idx)?;
|
||||
match ¶m.label {
|
||||
lsp::ParameterLabel::Simple(string) => {
|
||||
let start = signature.label.find(string.as_str())?;
|
||||
Some((start, start + string.len()))
|
||||
}
|
||||
lsp::ParameterLabel::LabelOffsets([start, end]) => {
|
||||
// LS sends offsets based on utf-16 based string representation
|
||||
// but highlighting in helix is done using byte offset.
|
||||
use helix_core::str_utils::char_to_byte_idx;
|
||||
let from = char_to_byte_idx(&signature.label, *start as usize);
|
||||
let to = char_to_byte_idx(&signature.label, *end as usize);
|
||||
Some((from, to))
|
||||
}
|
||||
}
|
||||
};
|
||||
contents.set_active_param_range(active_param_range());
|
||||
|
||||
let old_popup = compositor.find_id::<Popup<SignatureHelp>>(SignatureHelp::ID);
|
||||
let mut popup = Popup::new(SignatureHelp::ID, contents)
|
||||
.position(old_popup.and_then(|p| p.get_position()))
|
||||
.position_bias(Open::Above)
|
||||
.ignore_escape_key(true);
|
||||
|
||||
// Don't create a popup if it intersects the auto-complete menu.
|
||||
let size = compositor.size();
|
||||
if compositor
|
||||
.find::<ui::EditorView>()
|
||||
.unwrap()
|
||||
.completion
|
||||
.as_mut()
|
||||
.map(|completion| completion.area(size, editor))
|
||||
.filter(|area| area.intersects(popup.area(size, editor)))
|
||||
.is_some()
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
compositor.replace_or_push(SignatureHelp::ID, popup);
|
||||
}
|
||||
|
||||
fn signature_help_post_insert_char_hook(
|
||||
tx: &Sender<SignatureHelpEvent>,
|
||||
PostInsertChar { cx, .. }: &mut PostInsertChar<'_, '_>,
|
||||
) -> anyhow::Result<()> {
|
||||
if !cx.editor.config().lsp.auto_signature_help {
|
||||
return Ok(());
|
||||
}
|
||||
let (view, doc) = current!(cx.editor);
|
||||
// TODO support multiple language servers (not just the first that is found), likely by merging UI somehow
|
||||
let Some(language_server) = doc
|
||||
.language_servers_with_feature(LanguageServerFeature::SignatureHelp)
|
||||
.next()
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let capabilities = language_server.capabilities();
|
||||
|
||||
if let lsp::ServerCapabilities {
|
||||
signature_help_provider:
|
||||
Some(lsp::SignatureHelpOptions {
|
||||
trigger_characters: Some(triggers),
|
||||
// TODO: retrigger_characters
|
||||
..
|
||||
}),
|
||||
..
|
||||
} = capabilities
|
||||
{
|
||||
let mut text = doc.text().slice(..);
|
||||
let cursor = doc.selection(view.id).primary().cursor(text);
|
||||
text = text.slice(..cursor);
|
||||
if triggers.iter().any(|trigger| text.ends_with(trigger)) {
|
||||
send_blocking(tx, SignatureHelpEvent::Trigger)
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(super) fn register_hooks(handlers: &Handlers) {
|
||||
let tx = handlers.signature_hints.clone();
|
||||
register_hook!(move |event: &mut OnModeSwitch<'_, '_>| {
|
||||
match (event.old_mode, event.new_mode) {
|
||||
(Mode::Insert, _) => {
|
||||
send_blocking(&tx, SignatureHelpEvent::Cancel);
|
||||
event.cx.callback.push(Box::new(|compositor, _| {
|
||||
compositor.remove(SignatureHelp::ID);
|
||||
}));
|
||||
}
|
||||
(_, Mode::Insert) => {
|
||||
if event.cx.editor.config().lsp.auto_signature_help {
|
||||
send_blocking(&tx, SignatureHelpEvent::Trigger);
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
|
||||
let tx = handlers.signature_hints.clone();
|
||||
register_hook!(
|
||||
move |event: &mut PostInsertChar<'_, '_>| signature_help_post_insert_char_hook(&tx, event)
|
||||
);
|
||||
|
||||
let tx = handlers.signature_hints.clone();
|
||||
register_hook!(move |event: &mut DocumentDidChange<'_>| {
|
||||
if event.doc.config.load().lsp.auto_signature_help {
|
||||
send_blocking(&tx, SignatureHelpEvent::ReTrigger);
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
|
||||
let tx = handlers.signature_hints.clone();
|
||||
register_hook!(move |event: &mut SelectionDidChange<'_>| {
|
||||
if event.doc.config.load().lsp.auto_signature_help {
|
||||
send_blocking(&tx, SignatureHelpEvent::ReTrigger);
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
}
|
@@ -182,7 +182,7 @@ pub fn languages_all() -> std::io::Result<()> {
|
||||
.sort_unstable_by_key(|l| l.language_id.clone());
|
||||
|
||||
let check_binary = |cmd: Option<&str>| match cmd {
|
||||
Some(cmd) => match which::which(cmd) {
|
||||
Some(cmd) => match helix_stdx::env::which(cmd) {
|
||||
Ok(_) => column(&format!("✓ {}", cmd), Color::Green),
|
||||
Err(_) => column(&format!("✘ {}", cmd), Color::Red),
|
||||
},
|
||||
@@ -322,7 +322,7 @@ fn probe_protocols<'a, I: Iterator<Item = &'a str> + 'a>(
|
||||
writeln!(stdout)?;
|
||||
|
||||
for cmd in server_cmds {
|
||||
let (path, icon) = match which::which(cmd) {
|
||||
let (path, icon) = match helix_stdx::env::which(cmd) {
|
||||
Ok(path) => (path.display().to_string().green(), "✓".green()),
|
||||
Err(_) => (format!("'{}' not found in $PATH", cmd).red(), "✘".red()),
|
||||
};
|
||||
@@ -344,7 +344,7 @@ fn probe_protocol(protocol_name: &str, server_cmd: Option<String>) -> std::io::R
|
||||
writeln!(stdout, "Configured {}: {}", protocol_name, cmd_name)?;
|
||||
|
||||
if let Some(cmd) = server_cmd {
|
||||
let path = match which::which(&cmd) {
|
||||
let path = match helix_stdx::env::which(&cmd) {
|
||||
Ok(path) => path.display().to_string().green(),
|
||||
Err(_) => format!("'{}' not found in $PATH", cmd).red(),
|
||||
};
|
||||
|
@@ -1,13 +1,37 @@
|
||||
use helix_event::status::StatusMessage;
|
||||
use helix_event::{runtime_local, send_blocking};
|
||||
use helix_view::Editor;
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
use crate::compositor::Compositor;
|
||||
|
||||
use futures_util::future::{BoxFuture, Future, FutureExt};
|
||||
use futures_util::stream::{FuturesUnordered, StreamExt};
|
||||
use tokio::sync::mpsc::{channel, Receiver, Sender};
|
||||
|
||||
pub type EditorCompositorCallback = Box<dyn FnOnce(&mut Editor, &mut Compositor) + Send>;
|
||||
pub type EditorCallback = Box<dyn FnOnce(&mut Editor) + Send>;
|
||||
|
||||
runtime_local! {
|
||||
static JOB_QUEUE: OnceCell<Sender<Callback>> = OnceCell::new();
|
||||
}
|
||||
|
||||
pub async fn dispatch_callback(job: Callback) {
|
||||
let _ = JOB_QUEUE.wait().send(job).await;
|
||||
}
|
||||
|
||||
pub async fn dispatch(job: impl FnOnce(&mut Editor, &mut Compositor) + Send + 'static) {
|
||||
let _ = JOB_QUEUE
|
||||
.wait()
|
||||
.send(Callback::EditorCompositor(Box::new(job)))
|
||||
.await;
|
||||
}
|
||||
|
||||
pub fn dispatch_blocking(job: impl FnOnce(&mut Editor, &mut Compositor) + Send + 'static) {
|
||||
let jobs = JOB_QUEUE.wait();
|
||||
send_blocking(jobs, Callback::EditorCompositor(Box::new(job)))
|
||||
}
|
||||
|
||||
pub enum Callback {
|
||||
EditorCompositor(EditorCompositorCallback),
|
||||
Editor(EditorCallback),
|
||||
@@ -21,11 +45,11 @@ pub struct Job {
|
||||
pub wait: bool,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Jobs {
|
||||
pub futures: FuturesUnordered<JobFuture>,
|
||||
/// These are the ones that need to complete before we exit.
|
||||
/// jobs that need to complete before we exit.
|
||||
pub wait_futures: FuturesUnordered<JobFuture>,
|
||||
pub callbacks: Receiver<Callback>,
|
||||
pub status_messages: Receiver<StatusMessage>,
|
||||
}
|
||||
|
||||
impl Job {
|
||||
@@ -52,8 +76,16 @@ impl Job {
|
||||
}
|
||||
|
||||
impl Jobs {
|
||||
#[allow(clippy::new_without_default)]
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
let (tx, rx) = channel(1024);
|
||||
let _ = JOB_QUEUE.set(tx);
|
||||
let status_messages = helix_event::status::setup();
|
||||
Self {
|
||||
wait_futures: FuturesUnordered::new(),
|
||||
callbacks: rx,
|
||||
status_messages,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn spawn<F: Future<Output = anyhow::Result<()>> + Send + 'static>(&mut self, f: F) {
|
||||
@@ -85,18 +117,17 @@ impl Jobs {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn next_job(&mut self) -> Option<anyhow::Result<Option<Callback>>> {
|
||||
tokio::select! {
|
||||
event = self.futures.next() => { event }
|
||||
event = self.wait_futures.next() => { event }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add(&self, j: Job) {
|
||||
if j.wait {
|
||||
self.wait_futures.push(j.future);
|
||||
} else {
|
||||
self.futures.push(j.future);
|
||||
tokio::spawn(async move {
|
||||
match j.future.await {
|
||||
Ok(Some(cb)) => dispatch_callback(cb).await,
|
||||
Ok(None) => (),
|
||||
Err(err) => helix_event::status::report(err).await,
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -319,7 +319,7 @@ impl Keymaps {
|
||||
self.sticky = None;
|
||||
}
|
||||
|
||||
let first = self.state.get(0).unwrap_or(&key);
|
||||
let first = self.state.first().unwrap_or(&key);
|
||||
let trie_node = match self.sticky {
|
||||
Some(ref trie) => Cow::Owned(KeyTrie::Node(trie.clone())),
|
||||
None => Cow::Borrowed(keymap),
|
||||
|
@@ -6,13 +6,21 @@ pub mod args;
|
||||
pub mod commands;
|
||||
pub mod compositor;
|
||||
pub mod config;
|
||||
pub mod events;
|
||||
pub mod health;
|
||||
pub mod job;
|
||||
pub mod keymap;
|
||||
pub mod ui;
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use futures_util::Future;
|
||||
mod handlers;
|
||||
|
||||
use ignore::DirEntry;
|
||||
use url::Url;
|
||||
|
||||
pub use keymap::macros::*;
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn true_color() -> bool {
|
||||
@@ -46,3 +54,22 @@ fn filter_picker_entry(entry: &DirEntry, root: &Path, dedup_symlinks: bool) -> b
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
/// Opens URL in external program.
|
||||
fn open_external_url_callback(
|
||||
url: Url,
|
||||
) -> impl Future<Output = Result<job::Callback, anyhow::Error>> + Send + 'static {
|
||||
let commands = open::commands(url.as_str());
|
||||
async {
|
||||
for cmd in commands {
|
||||
let mut command = tokio::process::Command::new(cmd.get_program());
|
||||
command.args(cmd.get_args());
|
||||
if command.output().await.is_ok() {
|
||||
return Ok(job::Callback::Editor(Box::new(|_| {})));
|
||||
}
|
||||
}
|
||||
Ok(job::Callback::Editor(Box::new(move |editor| {
|
||||
editor.set_error("Opening URL in external program failed")
|
||||
})))
|
||||
}
|
||||
}
|
||||
|
@@ -118,16 +118,16 @@ FLAGS:
|
||||
|
||||
// Before setting the working directory, resolve all the paths in args.files
|
||||
for (path, _) in args.files.iter_mut() {
|
||||
*path = helix_core::path::get_canonicalized_path(path);
|
||||
*path = helix_stdx::path::canonicalize(&path);
|
||||
}
|
||||
|
||||
// NOTE: Set the working directory early so the correct configuration is loaded. Be aware that
|
||||
// Application::new() depends on this logic so it must be updated if this changes.
|
||||
if let Some(path) = &args.working_directory {
|
||||
helix_loader::set_current_working_dir(path)?;
|
||||
helix_stdx::env::set_current_working_dir(path)?;
|
||||
} else if let Some((path, _)) = args.files.first().filter(|p| p.0.is_dir()) {
|
||||
// If the first file is a directory, it will be the working directory unless -w was specified
|
||||
helix_loader::set_current_working_dir(path)?;
|
||||
helix_stdx::env::set_current_working_dir(path)?;
|
||||
}
|
||||
|
||||
let config = match Config::load_default() {
|
||||
|
@@ -1,8 +1,12 @@
|
||||
use crate::compositor::{Component, Context, Event, EventResult};
|
||||
use crate::{
|
||||
compositor::{Component, Context, Event, EventResult},
|
||||
handlers::trigger_auto_completion,
|
||||
};
|
||||
use helix_view::{
|
||||
document::SavePoint,
|
||||
editor::CompleteAction,
|
||||
graphics::Margin,
|
||||
handlers::lsp::SignatureHelpInvoked,
|
||||
theme::{Modifier, Style},
|
||||
ViewId,
|
||||
};
|
||||
@@ -10,7 +14,7 @@ use tui::{buffer::Buffer as Surface, text::Span};
|
||||
|
||||
use std::{borrow::Cow, sync::Arc};
|
||||
|
||||
use helix_core::{Change, Transaction};
|
||||
use helix_core::{chars, Change, Transaction};
|
||||
use helix_view::{graphics::Rect, Document, Editor};
|
||||
|
||||
use crate::commands;
|
||||
@@ -95,10 +99,9 @@ pub struct CompletionItem {
|
||||
/// Wraps a Menu.
|
||||
pub struct Completion {
|
||||
popup: Popup<Menu<CompletionItem>>,
|
||||
start_offset: usize,
|
||||
#[allow(dead_code)]
|
||||
trigger_offset: usize,
|
||||
// TODO: maintain a completioncontext with trigger kind & trigger char
|
||||
filter: String,
|
||||
}
|
||||
|
||||
impl Completion {
|
||||
@@ -108,7 +111,6 @@ impl Completion {
|
||||
editor: &Editor,
|
||||
savepoint: Arc<SavePoint>,
|
||||
mut items: Vec<CompletionItem>,
|
||||
start_offset: usize,
|
||||
trigger_offset: usize,
|
||||
) -> Self {
|
||||
let preview_completion_insert = editor.config().preview_completion_insert;
|
||||
@@ -246,7 +248,7 @@ impl Completion {
|
||||
// (also without sending the transaction to the LS) *before any further transaction is applied*.
|
||||
// Otherwise incremental sync breaks (since the state of the LS doesn't match the state the transaction
|
||||
// is applied to).
|
||||
if editor.last_completion.is_none() {
|
||||
if matches!(editor.last_completion, Some(CompleteAction::Triggered)) {
|
||||
editor.last_completion = Some(CompleteAction::Selected {
|
||||
savepoint: doc.savepoint(view),
|
||||
})
|
||||
@@ -324,8 +326,18 @@ impl Completion {
|
||||
doc.apply(&transaction, view.id);
|
||||
}
|
||||
}
|
||||
// we could have just inserted a trigger char (like a `crate::` completion for rust
|
||||
// so we want to retrigger immediately when accepting a completion.
|
||||
trigger_auto_completion(&editor.handlers.completions, editor, true);
|
||||
}
|
||||
};
|
||||
|
||||
// In case the popup was deleted because of an intersection w/ the auto-complete menu.
|
||||
if event != PromptEvent::Update {
|
||||
editor
|
||||
.handlers
|
||||
.trigger_signature_help(SignatureHelpInvoked::Automatic, editor);
|
||||
}
|
||||
});
|
||||
|
||||
let margin = if editor.menu_border() {
|
||||
@@ -339,14 +351,30 @@ impl Completion {
|
||||
.ignore_escape_key(true)
|
||||
.margin(margin);
|
||||
|
||||
let (view, doc) = current_ref!(editor);
|
||||
let text = doc.text().slice(..);
|
||||
let cursor = doc.selection(view.id).primary().cursor(text);
|
||||
let offset = text
|
||||
.chars_at(cursor)
|
||||
.reversed()
|
||||
.take_while(|ch| chars::char_is_word(*ch))
|
||||
.count();
|
||||
let start_offset = cursor.saturating_sub(offset);
|
||||
|
||||
let fragment = doc.text().slice(start_offset..cursor);
|
||||
let mut completion = Self {
|
||||
popup,
|
||||
start_offset,
|
||||
trigger_offset,
|
||||
// TODO: expand nucleo api to allow moving straight to a Utf32String here
|
||||
// and avoid allocation during matching
|
||||
filter: String::from(fragment),
|
||||
};
|
||||
|
||||
// need to recompute immediately in case start_offset != trigger_offset
|
||||
completion.recompute_filter(editor);
|
||||
completion
|
||||
.popup
|
||||
.contents_mut()
|
||||
.score(&completion.filter, false);
|
||||
|
||||
completion
|
||||
}
|
||||
@@ -366,39 +394,22 @@ impl Completion {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn recompute_filter(&mut self, editor: &Editor) {
|
||||
/// Appends (`c: Some(c)`) or removes (`c: None`) a character to/from the filter
|
||||
/// this should be called whenever the user types or deletes a character in insert mode.
|
||||
pub fn update_filter(&mut self, c: Option<char>) {
|
||||
// recompute menu based on matches
|
||||
let menu = self.popup.contents_mut();
|
||||
let (view, doc) = current_ref!(editor);
|
||||
|
||||
// cx.hooks()
|
||||
// cx.add_hook(enum type, ||)
|
||||
// cx.trigger_hook(enum type, &str, ...) <-- there has to be enough to identify doc/view
|
||||
// callback with editor & compositor
|
||||
//
|
||||
// trigger_hook sends event into channel, that's consumed in the global loop and
|
||||
// triggers all registered callbacks
|
||||
// TODO: hooks should get processed immediately so maybe do it after select!(), before
|
||||
// looping?
|
||||
|
||||
let cursor = doc
|
||||
.selection(view.id)
|
||||
.primary()
|
||||
.cursor(doc.text().slice(..));
|
||||
if self.trigger_offset <= cursor {
|
||||
let fragment = doc.text().slice(self.start_offset..cursor);
|
||||
let text = Cow::from(fragment);
|
||||
// TODO: logic is same as ui/picker
|
||||
menu.score(&text);
|
||||
} else {
|
||||
// we backspaced before the start offset, clear the menu
|
||||
// this will cause the editor to remove the completion popup
|
||||
menu.clear();
|
||||
match c {
|
||||
Some(c) => self.filter.push(c),
|
||||
None => {
|
||||
self.filter.pop();
|
||||
if self.filter.is_empty() {
|
||||
menu.clear();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update(&mut self, cx: &mut commands::Context) {
|
||||
self.recompute_filter(cx.editor)
|
||||
menu.score(&self.filter, c.is_some());
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
|
@@ -1,7 +1,7 @@
|
||||
use crate::{
|
||||
commands::{self, OnKeyCallback},
|
||||
compositor::{Component, Context, Event, EventResult},
|
||||
job::{self, Callback},
|
||||
events::{OnModeSwitch, PostCommand},
|
||||
key,
|
||||
keymap::{KeymapResult, Keymaps},
|
||||
ui::{
|
||||
@@ -33,8 +33,8 @@ use std::{mem::take, num::NonZeroUsize, path::PathBuf, rc::Rc, sync::Arc};
|
||||
|
||||
use tui::{buffer::Buffer as Surface, text::Span};
|
||||
|
||||
use super::document::LineDecoration;
|
||||
use super::{completion::CompletionItem, statusline};
|
||||
use super::{document::LineDecoration, lsp::SignatureHelp};
|
||||
|
||||
pub struct EditorView {
|
||||
pub keymaps: Keymaps,
|
||||
@@ -835,35 +835,26 @@ impl EditorView {
|
||||
|
||||
let mut execute_command = |command: &commands::MappableCommand| {
|
||||
command.execute(cxt);
|
||||
let current_mode = cxt.editor.mode();
|
||||
match (last_mode, current_mode) {
|
||||
(Mode::Normal, Mode::Insert) => {
|
||||
// HAXX: if we just entered insert mode from normal, clear key buf
|
||||
// and record the command that got us into this mode.
|
||||
helix_event::dispatch(PostCommand { command, cx: cxt });
|
||||
|
||||
let current_mode = cxt.editor.mode();
|
||||
if current_mode != last_mode {
|
||||
helix_event::dispatch(OnModeSwitch {
|
||||
old_mode: last_mode,
|
||||
new_mode: current_mode,
|
||||
cx: cxt,
|
||||
});
|
||||
|
||||
// HAXX: if we just entered insert mode from normal, clear key buf
|
||||
// and record the command that got us into this mode.
|
||||
if current_mode == Mode::Insert {
|
||||
// how we entered insert mode is important, and we should track that so
|
||||
// we can repeat the side effect.
|
||||
self.last_insert.0 = command.clone();
|
||||
self.last_insert.1.clear();
|
||||
|
||||
commands::signature_help_impl(cxt, commands::SignatureHelpInvoked::Automatic);
|
||||
}
|
||||
(Mode::Insert, Mode::Normal) => {
|
||||
// if exiting insert mode, remove completion
|
||||
self.clear_completion(cxt.editor);
|
||||
cxt.editor.completion_request_handle = None;
|
||||
|
||||
// TODO: Use an on_mode_change hook to remove signature help
|
||||
cxt.jobs.callback(async {
|
||||
let call: job::Callback =
|
||||
Callback::EditorCompositor(Box::new(|_editor, compositor| {
|
||||
compositor.remove(SignatureHelp::ID);
|
||||
}));
|
||||
Ok(call)
|
||||
});
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
last_mode = current_mode;
|
||||
};
|
||||
|
||||
@@ -991,12 +982,10 @@ impl EditorView {
|
||||
editor: &mut Editor,
|
||||
savepoint: Arc<SavePoint>,
|
||||
items: Vec<CompletionItem>,
|
||||
start_offset: usize,
|
||||
trigger_offset: usize,
|
||||
size: Rect,
|
||||
) -> Option<Rect> {
|
||||
let mut completion =
|
||||
Completion::new(editor, savepoint, items, start_offset, trigger_offset);
|
||||
let mut completion = Completion::new(editor, savepoint, items, trigger_offset);
|
||||
|
||||
if completion.is_empty() {
|
||||
// skip if we got no completion results
|
||||
@@ -1004,7 +993,7 @@ impl EditorView {
|
||||
}
|
||||
|
||||
let area = completion.area(size, editor);
|
||||
editor.last_completion = None;
|
||||
editor.last_completion = Some(CompleteAction::Triggered);
|
||||
self.last_insert.1.push(InsertEvent::TriggerCompletion);
|
||||
|
||||
// TODO : propagate required size on resize to completion too
|
||||
@@ -1017,6 +1006,7 @@ impl EditorView {
|
||||
self.completion = None;
|
||||
if let Some(last_completion) = editor.last_completion.take() {
|
||||
match last_completion {
|
||||
CompleteAction::Triggered => (),
|
||||
CompleteAction::Applied {
|
||||
trigger_offset,
|
||||
changes,
|
||||
@@ -1030,9 +1020,6 @@ impl EditorView {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clear any savepoints
|
||||
editor.clear_idle_timer(); // don't retrigger
|
||||
}
|
||||
|
||||
pub fn handle_idle_timeout(&mut self, cx: &mut commands::Context) -> EventResult {
|
||||
@@ -1046,13 +1033,7 @@ impl EditorView {
|
||||
};
|
||||
}
|
||||
|
||||
if cx.editor.mode != Mode::Insert || !cx.editor.config().auto_completion {
|
||||
return EventResult::Ignored(None);
|
||||
}
|
||||
|
||||
crate::commands::insert::idle_completion(cx);
|
||||
|
||||
EventResult::Consumed(None)
|
||||
EventResult::Ignored(None)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1265,7 +1246,7 @@ impl Component for EditorView {
|
||||
editor: context.editor,
|
||||
count: None,
|
||||
register: None,
|
||||
callback: None,
|
||||
callback: Vec::new(),
|
||||
on_next_key_callback: None,
|
||||
jobs: context.jobs,
|
||||
};
|
||||
@@ -1338,12 +1319,6 @@ impl Component for EditorView {
|
||||
if callback.is_some() {
|
||||
// assume close_fn
|
||||
self.clear_completion(cx.editor);
|
||||
|
||||
// In case the popup was deleted because of an intersection w/ the auto-complete menu.
|
||||
commands::signature_help_impl(
|
||||
&mut cx,
|
||||
commands::SignatureHelpInvoked::Automatic,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1354,14 +1329,6 @@ impl Component for EditorView {
|
||||
|
||||
// record last_insert key
|
||||
self.last_insert.1.push(InsertEvent::Key(key));
|
||||
|
||||
// lastly we recalculate completion
|
||||
if let Some(completion) = &mut self.completion {
|
||||
completion.update(&mut cx);
|
||||
if completion.is_empty() {
|
||||
self.clear_completion(cx.editor);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
mode => self.command_mode(mode, &mut cx, key),
|
||||
@@ -1375,7 +1342,7 @@ impl Component for EditorView {
|
||||
}
|
||||
|
||||
// appease borrowck
|
||||
let callback = cx.callback.take();
|
||||
let callbacks = take(&mut cx.callback);
|
||||
|
||||
// if the command consumed the last view, skip the render.
|
||||
// on the next loop cycle the Application will then terminate.
|
||||
@@ -1394,6 +1361,16 @@ impl Component for EditorView {
|
||||
if mode != Mode::Insert {
|
||||
doc.append_changes_to_history(view);
|
||||
}
|
||||
let callback = if callbacks.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let callback: crate::compositor::Callback = Box::new(move |compositor, cx| {
|
||||
for callback in callbacks {
|
||||
callback(compositor, cx)
|
||||
}
|
||||
});
|
||||
Some(callback)
|
||||
};
|
||||
|
||||
EventResult::Consumed(callback)
|
||||
}
|
||||
|
@@ -96,20 +96,34 @@ impl<T: Item> Menu<T> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn score(&mut self, pattern: &str) {
|
||||
// reuse the matches allocation
|
||||
self.matches.clear();
|
||||
pub fn score(&mut self, pattern: &str, incremental: bool) {
|
||||
let mut matcher = MATCHER.lock();
|
||||
matcher.config = Config::DEFAULT;
|
||||
let pattern = Atom::new(pattern, CaseMatching::Ignore, AtomKind::Fuzzy, false);
|
||||
let mut buf = Vec::new();
|
||||
let matches = self.options.iter().enumerate().filter_map(|(i, option)| {
|
||||
let text = option.filter_text(&self.editor_data);
|
||||
pattern
|
||||
.score(Utf32Str::new(&text, &mut buf), &mut matcher)
|
||||
.map(|score| (i as u32, score as u32))
|
||||
});
|
||||
self.matches.extend(matches);
|
||||
if incremental {
|
||||
self.matches.retain_mut(|(index, score)| {
|
||||
let option = &self.options[*index as usize];
|
||||
let text = option.filter_text(&self.editor_data);
|
||||
let new_score = pattern.score(Utf32Str::new(&text, &mut buf), &mut matcher);
|
||||
match new_score {
|
||||
Some(new_score) => {
|
||||
*score = new_score as u32;
|
||||
true
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
})
|
||||
} else {
|
||||
self.matches.clear();
|
||||
let matches = self.options.iter().enumerate().filter_map(|(i, option)| {
|
||||
let text = option.filter_text(&self.editor_data);
|
||||
pattern
|
||||
.score(Utf32Str::new(&text, &mut buf), &mut matcher)
|
||||
.map(|score| (i as u32, score as u32))
|
||||
});
|
||||
self.matches.extend(matches);
|
||||
}
|
||||
self.matches
|
||||
.sort_unstable_by_key(|&(i, score)| (Reverse(score), i));
|
||||
|
||||
|
@@ -409,7 +409,7 @@ pub mod completers {
|
||||
use std::path::Path;
|
||||
|
||||
let is_tilde = input == "~";
|
||||
let path = helix_core::path::expand_tilde(Path::new(input));
|
||||
let path = helix_stdx::path::expand_tilde(Path::new(input));
|
||||
|
||||
let (dir, file_name) = if input.ends_with(std::path::MAIN_SEPARATOR) {
|
||||
(path, None)
|
||||
@@ -430,7 +430,7 @@ pub mod completers {
|
||||
match path.parent() {
|
||||
Some(path) if !path.as_os_str().is_empty() => path.to_path_buf(),
|
||||
// Path::new("h")'s parent is Some("")...
|
||||
_ => helix_loader::current_working_dir(),
|
||||
_ => helix_stdx::env::current_working_dir(),
|
||||
}
|
||||
};
|
||||
|
||||
|
@@ -63,7 +63,7 @@ impl PathOrId {
|
||||
fn get_canonicalized(self) -> Self {
|
||||
use PathOrId::*;
|
||||
match self {
|
||||
Path(path) => Path(helix_core::path::get_canonicalized_path(&path)),
|
||||
Path(path) => Path(helix_stdx::path::canonicalize(path)),
|
||||
Id(id) => Id(id),
|
||||
}
|
||||
}
|
||||
|
@@ -11,7 +11,7 @@ impl ProgressSpinners {
|
||||
}
|
||||
|
||||
pub fn get_or_create(&mut self, id: usize) -> &mut Spinner {
|
||||
self.inner.entry(id).or_insert_with(Spinner::default)
|
||||
self.inner.entry(id).or_default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -3,7 +3,8 @@ use std::{
|
||||
ops::RangeInclusive,
|
||||
};
|
||||
|
||||
use helix_core::{diagnostic::Severity, path::get_normalized_path};
|
||||
use helix_core::diagnostic::Severity;
|
||||
use helix_stdx::path;
|
||||
use helix_view::doc;
|
||||
|
||||
use super::*;
|
||||
@@ -23,7 +24,7 @@ async fn test_write_quit_fail() -> anyhow::Result<()> {
|
||||
assert_eq!(1, docs.len());
|
||||
|
||||
let doc = docs.pop().unwrap();
|
||||
assert_eq!(Some(&get_normalized_path(file.path())), doc.path());
|
||||
assert_eq!(Some(&path::normalize(file.path())), doc.path());
|
||||
assert_eq!(&Severity::Error, app.editor.get_status().unwrap().1);
|
||||
}),
|
||||
false,
|
||||
@@ -269,7 +270,7 @@ async fn test_write_scratch_to_new_path() -> anyhow::Result<()> {
|
||||
assert_eq!(1, docs.len());
|
||||
|
||||
let doc = docs.pop().unwrap();
|
||||
assert_eq!(Some(&get_normalized_path(file.path())), doc.path());
|
||||
assert_eq!(Some(&path::normalize(file.path())), doc.path());
|
||||
}),
|
||||
false,
|
||||
)
|
||||
@@ -341,7 +342,7 @@ async fn test_write_new_path() -> anyhow::Result<()> {
|
||||
Some(&|app| {
|
||||
let doc = doc!(app.editor);
|
||||
assert!(!app.editor.is_err());
|
||||
assert_eq!(&get_normalized_path(file1.path()), doc.path().unwrap());
|
||||
assert_eq!(&path::normalize(file1.path()), doc.path().unwrap());
|
||||
}),
|
||||
),
|
||||
(
|
||||
@@ -349,7 +350,7 @@ async fn test_write_new_path() -> anyhow::Result<()> {
|
||||
Some(&|app| {
|
||||
let doc = doc!(app.editor);
|
||||
assert!(!app.editor.is_err());
|
||||
assert_eq!(&get_normalized_path(file2.path()), doc.path().unwrap());
|
||||
assert_eq!(&path::normalize(file2.path()), doc.path().unwrap());
|
||||
assert!(app.editor.document_by_path(file1.path()).is_none());
|
||||
}),
|
||||
),
|
||||
|
@@ -1,6 +1,6 @@
|
||||
use super::*;
|
||||
|
||||
use helix_core::path::get_normalized_path;
|
||||
use helix_stdx::path;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_split_write_quit_all() -> anyhow::Result<()> {
|
||||
@@ -27,21 +27,21 @@ async fn test_split_write_quit_all() -> anyhow::Result<()> {
|
||||
|
||||
let doc1 = docs
|
||||
.iter()
|
||||
.find(|doc| doc.path().unwrap() == &get_normalized_path(file1.path()))
|
||||
.find(|doc| doc.path().unwrap() == &path::normalize(file1.path()))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!("hello1", doc1.text().to_string());
|
||||
|
||||
let doc2 = docs
|
||||
.iter()
|
||||
.find(|doc| doc.path().unwrap() == &get_normalized_path(file2.path()))
|
||||
.find(|doc| doc.path().unwrap() == &path::normalize(file2.path()))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!("hello2", doc2.text().to_string());
|
||||
|
||||
let doc3 = docs
|
||||
.iter()
|
||||
.find(|doc| doc.path().unwrap() == &get_normalized_path(file3.path()))
|
||||
.find(|doc| doc.path().unwrap() == &path::normalize(file3.path()))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!("hello3", doc3.text().to_string());
|
||||
|
@@ -79,6 +79,7 @@ pub struct CrosstermBackend<W: Write> {
|
||||
capabilities: Capabilities,
|
||||
supports_keyboard_enhancement_protocol: OnceCell<bool>,
|
||||
mouse_capture_enabled: bool,
|
||||
supports_bracketed_paste: bool,
|
||||
}
|
||||
|
||||
impl<W> CrosstermBackend<W>
|
||||
@@ -91,6 +92,7 @@ where
|
||||
capabilities: Capabilities::from_env_or_default(config),
|
||||
supports_keyboard_enhancement_protocol: OnceCell::new(),
|
||||
mouse_capture_enabled: false,
|
||||
supports_bracketed_paste: true,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -134,9 +136,16 @@ where
|
||||
execute!(
|
||||
self.buffer,
|
||||
terminal::EnterAlternateScreen,
|
||||
EnableBracketedPaste,
|
||||
EnableFocusChange
|
||||
)?;
|
||||
match execute!(self.buffer, EnableBracketedPaste,) {
|
||||
Err(err) if err.kind() == io::ErrorKind::Unsupported => {
|
||||
log::warn!("Bracketed paste is not supported on this terminal.");
|
||||
self.supports_bracketed_paste = false;
|
||||
}
|
||||
Err(err) => return Err(err),
|
||||
Ok(_) => (),
|
||||
};
|
||||
execute!(self.buffer, terminal::Clear(terminal::ClearType::All))?;
|
||||
if config.enable_mouse_capture {
|
||||
execute!(self.buffer, EnableMouseCapture)?;
|
||||
@@ -177,9 +186,11 @@ where
|
||||
if self.supports_keyboard_enhancement_protocol() {
|
||||
execute!(self.buffer, PopKeyboardEnhancementFlags)?;
|
||||
}
|
||||
if self.supports_bracketed_paste {
|
||||
execute!(self.buffer, DisableBracketedPaste,)?;
|
||||
}
|
||||
execute!(
|
||||
self.buffer,
|
||||
DisableBracketedPaste,
|
||||
DisableFocusChange,
|
||||
terminal::LeaveAlternateScreen
|
||||
)?;
|
||||
@@ -195,12 +206,8 @@ where
|
||||
// disable without calling enable previously
|
||||
let _ = execute!(stdout, DisableMouseCapture);
|
||||
let _ = execute!(stdout, PopKeyboardEnhancementFlags);
|
||||
execute!(
|
||||
stdout,
|
||||
DisableBracketedPaste,
|
||||
DisableFocusChange,
|
||||
terminal::LeaveAlternateScreen
|
||||
)?;
|
||||
let _ = execute!(stdout, DisableBracketedPaste);
|
||||
execute!(stdout, DisableFocusChange, terminal::LeaveAlternateScreen)?;
|
||||
terminal::disable_raw_mode()
|
||||
}
|
||||
|
||||
|
@@ -19,7 +19,7 @@ tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "p
|
||||
parking_lot = "0.12"
|
||||
arc-swap = { version = "1.6.0" }
|
||||
|
||||
gix = { version = "0.57.1", default-features = false , optional = true }
|
||||
gix = { version = "0.58.0", default-features = false , optional = true }
|
||||
imara-diff = "0.1.5"
|
||||
anyhow = "1"
|
||||
|
||||
|
@@ -15,6 +15,7 @@ default = []
|
||||
term = ["crossterm"]
|
||||
|
||||
[dependencies]
|
||||
helix-stdx = { path = "../helix-stdx" }
|
||||
helix-core = { path = "../helix-core" }
|
||||
helix-event = { path = "../helix-event" }
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
@@ -45,7 +46,6 @@ serde_json = "1.0"
|
||||
toml = "0.7"
|
||||
log = "~0.4"
|
||||
|
||||
which = "5.0.0"
|
||||
parking_lot = "0.12.1"
|
||||
|
||||
|
||||
|
@@ -73,7 +73,7 @@ pub fn get_clipboard_provider() -> Box<dyn ClipboardProvider> {
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub fn get_clipboard_provider() -> Box<dyn ClipboardProvider> {
|
||||
use crate::env::{binary_exists, env_var_is_set};
|
||||
use helix_stdx::env::{binary_exists, env_var_is_set};
|
||||
|
||||
if env_var_is_set("TMUX") && binary_exists("tmux") {
|
||||
command_provider! {
|
||||
@@ -98,7 +98,7 @@ pub fn get_clipboard_provider() -> Box<dyn ClipboardProvider> {
|
||||
|
||||
#[cfg(not(any(windows, target_os = "wasm32", target_os = "macos")))]
|
||||
pub fn get_clipboard_provider() -> Box<dyn ClipboardProvider> {
|
||||
use crate::env::{binary_exists, env_var_is_set};
|
||||
use helix_stdx::env::{binary_exists, env_var_is_set};
|
||||
use provider::command::is_exit_success;
|
||||
// TODO: support for user-defined provider, probably when we have plugin support by setting a
|
||||
// variable?
|
||||
|
@@ -36,6 +36,7 @@ use helix_core::{
|
||||
};
|
||||
|
||||
use crate::editor::Config;
|
||||
use crate::events::{DocumentDidChange, SelectionDidChange};
|
||||
use crate::{DocumentId, Editor, Theme, View, ViewId};
|
||||
|
||||
/// 8kB of buffer space for encoding and decoding `Rope`s.
|
||||
@@ -114,19 +115,6 @@ pub struct SavePoint {
|
||||
/// The view this savepoint is associated with
|
||||
pub view: ViewId,
|
||||
revert: Mutex<Transaction>,
|
||||
pub text: Rope,
|
||||
}
|
||||
|
||||
impl SavePoint {
|
||||
pub fn cursor(&self) -> usize {
|
||||
// we always create transactions with selections
|
||||
self.revert
|
||||
.lock()
|
||||
.selection()
|
||||
.unwrap()
|
||||
.primary()
|
||||
.cursor(self.text.slice(..))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Document {
|
||||
@@ -738,7 +726,12 @@ impl Document {
|
||||
if let Some((fmt_cmd, fmt_args)) = self
|
||||
.language_config()
|
||||
.and_then(|c| c.formatter.as_ref())
|
||||
.and_then(|formatter| Some((which::which(&formatter.command).ok()?, &formatter.args)))
|
||||
.and_then(|formatter| {
|
||||
Some((
|
||||
helix_stdx::env::which(&formatter.command).ok()?,
|
||||
&formatter.args,
|
||||
))
|
||||
})
|
||||
{
|
||||
use std::process::Stdio;
|
||||
let text = self.text().clone();
|
||||
@@ -855,7 +848,7 @@ impl Document {
|
||||
let text = self.text().clone();
|
||||
|
||||
let path = match path {
|
||||
Some(path) => helix_core::path::get_canonicalized_path(&path),
|
||||
Some(path) => helix_stdx::path::canonicalize(path),
|
||||
None => {
|
||||
if self.path.is_none() {
|
||||
bail!("Can't save with no path set!");
|
||||
@@ -1049,7 +1042,7 @@ impl Document {
|
||||
}
|
||||
|
||||
pub fn set_path(&mut self, path: Option<&Path>) {
|
||||
let path = path.map(helix_core::path::get_canonicalized_path);
|
||||
let path = path.map(helix_stdx::path::canonicalize);
|
||||
|
||||
// if parent doesn't exist we still want to open the document
|
||||
// and error out when document is saved
|
||||
@@ -1096,6 +1089,10 @@ impl Document {
|
||||
// TODO: use a transaction?
|
||||
self.selections
|
||||
.insert(view_id, selection.ensure_invariants(self.text().slice(..)));
|
||||
helix_event::dispatch(SelectionDidChange {
|
||||
doc: self,
|
||||
view: view_id,
|
||||
})
|
||||
}
|
||||
|
||||
/// Find the origin selection of the text in a document, i.e. where
|
||||
@@ -1149,6 +1146,14 @@ impl Document {
|
||||
let success = transaction.changes().apply(&mut self.text);
|
||||
|
||||
if success {
|
||||
if emit_lsp_notification {
|
||||
helix_event::dispatch(DocumentDidChange {
|
||||
doc: self,
|
||||
view: view_id,
|
||||
old_text: &old_doc,
|
||||
});
|
||||
}
|
||||
|
||||
for selection in self.selections.values_mut() {
|
||||
*selection = selection
|
||||
.clone()
|
||||
@@ -1164,6 +1169,10 @@ impl Document {
|
||||
view_id,
|
||||
selection.clone().ensure_invariants(self.text.slice(..)),
|
||||
);
|
||||
helix_event::dispatch(SelectionDidChange {
|
||||
doc: self,
|
||||
view: view_id,
|
||||
});
|
||||
}
|
||||
|
||||
self.modified_since_accessed = true;
|
||||
@@ -1276,6 +1285,7 @@ impl Document {
|
||||
}
|
||||
|
||||
if emit_lsp_notification {
|
||||
// TODO: move to hook
|
||||
// emit lsp notification
|
||||
for language_server in self.language_servers() {
|
||||
let notify = language_server.text_document_did_change(
|
||||
@@ -1386,7 +1396,6 @@ impl Document {
|
||||
let savepoint = Arc::new(SavePoint {
|
||||
view: view.id,
|
||||
revert: Mutex::new(revert),
|
||||
text: self.text.clone(),
|
||||
});
|
||||
self.savepoints.push(Arc::downgrade(&savepoint));
|
||||
savepoint
|
||||
@@ -1672,7 +1681,7 @@ impl Document {
|
||||
pub fn relative_path(&self) -> Option<PathBuf> {
|
||||
self.path
|
||||
.as_deref()
|
||||
.map(helix_core::path::get_relative_path)
|
||||
.map(helix_stdx::path::get_relative_path)
|
||||
}
|
||||
|
||||
pub fn display_name(&self) -> Cow<'static, str> {
|
||||
|
@@ -2,6 +2,7 @@ use crate::{
|
||||
align_view,
|
||||
document::{DocumentSavedEventFuture, DocumentSavedEventResult, Mode, SavePoint},
|
||||
graphics::{CursorKind, Rect},
|
||||
handlers::Handlers,
|
||||
info::Info,
|
||||
input::KeyEvent,
|
||||
register::Registers,
|
||||
@@ -30,10 +31,7 @@ use std::{
|
||||
};
|
||||
|
||||
use tokio::{
|
||||
sync::{
|
||||
mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender},
|
||||
oneshot,
|
||||
},
|
||||
sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender},
|
||||
time::{sleep, Duration, Instant, Sleep},
|
||||
};
|
||||
|
||||
@@ -243,12 +241,19 @@ pub struct Config {
|
||||
/// Set a global text_width
|
||||
pub text_width: usize,
|
||||
/// Time in milliseconds since last keypress before idle timers trigger.
|
||||
/// Used for autocompletion, set to 0 for instant. Defaults to 250ms.
|
||||
/// Used for various UI timeouts. Defaults to 250ms.
|
||||
#[serde(
|
||||
serialize_with = "serialize_duration_millis",
|
||||
deserialize_with = "deserialize_duration_millis"
|
||||
)]
|
||||
pub idle_timeout: Duration,
|
||||
/// Time in milliseconds after typing a word character before auto completions
|
||||
/// are shown, set to 5 for instant. Defaults to 250ms.
|
||||
#[serde(
|
||||
serialize_with = "serialize_duration_millis",
|
||||
deserialize_with = "deserialize_duration_millis"
|
||||
)]
|
||||
pub completion_timeout: Duration,
|
||||
/// Whether to insert the completion suggestion on hover. Defaults to true.
|
||||
pub preview_completion_insert: bool,
|
||||
pub completion_trigger_len: u8,
|
||||
@@ -324,7 +329,7 @@ pub struct TerminalConfig {
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn get_terminal_provider() -> Option<TerminalConfig> {
|
||||
use crate::env::binary_exists;
|
||||
use helix_stdx::env::binary_exists;
|
||||
|
||||
if binary_exists("wt") {
|
||||
return Some(TerminalConfig {
|
||||
@@ -347,7 +352,7 @@ pub fn get_terminal_provider() -> Option<TerminalConfig> {
|
||||
|
||||
#[cfg(not(any(windows, target_os = "wasm32")))]
|
||||
pub fn get_terminal_provider() -> Option<TerminalConfig> {
|
||||
use crate::env::{binary_exists, env_var_is_set};
|
||||
use helix_stdx::env::{binary_exists, env_var_is_set};
|
||||
|
||||
if env_var_is_set("TMUX") && binary_exists("tmux") {
|
||||
return Some(TerminalConfig {
|
||||
@@ -828,6 +833,7 @@ impl Default for Config {
|
||||
auto_format: true,
|
||||
auto_save: false,
|
||||
idle_timeout: Duration::from_millis(250),
|
||||
completion_timeout: Duration::from_millis(250),
|
||||
preview_completion_insert: true,
|
||||
completion_trigger_len: 2,
|
||||
auto_info: true,
|
||||
@@ -952,14 +958,7 @@ pub struct Editor {
|
||||
/// avoid calculating the cursor position multiple
|
||||
/// times during rendering and should not be set by other functions.
|
||||
pub cursor_cache: Cell<Option<Option<Position>>>,
|
||||
/// When a new completion request is sent to the server old
|
||||
/// unfinished request must be dropped. Each completion
|
||||
/// request is associated with a channel that cancels
|
||||
/// when the channel is dropped. That channel is stored
|
||||
/// here. When a new completion request is sent this
|
||||
/// field is set and any old requests are automatically
|
||||
/// canceled as a result
|
||||
pub completion_request_handle: Option<oneshot::Sender<()>>,
|
||||
pub handlers: Handlers,
|
||||
}
|
||||
|
||||
pub type Motion = Box<dyn Fn(&mut Editor)>;
|
||||
@@ -987,13 +986,16 @@ enum ThemeAction {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum CompleteAction {
|
||||
Triggered,
|
||||
/// A savepoint of the currently selected completion. The savepoint
|
||||
/// MUST be restored before sending any event to the LSP
|
||||
Selected {
|
||||
savepoint: Arc<SavePoint>,
|
||||
},
|
||||
Applied {
|
||||
trigger_offset: usize,
|
||||
changes: Vec<Change>,
|
||||
},
|
||||
/// A savepoint of the currently selected completion. The savepoint
|
||||
/// MUST be restored before sending any event to the LSP
|
||||
Selected { savepoint: Arc<SavePoint> },
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
@@ -1027,6 +1029,7 @@ impl Editor {
|
||||
theme_loader: Arc<theme::Loader>,
|
||||
syn_loader: Arc<syntax::Loader>,
|
||||
config: Arc<dyn DynAccess<Config>>,
|
||||
handlers: Handlers,
|
||||
) -> Self {
|
||||
let language_servers = helix_lsp::Registry::new(syn_loader.clone());
|
||||
let conf = config.load();
|
||||
@@ -1071,7 +1074,7 @@ impl Editor {
|
||||
config_events: unbounded_channel(),
|
||||
needs_redraw: false,
|
||||
cursor_cache: Cell::new(None),
|
||||
completion_request_handle: None,
|
||||
handlers,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1144,7 +1147,7 @@ impl Editor {
|
||||
#[inline]
|
||||
pub fn set_error<T: Into<Cow<'static, str>>>(&mut self, error: T) {
|
||||
let error = error.into();
|
||||
log::error!("editor error: {}", error);
|
||||
log::debug!("editor error: {}", error);
|
||||
self.status_msg = Some((error, Severity::Error));
|
||||
}
|
||||
|
||||
@@ -1235,12 +1238,19 @@ impl Editor {
|
||||
.filter_map(|(lang, client)| match client {
|
||||
Ok(client) => Some((lang, client)),
|
||||
Err(err) => {
|
||||
log::error!(
|
||||
"Failed to initialize the language servers for `{}` - `{}` {{ {} }}",
|
||||
language.scope(),
|
||||
lang,
|
||||
err
|
||||
);
|
||||
if let helix_lsp::Error::ExecutableNotFound(err) = err {
|
||||
// Silence by default since some language servers might just not be installed
|
||||
log::debug!(
|
||||
"Language server not found for `{}` {} {}", language.scope(), lang, err,
|
||||
);
|
||||
} else {
|
||||
log::error!(
|
||||
"Failed to initialize the language servers for `{}` - `{}` {{ {} }}",
|
||||
language.scope(),
|
||||
lang,
|
||||
err
|
||||
);
|
||||
}
|
||||
None
|
||||
}
|
||||
})
|
||||
@@ -1464,7 +1474,7 @@ impl Editor {
|
||||
|
||||
// ??? possible use for integration tests
|
||||
pub fn open(&mut self, path: &Path, action: Action) -> Result<DocumentId, Error> {
|
||||
let path = helix_core::path::get_canonicalized_path(path);
|
||||
let path = helix_stdx::path::canonicalize(path);
|
||||
let id = self.document_by_path(&path).map(|doc| doc.id);
|
||||
|
||||
let id = if let Some(id) = id {
|
||||
|
@@ -1,8 +0,0 @@
|
||||
pub fn binary_exists(binary_name: &str) -> bool {
|
||||
which::which(binary_name).is_ok()
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
pub fn env_var_is_set(env_var_name: &str) -> bool {
|
||||
std::env::var_os(env_var_name).is_some()
|
||||
}
|
9
helix-view/src/events.rs
Normal file
9
helix-view/src/events.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use helix_core::Rope;
|
||||
use helix_event::events;
|
||||
|
||||
use crate::{Document, ViewId};
|
||||
|
||||
events! {
|
||||
DocumentDidChange<'a> { doc: &'a mut Document, view: ViewId, old_text: &'a Rope }
|
||||
SelectionDidChange<'a> { doc: &'a mut Document, view: ViewId }
|
||||
}
|
41
helix-view/src/handlers.rs
Normal file
41
helix-view/src/handlers.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use helix_event::send_blocking;
|
||||
use tokio::sync::mpsc::Sender;
|
||||
|
||||
use crate::handlers::lsp::SignatureHelpInvoked;
|
||||
use crate::{DocumentId, Editor, ViewId};
|
||||
|
||||
pub mod dap;
|
||||
pub mod lsp;
|
||||
|
||||
pub struct Handlers {
|
||||
// only public because most of the actual implementation is in helix-term right now :/
|
||||
pub completions: Sender<lsp::CompletionEvent>,
|
||||
pub signature_hints: Sender<lsp::SignatureHelpEvent>,
|
||||
}
|
||||
|
||||
impl Handlers {
|
||||
/// Manually trigger completion (c-x)
|
||||
pub fn trigger_completions(&self, trigger_pos: usize, doc: DocumentId, view: ViewId) {
|
||||
send_blocking(
|
||||
&self.completions,
|
||||
lsp::CompletionEvent::ManualTrigger {
|
||||
cursor: trigger_pos,
|
||||
doc,
|
||||
view,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
pub fn trigger_signature_help(&self, invocation: SignatureHelpInvoked, editor: &Editor) {
|
||||
let event = match invocation {
|
||||
SignatureHelpInvoked::Automatic => {
|
||||
if !editor.config().lsp.auto_signature_help {
|
||||
return;
|
||||
}
|
||||
lsp::SignatureHelpEvent::Trigger
|
||||
}
|
||||
SignatureHelpInvoked::Manual => lsp::SignatureHelpEvent::Invoked,
|
||||
};
|
||||
send_blocking(&self.signature_hints, event)
|
||||
}
|
||||
}
|
@@ -1 +1,41 @@
|
||||
use crate::{DocumentId, ViewId};
|
||||
|
||||
pub enum CompletionEvent {
|
||||
/// Auto completion was triggered by typing a word char
|
||||
AutoTrigger {
|
||||
cursor: usize,
|
||||
doc: DocumentId,
|
||||
view: ViewId,
|
||||
},
|
||||
/// Auto completion was triggered by typing a trigger char
|
||||
/// specified by the LSP
|
||||
TriggerChar {
|
||||
cursor: usize,
|
||||
doc: DocumentId,
|
||||
view: ViewId,
|
||||
},
|
||||
/// A completion was manually requested (c-x)
|
||||
ManualTrigger {
|
||||
cursor: usize,
|
||||
doc: DocumentId,
|
||||
view: ViewId,
|
||||
},
|
||||
/// Some text was deleted and the cursor is now at `pos`
|
||||
DeleteText { cursor: usize },
|
||||
/// Invalidate the current auto completion trigger
|
||||
Cancel,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum SignatureHelpInvoked {
|
||||
Automatic,
|
||||
Manual,
|
||||
}
|
||||
|
||||
pub enum SignatureHelpEvent {
|
||||
Invoked,
|
||||
Trigger,
|
||||
ReTrigger,
|
||||
Cancel,
|
||||
RequestComplete { open: bool },
|
||||
}
|
||||
|
@@ -1,17 +1,14 @@
|
||||
#[macro_use]
|
||||
pub mod macros;
|
||||
|
||||
pub mod base64;
|
||||
pub mod clipboard;
|
||||
pub mod document;
|
||||
pub mod editor;
|
||||
pub mod env;
|
||||
pub mod events;
|
||||
pub mod graphics;
|
||||
pub mod gutter;
|
||||
pub mod handlers {
|
||||
pub mod dap;
|
||||
pub mod lsp;
|
||||
}
|
||||
pub mod base64;
|
||||
pub mod handlers;
|
||||
pub mod info;
|
||||
pub mod input;
|
||||
pub mod keyboard;
|
||||
|
@@ -796,6 +796,7 @@ file-types = [
|
||||
"sh",
|
||||
"bash",
|
||||
"zsh",
|
||||
".bash_history",
|
||||
".bash_login",
|
||||
".bash_logout",
|
||||
".bash_profile",
|
||||
@@ -1119,7 +1120,7 @@ name = "purescript"
|
||||
scope = "source.purescript"
|
||||
injection-regex = "purescript"
|
||||
file-types = ["purs"]
|
||||
roots = ["spago.dhall", "bower.json"]
|
||||
roots = ["spago.yaml", "spago.dhall", "bower.json"]
|
||||
comment-token = "--"
|
||||
language-servers = [ "purescript-language-server" ]
|
||||
indent = { tab-width = 2, unit = " " }
|
||||
@@ -1393,7 +1394,7 @@ language-servers = [ "metals" ]
|
||||
|
||||
[[grammar]]
|
||||
name = "scala"
|
||||
source = { git = "https://github.com/tree-sitter/tree-sitter-scala", rev = "23d21310fe4ab4b3273e7a6810e781224a3e7fe1" }
|
||||
source = { git = "https://github.com/tree-sitter/tree-sitter-scala", rev = "7891815f42dca9ed6aeb464c2edc39d479ab965c" }
|
||||
|
||||
[[language]]
|
||||
name = "dockerfile"
|
||||
@@ -2864,7 +2865,7 @@ indent = { tab-width = 2, unit = " " }
|
||||
|
||||
[[grammar]]
|
||||
name = "typst"
|
||||
source = { git = "https://github.com/uben0/tree-sitter-typst", rev = "e35aa22395fdde82bbc4b5700c324ce346dfc9e5" }
|
||||
source = { git = "https://github.com/uben0/tree-sitter-typst", rev = "ecf8596336857adfcd5f7cbb3b2aa11a67badc37" }
|
||||
|
||||
[[language]]
|
||||
name = "nunjucks"
|
||||
|
@@ -10,23 +10,37 @@
|
||||
(variable_name) @variable.other.member
|
||||
|
||||
[
|
||||
"if"
|
||||
"then"
|
||||
"else"
|
||||
"elif"
|
||||
"fi"
|
||||
"case"
|
||||
"in"
|
||||
"esac"
|
||||
] @keyword.control.conditional
|
||||
|
||||
[
|
||||
"for"
|
||||
"do"
|
||||
"done"
|
||||
"elif"
|
||||
"else"
|
||||
"esac"
|
||||
"export"
|
||||
"fi"
|
||||
"for"
|
||||
"function"
|
||||
"if"
|
||||
"in"
|
||||
"unset"
|
||||
"select"
|
||||
"until"
|
||||
"while"
|
||||
"then"
|
||||
] @keyword.control.repeat
|
||||
|
||||
[
|
||||
"declare"
|
||||
"typeset"
|
||||
"export"
|
||||
"readonly"
|
||||
"local"
|
||||
"unset"
|
||||
"unsetenv"
|
||||
] @keyword
|
||||
|
||||
"function" @keyword.function
|
||||
|
||||
(comment) @comment
|
||||
|
||||
(function_definition name: (word) @function)
|
||||
|
7
runtime/queries/css/indents.scm
Normal file
7
runtime/queries/css/indents.scm
Normal file
@@ -0,0 +1,7 @@
|
||||
[
|
||||
(block)
|
||||
] @indent
|
||||
|
||||
[
|
||||
"}"
|
||||
] @outdent
|
68
runtime/queries/dart/textobjects.scm
Normal file
68
runtime/queries/dart/textobjects.scm
Normal file
@@ -0,0 +1,68 @@
|
||||
(class_definition
|
||||
body: (_) @class.inside) @class.around
|
||||
|
||||
(mixin_declaration
|
||||
(class_body) @class.inside) @class.around
|
||||
|
||||
(extension_declaration
|
||||
(extension_body) @class.inside) @class.around
|
||||
|
||||
(enum_declaration
|
||||
body: (_) @class.inside) @class.around
|
||||
|
||||
(type_alias) @class.around
|
||||
|
||||
(_
|
||||
(
|
||||
[
|
||||
(getter_signature)
|
||||
(setter_signature)
|
||||
(function_signature)
|
||||
(method_signature)
|
||||
(constructor_signature)
|
||||
]
|
||||
.
|
||||
(function_body) @function.inside @function.around
|
||||
) @function.around
|
||||
)
|
||||
|
||||
(declaration
|
||||
[
|
||||
(constant_constructor_signature)
|
||||
(constructor_signature)
|
||||
(factory_constructor_signature)
|
||||
(redirecting_factory_constructor_signature)
|
||||
(getter_signature)
|
||||
(setter_signature)
|
||||
(operator_signature)
|
||||
(function_signature)
|
||||
]
|
||||
) @function.around
|
||||
|
||||
(lambda_expression
|
||||
body: (_) @function.inside
|
||||
) @function.around
|
||||
|
||||
(function_expression
|
||||
body: (_) @function.inside
|
||||
) @function.around
|
||||
|
||||
[
|
||||
(comment)
|
||||
(documentation_comment)
|
||||
] @comment.inside
|
||||
|
||||
(comment)+ @comment.around
|
||||
|
||||
(documentation_comment)+ @comment.around
|
||||
|
||||
(formal_parameter) @parameter.inside
|
||||
|
||||
(formal_parameter_list) @parameter.around
|
||||
|
||||
(expression_statement
|
||||
((identifier) @_name (#any-of? @_name "test" "testWidgets"))
|
||||
.
|
||||
(selector (argument_part (arguments . (_) . (argument) @test.inside)))
|
||||
) @test.around
|
||||
|
8
runtime/queries/make/indents.scm
Normal file
8
runtime/queries/make/indents.scm
Normal file
@@ -0,0 +1,8 @@
|
||||
[
|
||||
(define_directive)
|
||||
(rule)
|
||||
] @indent
|
||||
|
||||
[
|
||||
"endef"
|
||||
] @outdent
|
@@ -263,7 +263,7 @@
|
||||
|
||||
"return" @keyword.control.return
|
||||
|
||||
(comment) @comment
|
||||
[(comment) (block_comment)] @comment
|
||||
|
||||
;; `case` is a conditional keyword in case_block
|
||||
|
||||
|
@@ -51,8 +51,8 @@
|
||||
|
||||
; Comment queries
|
||||
|
||||
(comment) @comment.inside
|
||||
(comment) @comment.around ; Does not match consecutive block comments
|
||||
[(comment) (block_comment)] @comment.inside
|
||||
[(comment) (block_comment)] @comment.around ; Does not match consecutive block comments
|
||||
|
||||
|
||||
; Test queries
|
||||
|
@@ -41,7 +41,7 @@
|
||||
(capture) @label
|
||||
|
||||
((predicate_name) @function
|
||||
(#match? @function "^#(eq\\?|match\\?|is\\?|is-not\\?|not-same-line\\?|not-kind-eq\\?|set!|select-adjacent!|strip!)$"))
|
||||
(#any-of? @function "#eq?" "#match?" "#any-of?" "#not-any-of?" "#is?" "#is-not?" "#not-same-line?" "#not-kind-eq?" "#set!" "#select-adjacent!" "#strip!"))
|
||||
(predicate_name) @error
|
||||
|
||||
(escape_sequence) @constant.character.escape
|
||||
|
@@ -55,7 +55,12 @@
|
||||
; MARKUP
|
||||
(item "-" @markup.list)
|
||||
(term ["/" ":"] @markup.list)
|
||||
(heading ["=" "==" "===" "====" "====="] @markup.heading.marker) @markup.heading
|
||||
(heading "=" @markup.heading.marker) @markup.heading.1
|
||||
(heading "==" @markup.heading.marker) @markup.heading.2
|
||||
(heading "===" @markup.heading.marker) @markup.heading.3
|
||||
(heading "====" @markup.heading.marker) @markup.heading.4
|
||||
(heading "=====" @markup.heading.marker) @markup.heading.5
|
||||
(heading "======" @markup.heading.marker) @markup.heading.6
|
||||
(url) @tag
|
||||
(emph) @markup.italic
|
||||
(strong) @markup.bold
|
||||
|
@@ -3,4 +3,5 @@
|
||||
|
||||
(raw_blck
|
||||
lang: (ident) @injection.language
|
||||
(blob) @injection.content)
|
||||
(blob) @injection.content)
|
||||
|
||||
|
@@ -77,6 +77,10 @@
|
||||
"ui.statusline.insert" = { fg = "light-black", bg = "green" }
|
||||
"ui.statusline.select" = { fg = "light-black", bg = "purple" }
|
||||
|
||||
"ui.bufferline" = { fg = "light-gray", bg = "light-black" }
|
||||
"ui.bufferline.active" = { fg = "light-black", bg = "blue", underline = { color = "light-black", style = "line" } }
|
||||
"ui.bufferline.background" = { bg = "light-black" }
|
||||
|
||||
"ui.text" = { fg = "white" }
|
||||
"ui.text.focus" = { fg = "white", bg = "light-black", modifiers = ["bold"] }
|
||||
|
||||
|
@@ -75,6 +75,11 @@
|
||||
"ui.statusline.normal" = { fg = "light-black", bg = "purple" }
|
||||
"ui.statusline.insert" = { fg = "light-black", bg = "green" }
|
||||
"ui.statusline.select" = { fg = "light-black", bg = "cyan" }
|
||||
|
||||
"ui.bufferline" = { fg = "light-gray", bg = "light-black" }
|
||||
"ui.bufferline.active" = { fg = "light-black", bg = "blue", underline = { color = "light-black", style = "line" } }
|
||||
"ui.bufferline.background" = { bg = "light-black" }
|
||||
|
||||
"ui.text" = { fg = "white" }
|
||||
"ui.text.focus" = { fg = "white", bg = "light-black", modifiers = ["bold"] }
|
||||
|
||||
|
@@ -72,6 +72,7 @@
|
||||
"ui.menu.selected" = { fg = "bg0", bg = "green" }
|
||||
"ui.virtual.whitespace" = { fg = "grey_dim" }
|
||||
"ui.virtual.ruler" = { bg = "grey_dim" }
|
||||
"ui.virtual.inlay-hint" = { fg = "grey_dim" }
|
||||
|
||||
info = { fg = 'green', bg = 'bg2' }
|
||||
hint = { fg = 'blue', bg = 'bg2', modifiers = ['bold'] }
|
||||
|
Reference in New Issue
Block a user