mirror of
https://github.com/helix-editor/helix.git
synced 2025-10-06 00:13:28 +02:00
Compare commits
193 Commits
inlay-hint
...
spellbook
Author | SHA1 | Date | |
---|---|---|---|
|
764097593a | ||
|
e6722d7a0e | ||
|
95ad7fb850 | ||
|
35f3513abe | ||
|
036729211a | ||
|
d3fb8fc9b8 | ||
|
684e108fd0 | ||
|
3c6c221d45 | ||
|
6b94d70f20 | ||
|
1491cbc8f3 | ||
|
e11794be37 | ||
|
fba644f2b4 | ||
|
24fe989596 | ||
|
fed3edcab7 | ||
|
4099465632 | ||
|
9100bce9aa | ||
|
f5dc8245ea | ||
|
362e97e927 | ||
|
ba54b6afe4 | ||
|
837627dd8a | ||
|
1246549afd | ||
|
ada8004ea5 | ||
|
205e7ece70 | ||
|
1315b7e2b1 | ||
|
52192ae29e | ||
|
fba1a6188a | ||
|
b90d8960a8 | ||
|
6e4ec96101 | ||
|
62f270e5d2 | ||
|
3b7aaddb13 | ||
|
ab97585b69 | ||
|
9dbfb9b4eb | ||
|
091f19f67c | ||
|
ae3eac8aeb | ||
|
fbe9785613 | ||
|
7092e30f8d | ||
|
705d467932 | ||
|
05a4d05646 | ||
|
2b26d27416 | ||
|
e773d6cc92 | ||
|
633c5fbf0f | ||
|
b75e95862c | ||
|
f4b488e380 | ||
|
7410fe35a3 | ||
|
637274c4d4 | ||
|
01341cbbf6 | ||
|
b1f4717356 | ||
|
25b299abc5 | ||
|
4dd4ba798c | ||
|
ca4ae7f287 | ||
|
d375f1e7f4 | ||
|
8d2870b94a | ||
|
f6878f62f7 | ||
|
6c43dc4962 | ||
|
1ea9050a5e | ||
|
2baff46b25 | ||
|
921ca08e1b | ||
|
17fb12bcf3 | ||
|
67f1fe20c3 | ||
|
8961ae1dc6 | ||
|
3366db0afb | ||
|
733ebcdaeb | ||
|
2bd7452fe0 | ||
|
7dcddf98c6 | ||
|
c9e7b0f84f | ||
|
2fbe7fc5b5 | ||
|
12523cd126 | ||
|
8d58f6ce8d | ||
|
702a961517 | ||
|
1023e8f964 | ||
|
223ceec10a | ||
|
cb1ec1b27e | ||
|
4098151591 | ||
|
1edf98262c | ||
|
237d875e7d | ||
|
b70b8df916 | ||
|
ae0dd313bd | ||
|
76029e5840 | ||
|
3a6c9747b8 | ||
|
ebf96bd469 | ||
|
5a1dcc2429 | ||
|
be1bf2f909 | ||
|
05ae617e1c | ||
|
e606652a96 | ||
|
df02ef6a99 | ||
|
3ceae88c3a | ||
|
b4e51ef895 | ||
|
f157a918a3 | ||
|
a7c3a43069 | ||
|
702b1d0a0f | ||
|
b0528bbac4 | ||
|
09bc67ad6d | ||
|
6be38642f4 | ||
|
f46222ced3 | ||
|
9bb80a74e1 | ||
|
be1cf090c3 | ||
|
a3b64b6da2 | ||
|
aea53523dd | ||
|
24e3ccc31b | ||
|
c94fde8d1c | ||
|
84e95d35ee | ||
|
447a6d3299 | ||
|
47547e94ad | ||
|
908b9edf28 | ||
|
63a1a94d92 | ||
|
bfd2c72715 | ||
|
4c8600967c | ||
|
313ef30f64 | ||
|
9bb370c91e | ||
|
cb1ecc9128 | ||
|
7a6bc53528 | ||
|
60a03a35c6 | ||
|
e4ef096945 | ||
|
9e7a6a5dbd | ||
|
1460a086df | ||
|
51d3b15557 | ||
|
e53462c78c | ||
|
fb45017a26 | ||
|
cbac427383 | ||
|
46cb177792 | ||
|
4784650ccf | ||
|
ece12dd74d | ||
|
72932a391b | ||
|
4c630c148a | ||
|
ac3c6ebaff | ||
|
12139a4c30 | ||
|
aa3fad84ef | ||
|
69b9db2fbb | ||
|
1c32fb2d4d | ||
|
b42e1d20d2 | ||
|
949d9e4433 | ||
|
fbc4e5798e | ||
|
47cdd23e50 | ||
|
9f3b193743 | ||
|
99b57181d5 | ||
|
448e3c2ef5 | ||
|
95c8d9c9e9 | ||
|
8580d35de9 | ||
|
c5e9dda269 | ||
|
01f7e636d5 | ||
|
2ec59f8ff6 | ||
|
23e16264b3 | ||
|
0354ceb95f | ||
|
626407395a | ||
|
52d4d775ce | ||
|
0815b52e09 | ||
|
523e8aa781 | ||
|
3bfec18a45 | ||
|
0f1af75f76 | ||
|
37b5d8ba99 | ||
|
795040910b | ||
|
0ca01a9649 | ||
|
8f30f39c6a | ||
|
31cc2110ec | ||
|
bce166290a | ||
|
b1345b302d | ||
|
340934db92 | ||
|
d0275a554a | ||
|
4a7939928e | ||
|
8b952bb1d5 | ||
|
d24e4fcf0f | ||
|
34aa4d41c6 | ||
|
032c7b897d | ||
|
5d16aae58e | ||
|
a799794623 | ||
|
0609b06638 | ||
|
4130b162a7 | ||
|
46f7cdb5a9 | ||
|
9cfb8afa99 | ||
|
29789f2a9f | ||
|
efdcf34b79 | ||
|
e9a3dcd858 | ||
|
effe849cf4 | ||
|
130f725026 | ||
|
42de785779 | ||
|
6c42ed1bd5 | ||
|
5b72b59448 | ||
|
1bc45c8b3a | ||
|
f857a98671 | ||
|
5a671e65fd | ||
|
994b750dd4 | ||
|
1fc19c6d8e | ||
|
d0c5a2044d | ||
|
2bb0d52f3e | ||
|
7ebf650029 | ||
|
db187c4870 | ||
|
e148d8b311 | ||
|
fb815e2c6f | ||
|
e735485277 | ||
|
bb96a535fc | ||
|
01fce51c45 | ||
|
7929c0719d | ||
|
68d7308e25 |
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -8,4 +8,5 @@
|
||||
*.md text diff=markdown
|
||||
|
||||
book/theme/highlight.js linguist-vendored
|
||||
runtime/queries/**/*.scm linguist-language=Tree-sitter-Query
|
||||
Cargo.lock text
|
||||
|
4
.github/ISSUE_TEMPLATE/blank_issue.md
vendored
4
.github/ISSUE_TEMPLATE/blank_issue.md
vendored
@@ -1,4 +0,0 @@
|
||||
---
|
||||
name: Blank Issue
|
||||
about: Create a blank issue.
|
||||
---
|
21
.github/workflows/build.yml
vendored
21
.github/workflows/build.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
- cron: "00 01 * * *"
|
||||
|
||||
env:
|
||||
MSRV: "1.76"
|
||||
MSRV: "1.82"
|
||||
# This key can be changed to bust the cache of tree-sitter grammars.
|
||||
GRAMMAR_CACHE_VERSION: ""
|
||||
|
||||
@@ -35,8 +35,8 @@ jobs:
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: runtime/grammars
|
||||
key: ${{ runner.os }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
|
||||
restore-keys: ${{ runner.os }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-
|
||||
key: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
|
||||
restore-keys: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-
|
||||
|
||||
- name: Run cargo check
|
||||
run: cargo check
|
||||
@@ -45,6 +45,7 @@ jobs:
|
||||
name: Test Suite
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: github.repository == 'helix-editor/helix' || github.event_name != 'schedule'
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
HELIX_LOG_LEVEL: info
|
||||
@@ -65,8 +66,8 @@ jobs:
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: runtime/grammars
|
||||
key: ${{ runner.os }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
|
||||
restore-keys: ${{ runner.os }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-
|
||||
key: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
|
||||
restore-keys: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-
|
||||
|
||||
- name: Run cargo test
|
||||
run: cargo test --workspace
|
||||
@@ -76,7 +77,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
os: [ubuntu-latest, macos-latest, windows-latest, ubuntu-24.04-arm]
|
||||
|
||||
lints:
|
||||
name: Lints
|
||||
@@ -100,8 +101,8 @@ jobs:
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: runtime/grammars
|
||||
key: ${{ runner.os }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
|
||||
restore-keys: ${{ runner.os }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-
|
||||
key: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
|
||||
restore-keys: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-
|
||||
|
||||
- name: Run cargo fmt
|
||||
run: cargo fmt --all --check
|
||||
@@ -135,8 +136,8 @@ jobs:
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: runtime/grammars
|
||||
key: ${{ runner.os }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
|
||||
restore-keys: ${{ runner.os }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-
|
||||
key: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-${{ hashFiles('languages.toml') }}
|
||||
restore-keys: ${{ runner.os }}-${{ runner.arch }}-stable-v${{ env.GRAMMAR_CACHE_VERSION }}-tree-sitter-grammars-
|
||||
|
||||
- name: Validate queries
|
||||
run: cargo xtask query-check
|
||||
|
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@@ -58,18 +58,18 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false # don't fail other jobs if one fails
|
||||
matrix:
|
||||
build: [x86_64-linux, x86_64-macos, x86_64-windows] #, x86_64-win-gnu, win32-msvc
|
||||
build: [x86_64-linux, aarch64-linux, x86_64-macos, x86_64-windows] #, x86_64-win-gnu, win32-msvc
|
||||
include:
|
||||
- build: x86_64-linux
|
||||
os: ubuntu-22.04
|
||||
os: ubuntu-24.04
|
||||
rust: stable
|
||||
target: x86_64-unknown-linux-gnu
|
||||
cross: false
|
||||
- build: aarch64-linux
|
||||
os: ubuntu-22.04
|
||||
os: ubuntu-24.04-arm
|
||||
rust: stable
|
||||
target: aarch64-unknown-linux-gnu
|
||||
cross: true
|
||||
cross: false
|
||||
# - build: riscv64-linux
|
||||
# os: ubuntu-22.04
|
||||
# rust: stable
|
||||
|
705
Cargo.lock
generated
705
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -37,16 +37,17 @@ package.helix-tui.opt-level = 2
|
||||
package.helix-term.opt-level = 2
|
||||
|
||||
[workspace.dependencies]
|
||||
tree-sitter = { version = "0.22" }
|
||||
tree-house = { version = "0.3.0", default-features = false }
|
||||
nucleo = "0.5.0"
|
||||
slotmap = "1.0.7"
|
||||
thiserror = "2.0"
|
||||
tempfile = "3.19.1"
|
||||
tempfile = "3.20.0"
|
||||
bitflags = "2.9"
|
||||
unicode-segmentation = "1.2"
|
||||
ropey = { version = "1.6.1", default-features = false, features = ["simd"] }
|
||||
foldhash = "0.1"
|
||||
parking_lot = "0.12"
|
||||
spellbook = "0.3.1"
|
||||
|
||||
[workspace.package]
|
||||
version = "25.1.1"
|
||||
@@ -56,4 +57,4 @@ categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
license = "MPL-2.0"
|
||||
rust-version = "1.76"
|
||||
rust-version = "1.82"
|
||||
|
@@ -1,7 +1,6 @@
|
||||
[book]
|
||||
authors = ["Blaž Hrastnik"]
|
||||
language = "en"
|
||||
multilingual = false
|
||||
src = "src"
|
||||
|
||||
[output.html]
|
||||
|
@@ -35,10 +35,19 @@ RUSTFLAGS="-C target-feature=-crt-static"
|
||||
2. Compile from source:
|
||||
|
||||
```sh
|
||||
# Reproducible
|
||||
cargo install --path helix-term --locked
|
||||
```
|
||||
```sh
|
||||
# Optimized
|
||||
cargo install \
|
||||
--profile opt \
|
||||
--config 'build.rustflags="-C target-cpu=native"' \
|
||||
--path helix-term \
|
||||
--locked
|
||||
```
|
||||
|
||||
This command will create the `hx` executable and construct the tree-sitter
|
||||
Either command will create the `hx` executable and construct the tree-sitter
|
||||
grammars in the local `runtime` folder.
|
||||
|
||||
> 💡 If you do not want to fetch or build grammars, set an environment variable `HELIX_DISABLE_AUTO_GRAMMAR_BUILD`
|
||||
@@ -182,13 +191,13 @@ cargo deb -- --locked
|
||||
```
|
||||
|
||||
> 💡 This locks you into the `--release` profile. But you can also build helix in any way you like.
|
||||
> As long as you leave a `target/release/hx` file, it will get packaged with `cargo deb --no-build`
|
||||
> As long as you leave a `target/release/hx` file, it will get packaged with `cargo deb --no-build`
|
||||
|
||||
> 💡 Don't worry about the repeated
|
||||
> 💡 Don't worry about the following:
|
||||
> ```
|
||||
> warning: Failed to find dependency specification
|
||||
> ```
|
||||
> warnings. Cargo deb just reports which packaged files it didn't derive dependencies for. But
|
||||
> Cargo deb just reports which packaged files it didn't derive dependencies for. But
|
||||
> so far the dependency deriving seams very good, even if some of the grammar files are skipped.
|
||||
|
||||
You can find the resulted `.deb` in `target/debian/`. It should contain everything it needs, including the
|
||||
|
@@ -47,6 +47,8 @@ The following variables are supported:
|
||||
| `cursor_column` | The column number of the primary cursor in the currently focused document, starting at 1. This is counted as the number of grapheme clusters from the start of the line rather than bytes or codepoints. |
|
||||
| `buffer_name` | The relative path of the currently focused document. `[scratch]` is expanded instead for scratch buffers. |
|
||||
| `line_ending` | A string containing the line ending of the currently focused document. For example on Unix systems this is usually a line-feed character (`\n`) but on Windows systems this may be a carriage-return plus a line-feed (`\r\n`). The line ending kind of the currently focused document can be inspected with the `:line-ending` command. |
|
||||
| `language` | A string containing the language name of the currently focused document.|
|
||||
| `selection` | A string containing the contents of the primary selection of the currently focused document. |
|
||||
|
||||
Aside from editor variables, the following expansions may be used:
|
||||
|
||||
|
@@ -104,6 +104,8 @@ separator = "│"
|
||||
mode.normal = "NORMAL"
|
||||
mode.insert = "INSERT"
|
||||
mode.select = "SELECT"
|
||||
diagnostics = ["warning", "error"]
|
||||
workspace-diagnostics = ["warning", "error"]
|
||||
```
|
||||
The `[editor.statusline]` key takes the following sub-keys:
|
||||
|
||||
@@ -116,6 +118,8 @@ The `[editor.statusline]` key takes the following sub-keys:
|
||||
| `mode.normal` | The text shown in the `mode` element for normal mode | `"NOR"` |
|
||||
| `mode.insert` | The text shown in the `mode` element for insert mode | `"INS"` |
|
||||
| `mode.select` | The text shown in the `mode` element for select mode | `"SEL"` |
|
||||
| `diagnostics` | A list of severities which are displayed for the current buffer | `["warning", "error"]` |
|
||||
| `workspace-diagnostics` | A list of severities which are displayed for the workspace | `["warning", "error"]` |
|
||||
|
||||
The following statusline elements can be configured:
|
||||
|
||||
@@ -129,12 +133,13 @@ The following statusline elements can be configured:
|
||||
| `file-modification-indicator` | The indicator to show whether the file is modified (a `[+]` appears when there are unsaved changes) |
|
||||
| `file-encoding` | The encoding of the opened file if it differs from UTF-8 |
|
||||
| `file-line-ending` | The file line endings (CRLF or LF) |
|
||||
| `file-indent-style` | The file indentation style |
|
||||
| `read-only-indicator` | An indicator that shows `[readonly]` when a file cannot be written |
|
||||
| `total-line-numbers` | The total line numbers of the opened file |
|
||||
| `file-type` | The type of the opened file |
|
||||
| `diagnostics` | The number of warnings and/or errors |
|
||||
| `workspace-diagnostics` | The number of warnings and/or errors on workspace |
|
||||
| `selections` | The number of active selections |
|
||||
| `selections` | The primary selection index out of the number of active selections |
|
||||
| `primary-selection-length` | The number of characters currently in primary selection |
|
||||
| `position` | The cursor position |
|
||||
| `position-percentage` | The cursor position as a percentage of the total number of lines |
|
||||
@@ -152,6 +157,7 @@ The following statusline elements can be configured:
|
||||
| `display-progress-messages` | Display LSP progress messages below statusline[^1] | `false` |
|
||||
| `auto-signature-help` | Enable automatic popup of signature help (parameter hints) | `true` |
|
||||
| `display-inlay-hints` | Display inlay hints[^2] | `false` |
|
||||
| `inlay-hints-length-limit` | Maximum displayed length (non-zero number) of inlay hints | Unset by default |
|
||||
| `display-color-swatches` | Show color swatches next to colors | `true` |
|
||||
| `display-signature-help-docs` | Display docs under signature help popup | `true` |
|
||||
| `snippets` | Enables snippet completions. Requires a server restart (`:lsp-restart`) to take effect after `:config-reload`/`:set`. | `true` |
|
||||
|
@@ -3,7 +3,8 @@
|
||||
| ada | ✓ | ✓ | | `ada_language_server` |
|
||||
| adl | ✓ | ✓ | ✓ | |
|
||||
| agda | ✓ | | | |
|
||||
| amber | ✓ | | | |
|
||||
| alloy | ✓ | | | |
|
||||
| amber | ✓ | | | `amber-lsp` |
|
||||
| astro | ✓ | | | `astro-ls` |
|
||||
| awk | ✓ | ✓ | | `awk-language-server` |
|
||||
| bash | ✓ | ✓ | ✓ | `bash-language-server` |
|
||||
@@ -21,8 +22,9 @@
|
||||
| capnp | ✓ | | ✓ | |
|
||||
| cel | ✓ | | | |
|
||||
| circom | ✓ | | | `circom-lsp` |
|
||||
| clarity | ✓ | | | `clarinet` |
|
||||
| clojure | ✓ | | | `clojure-lsp` |
|
||||
| cmake | ✓ | ✓ | ✓ | `cmake-language-server` |
|
||||
| cmake | ✓ | ✓ | ✓ | `neocmakelsp`, `cmake-language-server` |
|
||||
| codeql | ✓ | ✓ | | `codeql` |
|
||||
| comment | ✓ | | | |
|
||||
| common-lisp | ✓ | | ✓ | `cl-lsp` |
|
||||
@@ -36,6 +38,7 @@
|
||||
| d | ✓ | ✓ | ✓ | `serve-d` |
|
||||
| dart | ✓ | ✓ | ✓ | `dart` |
|
||||
| dbml | ✓ | | | |
|
||||
| debian | ✓ | | | |
|
||||
| devicetree | ✓ | | | |
|
||||
| dhall | ✓ | ✓ | | `dhall-lsp-server` |
|
||||
| diff | ✓ | | | |
|
||||
@@ -45,6 +48,7 @@
|
||||
| dot | ✓ | | | `dot-language-server` |
|
||||
| dtd | ✓ | | | |
|
||||
| dune | ✓ | | | |
|
||||
| dunstrc | ✓ | | | |
|
||||
| earthfile | ✓ | ✓ | ✓ | `earthlyls` |
|
||||
| edoc | ✓ | | | |
|
||||
| eex | ✓ | | | |
|
||||
@@ -57,6 +61,7 @@
|
||||
| erb | ✓ | | | |
|
||||
| erlang | ✓ | ✓ | | `erlang_ls`, `elp` |
|
||||
| esdl | ✓ | | | |
|
||||
| fennel | ✓ | | | `fennel-ls` |
|
||||
| fga | ✓ | ✓ | ✓ | |
|
||||
| fidl | ✓ | | | |
|
||||
| fish | ✓ | ✓ | ✓ | `fish-lsp` |
|
||||
@@ -118,7 +123,7 @@
|
||||
| jsonnet | ✓ | | | `jsonnet-language-server` |
|
||||
| jsx | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| julia | ✓ | ✓ | ✓ | `julia` |
|
||||
| just | ✓ | ✓ | ✓ | |
|
||||
| just | ✓ | ✓ | ✓ | `just-lsp` |
|
||||
| kdl | ✓ | ✓ | ✓ | |
|
||||
| koka | ✓ | | ✓ | `koka` |
|
||||
| kotlin | ✓ | ✓ | ✓ | `kotlin-language-server` |
|
||||
@@ -134,16 +139,18 @@
|
||||
| log | ✓ | | | |
|
||||
| lpf | ✓ | | | |
|
||||
| lua | ✓ | ✓ | ✓ | `lua-language-server` |
|
||||
| luau | ✓ | ✓ | ✓ | `luau-lsp` |
|
||||
| mail | ✓ | ✓ | | |
|
||||
| make | ✓ | | ✓ | |
|
||||
| markdoc | ✓ | | | `markdoc-ls` |
|
||||
| markdown | ✓ | | | `marksman`, `markdown-oxide` |
|
||||
| markdown-rustdoc | ✓ | | | |
|
||||
| markdown.inline | ✓ | | | |
|
||||
| matlab | ✓ | ✓ | ✓ | |
|
||||
| mermaid | ✓ | | | |
|
||||
| meson | ✓ | | ✓ | `mesonlsp` |
|
||||
| mint | | | | `mint` |
|
||||
| mojo | ✓ | ✓ | ✓ | `magic` |
|
||||
| mojo | ✓ | ✓ | ✓ | `pixi` |
|
||||
| move | ✓ | | | |
|
||||
| msbuild | ✓ | | ✓ | |
|
||||
| nasm | ✓ | ✓ | | `asm-lsp` |
|
||||
@@ -175,12 +182,14 @@
|
||||
| ponylang | ✓ | ✓ | ✓ | |
|
||||
| powershell | ✓ | | | |
|
||||
| prisma | ✓ | ✓ | | `prisma-language-server` |
|
||||
| prolog | | | | `swipl` |
|
||||
| prolog | ✓ | | ✓ | `swipl` |
|
||||
| protobuf | ✓ | ✓ | ✓ | `buf`, `pb`, `protols` |
|
||||
| prql | ✓ | | | |
|
||||
| pug | ✓ | | | |
|
||||
| purescript | ✓ | ✓ | | `purescript-language-server` |
|
||||
| python | ✓ | ✓ | ✓ | `ruff`, `jedi-language-server`, `pylsp` |
|
||||
| python | ✓ | ✓ | ✓ | `ty`, `ruff`, `jedi-language-server`, `pylsp` |
|
||||
| qml | ✓ | | ✓ | `qmlls` |
|
||||
| quarto | ✓ | | ✓ | |
|
||||
| quint | ✓ | | | `quint-language-server` |
|
||||
| r | ✓ | | | `R` |
|
||||
| racket | ✓ | | ✓ | `racket` |
|
||||
@@ -193,10 +202,12 @@
|
||||
| rst | ✓ | | | |
|
||||
| ruby | ✓ | ✓ | ✓ | `ruby-lsp`, `solargraph` |
|
||||
| rust | ✓ | ✓ | ✓ | `rust-analyzer` |
|
||||
| rust-format-args | ✓ | | | |
|
||||
| sage | ✓ | ✓ | | |
|
||||
| scala | ✓ | ✓ | ✓ | `metals` |
|
||||
| scheme | ✓ | | ✓ | |
|
||||
| scss | ✓ | | | `vscode-css-language-server` |
|
||||
| slang | ✓ | ✓ | ✓ | `slangd` |
|
||||
| slint | ✓ | ✓ | ✓ | `slint-lsp` |
|
||||
| smali | ✓ | | ✓ | |
|
||||
| smithy | ✓ | | | `cs` |
|
||||
@@ -227,7 +238,7 @@
|
||||
| thrift | ✓ | | | |
|
||||
| tlaplus | ✓ | | | |
|
||||
| todotxt | ✓ | | | |
|
||||
| toml | ✓ | ✓ | | `taplo` |
|
||||
| toml | ✓ | ✓ | | `taplo`, `tombi` |
|
||||
| tsq | ✓ | | | `ts_query_ls` |
|
||||
| tsx | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| twig | ✓ | | | |
|
||||
@@ -248,6 +259,7 @@
|
||||
| wat | ✓ | | | `wat_server` |
|
||||
| webc | ✓ | | | |
|
||||
| werk | ✓ | | | |
|
||||
| wesl | ✓ | ✓ | | |
|
||||
| wgsl | ✓ | | | `wgsl-analyzer` |
|
||||
| wit | ✓ | | ✓ | |
|
||||
| wren | ✓ | ✓ | ✓ | |
|
||||
|
@@ -126,8 +126,10 @@
|
||||
| `add_newline_below` | Add newline below | normal: `` ]<space> ``, select: `` ]<space> `` |
|
||||
| `goto_type_definition` | Goto type definition | normal: `` gy ``, select: `` gy `` |
|
||||
| `goto_implementation` | Goto implementation | normal: `` gi ``, select: `` gi `` |
|
||||
| `goto_file_start` | Goto line number <n> else file start | normal: `` gg ``, select: `` gg `` |
|
||||
| `goto_file_start` | Goto line number <n> else file start | normal: `` gg `` |
|
||||
| `goto_file_end` | Goto file end | |
|
||||
| `extend_to_file_start` | Extend to line number<n> else file start | select: `` gg `` |
|
||||
| `extend_to_file_end` | Extend to file end | |
|
||||
| `goto_file` | Goto files/URLs in selections | normal: `` gf ``, select: `` gf `` |
|
||||
| `goto_file_hsplit` | Goto files in selections (hsplit) | normal: `` <C-w>f ``, `` <space>wf ``, select: `` <C-w>f ``, `` <space>wf `` |
|
||||
| `goto_file_vsplit` | Goto files in selections (vsplit) | normal: `` <C-w>F ``, `` <space>wF ``, select: `` <C-w>F ``, `` <space>wF `` |
|
||||
@@ -139,7 +141,8 @@
|
||||
| `goto_last_modified_file` | Goto last modified file | normal: `` gm ``, select: `` gm `` |
|
||||
| `goto_last_modification` | Goto last modification | normal: `` g. ``, select: `` g. `` |
|
||||
| `goto_line` | Goto line | normal: `` G ``, select: `` G `` |
|
||||
| `goto_last_line` | Goto last line | normal: `` ge ``, select: `` ge `` |
|
||||
| `goto_last_line` | Goto last line | normal: `` ge `` |
|
||||
| `extend_to_last_line` | Extend to last line | select: `` ge `` |
|
||||
| `goto_first_diag` | Goto first diagnostic | normal: `` [D ``, select: `` [D `` |
|
||||
| `goto_last_diag` | Goto last diagnostic | normal: `` ]D ``, select: `` ]D `` |
|
||||
| `goto_next_diag` | Goto next diagnostic | normal: `` ]d ``, select: `` ]d `` |
|
||||
@@ -150,6 +153,8 @@
|
||||
| `goto_last_change` | Goto last change | normal: `` ]G ``, select: `` ]G `` |
|
||||
| `goto_line_start` | Goto line start | normal: `` gh ``, `` <home> ``, select: `` gh ``, insert: `` <home> `` |
|
||||
| `goto_line_end` | Goto line end | normal: `` gl ``, `` <end> ``, select: `` gl `` |
|
||||
| `goto_column` | Goto column | normal: `` g\| `` |
|
||||
| `extend_to_column` | Extend to column | select: `` g\| `` |
|
||||
| `goto_next_buffer` | Goto next buffer | normal: `` gn ``, select: `` gn `` |
|
||||
| `goto_previous_buffer` | Goto previous buffer | normal: `` gp ``, select: `` gp `` |
|
||||
| `goto_line_end_newline` | Goto newline at line end | insert: `` <end> `` |
|
||||
@@ -296,5 +301,7 @@
|
||||
| `command_palette` | Open command palette | normal: `` <space>? ``, select: `` <space>? `` |
|
||||
| `goto_word` | Jump to a two-character label | normal: `` gw `` |
|
||||
| `extend_to_word` | Extend to a two-character label | select: `` gw `` |
|
||||
| `goto_next_tabstop` | goto next snippet placeholder | |
|
||||
| `goto_prev_tabstop` | goto next snippet placeholder | |
|
||||
| `goto_next_tabstop` | Goto next snippet placeholder | |
|
||||
| `goto_prev_tabstop` | Goto next snippet placeholder | |
|
||||
| `rotate_selections_first` | Make the first selection your primary one | |
|
||||
| `rotate_selections_last` | Make the last selection your primary one | |
|
||||
|
@@ -78,9 +78,9 @@
|
||||
| `:log-open` | Open the helix log file. |
|
||||
| `:insert-output` | Run shell command, inserting output before each selection. |
|
||||
| `:append-output` | Run shell command, appending output after each selection. |
|
||||
| `:pipe` | Pipe each selection to the shell command. |
|
||||
| `:pipe`, `:|` | Pipe each selection to the shell command. |
|
||||
| `:pipe-to` | Pipe each selection to the shell command, ignoring output. |
|
||||
| `:run-shell-command`, `:sh` | Run a shell command |
|
||||
| `:run-shell-command`, `:sh`, `:!` | Run a shell command |
|
||||
| `:reset-diff-change`, `:diffget`, `:diffg` | Reset the diff change at the cursor position. |
|
||||
| `:clear-register` | Clear given register. If no argument is provided, clear all registers. |
|
||||
| `:redraw` | Clear and re-render the whole UI |
|
||||
|
@@ -4,11 +4,16 @@ Writing language injection queries allows one to highlight a specific node as a
|
||||
In addition to the [standard][upstream-docs] language injection options used by tree-sitter, there
|
||||
are a few Helix specific extensions that allow for more control.
|
||||
|
||||
And example of a simple query that would highlight all strings as bash in Nix:
|
||||
An example of a simple query that would highlight all strings as bash in Nix:
|
||||
```scm
|
||||
((string_expression (string_fragment) @injection.content)
|
||||
(#set! injection.language "bash"))
|
||||
```
|
||||
Another example is this query, which highlights links in comments and keywords like "TODO", by reusing the dedicated "comment" language:
|
||||
```
|
||||
((comment) @injection.content
|
||||
(#set! injection.language "comment"))
|
||||
```
|
||||
|
||||
## Capture Types
|
||||
|
||||
|
@@ -23,6 +23,7 @@ The following [captures][tree-sitter-captures] are recognized:
|
||||
| `test.inside` |
|
||||
| `test.around` |
|
||||
| `parameter.inside` |
|
||||
| `parameter.around` |
|
||||
| `comment.inside` |
|
||||
| `comment.around` |
|
||||
| `entry.inside` |
|
||||
|
@@ -47,9 +47,9 @@ Normal mode is the default mode when you launch helix. You can return to it from
|
||||
| `W` | Move next WORD start | `move_next_long_word_start` |
|
||||
| `B` | Move previous WORD start | `move_prev_long_word_start` |
|
||||
| `E` | Move next WORD end | `move_next_long_word_end` |
|
||||
| `t` | Find 'till next char | `find_till_char` |
|
||||
| `t` | Find till next char | `find_till_char` |
|
||||
| `f` | Find next char | `find_next_char` |
|
||||
| `T` | Find 'till previous char | `till_prev_char` |
|
||||
| `T` | Find till previous char | `till_prev_char` |
|
||||
| `F` | Find previous char | `find_prev_char` |
|
||||
| `G` | Go to line number `<n>` | `goto_line` |
|
||||
| `Alt-.` | Repeat last motion (`f`, `t`, `m`, `[` or `]`) | `repeat_last_motion` |
|
||||
@@ -213,6 +213,7 @@ Jumps to various locations.
|
||||
| Key | Description | Command |
|
||||
| ----- | ----------- | ------- |
|
||||
| `g` | Go to line number `<n>` else start of file | `goto_file_start` |
|
||||
| <code>|</code> | Go to column number `<n>` else start of line | `goto_column` |
|
||||
| `e` | Go to the end of the file | `goto_last_line` |
|
||||
| `f` | Go to files in the selections | `goto_file` |
|
||||
| `h` | Go to the start of the line | `goto_line_start` |
|
||||
|
@@ -66,7 +66,7 @@ These configuration keys are available:
|
||||
| `indent` | The indent to use. Has sub keys `unit` (the text inserted into the document when indenting; usually set to N spaces or `"\t"` for tabs) and `tab-width` (the number of spaces rendered for a tab) |
|
||||
| `language-servers` | The Language Servers used for this language. See below for more information in the section [Configuring Language Servers for a language](#configuring-language-servers-for-a-language) |
|
||||
| `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) |
|
||||
| `formatter` | The formatter for the language, it will take precedence over the lsp when defined. The formatter must be able to take the original file as input from stdin and write the formatted file to stdout |
|
||||
| `formatter` | The formatter for the language, it will take precedence over the lsp when defined. The formatter must be able to take the original file as input from stdin and write the formatted file to stdout. The filename of the current buffer can be passed as argument by using the `%{buffer_name}` expansion variable. See below for more information in the [Configuring the formatter command](#configuring-the-formatter-command) |
|
||||
| `soft-wrap` | [editor.softwrap](./editor.md#editorsoft-wrap-section)
|
||||
| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap-at-text-width` is set, defaults to `editor.text-width` |
|
||||
| `rulers` | Overrides the `editor.rulers` config key for the language. |
|
||||
@@ -102,6 +102,16 @@ with the following priorities:
|
||||
the file extension of a given file wins. In the example above, the `"toml"`
|
||||
config matches files like `Cargo.toml` or `languages.toml`.
|
||||
|
||||
### Configuring the formatter command
|
||||
|
||||
[Command line expansions](./command-line.md#expansions) are supported in the arguments
|
||||
of the formatter command. In particular, the `%{buffer_name}` variable can be passed as
|
||||
argument to the formatter:
|
||||
|
||||
```toml
|
||||
formatter = { command = "mylang-formatter" , args = ["--stdin", "--stdin-filename %{buffer_name}"] }
|
||||
```
|
||||
|
||||
## Language Server configuration
|
||||
|
||||
Language servers are configured separately in the table `language-server` in the same file as the languages `languages.toml`
|
||||
|
@@ -10,6 +10,7 @@ Helix' keymap and interaction model ([Using Helix](#usage.md)) is easier to adop
|
||||
| --- | --- | --- |
|
||||
| [Vim](https://www.vim.org/) | [helix.vim](https://github.com/chtenb/helix.vim) config |
|
||||
| [IntelliJ IDEA](https://www.jetbrains.com/idea/) / [Android Studio](https://developer.android.com/studio)| [IdeaVim](https://plugins.jetbrains.com/plugin/164-ideavim) plugin + [helix.idea.vim](https://github.com/chtenb/helix.vim) config | Minimum recommended version is IdeaVim 2.19.0.
|
||||
| [Visual Studio](https://visualstudio.microsoft.com/) | [VsVim](https://marketplace.visualstudio.com/items?itemName=JaredParMSFT.VsVim) plugin + [helix.vs.vim](https://github.com/chtenb/helix.vim) config |
|
||||
| [Visual Studio Code](https://code.visualstudio.com/) | [Dance](https://marketplace.visualstudio.com/items?itemName=gregoire.dance) extension, or its [Helix fork](https://marketplace.visualstudio.com/items?itemName=kend.dancehelixkey) | The Helix fork has diverged. You can also use the original Dance and tweak its keybindings directly (try [this config](https://github.com/71/dance/issues/299#issuecomment-1655509531)).
|
||||
| [Visual Studio Code](https://code.visualstudio.com/) | [Helix for VS Code](https://marketplace.visualstudio.com/items?itemName=jasew.vscode-helix-emulation) extension|
|
||||
| [Zed](https://zed.dev/) | native via keybindings ([Bug](https://github.com/zed-industries/zed/issues/4642)) |
|
||||
@@ -22,7 +23,7 @@ Helix' keymap and interaction model ([Using Helix](#usage.md)) is easier to adop
|
||||
| --- | ---
|
||||
| Fish | [Feature Request](https://github.com/fish-shell/fish-shell/issues/7748)
|
||||
| Fish | [fish-helix](https://github.com/sshilovsky/fish-helix/tree/main)
|
||||
| Zsh | [helix-zsh](https://github.com/john-h-k/helix-zsh)
|
||||
| Zsh | [helix-zsh](https://github.com/john-h-k/helix-zsh) or [zsh-helix-mode](https://github.com/Multirious/zsh-helix-mode)
|
||||
| Nushell | [Feature Request](https://github.com/nushell/reedline/issues/639)
|
||||
|
||||
## Other software
|
||||
|
@@ -12,7 +12,7 @@ There are three kinds of commands that can be used in keymaps:
|
||||
in [`helix-term/src/commands.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands.rs)
|
||||
at the invocation of `static_commands!` macro.
|
||||
* Typable commands: commands that can be executed from command mode (`:`), for
|
||||
example `:write!`. See the [Commands](./commands.html) documentation for a
|
||||
example `:write!`. See the [Commands](./commands.md) documentation for a
|
||||
list of available typeable commands or the `TypableCommandList` declaration in
|
||||
the source code at [`helix-term/src/commands/typed.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands/typed.rs).
|
||||
* Macros: sequences of keys that are executed in order. These keybindings
|
||||
|
@@ -171,8 +171,9 @@ We use a similar set of scopes as
|
||||
|
||||
- `comment` - Code comments
|
||||
- `line` - Single line comments (`//`)
|
||||
- `documentation` - Line documentation comments (e.g. `///` in Rust)
|
||||
- `block` - Block comments (e.g. (`/* */`)
|
||||
- `documentation` - Documentation comments (e.g. `///` in Rust)
|
||||
- `documentation` - Block documentation comments (e.g. `/** */` in Rust)
|
||||
|
||||
- `variable` - Variables
|
||||
- `builtin` - Reserved language variables (`self`, `this`, `super`, etc.)
|
||||
@@ -181,7 +182,7 @@ We use a similar set of scopes as
|
||||
- `member` - Fields of composite data types (e.g. structs, unions)
|
||||
- `private` - Private fields that use a unique syntax (currently just ECMAScript-based languages)
|
||||
|
||||
- `label`
|
||||
- `label` - `.class`, `#id` in CSS, etc.
|
||||
|
||||
- `punctuation`
|
||||
- `delimiter` - Commas, colons
|
||||
@@ -216,7 +217,7 @@ We use a similar set of scopes as
|
||||
|
||||
- `namespace`
|
||||
|
||||
- `special`
|
||||
- `special` - `derive` in Rust, etc.
|
||||
|
||||
- `markup`
|
||||
- `heading`
|
||||
|
@@ -86,6 +86,6 @@ Keywords[ru]=текст;текстовый редактор;
|
||||
Keywords[sr]=Текст;едитор;
|
||||
Keywords[tr]=Metin;düzenleyici;
|
||||
Icon=helix
|
||||
Categories=Utility;TextEditor;
|
||||
Categories=Utility;TextEditor;ConsoleOnly
|
||||
StartupNotify=false
|
||||
MimeType=text/english;text/plain;text/x-makefile;text/x-c++hdr;text/x-c++src;text/x-chdr;text/x-csrc;text/x-java;text/x-moc;text/x-pascal;text/x-tcl;text/x-tex;application/x-shellscript;text/x-c;text/x-c++;
|
||||
|
@@ -6,7 +6,8 @@
|
||||
installShellFiles,
|
||||
git,
|
||||
gitRev ? null,
|
||||
...
|
||||
grammarOverlays ? [],
|
||||
includeGrammarIf ? _: true,
|
||||
}: let
|
||||
fs = lib.fileset;
|
||||
|
||||
@@ -28,7 +29,7 @@
|
||||
# that they reside in. It is built by calling the derivation in the
|
||||
# grammars.nix file, then taking the runtime directory in the git repo
|
||||
# and hooking symlinks up to it.
|
||||
grammars = callPackage ./grammars.nix {};
|
||||
grammars = callPackage ./grammars.nix {inherit grammarOverlays includeGrammarIf;};
|
||||
runtimeDir = runCommand "helix-runtime" {} ''
|
||||
mkdir -p $out
|
||||
ln -s ${./runtime}/* $out
|
||||
@@ -75,9 +76,9 @@ in
|
||||
mkdir -p $out/lib
|
||||
installShellCompletion ${./contrib/completion}/hx.{bash,fish,zsh}
|
||||
mkdir -p $out/share/{applications,icons/hicolor/{256x256,scalable}/apps}
|
||||
cp ${./contrib/Helix.desktop} $out/share/applications
|
||||
cp ${./contrib/Helix.desktop} $out/share/applications/Helix.desktop
|
||||
cp ${./logo.svg} $out/share/icons/hicolor/scalable/apps/helix.svg
|
||||
cp ${./contrib/helix.png} $out/share/icons/hicolor/256x256/apps
|
||||
cp ${./contrib/helix.png} $out/share/icons/hicolor/256x256/apps/helix.png
|
||||
'';
|
||||
|
||||
meta.mainProgram = "hx";
|
||||
|
@@ -13,7 +13,7 @@ Some suggestions to get started:
|
||||
- Instead of running a release version of Helix, while developing you may want to run in debug mode with `cargo run` which is way faster to compile
|
||||
- Looking for even faster compile times? Give a try to [mold](https://github.com/rui314/mold)
|
||||
- If your preferred language is missing, integrating a tree-sitter grammar for
|
||||
it and defining syntax highlight queries for it is straight forward and
|
||||
it and defining syntax highlight queries for it is straightforward and
|
||||
doesn't require much knowledge of the internals.
|
||||
- If you don't use the Nix development shell and are getting your rust-analyzer binary from rustup, you may need to run `rustup component add rust-analyzer`.
|
||||
This is because `rust-toolchain.toml` selects our MSRV for the development toolchain but doesn't download the matching rust-analyzer automatically.
|
||||
|
34
flake.lock
generated
34
flake.lock
generated
@@ -1,23 +1,5 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1740560979,
|
||||
@@ -36,7 +18,6 @@
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"rust-overlay": "rust-overlay"
|
||||
}
|
||||
@@ -60,21 +41,6 @@
|
||||
"repo": "rust-overlay",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
|
131
flake.nix
131
flake.nix
@@ -3,7 +3,6 @@
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
@@ -13,77 +12,89 @@
|
||||
outputs = {
|
||||
self,
|
||||
nixpkgs,
|
||||
flake-utils,
|
||||
rust-overlay,
|
||||
...
|
||||
}: let
|
||||
inherit (nixpkgs) lib;
|
||||
systems = [
|
||||
"x86_64-linux"
|
||||
"aarch64-linux"
|
||||
"x86_64-darwin"
|
||||
"aarch64-darwin"
|
||||
];
|
||||
eachSystem = lib.genAttrs systems;
|
||||
pkgsFor = eachSystem (system:
|
||||
import nixpkgs {
|
||||
localSystem.system = system;
|
||||
overlays = [(import rust-overlay) self.overlays.helix];
|
||||
});
|
||||
gitRev = self.rev or self.dirtyRev or null;
|
||||
in
|
||||
flake-utils.lib.eachDefaultSystem (system: let
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
overlays = [(import rust-overlay)];
|
||||
};
|
||||
in {
|
||||
packages = eachSystem (system: {
|
||||
inherit (pkgsFor.${system}) helix;
|
||||
/*
|
||||
The default Helix build. Uses the latest stable Rust toolchain, and unstable
|
||||
nixpkgs.
|
||||
|
||||
# Get Helix's MSRV toolchain to build with by default.
|
||||
msrvToolchain = pkgs.pkgsBuildHost.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
|
||||
msrvPlatform = pkgs.makeRustPlatform {
|
||||
cargo = msrvToolchain;
|
||||
rustc = msrvToolchain;
|
||||
};
|
||||
in {
|
||||
packages = rec {
|
||||
helix = pkgs.callPackage ./default.nix {inherit gitRev;};
|
||||
The build inputs can be overridden with the following:
|
||||
|
||||
/**
|
||||
The default Helix build. Uses the latest stable Rust toolchain, and unstable
|
||||
nixpkgs.
|
||||
|
||||
The build inputs can be overriden with the following:
|
||||
|
||||
packages.${system}.default.override { rustPlatform = newPlatform; };
|
||||
|
||||
Overriding a derivation attribute can be done as well:
|
||||
|
||||
packages.${system}.default.overrideAttrs { buildType = "debug"; };
|
||||
*/
|
||||
default = helix;
|
||||
};
|
||||
packages.${system}.default.override { rustPlatform = newPlatform; };
|
||||
|
||||
checks.helix = self.outputs.packages.${system}.helix.override {
|
||||
buildType = "debug";
|
||||
rustPlatform = msrvPlatform;
|
||||
};
|
||||
Overriding a derivation attribute can be done as well:
|
||||
|
||||
# Devshell behavior is preserved.
|
||||
devShells.default = let
|
||||
commonRustFlagsEnv = "-C link-arg=-fuse-ld=lld -C target-cpu=native --cfg tokio_unstable";
|
||||
platformRustFlagsEnv = pkgs.lib.optionalString pkgs.stdenv.isLinux "-Clink-arg=-Wl,--no-rosegment";
|
||||
in
|
||||
pkgs.mkShell
|
||||
{
|
||||
inputsFrom = [self.checks.${system}.helix];
|
||||
nativeBuildInputs = with pkgs;
|
||||
[
|
||||
lld
|
||||
cargo-flamegraph
|
||||
rust-bin.nightly.latest.rust-analyzer
|
||||
]
|
||||
++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) cargo-tarpaulin)
|
||||
++ (lib.optional stdenv.isLinux lldb)
|
||||
++ (lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.CoreFoundation);
|
||||
shellHook = ''
|
||||
export RUST_BACKTRACE="1"
|
||||
export RUSTFLAGS="''${RUSTFLAGS:-""} ${commonRustFlagsEnv} ${platformRustFlagsEnv}"
|
||||
'';
|
||||
packages.${system}.default.overrideAttrs { buildType = "debug"; };
|
||||
*/
|
||||
default = self.packages.${system}.helix;
|
||||
});
|
||||
checks =
|
||||
lib.mapAttrs (system: pkgs: let
|
||||
# Get Helix's MSRV toolchain to build with by default.
|
||||
msrvToolchain = pkgs.pkgsBuildHost.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
|
||||
msrvPlatform = pkgs.makeRustPlatform {
|
||||
cargo = msrvToolchain;
|
||||
rustc = msrvToolchain;
|
||||
};
|
||||
})
|
||||
// {
|
||||
overlays.default = final: prev: {
|
||||
in {
|
||||
helix = self.packages.${system}.helix.override {
|
||||
rustPlatform = msrvPlatform;
|
||||
};
|
||||
})
|
||||
pkgsFor;
|
||||
|
||||
# Devshell behavior is preserved.
|
||||
devShells =
|
||||
lib.mapAttrs (system: pkgs: {
|
||||
default = let
|
||||
commonRustFlagsEnv = "-C link-arg=-fuse-ld=lld -C target-cpu=native --cfg tokio_unstable";
|
||||
platformRustFlagsEnv = lib.optionalString pkgs.stdenv.isLinux "-Clink-arg=-Wl,--no-rosegment";
|
||||
in
|
||||
pkgs.mkShell {
|
||||
inputsFrom = [self.checks.${system}.helix];
|
||||
nativeBuildInputs = with pkgs;
|
||||
[
|
||||
lld
|
||||
cargo-flamegraph
|
||||
rust-bin.nightly.latest.rust-analyzer
|
||||
]
|
||||
++ (lib.optional (stdenv.isx86_64 && stdenv.isLinux) cargo-tarpaulin)
|
||||
++ (lib.optional stdenv.isLinux lldb)
|
||||
++ (lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.CoreFoundation);
|
||||
shellHook = ''
|
||||
export RUST_BACKTRACE="1"
|
||||
export RUSTFLAGS="''${RUSTFLAGS:-""} ${commonRustFlagsEnv} ${platformRustFlagsEnv}"
|
||||
'';
|
||||
};
|
||||
})
|
||||
pkgsFor;
|
||||
|
||||
overlays = {
|
||||
helix = final: prev: {
|
||||
helix = final.callPackage ./default.nix {inherit gitRev;};
|
||||
};
|
||||
};
|
||||
|
||||
default = self.overlays.helix;
|
||||
};
|
||||
};
|
||||
nixConfig = {
|
||||
extra-substituters = ["https://helix.cachix.org"];
|
||||
extra-trusted-public-keys = ["helix.cachix.org-1:ejp9KQpR1FBI2onstMQ34yogDm4OgU2ru6lIwPvuCVs="];
|
||||
|
11
grammars.nix
11
grammars.nix
@@ -1,22 +1,13 @@
|
||||
{
|
||||
stdenv,
|
||||
lib,
|
||||
runCommandLocal,
|
||||
runCommand,
|
||||
yj,
|
||||
includeGrammarIf ? _: true,
|
||||
grammarOverlays ? [],
|
||||
...
|
||||
}: let
|
||||
# HACK: nix < 2.6 has a bug in the toml parser, so we convert to JSON
|
||||
# before parsing
|
||||
languages-json = runCommandLocal "languages-toml-to-json" {} ''
|
||||
${yj}/bin/yj -t < ${./languages.toml} > $out
|
||||
'';
|
||||
languagesConfig =
|
||||
if lib.versionAtLeast builtins.nixVersion "2.6.0"
|
||||
then builtins.fromTOML (builtins.readFile ./languages.toml)
|
||||
else builtins.fromJSON (builtins.readFile (builtins.toPath languages-json));
|
||||
builtins.fromTOML (builtins.readFile ./languages.toml);
|
||||
isGitGrammar = grammar:
|
||||
builtins.hasAttr "source" grammar
|
||||
&& builtins.hasAttr "git" grammar.source
|
||||
|
@@ -21,7 +21,7 @@ helix-loader = { path = "../helix-loader" }
|
||||
helix-parsec = { path = "../helix-parsec" }
|
||||
|
||||
ropey.workspace = true
|
||||
smallvec = "1.14"
|
||||
smallvec = "1.15"
|
||||
smartstring = "1.0.1"
|
||||
unicode-segmentation.workspace = true
|
||||
# unicode-width is changing width definitions
|
||||
@@ -32,13 +32,12 @@ unicode-segmentation.workspace = true
|
||||
unicode-width = "=0.1.12"
|
||||
unicode-general-category = "1.0"
|
||||
slotmap.workspace = true
|
||||
tree-sitter.workspace = true
|
||||
tree-house.workspace = true
|
||||
once_cell = "1.21"
|
||||
arc-swap = "1"
|
||||
regex = "1"
|
||||
bitflags.workspace = true
|
||||
ahash = "0.8.11"
|
||||
hashbrown = { version = "0.14.5", features = ["raw"] }
|
||||
foldhash.workspace = true
|
||||
url = "2.5.4"
|
||||
|
||||
log = "0.4"
|
||||
@@ -47,8 +46,7 @@ serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
toml = "0.8"
|
||||
|
||||
imara-diff = "0.1.8"
|
||||
|
||||
imara-diff = "0.2.0"
|
||||
encoding_rs = "0.8"
|
||||
|
||||
chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] }
|
||||
|
@@ -357,7 +357,7 @@ pub struct Token<'a> {
|
||||
pub is_terminated: bool,
|
||||
}
|
||||
|
||||
impl Token<'_> {
|
||||
impl<'a> Token<'a> {
|
||||
pub fn empty_at(content_start: usize) -> Self {
|
||||
Self {
|
||||
kind: TokenKind::Unquoted,
|
||||
@@ -366,6 +366,15 @@ impl Token<'_> {
|
||||
is_terminated: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand(content: impl Into<Cow<'a, str>>) -> Self {
|
||||
Self {
|
||||
kind: TokenKind::Expand,
|
||||
content_start: 0,
|
||||
content: content.into(),
|
||||
is_terminated: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@@ -4,7 +4,8 @@
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use crate::{
|
||||
syntax::BlockCommentToken, Change, Range, Rope, RopeSlice, Selection, Tendril, Transaction,
|
||||
syntax::config::BlockCommentToken, Change, Range, Rope, RopeSlice, Selection, Tendril,
|
||||
Transaction,
|
||||
};
|
||||
use helix_stdx::rope::RopeSliceExt;
|
||||
use std::borrow::Cow;
|
||||
|
@@ -1,4 +1,4 @@
|
||||
use crate::syntax::{Configuration, Loader, LoaderError};
|
||||
use crate::syntax::{config::Configuration, Loader, LoaderError};
|
||||
|
||||
/// Language configuration based on built-in languages.toml.
|
||||
pub fn default_lang_config() -> Configuration {
|
||||
|
@@ -1,7 +1,6 @@
|
||||
//! LSP diagnostic utility types.
|
||||
use std::{fmt, sync::Arc};
|
||||
use std::fmt;
|
||||
|
||||
pub use helix_stdx::range::Range;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Describes the severity level of a [`Diagnostic`].
|
||||
@@ -20,66 +19,6 @@ impl Default for Severity {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, Hash, PartialEq, Clone, Deserialize, Serialize)]
|
||||
pub enum NumberOrString {
|
||||
Number(i32),
|
||||
String(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum DiagnosticTag {
|
||||
Unnecessary,
|
||||
Deprecated,
|
||||
}
|
||||
|
||||
/// Corresponds to [`lsp_types::Diagnostic`](https://docs.rs/lsp-types/0.94.0/lsp_types/struct.Diagnostic.html)
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Diagnostic {
|
||||
pub range: Range,
|
||||
// whether this diagnostic ends at the end of(or inside) a word
|
||||
pub ends_at_word: bool,
|
||||
pub starts_at_word: bool,
|
||||
pub zero_width: bool,
|
||||
pub line: usize,
|
||||
pub message: String,
|
||||
pub severity: Option<Severity>,
|
||||
pub code: Option<NumberOrString>,
|
||||
pub provider: DiagnosticProvider,
|
||||
pub tags: Vec<DiagnosticTag>,
|
||||
pub source: Option<String>,
|
||||
pub data: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// The source of a diagnostic.
|
||||
///
|
||||
/// This type is cheap to clone: all data is either `Copy` or wrapped in an `Arc`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum DiagnosticProvider {
|
||||
Lsp {
|
||||
/// The ID of the language server which sent the diagnostic.
|
||||
server_id: LanguageServerId,
|
||||
/// An optional identifier under which diagnostics are managed by the client.
|
||||
///
|
||||
/// `identifier` is a field from the LSP "Pull Diagnostics" feature meant to provide an
|
||||
/// optional "namespace" for diagnostics: a language server can respond to a diagnostics
|
||||
/// pull request with an identifier and these diagnostics should be treated as separate
|
||||
/// from push diagnostics. Rust-analyzer uses this feature for example to provide Cargo
|
||||
/// diagnostics with push and internal diagnostics with pull. The push diagnostics should
|
||||
/// not clear the pull diagnostics and vice-versa.
|
||||
identifier: Option<Arc<str>>,
|
||||
},
|
||||
// Future internal features can go here...
|
||||
}
|
||||
|
||||
impl DiagnosticProvider {
|
||||
pub fn language_server_id(&self) -> Option<LanguageServerId> {
|
||||
match self {
|
||||
Self::Lsp { server_id, .. } => Some(*server_id),
|
||||
// _ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// while I would prefer having this in helix-lsp that necessitates a bunch of
|
||||
// conversions I would rather not add. I think its fine since this just a very
|
||||
// trivial newtype wrapper and we would need something similar once we define
|
||||
@@ -93,10 +32,3 @@ impl fmt::Display for LanguageServerId {
|
||||
write!(f, "{:?}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Diagnostic {
|
||||
#[inline]
|
||||
pub fn severity(&self) -> Severity {
|
||||
self.severity.unwrap_or(Severity::Warning)
|
||||
}
|
||||
}
|
||||
|
@@ -1,51 +1,22 @@
|
||||
use std::ops::Range;
|
||||
use std::time::Instant;
|
||||
|
||||
use imara_diff::intern::InternedInput;
|
||||
use imara_diff::Algorithm;
|
||||
use imara_diff::{Algorithm, Diff, Hunk, IndentHeuristic, IndentLevel, InternedInput};
|
||||
use ropey::RopeSlice;
|
||||
|
||||
use crate::{ChangeSet, Rope, Tendril, Transaction};
|
||||
|
||||
/// A `imara_diff::Sink` that builds a `ChangeSet` for a character diff of a hunk
|
||||
struct CharChangeSetBuilder<'a> {
|
||||
res: &'a mut ChangeSet,
|
||||
hunk: &'a InternedInput<char>,
|
||||
pos: u32,
|
||||
}
|
||||
|
||||
impl imara_diff::Sink for CharChangeSetBuilder<'_> {
|
||||
type Out = ();
|
||||
fn process_change(&mut self, before: Range<u32>, after: Range<u32>) {
|
||||
self.res.retain((before.start - self.pos) as usize);
|
||||
self.res.delete(before.len());
|
||||
self.pos = before.end;
|
||||
|
||||
let res = self.hunk.after[after.start as usize..after.end as usize]
|
||||
.iter()
|
||||
.map(|&token| self.hunk.interner[token])
|
||||
.collect();
|
||||
|
||||
self.res.insert(res);
|
||||
}
|
||||
|
||||
fn finish(self) -> Self::Out {
|
||||
self.res.retain(self.hunk.before.len() - self.pos as usize);
|
||||
}
|
||||
}
|
||||
|
||||
struct LineChangeSetBuilder<'a> {
|
||||
struct ChangeSetBuilder<'a> {
|
||||
res: ChangeSet,
|
||||
after: RopeSlice<'a>,
|
||||
file: &'a InternedInput<RopeSlice<'a>>,
|
||||
current_hunk: InternedInput<char>,
|
||||
char_diff: Diff,
|
||||
pos: u32,
|
||||
}
|
||||
|
||||
impl imara_diff::Sink for LineChangeSetBuilder<'_> {
|
||||
type Out = ChangeSet;
|
||||
|
||||
fn process_change(&mut self, before: Range<u32>, after: Range<u32>) {
|
||||
impl ChangeSetBuilder<'_> {
|
||||
fn process_hunk(&mut self, before: Range<u32>, after: Range<u32>) {
|
||||
let len = self.file.before[self.pos as usize..before.start as usize]
|
||||
.iter()
|
||||
.map(|&it| self.file.interner[it].len_chars())
|
||||
@@ -109,25 +80,36 @@ impl imara_diff::Sink for LineChangeSetBuilder<'_> {
|
||||
.flat_map(|&it| self.file.interner[it].chars());
|
||||
self.current_hunk.update_before(hunk_before);
|
||||
self.current_hunk.update_after(hunk_after);
|
||||
|
||||
// the histogram heuristic does not work as well
|
||||
// for characters because the same characters often reoccur
|
||||
// use myer diff instead
|
||||
imara_diff::diff(
|
||||
self.char_diff.compute_with(
|
||||
Algorithm::Myers,
|
||||
&self.current_hunk,
|
||||
CharChangeSetBuilder {
|
||||
res: &mut self.res,
|
||||
hunk: &self.current_hunk,
|
||||
pos: 0,
|
||||
},
|
||||
&self.current_hunk.before,
|
||||
&self.current_hunk.after,
|
||||
self.current_hunk.interner.num_tokens(),
|
||||
);
|
||||
let mut pos = 0;
|
||||
for Hunk { before, after } in self.char_diff.hunks() {
|
||||
self.res.retain((before.start - pos) as usize);
|
||||
self.res.delete(before.len());
|
||||
pos = before.end;
|
||||
|
||||
let res = self.current_hunk.after[after.start as usize..after.end as usize]
|
||||
.iter()
|
||||
.map(|&token| self.current_hunk.interner[token])
|
||||
.collect();
|
||||
|
||||
self.res.insert(res);
|
||||
}
|
||||
self.res
|
||||
.retain(self.current_hunk.before.len() - pos as usize);
|
||||
// reuse allocations
|
||||
self.current_hunk.clear();
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(mut self) -> Self::Out {
|
||||
fn finish(mut self) -> ChangeSet {
|
||||
let len = self.file.before[self.pos as usize..]
|
||||
.iter()
|
||||
.map(|&it| self.file.interner[it].len_chars())
|
||||
@@ -140,7 +122,7 @@ impl imara_diff::Sink for LineChangeSetBuilder<'_> {
|
||||
|
||||
struct RopeLines<'a>(RopeSlice<'a>);
|
||||
|
||||
impl<'a> imara_diff::intern::TokenSource for RopeLines<'a> {
|
||||
impl<'a> imara_diff::TokenSource for RopeLines<'a> {
|
||||
type Token = RopeSlice<'a>;
|
||||
type Tokenizer = ropey::iter::Lines<'a>;
|
||||
|
||||
@@ -161,15 +143,23 @@ pub fn compare_ropes(before: &Rope, after: &Rope) -> Transaction {
|
||||
let res = ChangeSet::with_capacity(32);
|
||||
let after = after.slice(..);
|
||||
let file = InternedInput::new(RopeLines(before.slice(..)), RopeLines(after));
|
||||
let builder = LineChangeSetBuilder {
|
||||
let mut builder = ChangeSetBuilder {
|
||||
res,
|
||||
file: &file,
|
||||
after,
|
||||
pos: 0,
|
||||
current_hunk: InternedInput::default(),
|
||||
char_diff: Diff::default(),
|
||||
};
|
||||
|
||||
let res = imara_diff::diff(Algorithm::Histogram, &file, builder).into();
|
||||
let mut diff = Diff::compute(Algorithm::Histogram, &file);
|
||||
diff.postprocess_with_heuristic(
|
||||
&file,
|
||||
IndentHeuristic::new(|token| IndentLevel::for_ascii_line(file.interner[token].bytes(), 4)),
|
||||
);
|
||||
for hunk in diff.hunks() {
|
||||
builder.process_hunk(hunk.before, hunk.after)
|
||||
}
|
||||
let res = builder.finish().into();
|
||||
|
||||
log::debug!(
|
||||
"rope diff took {}s",
|
||||
|
@@ -21,7 +21,7 @@ use globset::{GlobBuilder, GlobMatcher};
|
||||
|
||||
use crate::{
|
||||
indent::{IndentStyle, MAX_INDENT},
|
||||
LineEnding,
|
||||
LineEnding, SpellingLanguage,
|
||||
};
|
||||
|
||||
/// Configuration declared for a path in `.editorconfig` files.
|
||||
@@ -31,7 +31,7 @@ pub struct EditorConfig {
|
||||
pub tab_width: Option<NonZeroU8>,
|
||||
pub line_ending: Option<LineEnding>,
|
||||
pub encoding: Option<&'static Encoding>,
|
||||
// pub spelling_language: Option<SpellingLanguage>,
|
||||
pub spelling_language: Option<SpellingLanguage>,
|
||||
pub trim_trailing_whitespace: Option<bool>,
|
||||
pub insert_final_newline: Option<bool>,
|
||||
pub max_line_length: Option<NonZeroU16>,
|
||||
@@ -144,6 +144,7 @@ impl EditorConfig {
|
||||
"utf-16be" => Some(encoding_rs::UTF_16BE),
|
||||
_ => None,
|
||||
});
|
||||
let spelling_language = pairs.get("spelling_language").and_then(|s| s.parse().ok());
|
||||
let trim_trailing_whitespace =
|
||||
pairs
|
||||
.get("trim_trailing_whitespace")
|
||||
@@ -170,6 +171,7 @@ impl EditorConfig {
|
||||
tab_width,
|
||||
line_ending,
|
||||
encoding,
|
||||
spelling_language,
|
||||
trim_trailing_whitespace,
|
||||
insert_final_newline,
|
||||
max_line_length,
|
||||
|
@@ -242,34 +242,6 @@ pub fn ensure_grapheme_boundary_prev(slice: RopeSlice, char_idx: usize) -> usize
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the given char position is a grapheme boundary.
|
||||
#[must_use]
|
||||
pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool {
|
||||
// Bounds check
|
||||
debug_assert!(char_idx <= slice.len_chars());
|
||||
|
||||
// We work with bytes for this, so convert.
|
||||
let byte_idx = slice.char_to_byte(char_idx);
|
||||
|
||||
// Get the chunk with our byte index in it.
|
||||
let (chunk, chunk_byte_idx, _, _) = slice.chunk_at_byte(byte_idx);
|
||||
|
||||
// Set up the grapheme cursor.
|
||||
let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true);
|
||||
|
||||
// Determine if the given position is a grapheme cluster boundary.
|
||||
loop {
|
||||
match gc.is_boundary(chunk, chunk_byte_idx) {
|
||||
Ok(n) => return n,
|
||||
Err(GraphemeIncomplete::PreContext(n)) => {
|
||||
let (ctx_chunk, ctx_byte_start, _, _) = slice.chunk_at_byte(n - 1);
|
||||
gc.provide_context(ctx_chunk, ctx_byte_start);
|
||||
}
|
||||
Err(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A highly compressed Cow<'a, str> that holds
|
||||
/// atmost u31::MAX bytes and is readonly
|
||||
pub struct GraphemeStr<'a> {
|
||||
|
@@ -1,14 +1,18 @@
|
||||
use std::{borrow::Cow, collections::HashMap, iter};
|
||||
|
||||
use helix_stdx::rope::RopeSliceExt;
|
||||
use tree_sitter::{Query, QueryCursor, QueryPredicateArg};
|
||||
use tree_house::TREE_SITTER_MATCH_LIMIT;
|
||||
|
||||
use crate::{
|
||||
chars::{char_is_line_ending, char_is_whitespace},
|
||||
graphemes::{grapheme_width, tab_width_at},
|
||||
syntax::{IndentationHeuristic, LanguageConfiguration, RopeProvider, Syntax},
|
||||
tree_sitter::Node,
|
||||
Position, Rope, RopeSlice, Tendril,
|
||||
syntax::{self, config::IndentationHeuristic},
|
||||
tree_sitter::{
|
||||
self,
|
||||
query::{InvalidPredicateError, UserPredicate},
|
||||
Capture, Grammar, InactiveQueryCursor, Node, Pattern, Query, QueryMatch, RopeInput,
|
||||
},
|
||||
Position, Rope, RopeSlice, Syntax, Tendril,
|
||||
};
|
||||
|
||||
/// Enum representing indentation style.
|
||||
@@ -279,18 +283,164 @@ fn add_indent_level(
|
||||
|
||||
/// Return true if only whitespace comes before the node on its line.
|
||||
/// If given, new_line_byte_pos is treated the same way as any existing newline.
|
||||
fn is_first_in_line(node: Node, text: RopeSlice, new_line_byte_pos: Option<usize>) -> bool {
|
||||
let mut line_start_byte_pos = text.line_to_byte(node.start_position().row);
|
||||
fn is_first_in_line(node: &Node, text: RopeSlice, new_line_byte_pos: Option<u32>) -> bool {
|
||||
let line = text.byte_to_line(node.start_byte() as usize);
|
||||
let mut line_start_byte_pos = text.line_to_byte(line) as u32;
|
||||
if let Some(pos) = new_line_byte_pos {
|
||||
if line_start_byte_pos < pos && pos <= node.start_byte() {
|
||||
line_start_byte_pos = pos;
|
||||
}
|
||||
}
|
||||
text.byte_slice(line_start_byte_pos..node.start_byte())
|
||||
text.byte_slice(line_start_byte_pos as usize..node.start_byte() as usize)
|
||||
.chars()
|
||||
.all(|c| c.is_whitespace())
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct IndentQueryPredicates {
|
||||
not_kind_eq: Vec<(Capture, Box<str>)>,
|
||||
same_line: Option<(Capture, Capture, bool)>,
|
||||
one_line: Option<(Capture, bool)>,
|
||||
}
|
||||
|
||||
impl IndentQueryPredicates {
|
||||
fn are_satisfied(
|
||||
&self,
|
||||
match_: &QueryMatch,
|
||||
text: RopeSlice,
|
||||
new_line_byte_pos: Option<u32>,
|
||||
) -> bool {
|
||||
for (capture, not_expected_kind) in self.not_kind_eq.iter() {
|
||||
let node = match_.nodes_for_capture(*capture).next();
|
||||
if node.is_some_and(|n| n.kind() == not_expected_kind.as_ref()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((capture1, capture2, negated)) = self.same_line {
|
||||
let n1 = match_.nodes_for_capture(capture1).next();
|
||||
let n2 = match_.nodes_for_capture(capture2).next();
|
||||
let satisfied = n1.zip(n2).is_some_and(|(n1, n2)| {
|
||||
let n1_line = get_node_start_line(text, n1, new_line_byte_pos);
|
||||
let n2_line = get_node_start_line(text, n2, new_line_byte_pos);
|
||||
let same_line = n1_line == n2_line;
|
||||
same_line != negated
|
||||
});
|
||||
|
||||
if !satisfied {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((capture, negated)) = self.one_line {
|
||||
let node = match_.nodes_for_capture(capture).next();
|
||||
let satisfied = node.is_some_and(|node| {
|
||||
let start_line = get_node_start_line(text, node, new_line_byte_pos);
|
||||
let end_line = get_node_end_line(text, node, new_line_byte_pos);
|
||||
let one_line = end_line == start_line;
|
||||
one_line != negated
|
||||
});
|
||||
|
||||
if !satisfied {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IndentQuery {
|
||||
query: Query,
|
||||
properties: HashMap<Pattern, IndentScope>,
|
||||
predicates: HashMap<Pattern, IndentQueryPredicates>,
|
||||
indent_capture: Option<Capture>,
|
||||
indent_always_capture: Option<Capture>,
|
||||
outdent_capture: Option<Capture>,
|
||||
outdent_always_capture: Option<Capture>,
|
||||
align_capture: Option<Capture>,
|
||||
anchor_capture: Option<Capture>,
|
||||
extend_capture: Option<Capture>,
|
||||
extend_prevent_once_capture: Option<Capture>,
|
||||
}
|
||||
|
||||
impl IndentQuery {
|
||||
pub fn new(grammar: Grammar, source: &str) -> Result<Self, tree_sitter::query::ParseError> {
|
||||
let mut properties = HashMap::new();
|
||||
let mut predicates: HashMap<Pattern, IndentQueryPredicates> = HashMap::new();
|
||||
let query = Query::new(grammar, source, |pattern, predicate| match predicate {
|
||||
UserPredicate::SetProperty { key: "scope", val } => {
|
||||
let scope = match val {
|
||||
Some("all") => IndentScope::All,
|
||||
Some("tail") => IndentScope::Tail,
|
||||
Some(other) => {
|
||||
return Err(format!("unknown scope (#set! scope \"{other}\")").into())
|
||||
}
|
||||
None => return Err("missing scope value (#set! scope ...)".into()),
|
||||
};
|
||||
|
||||
properties.insert(pattern, scope);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
UserPredicate::Other(predicate) => {
|
||||
let name = predicate.name();
|
||||
match name {
|
||||
"not-kind-eq?" => {
|
||||
predicate.check_arg_count(2)?;
|
||||
let capture = predicate.capture_arg(0)?;
|
||||
let not_expected_kind = predicate.str_arg(1)?;
|
||||
|
||||
predicates
|
||||
.entry(pattern)
|
||||
.or_default()
|
||||
.not_kind_eq
|
||||
.push((capture, not_expected_kind.into()));
|
||||
Ok(())
|
||||
}
|
||||
"same-line?" | "not-same-line?" => {
|
||||
predicate.check_arg_count(2)?;
|
||||
let capture1 = predicate.capture_arg(0)?;
|
||||
let capture2 = predicate.capture_arg(1)?;
|
||||
let negated = name == "not-same-line?";
|
||||
|
||||
predicates.entry(pattern).or_default().same_line =
|
||||
Some((capture1, capture2, negated));
|
||||
Ok(())
|
||||
}
|
||||
"one-line?" | "not-one-line?" => {
|
||||
predicate.check_arg_count(1)?;
|
||||
let capture = predicate.capture_arg(0)?;
|
||||
let negated = name == "not-one-line?";
|
||||
|
||||
predicates.entry(pattern).or_default().one_line = Some((capture, negated));
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(InvalidPredicateError::unknown(UserPredicate::Other(
|
||||
predicate,
|
||||
))),
|
||||
}
|
||||
}
|
||||
_ => Err(InvalidPredicateError::unknown(predicate)),
|
||||
})?;
|
||||
|
||||
Ok(Self {
|
||||
properties,
|
||||
predicates,
|
||||
indent_capture: query.get_capture("indent"),
|
||||
indent_always_capture: query.get_capture("indent.always"),
|
||||
outdent_capture: query.get_capture("outdent"),
|
||||
outdent_always_capture: query.get_capture("outdent.always"),
|
||||
align_capture: query.get_capture("align"),
|
||||
anchor_capture: query.get_capture("anchor"),
|
||||
extend_capture: query.get_capture("extend"),
|
||||
extend_prevent_once_capture: query.get_capture("extend.prevent-once"),
|
||||
query,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// The total indent for some line of code.
|
||||
/// This is usually constructed in one of 2 ways:
|
||||
/// - Successively add indent captures to get the (added) indent from a single line
|
||||
@@ -453,16 +603,16 @@ struct IndentQueryResult<'a> {
|
||||
extend_captures: HashMap<usize, Vec<ExtendCapture>>,
|
||||
}
|
||||
|
||||
fn get_node_start_line(node: Node, new_line_byte_pos: Option<usize>) -> usize {
|
||||
let mut node_line = node.start_position().row;
|
||||
fn get_node_start_line(text: RopeSlice, node: &Node, new_line_byte_pos: Option<u32>) -> usize {
|
||||
let mut node_line = text.byte_to_line(node.start_byte() as usize);
|
||||
// Adjust for the new line that will be inserted
|
||||
if new_line_byte_pos.is_some_and(|pos| node.start_byte() >= pos) {
|
||||
node_line += 1;
|
||||
}
|
||||
node_line
|
||||
}
|
||||
fn get_node_end_line(node: Node, new_line_byte_pos: Option<usize>) -> usize {
|
||||
let mut node_line = node.end_position().row;
|
||||
fn get_node_end_line(text: RopeSlice, node: &Node, new_line_byte_pos: Option<u32>) -> usize {
|
||||
let mut node_line = text.byte_to_line(node.end_byte() as usize);
|
||||
// Adjust for the new line that will be inserted (with a strict inequality since end_byte is exclusive)
|
||||
if new_line_byte_pos.is_some_and(|pos| node.end_byte() > pos) {
|
||||
node_line += 1;
|
||||
@@ -471,175 +621,96 @@ fn get_node_end_line(node: Node, new_line_byte_pos: Option<usize>) -> usize {
|
||||
}
|
||||
|
||||
fn query_indents<'a>(
|
||||
query: &Query,
|
||||
query: &IndentQuery,
|
||||
syntax: &Syntax,
|
||||
cursor: &mut QueryCursor,
|
||||
text: RopeSlice<'a>,
|
||||
range: std::ops::Range<usize>,
|
||||
new_line_byte_pos: Option<usize>,
|
||||
range: std::ops::Range<u32>,
|
||||
new_line_byte_pos: Option<u32>,
|
||||
) -> IndentQueryResult<'a> {
|
||||
let mut indent_captures: HashMap<usize, Vec<IndentCapture>> = HashMap::new();
|
||||
let mut extend_captures: HashMap<usize, Vec<ExtendCapture>> = HashMap::new();
|
||||
cursor.set_byte_range(range);
|
||||
|
||||
let mut cursor = InactiveQueryCursor::new(range, TREE_SITTER_MATCH_LIMIT).execute_query(
|
||||
&query.query,
|
||||
&syntax.tree().root_node(),
|
||||
RopeInput::new(text),
|
||||
);
|
||||
|
||||
// Iterate over all captures from the query
|
||||
for m in cursor.matches(query, syntax.tree().root_node(), RopeProvider(text)) {
|
||||
while let Some(m) = cursor.next_match() {
|
||||
// Skip matches where not all custom predicates are fulfilled
|
||||
if !query.general_predicates(m.pattern_index).iter().all(|pred| {
|
||||
match pred.operator.as_ref() {
|
||||
"not-kind-eq?" => match (pred.args.first(), pred.args.get(1)) {
|
||||
(
|
||||
Some(QueryPredicateArg::Capture(capture_idx)),
|
||||
Some(QueryPredicateArg::String(kind)),
|
||||
) => {
|
||||
let node = m.nodes_for_capture_index(*capture_idx).next();
|
||||
match node {
|
||||
Some(node) => node.kind()!=kind.as_ref(),
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
panic!("Invalid indent query: Arguments to \"not-kind-eq?\" must be a capture and a string");
|
||||
}
|
||||
},
|
||||
"same-line?" | "not-same-line?" => {
|
||||
match (pred.args.first(), pred.args.get(1)) {
|
||||
(
|
||||
Some(QueryPredicateArg::Capture(capt1)),
|
||||
Some(QueryPredicateArg::Capture(capt2))
|
||||
) => {
|
||||
let n1 = m.nodes_for_capture_index(*capt1).next();
|
||||
let n2 = m.nodes_for_capture_index(*capt2).next();
|
||||
match (n1, n2) {
|
||||
(Some(n1), Some(n2)) => {
|
||||
let n1_line = get_node_start_line(n1, new_line_byte_pos);
|
||||
let n2_line = get_node_start_line(n2, new_line_byte_pos);
|
||||
let same_line = n1_line == n2_line;
|
||||
same_line==(pred.operator.as_ref()=="same-line?")
|
||||
}
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
panic!("Invalid indent query: Arguments to \"{}\" must be 2 captures", pred.operator);
|
||||
}
|
||||
}
|
||||
}
|
||||
"one-line?" | "not-one-line?" => match pred.args.first() {
|
||||
Some(QueryPredicateArg::Capture(capture_idx)) => {
|
||||
let node = m.nodes_for_capture_index(*capture_idx).next();
|
||||
|
||||
match node {
|
||||
Some(node) => {
|
||||
let (start_line, end_line) = (get_node_start_line(node,new_line_byte_pos), get_node_end_line(node, new_line_byte_pos));
|
||||
let one_line = end_line == start_line;
|
||||
one_line != (pred.operator.as_ref() == "not-one-line?")
|
||||
},
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
panic!("Invalid indent query: Arguments to \"not-kind-eq?\" must be a capture and a string");
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
panic!(
|
||||
"Invalid indent query: Unknown predicate (\"{}\")",
|
||||
pred.operator
|
||||
);
|
||||
}
|
||||
}
|
||||
}) {
|
||||
if query
|
||||
.predicates
|
||||
.get(&m.pattern())
|
||||
.is_some_and(|preds| !preds.are_satisfied(&m, text, new_line_byte_pos))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// A list of pairs (node_id, indent_capture) that are added by this match.
|
||||
// They cannot be added to indent_captures immediately since they may depend on other captures (such as an @anchor).
|
||||
let mut added_indent_captures: Vec<(usize, IndentCapture)> = Vec::new();
|
||||
// The row/column position of the optional anchor in this query
|
||||
let mut anchor: Option<tree_sitter::Node> = None;
|
||||
for capture in m.captures {
|
||||
let capture_name = query.capture_names()[capture.index as usize];
|
||||
let capture_type = match capture_name {
|
||||
"indent" => IndentCaptureType::Indent,
|
||||
"indent.always" => IndentCaptureType::IndentAlways,
|
||||
"outdent" => IndentCaptureType::Outdent,
|
||||
"outdent.always" => IndentCaptureType::OutdentAlways,
|
||||
// The alignment will be updated to the correct value at the end, when the anchor is known.
|
||||
"align" => IndentCaptureType::Align(RopeSlice::from("")),
|
||||
"anchor" => {
|
||||
if anchor.is_some() {
|
||||
log::error!("Invalid indent query: Encountered more than one @anchor in the same match.")
|
||||
} else {
|
||||
anchor = Some(capture.node);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
"extend" => {
|
||||
extend_captures
|
||||
.entry(capture.node.id())
|
||||
.or_insert_with(|| Vec::with_capacity(1))
|
||||
.push(ExtendCapture::Extend);
|
||||
continue;
|
||||
}
|
||||
"extend.prevent-once" => {
|
||||
extend_captures
|
||||
.entry(capture.node.id())
|
||||
.or_insert_with(|| Vec::with_capacity(1))
|
||||
.push(ExtendCapture::PreventOnce);
|
||||
continue;
|
||||
}
|
||||
_ => {
|
||||
// Ignore any unknown captures (these may be needed for predicates such as #match?)
|
||||
continue;
|
||||
let mut anchor: Option<&Node> = None;
|
||||
for matched_node in m.matched_nodes() {
|
||||
let node_id = matched_node.node.id();
|
||||
let capture = Some(matched_node.capture);
|
||||
let capture_type = if capture == query.indent_capture {
|
||||
IndentCaptureType::Indent
|
||||
} else if capture == query.indent_always_capture {
|
||||
IndentCaptureType::IndentAlways
|
||||
} else if capture == query.outdent_capture {
|
||||
IndentCaptureType::Outdent
|
||||
} else if capture == query.outdent_always_capture {
|
||||
IndentCaptureType::OutdentAlways
|
||||
} else if capture == query.align_capture {
|
||||
IndentCaptureType::Align(RopeSlice::from(""))
|
||||
} else if capture == query.anchor_capture {
|
||||
if anchor.is_some() {
|
||||
log::error!("Invalid indent query: Encountered more than one @anchor in the same match.")
|
||||
} else {
|
||||
anchor = Some(&matched_node.node);
|
||||
}
|
||||
continue;
|
||||
} else if capture == query.extend_capture {
|
||||
extend_captures
|
||||
.entry(node_id)
|
||||
.or_insert_with(|| Vec::with_capacity(1))
|
||||
.push(ExtendCapture::Extend);
|
||||
continue;
|
||||
} else if capture == query.extend_prevent_once_capture {
|
||||
extend_captures
|
||||
.entry(node_id)
|
||||
.or_insert_with(|| Vec::with_capacity(1))
|
||||
.push(ExtendCapture::PreventOnce);
|
||||
continue;
|
||||
} else {
|
||||
// Ignore any unknown captures (these may be needed for predicates such as #match?)
|
||||
continue;
|
||||
};
|
||||
let scope = capture_type.default_scope();
|
||||
let mut indent_capture = IndentCapture {
|
||||
|
||||
// Apply additional settings for this capture
|
||||
let scope = query
|
||||
.properties
|
||||
.get(&m.pattern())
|
||||
.copied()
|
||||
.unwrap_or_else(|| capture_type.default_scope());
|
||||
let indent_capture = IndentCapture {
|
||||
capture_type,
|
||||
scope,
|
||||
};
|
||||
// Apply additional settings for this capture
|
||||
for property in query.property_settings(m.pattern_index) {
|
||||
match property.key.as_ref() {
|
||||
"scope" => {
|
||||
indent_capture.scope = match property.value.as_deref() {
|
||||
Some("all") => IndentScope::All,
|
||||
Some("tail") => IndentScope::Tail,
|
||||
Some(s) => {
|
||||
panic!("Invalid indent query: Unknown value for \"scope\" property (\"{}\")", s);
|
||||
}
|
||||
None => {
|
||||
panic!(
|
||||
"Invalid indent query: Missing value for \"scope\" property"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
panic!(
|
||||
"Invalid indent query: Unknown property \"{}\"",
|
||||
property.key
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
added_indent_captures.push((capture.node.id(), indent_capture))
|
||||
added_indent_captures.push((node_id, indent_capture))
|
||||
}
|
||||
for (node_id, mut capture) in added_indent_captures {
|
||||
// Set the anchor for all align queries.
|
||||
if let IndentCaptureType::Align(_) = capture.capture_type {
|
||||
let anchor = match anchor {
|
||||
None => {
|
||||
log::error!(
|
||||
"Invalid indent query: @align requires an accompanying @anchor."
|
||||
);
|
||||
continue;
|
||||
}
|
||||
Some(anchor) => anchor,
|
||||
let Some(anchor) = anchor else {
|
||||
log::error!("Invalid indent query: @align requires an accompanying @anchor.");
|
||||
continue;
|
||||
};
|
||||
let line = text.byte_to_line(anchor.start_byte() as usize);
|
||||
let line_start = text.line_to_byte(line);
|
||||
capture.capture_type = IndentCaptureType::Align(
|
||||
text.line(anchor.start_position().row)
|
||||
.byte_slice(0..anchor.start_position().column),
|
||||
text.byte_slice(line_start..anchor.start_byte() as usize),
|
||||
);
|
||||
}
|
||||
indent_captures
|
||||
@@ -691,13 +762,15 @@ fn extend_nodes<'a>(
|
||||
// - the cursor is on the same line as the end of the node OR
|
||||
// - the line that the cursor is on is more indented than the
|
||||
// first line of the node
|
||||
if deepest_preceding.end_position().row == line {
|
||||
if text.byte_to_line(deepest_preceding.end_byte() as usize) == line {
|
||||
extend_node = true;
|
||||
} else {
|
||||
let cursor_indent =
|
||||
indent_level_for_line(text.line(line), tab_width, indent_width);
|
||||
let node_indent = indent_level_for_line(
|
||||
text.line(deepest_preceding.start_position().row),
|
||||
text.line(
|
||||
text.byte_to_line(deepest_preceding.start_byte() as usize),
|
||||
),
|
||||
tab_width,
|
||||
indent_width,
|
||||
);
|
||||
@@ -714,7 +787,7 @@ fn extend_nodes<'a>(
|
||||
if node_captured && stop_extend {
|
||||
stop_extend = false;
|
||||
} else if extend_node && !stop_extend {
|
||||
*node = deepest_preceding;
|
||||
*node = deepest_preceding.clone();
|
||||
break;
|
||||
}
|
||||
// If the tree contains a syntax error, `deepest_preceding` may not
|
||||
@@ -731,17 +804,17 @@ fn extend_nodes<'a>(
|
||||
/// - The indent captures for all relevant nodes.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn init_indent_query<'a, 'b>(
|
||||
query: &Query,
|
||||
query: &IndentQuery,
|
||||
syntax: &'a Syntax,
|
||||
text: RopeSlice<'b>,
|
||||
tab_width: usize,
|
||||
indent_width: usize,
|
||||
line: usize,
|
||||
byte_pos: usize,
|
||||
new_line_byte_pos: Option<usize>,
|
||||
byte_pos: u32,
|
||||
new_line_byte_pos: Option<u32>,
|
||||
) -> Option<(Node<'a>, HashMap<usize, Vec<IndentCapture<'b>>>)> {
|
||||
// The innermost tree-sitter node which is considered for the indent
|
||||
// computation. It may change if some predeceding node is extended
|
||||
// computation. It may change if some preceding node is extended
|
||||
let mut node = syntax
|
||||
.tree()
|
||||
.root_node()
|
||||
@@ -751,37 +824,25 @@ fn init_indent_query<'a, 'b>(
|
||||
// The query range should intersect with all nodes directly preceding
|
||||
// the position of the indent query in case one of them is extended.
|
||||
let mut deepest_preceding = None; // The deepest node preceding the indent query position
|
||||
let mut tree_cursor = node.walk();
|
||||
for child in node.children(&mut tree_cursor) {
|
||||
for child in node.children() {
|
||||
if child.byte_range().end <= byte_pos {
|
||||
deepest_preceding = Some(child);
|
||||
deepest_preceding = Some(child.clone());
|
||||
}
|
||||
}
|
||||
deepest_preceding = deepest_preceding.map(|mut prec| {
|
||||
// Get the deepest directly preceding node
|
||||
while prec.child_count() > 0 {
|
||||
prec = prec.child(prec.child_count() - 1).unwrap();
|
||||
prec = prec.child(prec.child_count() - 1).unwrap().clone();
|
||||
}
|
||||
prec
|
||||
});
|
||||
let query_range = deepest_preceding
|
||||
.as_ref()
|
||||
.map(|prec| prec.byte_range().end - 1..byte_pos + 1)
|
||||
.unwrap_or(byte_pos..byte_pos + 1);
|
||||
|
||||
crate::syntax::PARSER.with(|ts_parser| {
|
||||
let mut ts_parser = ts_parser.borrow_mut();
|
||||
let mut cursor = ts_parser.cursors.pop().unwrap_or_default();
|
||||
let query_result = query_indents(
|
||||
query,
|
||||
syntax,
|
||||
&mut cursor,
|
||||
text,
|
||||
query_range,
|
||||
new_line_byte_pos,
|
||||
);
|
||||
ts_parser.cursors.push(cursor);
|
||||
(query_result, deepest_preceding)
|
||||
})
|
||||
let query_result = query_indents(query, syntax, text, query_range, new_line_byte_pos);
|
||||
(query_result, deepest_preceding)
|
||||
};
|
||||
let extend_captures = query_result.extend_captures;
|
||||
|
||||
@@ -839,7 +900,7 @@ fn init_indent_query<'a, 'b>(
|
||||
/// ```
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn treesitter_indent_for_pos<'a>(
|
||||
query: &Query,
|
||||
query: &IndentQuery,
|
||||
syntax: &Syntax,
|
||||
tab_width: usize,
|
||||
indent_width: usize,
|
||||
@@ -848,7 +909,7 @@ pub fn treesitter_indent_for_pos<'a>(
|
||||
pos: usize,
|
||||
new_line: bool,
|
||||
) -> Option<Indentation<'a>> {
|
||||
let byte_pos = text.char_to_byte(pos);
|
||||
let byte_pos = text.char_to_byte(pos) as u32;
|
||||
let new_line_byte_pos = new_line.then_some(byte_pos);
|
||||
let (mut node, mut indent_captures) = init_indent_query(
|
||||
query,
|
||||
@@ -868,7 +929,7 @@ pub fn treesitter_indent_for_pos<'a>(
|
||||
let mut indent_for_line_below = Indentation::default();
|
||||
|
||||
loop {
|
||||
let is_first = is_first_in_line(node, text, new_line_byte_pos);
|
||||
let is_first = is_first_in_line(&node, text, new_line_byte_pos);
|
||||
|
||||
// Apply all indent definitions for this node.
|
||||
// Since we only iterate over each node once, we can remove the
|
||||
@@ -891,8 +952,8 @@ pub fn treesitter_indent_for_pos<'a>(
|
||||
}
|
||||
|
||||
if let Some(parent) = node.parent() {
|
||||
let node_line = get_node_start_line(node, new_line_byte_pos);
|
||||
let parent_line = get_node_start_line(parent, new_line_byte_pos);
|
||||
let node_line = get_node_start_line(text, &node, new_line_byte_pos);
|
||||
let parent_line = get_node_start_line(text, &parent, new_line_byte_pos);
|
||||
|
||||
if node_line != parent_line {
|
||||
// Don't add indent for the line below the line of the query
|
||||
@@ -914,8 +975,9 @@ pub fn treesitter_indent_for_pos<'a>(
|
||||
} else {
|
||||
// Only add the indentation for the line below if that line
|
||||
// is not after the line that the indentation is calculated for.
|
||||
if (node.start_position().row < line)
|
||||
|| (new_line && node.start_position().row == line && node.start_byte() < byte_pos)
|
||||
let node_start_line = text.byte_to_line(node.start_byte() as usize);
|
||||
if node_start_line < line
|
||||
|| (new_line && node_start_line == line && node.start_byte() < byte_pos)
|
||||
{
|
||||
result.add_line(indent_for_line_below);
|
||||
}
|
||||
@@ -930,7 +992,7 @@ pub fn treesitter_indent_for_pos<'a>(
|
||||
/// This is done either using treesitter, or if that's not available by copying the indentation from the current line
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn indent_for_newline(
|
||||
language_config: Option<&LanguageConfiguration>,
|
||||
loader: &syntax::Loader,
|
||||
syntax: Option<&Syntax>,
|
||||
indent_heuristic: &IndentationHeuristic,
|
||||
indent_style: &IndentStyle,
|
||||
@@ -947,7 +1009,7 @@ pub fn indent_for_newline(
|
||||
Some(syntax),
|
||||
) = (
|
||||
indent_heuristic,
|
||||
language_config.and_then(|config| config.indent_query()),
|
||||
syntax.and_then(|syntax| loader.indent_query(syntax.root_language())),
|
||||
syntax,
|
||||
) {
|
||||
if let Some(indent) = treesitter_indent_for_pos(
|
||||
@@ -1015,10 +1077,10 @@ pub fn indent_for_newline(
|
||||
indent_style.as_str().repeat(indent_level)
|
||||
}
|
||||
|
||||
pub fn get_scopes(syntax: Option<&Syntax>, text: RopeSlice, pos: usize) -> Vec<&'static str> {
|
||||
pub fn get_scopes<'a>(syntax: Option<&'a Syntax>, text: RopeSlice, pos: usize) -> Vec<&'a str> {
|
||||
let mut scopes = Vec::new();
|
||||
if let Some(syntax) = syntax {
|
||||
let pos = text.char_to_byte(pos);
|
||||
let pos = text.char_to_byte(pos) as u32;
|
||||
let mut node = match syntax
|
||||
.tree()
|
||||
.root_node()
|
||||
|
@@ -1,3 +1,5 @@
|
||||
use std::fmt;
|
||||
|
||||
pub use encoding_rs as encoding;
|
||||
|
||||
pub mod auto_pairs;
|
||||
@@ -53,7 +55,7 @@ pub use smartstring::SmartString;
|
||||
pub type Tendril = SmartString<smartstring::LazyCompact>;
|
||||
|
||||
#[doc(inline)]
|
||||
pub use {regex, tree_sitter};
|
||||
pub use {regex, tree_house::tree_sitter};
|
||||
|
||||
pub use position::{
|
||||
char_idx_at_visual_offset, coords_at_pos, pos_at_coords, softwrapped_dimensions,
|
||||
@@ -67,9 +69,55 @@ pub use smallvec::{smallvec, SmallVec};
|
||||
pub use syntax::Syntax;
|
||||
|
||||
pub use completion::CompletionItem;
|
||||
pub use diagnostic::Diagnostic;
|
||||
|
||||
pub use line_ending::{LineEnding, NATIVE_LINE_ENDING};
|
||||
pub use transaction::{Assoc, Change, ChangeSet, Deletion, Operation, Transaction};
|
||||
|
||||
pub use uri::Uri;
|
||||
|
||||
pub use tree_house::Language;
|
||||
|
||||
/// A language to use for spell checking.
|
||||
///
|
||||
/// This is defined in the form `"ab_CD"` where `a`, `b`, `C` and `D` are all ASCII alphanumeric.
|
||||
/// The first two letters declare the ISO 639 language code and the later two are the ISO 3166
|
||||
/// territory identifier. The territory identifier is optional, so a language may just be `"ab"`.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct SpellingLanguage([u8; 5]);
|
||||
|
||||
impl SpellingLanguage {
|
||||
pub const EN_US: Self = Self(*b"en_US");
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
// SAFETY: `.0` is all ASCII bytes which is valid UTF-8.
|
||||
unsafe { std::str::from_utf8_unchecked(&self.0) }
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for SpellingLanguage {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ParseSpellingLanguageError(String);
|
||||
|
||||
impl std::str::FromStr for SpellingLanguage {
|
||||
type Err = ParseSpellingLanguageError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
// TODO: some parsing.
|
||||
if s.as_bytes() == Self::EN_US.0 {
|
||||
Ok(Self::EN_US)
|
||||
} else {
|
||||
Err(ParseSpellingLanguageError(s.to_owned()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ParseSpellingLanguageError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "expected ISO639 language code and optional ISO3166 territory code ('ab' or 'ab-CD'), found '{}'", self.0)
|
||||
}
|
||||
}
|
||||
|
@@ -1,7 +1,7 @@
|
||||
use std::iter;
|
||||
|
||||
use crate::tree_sitter::Node;
|
||||
use ropey::RopeSlice;
|
||||
use tree_sitter::Node;
|
||||
|
||||
use crate::movement::Direction::{self, Backward, Forward};
|
||||
use crate::Syntax;
|
||||
@@ -75,7 +75,7 @@ fn find_pair(
|
||||
pos_: usize,
|
||||
traverse_parents: bool,
|
||||
) -> Option<usize> {
|
||||
let pos = doc.char_to_byte(pos_);
|
||||
let pos = doc.char_to_byte(pos_) as u32;
|
||||
|
||||
let root = syntax.tree_for_byte_range(pos, pos).root_node();
|
||||
let mut node = root.descendant_for_byte_range(pos, pos)?;
|
||||
@@ -128,7 +128,7 @@ fn find_pair(
|
||||
if find_pair_end(doc, sibling.prev_sibling(), start_char, end_char, Backward)
|
||||
.is_some()
|
||||
{
|
||||
return doc.try_byte_to_char(sibling.start_byte()).ok();
|
||||
return doc.try_byte_to_char(sibling.start_byte() as usize).ok();
|
||||
}
|
||||
}
|
||||
} else if node.is_named() {
|
||||
@@ -144,9 +144,9 @@ fn find_pair(
|
||||
if node.child_count() != 0 {
|
||||
return None;
|
||||
}
|
||||
let node_start = doc.byte_to_char(node.start_byte());
|
||||
find_matching_bracket_plaintext(doc.byte_slice(node.byte_range()), pos_ - node_start)
|
||||
.map(|pos| pos + node_start)
|
||||
let node_start = doc.byte_to_char(node.start_byte() as usize);
|
||||
let node_text = doc.byte_slice(node.start_byte() as usize..node.end_byte() as usize);
|
||||
find_matching_bracket_plaintext(node_text, pos_ - node_start).map(|pos| pos + node_start)
|
||||
}
|
||||
|
||||
/// Returns the position of the matching bracket under cursor.
|
||||
@@ -304,7 +304,7 @@ fn as_char(doc: RopeSlice, node: &Node) -> Option<(usize, char)> {
|
||||
if node.byte_range().len() != 1 {
|
||||
return None;
|
||||
}
|
||||
let pos = doc.try_byte_to_char(node.start_byte()).ok()?;
|
||||
let pos = doc.try_byte_to_char(node.start_byte() as usize).ok()?;
|
||||
Some((pos, doc.char(pos)))
|
||||
}
|
||||
|
||||
|
@@ -1,7 +1,6 @@
|
||||
use std::{cmp::Reverse, iter};
|
||||
use std::{borrow::Cow, cmp::Reverse, iter};
|
||||
|
||||
use ropey::iter::Chars;
|
||||
use tree_sitter::{Node, QueryCursor};
|
||||
|
||||
use crate::{
|
||||
char_idx_at_visual_offset,
|
||||
@@ -13,9 +12,10 @@ use crate::{
|
||||
},
|
||||
line_ending::rope_is_line_ending,
|
||||
position::char_idx_at_visual_block_offset,
|
||||
syntax::LanguageConfiguration,
|
||||
syntax,
|
||||
text_annotations::TextAnnotations,
|
||||
textobject::TextObject,
|
||||
tree_sitter::Node,
|
||||
visual_offset_from_block, Range, RopeSlice, Selection, Syntax,
|
||||
};
|
||||
|
||||
@@ -560,21 +560,23 @@ fn reached_target(target: WordMotionTarget, prev_ch: char, next_ch: char) -> boo
|
||||
|
||||
/// Finds the range of the next or previous textobject in the syntax sub-tree of `node`.
|
||||
/// Returns the range in the forwards direction.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn goto_treesitter_object(
|
||||
slice: RopeSlice,
|
||||
range: Range,
|
||||
object_name: &str,
|
||||
dir: Direction,
|
||||
slice_tree: Node,
|
||||
lang_config: &LanguageConfiguration,
|
||||
slice_tree: &Node,
|
||||
syntax: &Syntax,
|
||||
loader: &syntax::Loader,
|
||||
count: usize,
|
||||
) -> Range {
|
||||
let textobject_query = loader.textobject_query(syntax.root_language());
|
||||
let get_range = move |range: Range| -> Option<Range> {
|
||||
let byte_pos = slice.char_to_byte(range.cursor(slice));
|
||||
|
||||
let cap_name = |t: TextObject| format!("{}.{}", object_name, t);
|
||||
let mut cursor = QueryCursor::new();
|
||||
let nodes = lang_config.textobject_query()?.capture_nodes_any(
|
||||
let nodes = textobject_query?.capture_nodes_any(
|
||||
&[
|
||||
&cap_name(TextObject::Movement),
|
||||
&cap_name(TextObject::Around),
|
||||
@@ -582,7 +584,6 @@ pub fn goto_treesitter_object(
|
||||
],
|
||||
slice_tree,
|
||||
slice,
|
||||
&mut cursor,
|
||||
)?;
|
||||
|
||||
let node = match dir {
|
||||
@@ -617,14 +618,15 @@ pub fn goto_treesitter_object(
|
||||
last_range
|
||||
}
|
||||
|
||||
fn find_parent_start(mut node: Node) -> Option<Node> {
|
||||
fn find_parent_start<'tree>(node: &Node<'tree>) -> Option<Node<'tree>> {
|
||||
let start = node.start_byte();
|
||||
let mut node = Cow::Borrowed(node);
|
||||
|
||||
while node.start_byte() >= start || !node.is_named() {
|
||||
node = node.parent()?;
|
||||
node = Cow::Owned(node.parent()?);
|
||||
}
|
||||
|
||||
Some(node)
|
||||
Some(node.into_owned())
|
||||
}
|
||||
|
||||
pub fn move_parent_node_end(
|
||||
@@ -635,8 +637,8 @@ pub fn move_parent_node_end(
|
||||
movement: Movement,
|
||||
) -> Selection {
|
||||
selection.transform(|range| {
|
||||
let start_from = text.char_to_byte(range.from());
|
||||
let start_to = text.char_to_byte(range.to());
|
||||
let start_from = text.char_to_byte(range.from()) as u32;
|
||||
let start_to = text.char_to_byte(range.to()) as u32;
|
||||
|
||||
let mut node = match syntax.named_descendant_for_byte_range(start_from, start_to) {
|
||||
Some(node) => node,
|
||||
@@ -654,18 +656,18 @@ pub fn move_parent_node_end(
|
||||
// moving forward, we always want to move one past the end of the
|
||||
// current node, so use the end byte of the current node, which is an exclusive
|
||||
// end of the range
|
||||
Direction::Forward => text.byte_to_char(node.end_byte()),
|
||||
Direction::Forward => text.byte_to_char(node.end_byte() as usize),
|
||||
|
||||
// moving backward, we want the cursor to land on the start char of
|
||||
// the current node, or if it is already at the start of a node, to traverse up to
|
||||
// the parent
|
||||
Direction::Backward => {
|
||||
let end_head = text.byte_to_char(node.start_byte());
|
||||
let end_head = text.byte_to_char(node.start_byte() as usize);
|
||||
|
||||
// if we're already on the beginning, look up to the parent
|
||||
if end_head == range.cursor(text) {
|
||||
node = find_parent_start(node).unwrap_or(node);
|
||||
text.byte_to_char(node.start_byte())
|
||||
node = find_parent_start(&node).unwrap_or(node);
|
||||
text.byte_to_char(node.start_byte() as usize)
|
||||
} else {
|
||||
end_head
|
||||
}
|
||||
|
@@ -4,8 +4,8 @@ pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection)
|
||||
let cursor = &mut syntax.walk();
|
||||
|
||||
selection.transform(|range| {
|
||||
let from = text.char_to_byte(range.from());
|
||||
let to = text.char_to_byte(range.to());
|
||||
let from = text.char_to_byte(range.from()) as u32;
|
||||
let to = text.char_to_byte(range.to()) as u32;
|
||||
|
||||
let byte_range = from..to;
|
||||
cursor.reset_to_byte_range(from, to);
|
||||
@@ -17,8 +17,8 @@ pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection)
|
||||
}
|
||||
|
||||
let node = cursor.node();
|
||||
let from = text.byte_to_char(node.start_byte());
|
||||
let to = text.byte_to_char(node.end_byte());
|
||||
let from = text.byte_to_char(node.start_byte() as usize);
|
||||
let to = text.byte_to_char(node.end_byte() as usize);
|
||||
|
||||
Range::new(to, from).with_direction(range.direction())
|
||||
})
|
||||
@@ -53,10 +53,10 @@ pub fn select_next_sibling(syntax: &Syntax, text: RopeSlice, selection: Selectio
|
||||
}
|
||||
|
||||
pub fn select_all_siblings(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||
selection.transform_iter(|range| {
|
||||
let mut cursor = syntax.walk();
|
||||
let mut cursor = syntax.walk();
|
||||
selection.transform_iter(move |range| {
|
||||
let (from, to) = range.into_byte_range(text);
|
||||
cursor.reset_to_byte_range(from, to);
|
||||
cursor.reset_to_byte_range(from as u32, to as u32);
|
||||
|
||||
if !cursor.goto_parent_with(|parent| parent.child_count() > 1) {
|
||||
return vec![range].into_iter();
|
||||
@@ -67,21 +67,18 @@ pub fn select_all_siblings(syntax: &Syntax, text: RopeSlice, selection: Selectio
|
||||
}
|
||||
|
||||
pub fn select_all_children(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
|
||||
selection.transform_iter(|range| {
|
||||
let mut cursor = syntax.walk();
|
||||
let mut cursor = syntax.walk();
|
||||
selection.transform_iter(move |range| {
|
||||
let (from, to) = range.into_byte_range(text);
|
||||
cursor.reset_to_byte_range(from, to);
|
||||
cursor.reset_to_byte_range(from as u32, to as u32);
|
||||
select_children(&mut cursor, text, range).into_iter()
|
||||
})
|
||||
}
|
||||
|
||||
fn select_children<'n>(
|
||||
cursor: &'n mut TreeCursor<'n>,
|
||||
text: RopeSlice,
|
||||
range: Range,
|
||||
) -> Vec<Range> {
|
||||
fn select_children(cursor: &mut TreeCursor, text: RopeSlice, range: Range) -> Vec<Range> {
|
||||
let children = cursor
|
||||
.named_children()
|
||||
.children()
|
||||
.filter(|child| child.is_named())
|
||||
.map(|child| Range::from_node(child, text, range.direction()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@@ -98,7 +95,7 @@ pub fn select_prev_sibling(syntax: &Syntax, text: RopeSlice, selection: Selectio
|
||||
text,
|
||||
selection,
|
||||
|cursor| {
|
||||
while !cursor.goto_prev_sibling() {
|
||||
while !cursor.goto_previous_sibling() {
|
||||
if !cursor.goto_parent() {
|
||||
break;
|
||||
}
|
||||
@@ -121,16 +118,16 @@ where
|
||||
let cursor = &mut syntax.walk();
|
||||
|
||||
selection.transform(|range| {
|
||||
let from = text.char_to_byte(range.from());
|
||||
let to = text.char_to_byte(range.to());
|
||||
let from = text.char_to_byte(range.from()) as u32;
|
||||
let to = text.char_to_byte(range.to()) as u32;
|
||||
|
||||
cursor.reset_to_byte_range(from, to);
|
||||
|
||||
motion(cursor);
|
||||
|
||||
let node = cursor.node();
|
||||
let from = text.byte_to_char(node.start_byte());
|
||||
let to = text.byte_to_char(node.end_byte());
|
||||
let from = text.byte_to_char(node.start_byte() as usize);
|
||||
let to = text.byte_to_char(node.end_byte() as usize);
|
||||
|
||||
Range::new(from, to).with_direction(direction.unwrap_or_else(|| range.direction()))
|
||||
})
|
||||
|
@@ -89,11 +89,6 @@ impl From<(usize, usize)> for Position {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Position> for tree_sitter::Point {
|
||||
fn from(pos: Position) -> Self {
|
||||
Self::new(pos.row, pos.col)
|
||||
}
|
||||
}
|
||||
/// Convert a character index to (line, column) coordinates.
|
||||
///
|
||||
/// column in `char` count which can be used for row:column display in
|
||||
|
@@ -9,13 +9,13 @@ use crate::{
|
||||
},
|
||||
line_ending::get_line_ending,
|
||||
movement::Direction,
|
||||
tree_sitter::Node,
|
||||
Assoc, ChangeSet, RopeSlice,
|
||||
};
|
||||
use helix_stdx::range::is_subset;
|
||||
use helix_stdx::rope::{self, RopeSliceExt};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::{borrow::Cow, iter, slice};
|
||||
use tree_sitter::Node;
|
||||
|
||||
/// A single selection range.
|
||||
///
|
||||
@@ -76,8 +76,8 @@ impl Range {
|
||||
}
|
||||
|
||||
pub fn from_node(node: Node, text: RopeSlice, direction: Direction) -> Self {
|
||||
let from = text.byte_to_char(node.start_byte());
|
||||
let to = text.byte_to_char(node.end_byte());
|
||||
let from = text.byte_to_char(node.start_byte() as usize);
|
||||
let to = text.byte_to_char(node.end_byte() as usize);
|
||||
Range::new(from, to).with_direction(direction)
|
||||
}
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
use std::ops::{Index, IndexMut};
|
||||
|
||||
use hashbrown::HashSet;
|
||||
use foldhash::HashSet;
|
||||
use helix_stdx::range::{is_exact_subset, is_subset};
|
||||
use helix_stdx::Range;
|
||||
use ropey::Rope;
|
||||
@@ -35,7 +35,7 @@ impl ActiveSnippet {
|
||||
let snippet = Self {
|
||||
ranges: snippet.ranges,
|
||||
tabstops: snippet.tabstops,
|
||||
active_tabstops: HashSet::new(),
|
||||
active_tabstops: HashSet::default(),
|
||||
current_tabstop: TabstopIdx(0),
|
||||
};
|
||||
(snippet.tabstops.len() != 1).then_some(snippet)
|
||||
|
File diff suppressed because it is too large
Load Diff
617
helix-core/src/syntax/config.rs
Normal file
617
helix-core/src/syntax/config.rs
Normal file
@@ -0,0 +1,617 @@
|
||||
use crate::{auto_pairs::AutoPairs, diagnostic::Severity, Language};
|
||||
|
||||
use globset::GlobSet;
|
||||
use helix_stdx::rope;
|
||||
use serde::{ser::SerializeSeq as _, Deserialize, Serialize};
|
||||
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::{self, Display},
|
||||
path::PathBuf,
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct Configuration {
|
||||
pub language: Vec<LanguageConfiguration>,
|
||||
#[serde(default)]
|
||||
pub language_server: HashMap<String, LanguageServerConfiguration>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct LanguageConfiguration {
|
||||
#[serde(skip)]
|
||||
pub(super) language: Option<Language>,
|
||||
|
||||
#[serde(rename = "name")]
|
||||
pub language_id: String, // c-sharp, rust, tsx
|
||||
#[serde(rename = "language-id")]
|
||||
// see the table under https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentItem
|
||||
pub language_server_language_id: Option<String>, // csharp, rust, typescriptreact, for the language-server
|
||||
pub scope: String, // source.rust
|
||||
pub file_types: Vec<FileType>, // filename extension or ends_with? <Gemfile, rb, etc>
|
||||
#[serde(default)]
|
||||
pub shebangs: Vec<String>, // interpreter(s) associated with language
|
||||
#[serde(default)]
|
||||
pub roots: Vec<String>, // these indicate project roots <.git, Cargo.toml>
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing,
|
||||
deserialize_with = "from_comment_tokens",
|
||||
alias = "comment-token"
|
||||
)]
|
||||
pub comment_tokens: Option<Vec<String>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing,
|
||||
deserialize_with = "from_block_comment_tokens"
|
||||
)]
|
||||
pub block_comment_tokens: Option<Vec<BlockCommentToken>>,
|
||||
pub text_width: Option<usize>,
|
||||
pub soft_wrap: Option<SoftWrap>,
|
||||
|
||||
#[serde(default)]
|
||||
pub auto_format: bool,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub formatter: Option<FormatterConfiguration>,
|
||||
|
||||
/// If set, overrides `editor.path-completion`.
|
||||
pub path_completion: Option<bool>,
|
||||
|
||||
#[serde(default)]
|
||||
pub diagnostic_severity: Severity,
|
||||
|
||||
pub grammar: Option<String>, // tree-sitter grammar name, defaults to language_id
|
||||
|
||||
// content_regex
|
||||
#[serde(default, skip_serializing, deserialize_with = "deserialize_regex")]
|
||||
pub injection_regex: Option<rope::Regex>,
|
||||
// first_line_regex
|
||||
//
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Vec::is_empty",
|
||||
serialize_with = "serialize_lang_features",
|
||||
deserialize_with = "deserialize_lang_features"
|
||||
)]
|
||||
pub language_servers: Vec<LanguageServerFeatures>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub indent: Option<IndentationConfiguration>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub debugger: Option<DebugAdapterConfig>,
|
||||
|
||||
/// Automatic insertion of pairs to parentheses, brackets,
|
||||
/// etc. Defaults to true. Optionally, this can be a list of 2-tuples
|
||||
/// to specify a list of characters to pair. This overrides the
|
||||
/// global setting.
|
||||
#[serde(default, skip_serializing, deserialize_with = "deserialize_auto_pairs")]
|
||||
pub auto_pairs: Option<AutoPairs>,
|
||||
|
||||
pub rulers: Option<Vec<u16>>, // if set, override editor's rulers
|
||||
|
||||
/// Hardcoded LSP root directories relative to the workspace root, like `examples` or `tools/fuzz`.
|
||||
/// Falling back to the current working directory if none are configured.
|
||||
pub workspace_lsp_roots: Option<Vec<PathBuf>>,
|
||||
#[serde(default)]
|
||||
pub persistent_diagnostic_sources: Vec<String>,
|
||||
}
|
||||
|
||||
impl LanguageConfiguration {
|
||||
pub fn language(&self) -> Language {
|
||||
// This value must be set by `super::Loader::new`.
|
||||
self.language.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub enum FileType {
|
||||
/// The extension of the file, either the `Path::extension` or the full
|
||||
/// filename if the file does not have an extension.
|
||||
Extension(String),
|
||||
/// A Unix-style path glob. This is compared to the file's absolute path, so
|
||||
/// it can be used to detect files based on their directories. If the glob
|
||||
/// is not an absolute path and does not already start with a glob pattern,
|
||||
/// a glob pattern will be prepended to it.
|
||||
Glob(globset::Glob),
|
||||
}
|
||||
|
||||
impl Serialize for FileType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
use serde::ser::SerializeMap;
|
||||
|
||||
match self {
|
||||
FileType::Extension(extension) => serializer.serialize_str(extension),
|
||||
FileType::Glob(glob) => {
|
||||
let mut map = serializer.serialize_map(Some(1))?;
|
||||
map.serialize_entry("glob", glob.glob())?;
|
||||
map.end()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for FileType {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::de::Deserializer<'de>,
|
||||
{
|
||||
struct FileTypeVisitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for FileTypeVisitor {
|
||||
type Value = FileType;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("string or table")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(FileType::Extension(value.to_string()))
|
||||
}
|
||||
|
||||
fn visit_map<M>(self, mut map: M) -> Result<Self::Value, M::Error>
|
||||
where
|
||||
M: serde::de::MapAccess<'de>,
|
||||
{
|
||||
match map.next_entry::<String, String>()? {
|
||||
Some((key, mut glob)) if key == "glob" => {
|
||||
// If the glob isn't an absolute path or already starts
|
||||
// with a glob pattern, add a leading glob so we
|
||||
// properly match relative paths.
|
||||
if !glob.starts_with('/') && !glob.starts_with("*/") {
|
||||
glob.insert_str(0, "*/");
|
||||
}
|
||||
|
||||
globset::Glob::new(glob.as_str())
|
||||
.map(FileType::Glob)
|
||||
.map_err(|err| {
|
||||
serde::de::Error::custom(format!("invalid `glob` pattern: {}", err))
|
||||
})
|
||||
}
|
||||
Some((key, _value)) => Err(serde::de::Error::custom(format!(
|
||||
"unknown key in `file-types` list: {}",
|
||||
key
|
||||
))),
|
||||
None => Err(serde::de::Error::custom(
|
||||
"expected a `suffix` key in the `file-types` entry",
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_any(FileTypeVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
fn from_comment_tokens<'de, D>(deserializer: D) -> Result<Option<Vec<String>>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum CommentTokens {
|
||||
Multiple(Vec<String>),
|
||||
Single(String),
|
||||
}
|
||||
Ok(
|
||||
Option::<CommentTokens>::deserialize(deserializer)?.map(|tokens| match tokens {
|
||||
CommentTokens::Single(val) => vec![val],
|
||||
CommentTokens::Multiple(vals) => vals,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct BlockCommentToken {
|
||||
pub start: String,
|
||||
pub end: String,
|
||||
}
|
||||
|
||||
impl Default for BlockCommentToken {
|
||||
fn default() -> Self {
|
||||
BlockCommentToken {
|
||||
start: "/*".to_string(),
|
||||
end: "*/".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn from_block_comment_tokens<'de, D>(
|
||||
deserializer: D,
|
||||
) -> Result<Option<Vec<BlockCommentToken>>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum BlockCommentTokens {
|
||||
Multiple(Vec<BlockCommentToken>),
|
||||
Single(BlockCommentToken),
|
||||
}
|
||||
Ok(
|
||||
Option::<BlockCommentTokens>::deserialize(deserializer)?.map(|tokens| match tokens {
|
||||
BlockCommentTokens::Single(val) => vec![val],
|
||||
BlockCommentTokens::Multiple(vals) => vals,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum LanguageServerFeature {
|
||||
Format,
|
||||
GotoDeclaration,
|
||||
GotoDefinition,
|
||||
GotoTypeDefinition,
|
||||
GotoReference,
|
||||
GotoImplementation,
|
||||
// Goto, use bitflags, combining previous Goto members?
|
||||
SignatureHelp,
|
||||
Hover,
|
||||
DocumentHighlight,
|
||||
Completion,
|
||||
CodeAction,
|
||||
WorkspaceCommand,
|
||||
DocumentSymbols,
|
||||
WorkspaceSymbols,
|
||||
// Symbols, use bitflags, see above?
|
||||
Diagnostics,
|
||||
RenameSymbol,
|
||||
InlayHints,
|
||||
DocumentColors,
|
||||
}
|
||||
|
||||
impl Display for LanguageServerFeature {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
use LanguageServerFeature::*;
|
||||
let feature = match self {
|
||||
Format => "format",
|
||||
GotoDeclaration => "goto-declaration",
|
||||
GotoDefinition => "goto-definition",
|
||||
GotoTypeDefinition => "goto-type-definition",
|
||||
GotoReference => "goto-reference",
|
||||
GotoImplementation => "goto-implementation",
|
||||
SignatureHelp => "signature-help",
|
||||
Hover => "hover",
|
||||
DocumentHighlight => "document-highlight",
|
||||
Completion => "completion",
|
||||
CodeAction => "code-action",
|
||||
WorkspaceCommand => "workspace-command",
|
||||
DocumentSymbols => "document-symbols",
|
||||
WorkspaceSymbols => "workspace-symbols",
|
||||
Diagnostics => "diagnostics",
|
||||
RenameSymbol => "rename-symbol",
|
||||
InlayHints => "inlay-hints",
|
||||
DocumentColors => "document-colors",
|
||||
};
|
||||
write!(f, "{feature}",)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(untagged, rename_all = "kebab-case", deny_unknown_fields)]
|
||||
enum LanguageServerFeatureConfiguration {
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
Features {
|
||||
#[serde(default, skip_serializing_if = "HashSet::is_empty")]
|
||||
only_features: HashSet<LanguageServerFeature>,
|
||||
#[serde(default, skip_serializing_if = "HashSet::is_empty")]
|
||||
except_features: HashSet<LanguageServerFeature>,
|
||||
name: String,
|
||||
},
|
||||
Simple(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LanguageServerFeatures {
|
||||
pub name: String,
|
||||
pub only: HashSet<LanguageServerFeature>,
|
||||
pub excluded: HashSet<LanguageServerFeature>,
|
||||
}
|
||||
|
||||
impl LanguageServerFeatures {
|
||||
pub fn has_feature(&self, feature: LanguageServerFeature) -> bool {
|
||||
(self.only.is_empty() || self.only.contains(&feature)) && !self.excluded.contains(&feature)
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_lang_features<'de, D>(
|
||||
deserializer: D,
|
||||
) -> Result<Vec<LanguageServerFeatures>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let raw: Vec<LanguageServerFeatureConfiguration> = Deserialize::deserialize(deserializer)?;
|
||||
let res = raw
|
||||
.into_iter()
|
||||
.map(|config| match config {
|
||||
LanguageServerFeatureConfiguration::Simple(name) => LanguageServerFeatures {
|
||||
name,
|
||||
..Default::default()
|
||||
},
|
||||
LanguageServerFeatureConfiguration::Features {
|
||||
only_features,
|
||||
except_features,
|
||||
name,
|
||||
} => LanguageServerFeatures {
|
||||
name,
|
||||
only: only_features,
|
||||
excluded: except_features,
|
||||
},
|
||||
})
|
||||
.collect();
|
||||
Ok(res)
|
||||
}
|
||||
fn serialize_lang_features<S>(
|
||||
map: &Vec<LanguageServerFeatures>,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let mut serializer = serializer.serialize_seq(Some(map.len()))?;
|
||||
for features in map {
|
||||
let features = if features.only.is_empty() && features.excluded.is_empty() {
|
||||
LanguageServerFeatureConfiguration::Simple(features.name.to_owned())
|
||||
} else {
|
||||
LanguageServerFeatureConfiguration::Features {
|
||||
only_features: features.only.clone(),
|
||||
except_features: features.excluded.clone(),
|
||||
name: features.name.to_owned(),
|
||||
}
|
||||
};
|
||||
serializer.serialize_element(&features)?;
|
||||
}
|
||||
serializer.end()
|
||||
}
|
||||
|
||||
fn deserialize_required_root_patterns<'de, D>(deserializer: D) -> Result<Option<GlobSet>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let patterns = Vec::<String>::deserialize(deserializer)?;
|
||||
if patterns.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
let mut builder = globset::GlobSetBuilder::new();
|
||||
for pattern in patterns {
|
||||
let glob = globset::Glob::new(&pattern).map_err(serde::de::Error::custom)?;
|
||||
builder.add(glob);
|
||||
}
|
||||
builder.build().map(Some).map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct LanguageServerConfiguration {
|
||||
pub command: String,
|
||||
#[serde(default)]
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
pub args: Vec<String>,
|
||||
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
|
||||
pub environment: HashMap<String, String>,
|
||||
#[serde(default, skip_serializing, deserialize_with = "deserialize_lsp_config")]
|
||||
pub config: Option<serde_json::Value>,
|
||||
#[serde(default = "default_timeout")]
|
||||
pub timeout: u64,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing,
|
||||
deserialize_with = "deserialize_required_root_patterns"
|
||||
)]
|
||||
pub required_root_patterns: Option<GlobSet>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct FormatterConfiguration {
|
||||
pub command: String,
|
||||
#[serde(default)]
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
pub args: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct AdvancedCompletion {
|
||||
pub name: Option<String>,
|
||||
pub completion: Option<String>,
|
||||
pub default: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "kebab-case", untagged)]
|
||||
pub enum DebugConfigCompletion {
|
||||
Named(String),
|
||||
Advanced(AdvancedCompletion),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum DebugArgumentValue {
|
||||
String(String),
|
||||
Array(Vec<String>),
|
||||
Boolean(bool),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct DebugTemplate {
|
||||
pub name: String,
|
||||
pub request: String,
|
||||
#[serde(default)]
|
||||
pub completion: Vec<DebugConfigCompletion>,
|
||||
pub args: HashMap<String, DebugArgumentValue>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct DebugAdapterConfig {
|
||||
pub name: String,
|
||||
pub transport: String,
|
||||
#[serde(default)]
|
||||
pub command: String,
|
||||
#[serde(default)]
|
||||
pub args: Vec<String>,
|
||||
pub port_arg: Option<String>,
|
||||
pub templates: Vec<DebugTemplate>,
|
||||
#[serde(default)]
|
||||
pub quirks: DebuggerQuirks,
|
||||
}
|
||||
|
||||
// Different workarounds for adapters' differences
|
||||
#[derive(Debug, Default, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub struct DebuggerQuirks {
|
||||
#[serde(default)]
|
||||
pub absolute_paths: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct IndentationConfiguration {
|
||||
#[serde(deserialize_with = "deserialize_tab_width")]
|
||||
pub tab_width: usize,
|
||||
pub unit: String,
|
||||
}
|
||||
|
||||
/// How the indentation for a newly inserted line should be determined.
|
||||
/// If the selected heuristic is not available (e.g. because the current
|
||||
/// language has no tree-sitter indent queries), a simpler one will be used.
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum IndentationHeuristic {
|
||||
/// Just copy the indentation of the line that the cursor is currently on.
|
||||
Simple,
|
||||
/// Use tree-sitter indent queries to compute the expected absolute indentation level of the new line.
|
||||
TreeSitter,
|
||||
/// Use tree-sitter indent queries to compute the expected difference in indentation between the new line
|
||||
/// and the line before. Add this to the actual indentation level of the line before.
|
||||
#[default]
|
||||
Hybrid,
|
||||
}
|
||||
|
||||
/// Configuration for auto pairs
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields, untagged)]
|
||||
pub enum AutoPairConfig {
|
||||
/// Enables or disables auto pairing. False means disabled. True means to use the default pairs.
|
||||
Enable(bool),
|
||||
|
||||
/// The mappings of pairs.
|
||||
Pairs(HashMap<char, char>),
|
||||
}
|
||||
|
||||
impl Default for AutoPairConfig {
|
||||
fn default() -> Self {
|
||||
AutoPairConfig::Enable(true)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&AutoPairConfig> for Option<AutoPairs> {
|
||||
fn from(auto_pair_config: &AutoPairConfig) -> Self {
|
||||
match auto_pair_config {
|
||||
AutoPairConfig::Enable(false) => None,
|
||||
AutoPairConfig::Enable(true) => Some(AutoPairs::default()),
|
||||
AutoPairConfig::Pairs(pairs) => Some(AutoPairs::new(pairs.iter())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AutoPairConfig> for Option<AutoPairs> {
|
||||
fn from(auto_pairs_config: AutoPairConfig) -> Self {
|
||||
(&auto_pairs_config).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for AutoPairConfig {
|
||||
type Err = std::str::ParseBoolError;
|
||||
|
||||
// only do bool parsing for runtime setting
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let enable: bool = s.parse()?;
|
||||
Ok(AutoPairConfig::Enable(enable))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(default, rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct SoftWrap {
|
||||
/// Soft wrap lines that exceed viewport width. Default to off
|
||||
// NOTE: Option on purpose because the struct is shared between language config and global config.
|
||||
// By default the option is None so that the language config falls back to the global config unless explicitly set.
|
||||
pub enable: Option<bool>,
|
||||
/// Maximum space left free at the end of the line.
|
||||
/// This space is used to wrap text at word boundaries. If that is not possible within this limit
|
||||
/// the word is simply split at the end of the line.
|
||||
///
|
||||
/// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views.
|
||||
///
|
||||
/// Default to 20
|
||||
pub max_wrap: Option<u16>,
|
||||
/// Maximum number of indentation that can be carried over from the previous line when softwrapping.
|
||||
/// If a line is indented further then this limit it is rendered at the start of the viewport instead.
|
||||
///
|
||||
/// This is automatically hard-limited to a quarter of the viewport to ensure correct display on small views.
|
||||
///
|
||||
/// Default to 40
|
||||
pub max_indent_retain: Option<u16>,
|
||||
/// Indicator placed at the beginning of softwrapped lines
|
||||
///
|
||||
/// Defaults to ↪
|
||||
pub wrap_indicator: Option<String>,
|
||||
/// Softwrap at `text_width` instead of viewport width if it is shorter
|
||||
pub wrap_at_text_width: Option<bool>,
|
||||
}
|
||||
|
||||
fn deserialize_regex<'de, D>(deserializer: D) -> Result<Option<rope::Regex>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Option::<String>::deserialize(deserializer)?
|
||||
.map(|buf| rope::Regex::new(&buf).map_err(serde::de::Error::custom))
|
||||
.transpose()
|
||||
}
|
||||
|
||||
fn deserialize_lsp_config<'de, D>(deserializer: D) -> Result<Option<serde_json::Value>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Option::<toml::Value>::deserialize(deserializer)?
|
||||
.map(|toml| toml.try_into().map_err(serde::de::Error::custom))
|
||||
.transpose()
|
||||
}
|
||||
|
||||
fn deserialize_tab_width<'de, D>(deserializer: D) -> Result<usize, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
usize::deserialize(deserializer).and_then(|n| {
|
||||
if n > 0 && n <= 16 {
|
||||
Ok(n)
|
||||
} else {
|
||||
Err(serde::de::Error::custom(
|
||||
"tab width must be a value from 1 to 16 inclusive",
|
||||
))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn deserialize_auto_pairs<'de, D>(deserializer: D) -> Result<Option<AutoPairs>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Ok(Option::<AutoPairConfig>::deserialize(deserializer)?.and_then(AutoPairConfig::into))
|
||||
}
|
||||
|
||||
fn default_timeout() -> u64 {
|
||||
20
|
||||
}
|
@@ -1,264 +0,0 @@
|
||||
use std::{cmp::Reverse, ops::Range};
|
||||
|
||||
use super::{LanguageLayer, LayerId};
|
||||
|
||||
use slotmap::HopSlotMap;
|
||||
use tree_sitter::Node;
|
||||
|
||||
/// The byte range of an injection layer.
|
||||
///
|
||||
/// Injection ranges may overlap, but all overlapping parts are subsets of their parent ranges.
|
||||
/// This allows us to sort the ranges ahead of time in order to efficiently find a range that
|
||||
/// contains a point with maximum depth.
|
||||
#[derive(Debug)]
|
||||
struct InjectionRange {
|
||||
start: usize,
|
||||
end: usize,
|
||||
layer_id: LayerId,
|
||||
depth: u32,
|
||||
}
|
||||
|
||||
pub struct TreeCursor<'a> {
|
||||
layers: &'a HopSlotMap<LayerId, LanguageLayer>,
|
||||
root: LayerId,
|
||||
current: LayerId,
|
||||
injection_ranges: Vec<InjectionRange>,
|
||||
// TODO: Ideally this would be a `tree_sitter::TreeCursor<'a>` but
|
||||
// that returns very surprising results in testing.
|
||||
cursor: Node<'a>,
|
||||
}
|
||||
|
||||
impl<'a> TreeCursor<'a> {
|
||||
pub(super) fn new(layers: &'a HopSlotMap<LayerId, LanguageLayer>, root: LayerId) -> Self {
|
||||
let mut injection_ranges = Vec::new();
|
||||
|
||||
for (layer_id, layer) in layers.iter() {
|
||||
// Skip the root layer
|
||||
if layer.parent.is_none() {
|
||||
continue;
|
||||
}
|
||||
for byte_range in layer.ranges.iter() {
|
||||
let range = InjectionRange {
|
||||
start: byte_range.start_byte,
|
||||
end: byte_range.end_byte,
|
||||
layer_id,
|
||||
depth: layer.depth,
|
||||
};
|
||||
injection_ranges.push(range);
|
||||
}
|
||||
}
|
||||
|
||||
injection_ranges.sort_unstable_by_key(|range| (range.end, Reverse(range.depth)));
|
||||
|
||||
let cursor = layers[root].tree().root_node();
|
||||
|
||||
Self {
|
||||
layers,
|
||||
root,
|
||||
current: root,
|
||||
injection_ranges,
|
||||
cursor,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn node(&self) -> Node<'a> {
|
||||
self.cursor
|
||||
}
|
||||
|
||||
pub fn goto_parent(&mut self) -> bool {
|
||||
if let Some(parent) = self.node().parent() {
|
||||
self.cursor = parent;
|
||||
return true;
|
||||
}
|
||||
|
||||
// If we are already on the root layer, we cannot ascend.
|
||||
if self.current == self.root {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Ascend to the parent layer.
|
||||
let range = self.node().byte_range();
|
||||
let parent_id = self.layers[self.current]
|
||||
.parent
|
||||
.expect("non-root layers have a parent");
|
||||
self.current = parent_id;
|
||||
let root = self.layers[self.current].tree().root_node();
|
||||
self.cursor = root
|
||||
.descendant_for_byte_range(range.start, range.end)
|
||||
.unwrap_or(root);
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
pub fn goto_parent_with<P>(&mut self, predicate: P) -> bool
|
||||
where
|
||||
P: Fn(&Node) -> bool,
|
||||
{
|
||||
while self.goto_parent() {
|
||||
if predicate(&self.node()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Finds the injection layer that has exactly the same range as the given `range`.
|
||||
fn layer_id_of_byte_range(&self, search_range: Range<usize>) -> Option<LayerId> {
|
||||
let start_idx = self
|
||||
.injection_ranges
|
||||
.partition_point(|range| range.end < search_range.end);
|
||||
|
||||
self.injection_ranges[start_idx..]
|
||||
.iter()
|
||||
.take_while(|range| range.end == search_range.end)
|
||||
.find_map(|range| (range.start == search_range.start).then_some(range.layer_id))
|
||||
}
|
||||
|
||||
fn goto_first_child_impl(&mut self, named: bool) -> bool {
|
||||
// Check if the current node's range is an exact injection layer range.
|
||||
if let Some(layer_id) = self
|
||||
.layer_id_of_byte_range(self.node().byte_range())
|
||||
.filter(|&layer_id| layer_id != self.current)
|
||||
{
|
||||
// Switch to the child layer.
|
||||
self.current = layer_id;
|
||||
self.cursor = self.layers[self.current].tree().root_node();
|
||||
return true;
|
||||
}
|
||||
|
||||
let child = if named {
|
||||
self.cursor.named_child(0)
|
||||
} else {
|
||||
self.cursor.child(0)
|
||||
};
|
||||
|
||||
if let Some(child) = child {
|
||||
// Otherwise descend in the current tree.
|
||||
self.cursor = child;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn goto_first_child(&mut self) -> bool {
|
||||
self.goto_first_child_impl(false)
|
||||
}
|
||||
|
||||
pub fn goto_first_named_child(&mut self) -> bool {
|
||||
self.goto_first_child_impl(true)
|
||||
}
|
||||
|
||||
fn goto_next_sibling_impl(&mut self, named: bool) -> bool {
|
||||
let sibling = if named {
|
||||
self.cursor.next_named_sibling()
|
||||
} else {
|
||||
self.cursor.next_sibling()
|
||||
};
|
||||
|
||||
if let Some(sibling) = sibling {
|
||||
self.cursor = sibling;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn goto_next_sibling(&mut self) -> bool {
|
||||
self.goto_next_sibling_impl(false)
|
||||
}
|
||||
|
||||
pub fn goto_next_named_sibling(&mut self) -> bool {
|
||||
self.goto_next_sibling_impl(true)
|
||||
}
|
||||
|
||||
fn goto_prev_sibling_impl(&mut self, named: bool) -> bool {
|
||||
let sibling = if named {
|
||||
self.cursor.prev_named_sibling()
|
||||
} else {
|
||||
self.cursor.prev_sibling()
|
||||
};
|
||||
|
||||
if let Some(sibling) = sibling {
|
||||
self.cursor = sibling;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn goto_prev_sibling(&mut self) -> bool {
|
||||
self.goto_prev_sibling_impl(false)
|
||||
}
|
||||
|
||||
pub fn goto_prev_named_sibling(&mut self) -> bool {
|
||||
self.goto_prev_sibling_impl(true)
|
||||
}
|
||||
|
||||
/// Finds the injection layer that contains the given start-end range.
|
||||
fn layer_id_containing_byte_range(&self, start: usize, end: usize) -> LayerId {
|
||||
let start_idx = self
|
||||
.injection_ranges
|
||||
.partition_point(|range| range.end < end);
|
||||
|
||||
self.injection_ranges[start_idx..]
|
||||
.iter()
|
||||
.take_while(|range| range.start < end || range.depth > 1)
|
||||
.find_map(|range| (range.start <= start).then_some(range.layer_id))
|
||||
.unwrap_or(self.root)
|
||||
}
|
||||
|
||||
pub fn reset_to_byte_range(&mut self, start: usize, end: usize) {
|
||||
self.current = self.layer_id_containing_byte_range(start, end);
|
||||
let root = self.layers[self.current].tree().root_node();
|
||||
self.cursor = root.descendant_for_byte_range(start, end).unwrap_or(root);
|
||||
}
|
||||
|
||||
/// Returns an iterator over the children of the node the TreeCursor is on
|
||||
/// at the time this is called.
|
||||
pub fn children(&'a mut self) -> ChildIter<'a> {
|
||||
let parent = self.node();
|
||||
|
||||
ChildIter {
|
||||
cursor: self,
|
||||
parent,
|
||||
named: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns an iterator over the named children of the node the TreeCursor is on
|
||||
/// at the time this is called.
|
||||
pub fn named_children(&'a mut self) -> ChildIter<'a> {
|
||||
let parent = self.node();
|
||||
|
||||
ChildIter {
|
||||
cursor: self,
|
||||
parent,
|
||||
named: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ChildIter<'n> {
|
||||
cursor: &'n mut TreeCursor<'n>,
|
||||
parent: Node<'n>,
|
||||
named: bool,
|
||||
}
|
||||
|
||||
impl<'n> Iterator for ChildIter<'n> {
|
||||
type Item = Node<'n>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
// first iteration, just visit the first child
|
||||
if self.cursor.node() == self.parent {
|
||||
self.cursor
|
||||
.goto_first_child_impl(self.named)
|
||||
.then(|| self.cursor.node())
|
||||
} else {
|
||||
self.cursor
|
||||
.goto_next_sibling_impl(self.named)
|
||||
.then(|| self.cursor.node())
|
||||
}
|
||||
}
|
||||
}
|
@@ -65,7 +65,7 @@ pub fn print(s: &str) -> (String, Selection) {
|
||||
let head_at_beg = iter.next_if_eq(&"|").is_some();
|
||||
let last_grapheme = |s: &str| {
|
||||
UnicodeSegmentation::graphemes(s, true)
|
||||
.last()
|
||||
.next_back()
|
||||
.map(String::from)
|
||||
};
|
||||
|
||||
|
@@ -5,7 +5,7 @@ use std::ops::Range;
|
||||
use std::ptr::NonNull;
|
||||
|
||||
use crate::doc_formatter::FormattedGrapheme;
|
||||
use crate::syntax::Highlight;
|
||||
use crate::syntax::{Highlight, OverlayHighlights};
|
||||
use crate::{Position, Tendril};
|
||||
|
||||
/// An inline annotation is continuous text shown
|
||||
@@ -300,10 +300,7 @@ impl<'a> TextAnnotations<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn collect_overlay_highlights(
|
||||
&self,
|
||||
char_range: Range<usize>,
|
||||
) -> Vec<(usize, Range<usize>)> {
|
||||
pub fn collect_overlay_highlights(&self, char_range: Range<usize>) -> OverlayHighlights {
|
||||
let mut highlights = Vec::new();
|
||||
self.reset_pos(char_range.start);
|
||||
for char_idx in char_range {
|
||||
@@ -311,11 +308,11 @@ impl<'a> TextAnnotations<'a> {
|
||||
// we don't know the number of chars the original grapheme takes
|
||||
// however it doesn't matter as highlight boundaries are automatically
|
||||
// aligned to grapheme boundaries in the rendering code
|
||||
highlights.push((highlight.0, char_idx..char_idx + 1))
|
||||
highlights.push((highlight, char_idx..char_idx + 1));
|
||||
}
|
||||
}
|
||||
|
||||
highlights
|
||||
OverlayHighlights::Heterogenous { highlights }
|
||||
}
|
||||
|
||||
/// Add new inline annotations.
|
||||
|
@@ -1,13 +1,12 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use ropey::RopeSlice;
|
||||
use tree_sitter::{Node, QueryCursor};
|
||||
|
||||
use crate::chars::{categorize_char, char_is_whitespace, CharCategory};
|
||||
use crate::graphemes::{next_grapheme_boundary, prev_grapheme_boundary};
|
||||
use crate::line_ending::rope_is_line_ending;
|
||||
use crate::movement::Direction;
|
||||
use crate::syntax::LanguageConfiguration;
|
||||
use crate::syntax;
|
||||
use crate::Range;
|
||||
use crate::{surround, Syntax};
|
||||
|
||||
@@ -260,18 +259,18 @@ pub fn textobject_treesitter(
|
||||
range: Range,
|
||||
textobject: TextObject,
|
||||
object_name: &str,
|
||||
slice_tree: Node,
|
||||
lang_config: &LanguageConfiguration,
|
||||
syntax: &Syntax,
|
||||
loader: &syntax::Loader,
|
||||
_count: usize,
|
||||
) -> Range {
|
||||
let root = syntax.tree().root_node();
|
||||
let textobject_query = loader.textobject_query(syntax.root_language());
|
||||
let get_range = move || -> Option<Range> {
|
||||
let byte_pos = slice.char_to_byte(range.cursor(slice));
|
||||
|
||||
let capture_name = format!("{}.{}", object_name, textobject); // eg. function.inner
|
||||
let mut cursor = QueryCursor::new();
|
||||
let node = lang_config
|
||||
.textobject_query()?
|
||||
.capture_nodes(&capture_name, slice_tree, slice, &mut cursor)?
|
||||
let node = textobject_query?
|
||||
.capture_nodes(&capture_name, &root, slice)?
|
||||
.filter(|node| node.byte_range().contains(&byte_pos))
|
||||
.min_by_key(|node| node.byte_range().len())?;
|
||||
|
||||
|
@@ -1,18 +1,44 @@
|
||||
use std::{
|
||||
fmt,
|
||||
num::NonZeroUsize,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
// uses NonZeroUsize so Option<DocumentId> takes the same space
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
||||
pub struct DocumentId(NonZeroUsize);
|
||||
|
||||
impl DocumentId {
|
||||
pub const MAX: Self = Self(unsafe { NonZeroUsize::new_unchecked(usize::MAX) });
|
||||
|
||||
pub fn next(&self) -> Self {
|
||||
// Safety: adding 1 from 1 is fine, probably impossible to reach usize max
|
||||
Self(unsafe { NonZeroUsize::new_unchecked(self.0.get() + 1) })
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DocumentId {
|
||||
fn default() -> DocumentId {
|
||||
// Safety: 1 is non-zero
|
||||
DocumentId(unsafe { NonZeroUsize::new_unchecked(1) })
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DocumentId {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// A generic pointer to a file location.
|
||||
///
|
||||
/// Currently this type only supports paths to local files.
|
||||
///
|
||||
/// Cloning this type is cheap: the internal representation uses an Arc.
|
||||
/// Cloning this type is cheap: the internal representation uses an Arc or data which is Copy.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[non_exhaustive]
|
||||
pub enum Uri {
|
||||
File(Arc<Path>),
|
||||
Scratch(DocumentId),
|
||||
}
|
||||
|
||||
impl Uri {
|
||||
@@ -21,12 +47,14 @@ impl Uri {
|
||||
pub fn to_url(&self) -> Result<url::Url, ()> {
|
||||
match self {
|
||||
Uri::File(path) => url::Url::from_file_path(path),
|
||||
Uri::Scratch(_) => Err(()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_path(&self) -> Option<&Path> {
|
||||
match self {
|
||||
Self::File(path) => Some(path),
|
||||
Self::Scratch(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -41,6 +69,7 @@ impl fmt::Display for Uri {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::File(path) => write!(f, "{}", path.display()),
|
||||
Self::Scratch(id) => write!(f, "[scratch {id}]"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,12 +1,11 @@
|
||||
use arc_swap::ArcSwap;
|
||||
use helix_core::{
|
||||
indent::{indent_level_for_line, treesitter_indent_for_pos, IndentStyle},
|
||||
syntax::{Configuration, Loader},
|
||||
syntax::{config::Configuration, Loader},
|
||||
Syntax,
|
||||
};
|
||||
use helix_stdx::rope::RopeSliceExt;
|
||||
use ropey::Rope;
|
||||
use std::{ops::Range, path::PathBuf, process::Command, sync::Arc};
|
||||
use std::{ops::Range, path::PathBuf, process::Command};
|
||||
|
||||
#[test]
|
||||
fn test_treesitter_indent_rust() {
|
||||
@@ -196,17 +195,12 @@ fn test_treesitter_indent(
|
||||
runtime.push("../runtime");
|
||||
std::env::set_var("HELIX_RUNTIME", runtime.to_str().unwrap());
|
||||
|
||||
let language_config = loader.language_config_for_scope(lang_scope).unwrap();
|
||||
let language = loader.language_for_scope(lang_scope).unwrap();
|
||||
let language_config = loader.language(language).config();
|
||||
let indent_style = IndentStyle::from_str(&language_config.indent.as_ref().unwrap().unit);
|
||||
let highlight_config = language_config.highlight_config(&[]).unwrap();
|
||||
let text = doc.slice(..);
|
||||
let syntax = Syntax::new(
|
||||
text,
|
||||
highlight_config,
|
||||
Arc::new(ArcSwap::from_pointee(loader)),
|
||||
)
|
||||
.unwrap();
|
||||
let indent_query = language_config.indent_query().unwrap();
|
||||
let syntax = Syntax::new(text, language, &loader).unwrap();
|
||||
let indent_query = loader.indent_query(language).unwrap();
|
||||
|
||||
for i in 0..doc.len_lines() {
|
||||
let line = text.line(i);
|
||||
|
@@ -4,7 +4,7 @@ use crate::{
|
||||
types::*,
|
||||
Error, Result,
|
||||
};
|
||||
use helix_core::syntax::DebuggerQuirks;
|
||||
use helix_core::syntax::config::DebuggerQuirks;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
|
@@ -85,10 +85,11 @@ impl Transport {
|
||||
async fn recv_server_message(
|
||||
reader: &mut Box<dyn AsyncBufRead + Unpin + Send>,
|
||||
buffer: &mut String,
|
||||
content: &mut Vec<u8>,
|
||||
) -> Result<Payload> {
|
||||
let mut content_length = None;
|
||||
loop {
|
||||
buffer.truncate(0);
|
||||
buffer.clear();
|
||||
if reader.read_line(buffer).await? == 0 {
|
||||
return Err(Error::StreamClosed);
|
||||
};
|
||||
@@ -117,17 +118,17 @@ impl Transport {
|
||||
}
|
||||
|
||||
let content_length = content_length.context("missing content length")?;
|
||||
|
||||
//TODO: reuse vector
|
||||
let mut content = vec![0; content_length];
|
||||
reader.read_exact(&mut content).await?;
|
||||
let msg = std::str::from_utf8(&content).context("invalid utf8 from server")?;
|
||||
content.resize(content_length, 0);
|
||||
reader.read_exact(content).await?;
|
||||
let msg = std::str::from_utf8(content).context("invalid utf8 from server")?;
|
||||
|
||||
info!("<- DAP {}", msg);
|
||||
|
||||
// try parsing as output (server response) or call (server request)
|
||||
let output: serde_json::Result<Payload> = serde_json::from_str(msg);
|
||||
|
||||
content.clear();
|
||||
|
||||
Ok(output?)
|
||||
}
|
||||
|
||||
@@ -242,8 +243,15 @@ impl Transport {
|
||||
client_tx: UnboundedSender<Payload>,
|
||||
) {
|
||||
let mut recv_buffer = String::new();
|
||||
let mut content_buffer = Vec::new();
|
||||
loop {
|
||||
match Self::recv_server_message(&mut server_stdout, &mut recv_buffer).await {
|
||||
match Self::recv_server_message(
|
||||
&mut server_stdout,
|
||||
&mut recv_buffer,
|
||||
&mut content_buffer,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(msg) => match transport.process_server_message(&client_tx, msg).await {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
|
@@ -56,6 +56,7 @@ fn smoke_test() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(dead_code)]
|
||||
fn dynamic() {
|
||||
events! {
|
||||
Event3 {}
|
||||
|
@@ -21,7 +21,6 @@ anyhow = "1"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
toml = "0.8"
|
||||
etcetera = "0.10"
|
||||
tree-sitter.workspace = true
|
||||
once_cell = "1.21"
|
||||
log = "0.4"
|
||||
|
||||
@@ -32,5 +31,4 @@ cc = { version = "1" }
|
||||
threadpool = { version = "1.0" }
|
||||
tempfile.workspace = true
|
||||
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||
libloading = "0.8"
|
||||
tree-house.workspace = true
|
||||
|
@@ -23,22 +23,6 @@ pub fn user_lang_config() -> Result<toml::Value, toml::de::Error> {
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.fold(default_lang_config(), |a, b| {
|
||||
// combines for example
|
||||
// b:
|
||||
// [[language]]
|
||||
// name = "toml"
|
||||
// language-server = { command = "taplo", args = ["lsp", "stdio"] }
|
||||
//
|
||||
// a:
|
||||
// [[language]]
|
||||
// language-server = { command = "/usr/bin/taplo" }
|
||||
//
|
||||
// into:
|
||||
// [[language]]
|
||||
// name = "toml"
|
||||
// language-server = { command = "/usr/bin/taplo" }
|
||||
//
|
||||
// thus it overrides the third depth-level of b with values of a if they exist, but otherwise merges their values
|
||||
crate::merge_toml_values(a, b, 3)
|
||||
});
|
||||
|
||||
|
@@ -9,7 +9,7 @@ use std::{
|
||||
sync::mpsc::channel,
|
||||
};
|
||||
use tempfile::TempPath;
|
||||
use tree_sitter::Language;
|
||||
use tree_house::tree_sitter::Grammar;
|
||||
|
||||
#[cfg(unix)]
|
||||
const DYLIB_EXTENSION: &str = "so";
|
||||
@@ -61,28 +61,21 @@ const BUILD_TARGET: &str = env!("BUILD_TARGET");
|
||||
const REMOTE_NAME: &str = "origin";
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn get_language(name: &str) -> Result<Language> {
|
||||
pub fn get_language(name: &str) -> Result<Option<Grammar>> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn get_language(name: &str) -> Result<Language> {
|
||||
use libloading::{Library, Symbol};
|
||||
pub fn get_language(name: &str) -> Result<Option<Grammar>> {
|
||||
let mut rel_library_path = PathBuf::new().join("grammars").join(name);
|
||||
rel_library_path.set_extension(DYLIB_EXTENSION);
|
||||
let library_path = crate::runtime_file(&rel_library_path);
|
||||
if !library_path.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let library = unsafe { Library::new(&library_path) }
|
||||
.with_context(|| format!("Error opening dynamic library {:?}", library_path))?;
|
||||
let language_fn_name = format!("tree_sitter_{}", name.replace('-', "_"));
|
||||
let language = unsafe {
|
||||
let language_fn: Symbol<unsafe extern "C" fn() -> Language> = library
|
||||
.get(language_fn_name.as_bytes())
|
||||
.with_context(|| format!("Failed to load symbol {}", language_fn_name))?;
|
||||
language_fn()
|
||||
};
|
||||
std::mem::forget(library);
|
||||
Ok(language)
|
||||
let grammar = unsafe { Grammar::new(name, &library_path) }?;
|
||||
Ok(Some(grammar))
|
||||
}
|
||||
|
||||
fn ensure_git_is_available() -> Result<()> {
|
||||
@@ -451,7 +444,6 @@ fn build_tree_sitter_library(
|
||||
command
|
||||
.args(["/nologo", "/LD", "/I"])
|
||||
.arg(header_path)
|
||||
.arg("/Od")
|
||||
.arg("/utf-8")
|
||||
.arg("/std:c11");
|
||||
if let Some(scanner_path) = scanner_path.as_ref() {
|
||||
@@ -469,7 +461,6 @@ fn build_tree_sitter_library(
|
||||
cpp_command
|
||||
.args(["/nologo", "/LD", "/I"])
|
||||
.arg(header_path)
|
||||
.arg("/Od")
|
||||
.arg("/utf-8")
|
||||
.arg("/std:c++14")
|
||||
.arg(format!("/Fo{}", object_file.display()))
|
||||
|
@@ -132,6 +132,15 @@ pub fn cache_dir() -> PathBuf {
|
||||
path
|
||||
}
|
||||
|
||||
pub fn state_dir() -> PathBuf {
|
||||
let strategy = choose_base_strategy().expect("could not determine XDG strategy");
|
||||
let mut path = strategy
|
||||
.state_dir()
|
||||
.expect("state_dir is always Some for default base strategy");
|
||||
path.push("helix");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn config_file() -> PathBuf {
|
||||
CONFIG_FILE.get().map(|path| path.to_path_buf()).unwrap()
|
||||
}
|
||||
@@ -152,19 +161,43 @@ pub fn default_log_file() -> PathBuf {
|
||||
cache_dir().join("helix.log")
|
||||
}
|
||||
|
||||
// TODO: personal dictionary per language.
|
||||
pub fn personal_dictionary_file() -> PathBuf {
|
||||
state_dir().join("personal-dictionary.txt")
|
||||
}
|
||||
|
||||
/// Merge two TOML documents, merging values from `right` onto `left`
|
||||
///
|
||||
/// When an array exists in both `left` and `right`, `right`'s array is
|
||||
/// used. When a table exists in both `left` and `right`, the merged table
|
||||
/// consists of all keys in `left`'s table unioned with all keys in `right`
|
||||
/// with the values of `right` being merged recursively onto values of
|
||||
/// `left`.
|
||||
/// `merge_depth` sets the nesting depth up to which values are merged instead
|
||||
/// of overridden.
|
||||
///
|
||||
/// `merge_toplevel_arrays` controls whether a top-level array in the TOML
|
||||
/// document is merged instead of overridden. This is useful for TOML
|
||||
/// documents that use a top-level array of values like the `languages.toml`,
|
||||
/// where one usually wants to override or add to the array instead of
|
||||
/// replacing it altogether.
|
||||
/// When a table exists in both `left` and `right`, the merged table consists of
|
||||
/// all keys in `left`'s table unioned with all keys in `right` with the values
|
||||
/// of `right` being merged recursively onto values of `left`.
|
||||
///
|
||||
/// `crate::merge_toml_values(a, b, 3)` combines, for example:
|
||||
///
|
||||
/// b:
|
||||
/// ```toml
|
||||
/// [[language]]
|
||||
/// name = "toml"
|
||||
/// language-server = { command = "taplo", args = ["lsp", "stdio"] }
|
||||
/// ```
|
||||
/// a:
|
||||
/// ```toml
|
||||
/// [[language]]
|
||||
/// language-server = { command = "/usr/bin/taplo" }
|
||||
/// ```
|
||||
///
|
||||
/// into:
|
||||
/// ```toml
|
||||
/// [[language]]
|
||||
/// name = "toml"
|
||||
/// language-server = { command = "/usr/bin/taplo" }
|
||||
/// ```
|
||||
///
|
||||
/// thus it overrides the third depth-level of b with values of a if they exist,
|
||||
/// but otherwise merges their values
|
||||
pub fn merge_toml_values(left: toml::Value, right: toml::Value, merge_depth: usize) -> toml::Value {
|
||||
use toml::Value;
|
||||
|
||||
@@ -174,11 +207,6 @@ pub fn merge_toml_values(left: toml::Value, right: toml::Value, merge_depth: usi
|
||||
|
||||
match (left, right) {
|
||||
(Value::Array(mut left_items), Value::Array(right_items)) => {
|
||||
// The top-level arrays should be merged but nested arrays should
|
||||
// act as overrides. For the `languages.toml` config, this means
|
||||
// that you can specify a sub-set of languages in an overriding
|
||||
// `languages.toml` but that nested arrays like Language Server
|
||||
// arguments are replaced instead of merged.
|
||||
if merge_depth > 0 {
|
||||
left_items.reserve(right_items.len());
|
||||
for rvalue in right_items {
|
||||
|
@@ -260,7 +260,9 @@ impl Position {
|
||||
|
||||
/// A range in a text document expressed as (zero-based) start and end positions.
|
||||
/// A range is comparable to a selection in an editor. Therefore the end position is exclusive.
|
||||
#[derive(Debug, Eq, PartialEq, Copy, Clone, Default, Deserialize, Serialize, Hash)]
|
||||
#[derive(
|
||||
Debug, Eq, PartialEq, PartialOrd, Ord, Copy, Clone, Default, Deserialize, Serialize, Hash,
|
||||
)]
|
||||
pub struct Range {
|
||||
/// The range's start position.
|
||||
pub start: Position,
|
||||
@@ -2568,9 +2570,9 @@ pub enum Documentation {
|
||||
///
|
||||
/// The pair of a language and a value is an equivalent to markdown:
|
||||
///
|
||||
/// ```${language}
|
||||
/// <pre><code>```${language}
|
||||
/// ${value}
|
||||
/// ```
|
||||
/// ```</code></pre>
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum MarkedString {
|
||||
|
@@ -25,7 +25,7 @@ globset = "0.4.16"
|
||||
log = "0.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
tokio = { version = "1.44", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
||||
tokio = { version = "1.45", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
||||
tokio-stream = "0.1.17"
|
||||
parking_lot.workspace = true
|
||||
arc-swap = "1"
|
||||
|
@@ -10,7 +10,7 @@ use crate::lsp::{
|
||||
DidChangeWorkspaceFoldersParams, OneOf, PositionEncodingKind, SignatureHelp, Url,
|
||||
WorkspaceFolder, WorkspaceFoldersChangeEvent,
|
||||
};
|
||||
use helix_core::{find_workspace, syntax::LanguageServerFeature, ChangeSet, Rope};
|
||||
use helix_core::{find_workspace, syntax::config::LanguageServerFeature, ChangeSet, Rope};
|
||||
use helix_loader::VERSION_AND_GIT_HASH;
|
||||
use helix_stdx::path;
|
||||
use parking_lot::Mutex;
|
||||
@@ -39,7 +39,7 @@ fn workspace_for_uri(uri: lsp::Url) -> WorkspaceFolder {
|
||||
lsp::WorkspaceFolder {
|
||||
name: uri
|
||||
.path_segments()
|
||||
.and_then(|segments| segments.last())
|
||||
.and_then(|mut segments| segments.next_back())
|
||||
.map(|basename| basename.to_string())
|
||||
.unwrap_or_default(),
|
||||
uri,
|
||||
@@ -176,6 +176,29 @@ impl Client {
|
||||
self.did_change_workspace(vec![workspace_for_uri(root_uri)], Vec::new())
|
||||
}
|
||||
|
||||
/// Merge FormattingOptions with 'config.format' and return it
|
||||
fn get_merged_formatting_options(
|
||||
&self,
|
||||
options: lsp::FormattingOptions,
|
||||
) -> lsp::FormattingOptions {
|
||||
let config_format = self
|
||||
.config
|
||||
.as_ref()
|
||||
.and_then(|cfg| cfg.get("format"))
|
||||
.and_then(|fmt| HashMap::<String, lsp::FormattingProperty>::deserialize(fmt).ok());
|
||||
|
||||
if let Some(mut properties) = config_format {
|
||||
// passed in options take precedence over 'config.format'
|
||||
properties.extend(options.properties);
|
||||
lsp::FormattingOptions {
|
||||
properties,
|
||||
..options
|
||||
}
|
||||
} else {
|
||||
options
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity, clippy::too_many_arguments)]
|
||||
pub fn start(
|
||||
cmd: &str,
|
||||
@@ -201,6 +224,7 @@ impl Client {
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.current_dir(&root_path)
|
||||
// make sure the process is reaped on drop
|
||||
.kill_on_drop(true)
|
||||
.spawn();
|
||||
@@ -356,7 +380,14 @@ impl Client {
|
||||
capabilities.inlay_hint_provider,
|
||||
Some(OneOf::Left(true) | OneOf::Right(InlayHintServerCapabilities::Options(_)))
|
||||
),
|
||||
LanguageServerFeature::DocumentColors => capabilities.color_provider.is_some(),
|
||||
LanguageServerFeature::DocumentColors => matches!(
|
||||
capabilities.color_provider,
|
||||
Some(
|
||||
ColorProviderCapability::Simple(true)
|
||||
| ColorProviderCapability::ColorProvider(_)
|
||||
| ColorProviderCapability::Options(_)
|
||||
)
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1160,23 +1191,7 @@ impl Client {
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
// merge FormattingOptions with 'config.format'
|
||||
let config_format = self
|
||||
.config
|
||||
.as_ref()
|
||||
.and_then(|cfg| cfg.get("format"))
|
||||
.and_then(|fmt| HashMap::<String, lsp::FormattingProperty>::deserialize(fmt).ok());
|
||||
|
||||
let options = if let Some(mut properties) = config_format {
|
||||
// passed in options take precedence over 'config.format'
|
||||
properties.extend(options.properties);
|
||||
lsp::FormattingOptions {
|
||||
properties,
|
||||
..options
|
||||
}
|
||||
} else {
|
||||
options
|
||||
};
|
||||
let options = self.get_merged_formatting_options(options);
|
||||
|
||||
let params = lsp::DocumentFormattingParams {
|
||||
text_document,
|
||||
@@ -1202,6 +1217,8 @@ impl Client {
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let options = self.get_merged_formatting_options(options);
|
||||
|
||||
let params = lsp::DocumentRangeFormattingParams {
|
||||
text_document,
|
||||
range,
|
||||
|
@@ -12,7 +12,7 @@ pub use jsonrpc::Call;
|
||||
pub use lsp::{Position, Url};
|
||||
|
||||
use futures_util::stream::select_all::SelectAll;
|
||||
use helix_core::syntax::{
|
||||
use helix_core::syntax::config::{
|
||||
LanguageConfiguration, LanguageServerConfiguration, LanguageServerFeatures,
|
||||
};
|
||||
use helix_stdx::path;
|
||||
@@ -52,7 +52,7 @@ pub enum Error {
|
||||
Other(#[from] anyhow::Error),
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
|
||||
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum OffsetEncoding {
|
||||
/// UTF-8 code units aka bytes
|
||||
Utf8,
|
||||
@@ -68,63 +68,7 @@ pub mod util {
|
||||
use helix_core::line_ending::{line_end_byte_index, line_end_char_index};
|
||||
use helix_core::snippets::{RenderedSnippet, Snippet, SnippetRenderCtx};
|
||||
use helix_core::{chars, RopeSlice};
|
||||
use helix_core::{diagnostic::NumberOrString, Range, Rope, Selection, Tendril, Transaction};
|
||||
|
||||
/// Converts a diagnostic in the document to [`lsp::Diagnostic`].
|
||||
///
|
||||
/// Panics when [`pos_to_lsp_pos`] would for an invalid range on the diagnostic.
|
||||
pub fn diagnostic_to_lsp_diagnostic(
|
||||
doc: &Rope,
|
||||
diag: &helix_core::diagnostic::Diagnostic,
|
||||
offset_encoding: OffsetEncoding,
|
||||
) -> lsp::Diagnostic {
|
||||
use helix_core::diagnostic::Severity::*;
|
||||
|
||||
let range = Range::new(diag.range.start, diag.range.end);
|
||||
let severity = diag.severity.map(|s| match s {
|
||||
Hint => lsp::DiagnosticSeverity::HINT,
|
||||
Info => lsp::DiagnosticSeverity::INFORMATION,
|
||||
Warning => lsp::DiagnosticSeverity::WARNING,
|
||||
Error => lsp::DiagnosticSeverity::ERROR,
|
||||
});
|
||||
|
||||
let code = match diag.code.clone() {
|
||||
Some(x) => match x {
|
||||
NumberOrString::Number(x) => Some(lsp::NumberOrString::Number(x)),
|
||||
NumberOrString::String(x) => Some(lsp::NumberOrString::String(x)),
|
||||
},
|
||||
None => None,
|
||||
};
|
||||
|
||||
let new_tags: Vec<_> = diag
|
||||
.tags
|
||||
.iter()
|
||||
.map(|tag| match tag {
|
||||
helix_core::diagnostic::DiagnosticTag::Unnecessary => {
|
||||
lsp::DiagnosticTag::UNNECESSARY
|
||||
}
|
||||
helix_core::diagnostic::DiagnosticTag::Deprecated => lsp::DiagnosticTag::DEPRECATED,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let tags = if !new_tags.is_empty() {
|
||||
Some(new_tags)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
lsp::Diagnostic {
|
||||
range: range_to_lsp_range(doc, range, offset_encoding),
|
||||
severity,
|
||||
code,
|
||||
source: diag.source.clone(),
|
||||
message: diag.message.to_owned(),
|
||||
related_information: None,
|
||||
tags,
|
||||
data: diag.data.to_owned(),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
use helix_core::{Range, Rope, Selection, Tendril, Transaction};
|
||||
|
||||
/// Converts [`lsp::Position`] to a position in the document.
|
||||
///
|
||||
@@ -733,14 +677,17 @@ impl Registry {
|
||||
#[derive(Debug)]
|
||||
pub enum ProgressStatus {
|
||||
Created,
|
||||
Started(lsp::WorkDoneProgress),
|
||||
Started {
|
||||
title: String,
|
||||
progress: lsp::WorkDoneProgress,
|
||||
},
|
||||
}
|
||||
|
||||
impl ProgressStatus {
|
||||
pub fn progress(&self) -> Option<&lsp::WorkDoneProgress> {
|
||||
match &self {
|
||||
ProgressStatus::Created => None,
|
||||
ProgressStatus::Started(progress) => Some(progress),
|
||||
ProgressStatus::Started { title: _, progress } => Some(progress),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -777,6 +724,13 @@ impl LspProgressMap {
|
||||
self.0.get(&id).and_then(|values| values.get(token))
|
||||
}
|
||||
|
||||
pub fn title(&self, id: LanguageServerId, token: &lsp::ProgressToken) -> Option<&String> {
|
||||
self.progress(id, token).and_then(|p| match p {
|
||||
ProgressStatus::Created => None,
|
||||
ProgressStatus::Started { title, .. } => Some(title),
|
||||
})
|
||||
}
|
||||
|
||||
/// Checks if progress `token` for server with `id` is created.
|
||||
pub fn is_created(&mut self, id: LanguageServerId, token: &lsp::ProgressToken) -> bool {
|
||||
self.0
|
||||
@@ -801,17 +755,39 @@ impl LspProgressMap {
|
||||
self.0.get_mut(&id).and_then(|vals| vals.remove(token))
|
||||
}
|
||||
|
||||
/// Updates the progress of `token` for server with `id` to `status`, returns the value replaced or `None`.
|
||||
/// Updates the progress of `token` for server with `id` to begin state `status`
|
||||
pub fn begin(
|
||||
&mut self,
|
||||
id: LanguageServerId,
|
||||
token: lsp::ProgressToken,
|
||||
status: lsp::WorkDoneProgressBegin,
|
||||
) {
|
||||
self.0.entry(id).or_default().insert(
|
||||
token,
|
||||
ProgressStatus::Started {
|
||||
title: status.title.clone(),
|
||||
progress: lsp::WorkDoneProgress::Begin(status),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/// Updates the progress of `token` for server with `id` to report state `status`.
|
||||
pub fn update(
|
||||
&mut self,
|
||||
id: LanguageServerId,
|
||||
token: lsp::ProgressToken,
|
||||
status: lsp::WorkDoneProgress,
|
||||
) -> Option<ProgressStatus> {
|
||||
status: lsp::WorkDoneProgressReport,
|
||||
) {
|
||||
self.0
|
||||
.entry(id)
|
||||
.or_default()
|
||||
.insert(token, ProgressStatus::Started(status))
|
||||
.entry(token)
|
||||
.and_modify(|e| match e {
|
||||
ProgressStatus::Created => (),
|
||||
ProgressStatus::Started { progress, .. } => {
|
||||
*progress = lsp::WorkDoneProgress::Report(status)
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -90,11 +90,12 @@ impl Transport {
|
||||
async fn recv_server_message(
|
||||
reader: &mut (impl AsyncBufRead + Unpin + Send),
|
||||
buffer: &mut String,
|
||||
content: &mut Vec<u8>,
|
||||
language_server_name: &str,
|
||||
) -> Result<ServerMessage> {
|
||||
let mut content_length = None;
|
||||
loop {
|
||||
buffer.truncate(0);
|
||||
buffer.clear();
|
||||
if reader.read_line(buffer).await? == 0 {
|
||||
return Err(Error::StreamClosed);
|
||||
};
|
||||
@@ -126,17 +127,17 @@ impl Transport {
|
||||
}
|
||||
|
||||
let content_length = content_length.context("missing content length")?;
|
||||
|
||||
//TODO: reuse vector
|
||||
let mut content = vec![0; content_length];
|
||||
reader.read_exact(&mut content).await?;
|
||||
let msg = std::str::from_utf8(&content).context("invalid utf8 from server")?;
|
||||
content.resize(content_length, 0);
|
||||
reader.read_exact(content).await?;
|
||||
let msg = std::str::from_utf8(content).context("invalid utf8 from server")?;
|
||||
|
||||
info!("{language_server_name} <- {msg}");
|
||||
|
||||
// try parsing as output (server response) or call (server request)
|
||||
let output: serde_json::Result<ServerMessage> = serde_json::from_str(msg);
|
||||
|
||||
content.clear();
|
||||
|
||||
Ok(output?)
|
||||
}
|
||||
|
||||
@@ -255,9 +256,15 @@ impl Transport {
|
||||
client_tx: UnboundedSender<(LanguageServerId, jsonrpc::Call)>,
|
||||
) {
|
||||
let mut recv_buffer = String::new();
|
||||
let mut content_buffer = Vec::new();
|
||||
loop {
|
||||
match Self::recv_server_message(&mut server_stdout, &mut recv_buffer, &transport.name)
|
||||
.await
|
||||
match Self::recv_server_message(
|
||||
&mut server_stdout,
|
||||
&mut recv_buffer,
|
||||
&mut content_buffer,
|
||||
&transport.name,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(msg) => {
|
||||
match transport
|
||||
|
@@ -15,7 +15,7 @@ homepage.workspace = true
|
||||
dunce = "1.0"
|
||||
etcetera = "0.10"
|
||||
ropey.workspace = true
|
||||
which = "7.0"
|
||||
which = "8.0"
|
||||
regex-cursor = "0.1.5"
|
||||
bitflags.workspace = true
|
||||
once_cell = "1.21"
|
||||
@@ -23,7 +23,7 @@ regex-automata = "0.4.9"
|
||||
unicode-segmentation.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
windows-sys = { version = "0.59", features = ["Win32_Foundation", "Win32_Security", "Win32_Security_Authorization", "Win32_Storage_FileSystem", "Win32_System_Threading"] }
|
||||
windows-sys = { version = "0.60", features = ["Win32_Foundation", "Win32_Security", "Win32_Security_Authorization", "Win32_Storage_FileSystem", "Win32_System_Threading"] }
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
rustix = { version = "1.0", features = ["fs"] }
|
||||
|
@@ -135,7 +135,9 @@ pub trait RopeSliceExt<'a>: Sized {
|
||||
/// let graphemes: Vec<_> = text.graphemes().collect();
|
||||
/// assert_eq!(graphemes.as_slice(), &["😶🌫️", "🏴☠️", "🖼️"]);
|
||||
/// ```
|
||||
fn graphemes(self) -> RopeGraphemes<'a>;
|
||||
fn graphemes(self) -> RopeGraphemes<'a> {
|
||||
self.graphemes_at(0)
|
||||
}
|
||||
/// Returns an iterator over the grapheme clusters in the slice, reversed.
|
||||
///
|
||||
/// The returned iterator starts at the end of the slice and ends at the beginning of the
|
||||
@@ -150,7 +152,127 @@ pub trait RopeSliceExt<'a>: Sized {
|
||||
/// let graphemes: Vec<_> = text.graphemes_rev().collect();
|
||||
/// assert_eq!(graphemes.as_slice(), &["🖼️", "🏴☠️", "😶🌫️"]);
|
||||
/// ```
|
||||
fn graphemes_rev(self) -> RevRopeGraphemes<'a>;
|
||||
fn graphemes_rev(self) -> RopeGraphemes<'a>;
|
||||
/// Returns an iterator over the grapheme clusters in the slice at the given byte index.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use ropey::Rope;
|
||||
/// # use helix_stdx::rope::RopeSliceExt;
|
||||
/// let text = Rope::from_str("😶🌫️🏴☠️🖼️");
|
||||
/// // 14 is the byte index of the pirate flag's starting cluster boundary.
|
||||
/// let graphemes: Vec<_> = text.slice(..).graphemes_at(14).collect();
|
||||
/// assert_eq!(graphemes.as_slice(), &["🏴☠️", "🖼️"]);
|
||||
/// // 27 is the byte index of the pirate flag's ending cluster boundary.
|
||||
/// let graphemes: Vec<_> = text.slice(..).graphemes_at(27).reversed().collect();
|
||||
/// assert_eq!(graphemes.as_slice(), &["🏴☠️", "😶🌫️"]);
|
||||
/// ```
|
||||
fn graphemes_at(self, byte_idx: usize) -> RopeGraphemes<'a>;
|
||||
/// Returns an iterator over the grapheme clusters in a rope and the byte index where each
|
||||
/// grapheme cluster starts.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use ropey::Rope;
|
||||
/// # use helix_stdx::rope::RopeSliceExt;
|
||||
/// let text = Rope::from_str("😶🌫️🏴☠️🖼️");
|
||||
/// let slice = text.slice(..);
|
||||
/// let graphemes: Vec<_> = slice.grapheme_indices_at(0).collect();
|
||||
/// assert_eq!(
|
||||
/// graphemes.as_slice(),
|
||||
/// &[(0, "😶🌫️".into()), (14, "🏴☠️".into()), (27, "🖼️".into())]
|
||||
/// );
|
||||
/// let graphemes: Vec<_> = slice.grapheme_indices_at(slice.len_bytes()).reversed().collect();
|
||||
/// assert_eq!(
|
||||
/// graphemes.as_slice(),
|
||||
/// &[(27, "🖼️".into()), (14, "🏴☠️".into()), (0, "😶🌫️".into())]
|
||||
/// );
|
||||
/// ```
|
||||
fn grapheme_indices_at(self, byte_idx: usize) -> RopeGraphemeIndices<'a>;
|
||||
/// Finds the byte index of the next grapheme boundary after `byte_idx`.
|
||||
///
|
||||
/// If the byte index lies on the last grapheme cluster in the slice then this function
|
||||
/// returns `RopeSlice::len_bytes`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use ropey::Rope;
|
||||
/// # use helix_stdx::rope::RopeSliceExt;
|
||||
/// let text = Rope::from_str("😶🌫️🏴☠️🖼️");
|
||||
/// let slice = text.slice(..);
|
||||
/// let mut byte_idx = 0;
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).next(), Some("😶🌫️".into()));
|
||||
/// byte_idx = slice.next_grapheme_boundary(byte_idx);
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).next(), Some("🏴☠️".into()));
|
||||
///
|
||||
/// // If `byte_idx` does not lie on a character or grapheme boundary then this function is
|
||||
/// // functionally the same as `ceil_grapheme_boundary`.
|
||||
/// assert_eq!(slice.next_grapheme_boundary(byte_idx - 1), byte_idx);
|
||||
/// assert_eq!(slice.next_grapheme_boundary(byte_idx - 2), byte_idx);
|
||||
/// assert_eq!(slice.next_grapheme_boundary(byte_idx + 1), slice.next_grapheme_boundary(byte_idx));
|
||||
/// assert_eq!(slice.next_grapheme_boundary(byte_idx + 2), slice.next_grapheme_boundary(byte_idx));
|
||||
///
|
||||
/// byte_idx = slice.next_grapheme_boundary(byte_idx);
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).next(), Some("🖼️".into()));
|
||||
/// byte_idx = slice.next_grapheme_boundary(byte_idx);
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).next(), None);
|
||||
/// assert_eq!(byte_idx, slice.len_bytes());
|
||||
/// ```
|
||||
fn next_grapheme_boundary(self, byte_idx: usize) -> usize {
|
||||
self.nth_next_grapheme_boundary(byte_idx, 1)
|
||||
}
|
||||
/// Finds the byte index of the `n`th grapheme cluster after the given `byte_idx`.
|
||||
///
|
||||
/// If there are fewer than `n` grapheme clusters after `byte_idx` in the rope then this
|
||||
/// function returns `RopeSlice::len_bytes`.
|
||||
///
|
||||
/// This is functionally equivalent to calling `next_grapheme_boundary` `n` times but is more
|
||||
/// efficient.
|
||||
fn nth_next_grapheme_boundary(self, byte_idx: usize, n: usize) -> usize;
|
||||
/// Finds the byte index of the previous grapheme boundary before `byte_idx`.
|
||||
///
|
||||
/// If the byte index lies on the first grapheme cluster in the slice then this function
|
||||
/// returns zero.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use ropey::Rope;
|
||||
/// # use helix_stdx::rope::RopeSliceExt;
|
||||
/// let text = Rope::from_str("😶🌫️🏴☠️🖼️");
|
||||
/// let slice = text.slice(..);
|
||||
/// let mut byte_idx = text.len_bytes();
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), Some("🖼️".into()));
|
||||
/// byte_idx = slice.prev_grapheme_boundary(byte_idx);
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), Some("🏴☠️".into()));
|
||||
///
|
||||
/// // If `byte_idx` does not lie on a character or grapheme boundary then this function is
|
||||
/// // functionally the same as `floor_grapheme_boundary`.
|
||||
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx + 1), byte_idx);
|
||||
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx + 2), byte_idx);
|
||||
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx - 1), slice.prev_grapheme_boundary(byte_idx));
|
||||
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx - 2), slice.prev_grapheme_boundary(byte_idx));
|
||||
///
|
||||
/// byte_idx = slice.prev_grapheme_boundary(byte_idx);
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), Some("😶🌫️".into()));
|
||||
/// byte_idx = slice.prev_grapheme_boundary(byte_idx);
|
||||
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), None);
|
||||
/// assert_eq!(byte_idx, 0);
|
||||
/// ```
|
||||
fn prev_grapheme_boundary(self, byte_idx: usize) -> usize {
|
||||
self.nth_prev_grapheme_boundary(byte_idx, 1)
|
||||
}
|
||||
/// Finds the byte index of the `n`th grapheme cluster before the given `byte_idx`.
|
||||
///
|
||||
/// If there are fewer than `n` grapheme clusters before `byte_idx` in the rope then this
|
||||
/// function returns zero.
|
||||
///
|
||||
/// This is functionally equivalent to calling `prev_grapheme_boundary` `n` times but is more
|
||||
/// efficient.
|
||||
fn nth_prev_grapheme_boundary(self, byte_idx: usize, n: usize) -> usize;
|
||||
}
|
||||
|
||||
impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
|
||||
@@ -335,31 +457,111 @@ impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn graphemes(self) -> RopeGraphemes<'a> {
|
||||
let mut chunks = self.chunks();
|
||||
let first_chunk = chunks.next().unwrap_or("");
|
||||
fn graphemes_rev(self) -> RopeGraphemes<'a> {
|
||||
self.graphemes_at(self.len_bytes()).reversed()
|
||||
}
|
||||
|
||||
fn graphemes_at(self, byte_idx: usize) -> RopeGraphemes<'a> {
|
||||
// Bounds check
|
||||
assert!(byte_idx <= self.len_bytes());
|
||||
|
||||
let (mut chunks, chunk_byte_idx, _, _) = self.chunks_at_byte(byte_idx);
|
||||
let current_chunk = chunks.next().unwrap_or("");
|
||||
|
||||
RopeGraphemes {
|
||||
text: self,
|
||||
chunks,
|
||||
cur_chunk: first_chunk,
|
||||
cur_chunk_start: 0,
|
||||
cursor: GraphemeCursor::new(0, self.len_bytes(), true),
|
||||
current_chunk,
|
||||
chunk_byte_idx,
|
||||
cursor: GraphemeCursor::new(byte_idx, self.len_bytes(), true),
|
||||
is_reversed: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn graphemes_rev(self) -> RevRopeGraphemes<'a> {
|
||||
let (mut chunks, mut cur_chunk_start, _, _) = self.chunks_at_byte(self.len_bytes());
|
||||
chunks.reverse();
|
||||
let first_chunk = chunks.next().unwrap_or("");
|
||||
cur_chunk_start -= first_chunk.len();
|
||||
RevRopeGraphemes {
|
||||
text: self,
|
||||
chunks,
|
||||
cur_chunk: first_chunk,
|
||||
cur_chunk_start,
|
||||
cursor: GraphemeCursor::new(self.len_bytes(), self.len_bytes(), true),
|
||||
fn grapheme_indices_at(self, byte_idx: usize) -> RopeGraphemeIndices<'a> {
|
||||
// Bounds check
|
||||
assert!(byte_idx <= self.len_bytes());
|
||||
RopeGraphemeIndices {
|
||||
front_offset: byte_idx,
|
||||
iter: self.graphemes_at(byte_idx),
|
||||
is_reversed: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn nth_next_grapheme_boundary(self, mut byte_idx: usize, n: usize) -> usize {
|
||||
// Bounds check
|
||||
assert!(byte_idx <= self.len_bytes());
|
||||
|
||||
byte_idx = self.floor_char_boundary(byte_idx);
|
||||
|
||||
// Get the chunk with our byte index in it.
|
||||
let (mut chunk, mut chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx);
|
||||
|
||||
// Set up the grapheme cursor.
|
||||
let mut gc = GraphemeCursor::new(byte_idx, self.len_bytes(), true);
|
||||
|
||||
// Find the nth next grapheme cluster boundary.
|
||||
for _ in 0..n {
|
||||
loop {
|
||||
match gc.next_boundary(chunk, chunk_byte_idx) {
|
||||
Ok(None) => return self.len_bytes(),
|
||||
Ok(Some(boundary)) => {
|
||||
byte_idx = boundary;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::NextChunk) => {
|
||||
chunk_byte_idx += chunk.len();
|
||||
let (a, _, _, _) = self.chunk_at_byte(chunk_byte_idx);
|
||||
chunk = a;
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(n)) => {
|
||||
let ctx_chunk = self.chunk_at_byte(n - 1).0;
|
||||
gc.provide_context(ctx_chunk, n - ctx_chunk.len());
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
byte_idx
|
||||
}
|
||||
|
||||
fn nth_prev_grapheme_boundary(self, mut byte_idx: usize, n: usize) -> usize {
|
||||
// Bounds check
|
||||
assert!(byte_idx <= self.len_bytes());
|
||||
|
||||
byte_idx = self.ceil_char_boundary(byte_idx);
|
||||
|
||||
// Get the chunk with our byte index in it.
|
||||
let (mut chunk, mut chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx);
|
||||
|
||||
// Set up the grapheme cursor.
|
||||
let mut gc = GraphemeCursor::new(byte_idx, self.len_bytes(), true);
|
||||
|
||||
for _ in 0..n {
|
||||
loop {
|
||||
match gc.prev_boundary(chunk, chunk_byte_idx) {
|
||||
Ok(None) => return 0,
|
||||
Ok(Some(boundary)) => {
|
||||
byte_idx = boundary;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::PrevChunk) => {
|
||||
let (a, b, _, _) = self.chunk_at_byte(chunk_byte_idx - 1);
|
||||
chunk = a;
|
||||
chunk_byte_idx = b;
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(n)) => {
|
||||
let ctx_chunk = self.chunk_at_byte(n - 1).0;
|
||||
gc.provide_context(ctx_chunk, n - ctx_chunk.len());
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
byte_idx
|
||||
}
|
||||
}
|
||||
|
||||
// copied from std
|
||||
@@ -370,13 +572,19 @@ const fn is_utf8_char_boundary(b: u8) -> bool {
|
||||
}
|
||||
|
||||
/// An iterator over the graphemes of a `RopeSlice`.
|
||||
///
|
||||
/// This iterator is cursor-like: rather than implementing DoubleEndedIterator it can be reversed
|
||||
/// like a cursor. This style matches `Bytes` and `Chars` iterator types in Ropey and is more
|
||||
/// natural and useful for wrapping `GraphemeCursor`.
|
||||
#[derive(Clone)]
|
||||
pub struct RopeGraphemes<'a> {
|
||||
text: RopeSlice<'a>,
|
||||
chunks: Chunks<'a>,
|
||||
cur_chunk: &'a str,
|
||||
cur_chunk_start: usize,
|
||||
current_chunk: &'a str,
|
||||
/// Byte index of the start of the current chunk.
|
||||
chunk_byte_idx: usize,
|
||||
cursor: GraphemeCursor,
|
||||
is_reversed: bool,
|
||||
}
|
||||
|
||||
impl fmt::Debug for RopeGraphemes<'_> {
|
||||
@@ -384,112 +592,178 @@ impl fmt::Debug for RopeGraphemes<'_> {
|
||||
f.debug_struct("RopeGraphemes")
|
||||
.field("text", &self.text)
|
||||
.field("chunks", &self.chunks)
|
||||
.field("cur_chunk", &self.cur_chunk)
|
||||
.field("cur_chunk_start", &self.cur_chunk_start)
|
||||
.field("current_chunk", &self.current_chunk)
|
||||
.field("chunk_byte_idx", &self.chunk_byte_idx)
|
||||
// .field("cursor", &self.cursor)
|
||||
.field("is_reversed", &self.is_reversed)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> RopeGraphemes<'a> {
|
||||
#[allow(clippy::should_implement_trait)]
|
||||
pub fn next(&mut self) -> Option<RopeSlice<'a>> {
|
||||
if self.is_reversed {
|
||||
self.prev_impl()
|
||||
} else {
|
||||
self.next_impl()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prev(&mut self) -> Option<RopeSlice<'a>> {
|
||||
if self.is_reversed {
|
||||
self.next_impl()
|
||||
} else {
|
||||
self.prev_impl()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reverse(&mut self) {
|
||||
self.is_reversed = !self.is_reversed;
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn reversed(mut self) -> Self {
|
||||
self.reverse();
|
||||
self
|
||||
}
|
||||
|
||||
fn next_impl(&mut self) -> Option<RopeSlice<'a>> {
|
||||
let a = self.cursor.cur_cursor();
|
||||
let b;
|
||||
loop {
|
||||
match self
|
||||
.cursor
|
||||
.next_boundary(self.current_chunk, self.chunk_byte_idx)
|
||||
{
|
||||
Ok(None) => return None,
|
||||
Ok(Some(boundary)) => {
|
||||
b = boundary;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::NextChunk) => {
|
||||
self.chunk_byte_idx += self.current_chunk.len();
|
||||
self.current_chunk = self.chunks.next().unwrap_or("");
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(idx)) => {
|
||||
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
|
||||
self.cursor.provide_context(chunk, byte_idx);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
if a < self.chunk_byte_idx {
|
||||
Some(self.text.byte_slice(a..b))
|
||||
} else {
|
||||
let a2 = a - self.chunk_byte_idx;
|
||||
let b2 = b - self.chunk_byte_idx;
|
||||
Some((&self.current_chunk[a2..b2]).into())
|
||||
}
|
||||
}
|
||||
|
||||
fn prev_impl(&mut self) -> Option<RopeSlice<'a>> {
|
||||
let a = self.cursor.cur_cursor();
|
||||
let b;
|
||||
loop {
|
||||
match self
|
||||
.cursor
|
||||
.prev_boundary(self.current_chunk, self.chunk_byte_idx)
|
||||
{
|
||||
Ok(None) => return None,
|
||||
Ok(Some(boundary)) => {
|
||||
b = boundary;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::PrevChunk) => {
|
||||
self.current_chunk = self.chunks.prev().unwrap_or("");
|
||||
self.chunk_byte_idx -= self.current_chunk.len();
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(idx)) => {
|
||||
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
|
||||
self.cursor.provide_context(chunk, byte_idx);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
if a >= self.chunk_byte_idx + self.current_chunk.len() {
|
||||
Some(self.text.byte_slice(b..a))
|
||||
} else {
|
||||
let a2 = a - self.chunk_byte_idx;
|
||||
let b2 = b - self.chunk_byte_idx;
|
||||
Some((&self.current_chunk[b2..a2]).into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for RopeGraphemes<'a> {
|
||||
type Item = RopeSlice<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let a = self.cursor.cur_cursor();
|
||||
let b;
|
||||
loop {
|
||||
match self
|
||||
.cursor
|
||||
.next_boundary(self.cur_chunk, self.cur_chunk_start)
|
||||
{
|
||||
Ok(None) => {
|
||||
return None;
|
||||
}
|
||||
Ok(Some(n)) => {
|
||||
b = n;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::NextChunk) => {
|
||||
self.cur_chunk_start += self.cur_chunk.len();
|
||||
self.cur_chunk = self.chunks.next().unwrap_or("");
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(idx)) => {
|
||||
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
|
||||
self.cursor.provide_context(chunk, byte_idx);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
RopeGraphemes::next(self)
|
||||
}
|
||||
}
|
||||
|
||||
if a < self.cur_chunk_start {
|
||||
Some(self.text.byte_slice(a..b))
|
||||
/// An iterator over the grapheme clusters in a rope and the byte indices where each grapheme
|
||||
/// cluster starts.
|
||||
///
|
||||
/// This iterator wraps `RopeGraphemes` and is also cursor-like. Use `reverse` or `reversed` to
|
||||
/// toggle the direction of the iterator. See [RopeGraphemes].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RopeGraphemeIndices<'a> {
|
||||
front_offset: usize,
|
||||
iter: RopeGraphemes<'a>,
|
||||
is_reversed: bool,
|
||||
}
|
||||
|
||||
impl<'a> RopeGraphemeIndices<'a> {
|
||||
#[allow(clippy::should_implement_trait)]
|
||||
pub fn next(&mut self) -> Option<(usize, RopeSlice<'a>)> {
|
||||
if self.is_reversed {
|
||||
self.prev_impl()
|
||||
} else {
|
||||
let a2 = a - self.cur_chunk_start;
|
||||
let b2 = b - self.cur_chunk_start;
|
||||
Some((&self.cur_chunk[a2..b2]).into())
|
||||
self.next_impl()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over the graphemes of a `RopeSlice` in reverse.
|
||||
#[derive(Clone)]
|
||||
pub struct RevRopeGraphemes<'a> {
|
||||
text: RopeSlice<'a>,
|
||||
chunks: Chunks<'a>,
|
||||
cur_chunk: &'a str,
|
||||
cur_chunk_start: usize,
|
||||
cursor: GraphemeCursor,
|
||||
}
|
||||
pub fn prev(&mut self) -> Option<(usize, RopeSlice<'a>)> {
|
||||
if self.is_reversed {
|
||||
self.next_impl()
|
||||
} else {
|
||||
self.prev_impl()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for RevRopeGraphemes<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("RevRopeGraphemes")
|
||||
.field("text", &self.text)
|
||||
.field("chunks", &self.chunks)
|
||||
.field("cur_chunk", &self.cur_chunk)
|
||||
.field("cur_chunk_start", &self.cur_chunk_start)
|
||||
// .field("cursor", &self.cursor)
|
||||
.finish()
|
||||
pub fn reverse(&mut self) {
|
||||
self.is_reversed = !self.is_reversed;
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn reversed(mut self) -> Self {
|
||||
self.reverse();
|
||||
self
|
||||
}
|
||||
|
||||
fn next_impl(&mut self) -> Option<(usize, RopeSlice<'a>)> {
|
||||
let slice = self.iter.next()?;
|
||||
let idx = self.front_offset;
|
||||
self.front_offset += slice.len_bytes();
|
||||
Some((idx, slice))
|
||||
}
|
||||
|
||||
fn prev_impl(&mut self) -> Option<(usize, RopeSlice<'a>)> {
|
||||
let slice = self.iter.prev()?;
|
||||
self.front_offset -= slice.len_bytes();
|
||||
Some((self.front_offset, slice))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for RevRopeGraphemes<'a> {
|
||||
type Item = RopeSlice<'a>;
|
||||
impl<'a> Iterator for RopeGraphemeIndices<'a> {
|
||||
type Item = (usize, RopeSlice<'a>);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let a = self.cursor.cur_cursor();
|
||||
let b;
|
||||
loop {
|
||||
match self
|
||||
.cursor
|
||||
.prev_boundary(self.cur_chunk, self.cur_chunk_start)
|
||||
{
|
||||
Ok(None) => {
|
||||
return None;
|
||||
}
|
||||
Ok(Some(n)) => {
|
||||
b = n;
|
||||
break;
|
||||
}
|
||||
Err(GraphemeIncomplete::PrevChunk) => {
|
||||
self.cur_chunk = self.chunks.next().unwrap_or("");
|
||||
self.cur_chunk_start -= self.cur_chunk.len();
|
||||
}
|
||||
Err(GraphemeIncomplete::PreContext(idx)) => {
|
||||
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
|
||||
self.cursor.provide_context(chunk, byte_idx);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
if a >= self.cur_chunk_start + self.cur_chunk.len() {
|
||||
Some(self.text.byte_slice(b..a))
|
||||
} else {
|
||||
let a2 = a - self.cur_chunk_start;
|
||||
let b2 = b - self.cur_chunk_start;
|
||||
Some((&self.cur_chunk[b2..a2]).into())
|
||||
}
|
||||
RopeGraphemeIndices::next(self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,10 +1,6 @@
|
||||
#![cfg(windows)]
|
||||
|
||||
use std::{
|
||||
env::set_current_dir,
|
||||
error::Error,
|
||||
path::{Component, Path, PathBuf},
|
||||
};
|
||||
use std::{env::set_current_dir, error::Error, path::Component};
|
||||
|
||||
use helix_stdx::path;
|
||||
use tempfile::Builder;
|
||||
|
@@ -61,7 +61,7 @@ tokio-stream = "0.1"
|
||||
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
|
||||
arc-swap = { version = "1.7.1" }
|
||||
termini = "1"
|
||||
indexmap = "2.8"
|
||||
indexmap = "2.9"
|
||||
|
||||
# Logging
|
||||
fern = "0.7"
|
||||
@@ -91,9 +91,11 @@ serde = { version = "1.0", features = ["derive"] }
|
||||
grep-regex = "0.1.13"
|
||||
grep-searcher = "0.1.14"
|
||||
|
||||
parking_lot.workspace = true
|
||||
|
||||
[target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100
|
||||
signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] }
|
||||
libc = "0.2.171"
|
||||
libc = "0.2.173"
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
crossterm = { version = "0.28", features = ["event-stream", "use-dev-tty", "libc"] }
|
||||
@@ -102,7 +104,7 @@ crossterm = { version = "0.28", features = ["event-stream", "use-dev-tty", "libc
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
|
||||
[dev-dependencies]
|
||||
smallvec = "1.14"
|
||||
smallvec = "1.15"
|
||||
indoc = "2.0.6"
|
||||
tempfile.workspace = true
|
||||
same-file = "1.0.1"
|
||||
|
@@ -384,31 +384,30 @@ impl Application {
|
||||
}
|
||||
}
|
||||
|
||||
/// refresh language config after config change
|
||||
fn refresh_language_config(&mut self) -> Result<(), Error> {
|
||||
let lang_loader = helix_core::config::user_lang_loader()?;
|
||||
|
||||
self.editor.syn_loader.store(Arc::new(lang_loader));
|
||||
for document in self.editor.documents.values_mut() {
|
||||
document.detect_language(self.editor.syn_loader.clone());
|
||||
let diagnostics = Editor::doc_diagnostics(
|
||||
&self.editor.language_servers,
|
||||
&self.editor.diagnostics,
|
||||
document,
|
||||
);
|
||||
document.replace_diagnostics(diagnostics, &[], None);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn refresh_config(&mut self) {
|
||||
let mut refresh_config = || -> Result<(), Error> {
|
||||
let default_config = Config::load_default()
|
||||
.map_err(|err| anyhow::anyhow!("Failed to load config: {}", err))?;
|
||||
self.refresh_language_config()?;
|
||||
// Refresh theme after config change
|
||||
|
||||
// Update the syntax language loader before setting the theme. Setting the theme will
|
||||
// call `Loader::set_scopes` which must be done before the documents are re-parsed for
|
||||
// the sake of locals highlighting.
|
||||
let lang_loader = helix_core::config::user_lang_loader()?;
|
||||
self.editor.syn_loader.store(Arc::new(lang_loader));
|
||||
Self::load_configured_theme(&mut self.editor, &default_config);
|
||||
|
||||
// Re-parse any open documents with the new language config.
|
||||
let lang_loader = self.editor.syn_loader.load();
|
||||
for document in self.editor.documents.values_mut() {
|
||||
document.detect_language(&lang_loader);
|
||||
let diagnostics = Editor::doc_diagnostics(
|
||||
&self.editor.language_servers,
|
||||
&self.editor.diagnostics,
|
||||
document,
|
||||
);
|
||||
document.replace_diagnostics(diagnostics, &[], None);
|
||||
}
|
||||
|
||||
self.terminal
|
||||
.reconfigure(default_config.editor.clone().into())?;
|
||||
// Store new config
|
||||
@@ -570,16 +569,24 @@ impl Application {
|
||||
doc.set_last_saved_revision(doc_save_event.revision, doc_save_event.save_time);
|
||||
|
||||
let lines = doc_save_event.text.len_lines();
|
||||
let bytes = doc_save_event.text.len_bytes();
|
||||
let mut sz = doc_save_event.text.len_bytes() as f32;
|
||||
|
||||
const SUFFIX: [&str; 4] = ["B", "KiB", "MiB", "GiB"];
|
||||
let mut i = 0;
|
||||
while i < SUFFIX.len() - 1 && sz >= 1024.0 {
|
||||
sz /= 1024.0;
|
||||
i += 1;
|
||||
}
|
||||
|
||||
self.editor
|
||||
.set_doc_path(doc_save_event.doc_id, &doc_save_event.path);
|
||||
// TODO: fix being overwritten by lsp
|
||||
self.editor.set_status(format!(
|
||||
"'{}' written, {}L {}B",
|
||||
"'{}' written, {}L {:.1}{}",
|
||||
get_relative_path(&doc_save_event.path).to_string_lossy(),
|
||||
lines,
|
||||
bytes
|
||||
sz,
|
||||
SUFFIX[i],
|
||||
));
|
||||
}
|
||||
|
||||
@@ -722,16 +729,23 @@ impl Application {
|
||||
log::error!("Discarding publishDiagnostic notification sent by an uninitialized server: {}", language_server.name());
|
||||
return;
|
||||
}
|
||||
let provider = helix_core::diagnostic::DiagnosticProvider::Lsp {
|
||||
let provider = helix_view::diagnostic::DiagnosticProvider::Lsp {
|
||||
server_id,
|
||||
identifier: None,
|
||||
};
|
||||
self.editor.handle_lsp_diagnostics(
|
||||
&provider,
|
||||
uri,
|
||||
params.version,
|
||||
params.diagnostics,
|
||||
);
|
||||
let diagnostics = params
|
||||
.diagnostics
|
||||
.into_iter()
|
||||
.map(|diagnostic| {
|
||||
helix_view::Diagnostic::lsp(
|
||||
provider.clone(),
|
||||
language_server.offset_encoding(),
|
||||
diagnostic,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
self.editor
|
||||
.handle_diagnostics(&provider, uri, params.version, diagnostics);
|
||||
}
|
||||
Notification::ShowMessage(params) => {
|
||||
if self.config.load().editor.lsp.display_messages {
|
||||
@@ -756,10 +770,11 @@ impl Application {
|
||||
.compositor
|
||||
.find::<ui::EditorView>()
|
||||
.expect("expected at least one EditorView");
|
||||
let lsp::ProgressParams { token, value } = params;
|
||||
|
||||
let lsp::ProgressParamsValue::WorkDone(work) = value;
|
||||
let parts = match &work {
|
||||
let lsp::ProgressParams {
|
||||
token,
|
||||
value: lsp::ProgressParamsValue::WorkDone(work),
|
||||
} = params;
|
||||
let (title, message, percentage) = match &work {
|
||||
lsp::WorkDoneProgress::Begin(lsp::WorkDoneProgressBegin {
|
||||
title,
|
||||
message,
|
||||
@@ -787,47 +802,43 @@ impl Application {
|
||||
}
|
||||
};
|
||||
|
||||
let token_d: &dyn std::fmt::Display = match &token {
|
||||
lsp::NumberOrString::Number(n) => n,
|
||||
lsp::NumberOrString::String(s) => s,
|
||||
};
|
||||
|
||||
let status = match parts {
|
||||
(Some(title), Some(message), Some(percentage)) => {
|
||||
format!("[{}] {}% {} - {}", token_d, percentage, title, message)
|
||||
if self.editor.config().lsp.display_progress_messages {
|
||||
let title =
|
||||
title.or_else(|| self.lsp_progress.title(server_id, &token));
|
||||
if title.is_some() || percentage.is_some() || message.is_some() {
|
||||
use std::fmt::Write as _;
|
||||
let mut status = format!("{}: ", language_server!().name());
|
||||
if let Some(percentage) = percentage {
|
||||
write!(status, "{percentage:>2}% ").unwrap();
|
||||
}
|
||||
if let Some(title) = title {
|
||||
status.push_str(title);
|
||||
}
|
||||
if title.is_some() && message.is_some() {
|
||||
status.push_str(" ⋅ ");
|
||||
}
|
||||
if let Some(message) = message {
|
||||
status.push_str(message);
|
||||
}
|
||||
self.editor.set_status(status);
|
||||
}
|
||||
(Some(title), None, Some(percentage)) => {
|
||||
format!("[{}] {}% {}", token_d, percentage, title)
|
||||
}
|
||||
(Some(title), Some(message), None) => {
|
||||
format!("[{}] {} - {}", token_d, title, message)
|
||||
}
|
||||
(None, Some(message), Some(percentage)) => {
|
||||
format!("[{}] {}% {}", token_d, percentage, message)
|
||||
}
|
||||
(Some(title), None, None) => {
|
||||
format!("[{}] {}", token_d, title)
|
||||
}
|
||||
(None, Some(message), None) => {
|
||||
format!("[{}] {}", token_d, message)
|
||||
}
|
||||
(None, None, Some(percentage)) => {
|
||||
format!("[{}] {}%", token_d, percentage)
|
||||
}
|
||||
(None, None, None) => format!("[{}]", token_d),
|
||||
};
|
||||
|
||||
if let lsp::WorkDoneProgress::End(_) = work {
|
||||
self.lsp_progress.end_progress(server_id, &token);
|
||||
if !self.lsp_progress.is_progressing(server_id) {
|
||||
editor_view.spinners_mut().get_or_create(server_id).stop();
|
||||
}
|
||||
} else {
|
||||
self.lsp_progress.update(server_id, token, work);
|
||||
}
|
||||
|
||||
if self.config.load().editor.lsp.display_progress_messages {
|
||||
self.editor.set_status(status);
|
||||
match work {
|
||||
lsp::WorkDoneProgress::Begin(begin_status) => {
|
||||
self.lsp_progress
|
||||
.begin(server_id, token.clone(), begin_status);
|
||||
}
|
||||
lsp::WorkDoneProgress::Report(report_status) => {
|
||||
self.lsp_progress
|
||||
.update(server_id, token.clone(), report_status);
|
||||
}
|
||||
lsp::WorkDoneProgress::End(_) => {
|
||||
self.lsp_progress.end_progress(server_id, &token);
|
||||
if !self.lsp_progress.is_progressing(server_id) {
|
||||
editor_view.spinners_mut().get_or_create(server_id).stop();
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
Notification::ProgressMessage(_params) => {
|
||||
@@ -840,8 +851,8 @@ impl Application {
|
||||
// we need to clear those and remove the entries from the list if this leads to
|
||||
// an empty diagnostic list for said files
|
||||
for diags in self.editor.diagnostics.values_mut() {
|
||||
diags.retain(|(_, provider)| {
|
||||
provider.language_server_id() != Some(server_id)
|
||||
diags.retain(|diag| {
|
||||
diag.provider.language_server_id() != Some(server_id)
|
||||
});
|
||||
}
|
||||
|
||||
|
@@ -34,7 +34,7 @@ use helix_core::{
|
||||
regex::{self, Regex},
|
||||
search::{self, CharMatcher},
|
||||
selection, surround,
|
||||
syntax::{BlockCommentToken, LanguageServerFeature},
|
||||
syntax::config::{BlockCommentToken, LanguageServerFeature},
|
||||
text_annotations::{Overlay, TextAnnotations},
|
||||
textobject,
|
||||
unicode::width::UnicodeWidthChar,
|
||||
@@ -426,6 +426,8 @@ impl MappableCommand {
|
||||
goto_implementation, "Goto implementation",
|
||||
goto_file_start, "Goto line number <n> else file start",
|
||||
goto_file_end, "Goto file end",
|
||||
extend_to_file_start, "Extend to line number<n> else file start",
|
||||
extend_to_file_end, "Extend to file end",
|
||||
goto_file, "Goto files/URLs in selections",
|
||||
goto_file_hsplit, "Goto files in selections (hsplit)",
|
||||
goto_file_vsplit, "Goto files in selections (vsplit)",
|
||||
@@ -438,6 +440,7 @@ impl MappableCommand {
|
||||
goto_last_modification, "Goto last modification",
|
||||
goto_line, "Goto line",
|
||||
goto_last_line, "Goto last line",
|
||||
extend_to_last_line, "Extend to last line",
|
||||
goto_first_diag, "Goto first diagnostic",
|
||||
goto_last_diag, "Goto last diagnostic",
|
||||
goto_next_diag, "Goto next diagnostic",
|
||||
@@ -448,6 +451,8 @@ impl MappableCommand {
|
||||
goto_last_change, "Goto last change",
|
||||
goto_line_start, "Goto line start",
|
||||
goto_line_end, "Goto line end",
|
||||
goto_column, "Goto column",
|
||||
extend_to_column, "Extend to column",
|
||||
goto_next_buffer, "Goto next buffer",
|
||||
goto_previous_buffer, "Goto previous buffer",
|
||||
goto_line_end_newline, "Goto newline at line end",
|
||||
@@ -594,8 +599,10 @@ impl MappableCommand {
|
||||
command_palette, "Open command palette",
|
||||
goto_word, "Jump to a two-character label",
|
||||
extend_to_word, "Extend to a two-character label",
|
||||
goto_next_tabstop, "goto next snippet placeholder",
|
||||
goto_prev_tabstop, "goto next snippet placeholder",
|
||||
goto_next_tabstop, "Goto next snippet placeholder",
|
||||
goto_prev_tabstop, "Goto next snippet placeholder",
|
||||
rotate_selections_first, "Make the first selection your primary one",
|
||||
rotate_selections_last, "Make the last selection your primary one",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1253,28 +1260,44 @@ fn goto_next_paragraph(cx: &mut Context) {
|
||||
}
|
||||
|
||||
fn goto_file_start(cx: &mut Context) {
|
||||
goto_file_start_impl(cx, Movement::Move);
|
||||
}
|
||||
|
||||
fn extend_to_file_start(cx: &mut Context) {
|
||||
goto_file_start_impl(cx, Movement::Extend);
|
||||
}
|
||||
|
||||
fn goto_file_start_impl(cx: &mut Context, movement: Movement) {
|
||||
if cx.count.is_some() {
|
||||
goto_line(cx);
|
||||
goto_line_impl(cx, movement);
|
||||
} else {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let text = doc.text().slice(..);
|
||||
let selection = doc
|
||||
.selection(view.id)
|
||||
.clone()
|
||||
.transform(|range| range.put_cursor(text, 0, cx.editor.mode == Mode::Select));
|
||||
.transform(|range| range.put_cursor(text, 0, movement == Movement::Extend));
|
||||
push_jump(view, doc);
|
||||
doc.set_selection(view.id, selection);
|
||||
}
|
||||
}
|
||||
|
||||
fn goto_file_end(cx: &mut Context) {
|
||||
goto_file_end_impl(cx, Movement::Move);
|
||||
}
|
||||
|
||||
fn extend_to_file_end(cx: &mut Context) {
|
||||
goto_file_end_impl(cx, Movement::Extend)
|
||||
}
|
||||
|
||||
fn goto_file_end_impl(cx: &mut Context, movement: Movement) {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let text = doc.text().slice(..);
|
||||
let pos = doc.text().len_chars();
|
||||
let selection = doc
|
||||
.selection(view.id)
|
||||
.clone()
|
||||
.transform(|range| range.put_cursor(text, pos, cx.editor.mode == Mode::Select));
|
||||
.transform(|range| range.put_cursor(text, pos, movement == Movement::Extend));
|
||||
push_jump(view, doc);
|
||||
doc.set_selection(view.id, selection);
|
||||
}
|
||||
@@ -3482,12 +3505,12 @@ fn insert_with_indent(cx: &mut Context, cursor_fallback: IndentFallbackPos) {
|
||||
enter_insert_mode(cx);
|
||||
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let loader = cx.editor.syn_loader.load();
|
||||
|
||||
let text = doc.text().slice(..);
|
||||
let contents = doc.text();
|
||||
let selection = doc.selection(view.id);
|
||||
|
||||
let language_config = doc.language_config();
|
||||
let syntax = doc.syntax();
|
||||
let tab_width = doc.tab_width();
|
||||
|
||||
@@ -3503,7 +3526,7 @@ fn insert_with_indent(cx: &mut Context, cursor_fallback: IndentFallbackPos) {
|
||||
let line_end_index = cursor_line_start;
|
||||
|
||||
let indent = indent::indent_for_newline(
|
||||
language_config,
|
||||
&loader,
|
||||
syntax,
|
||||
&doc.config.load().indent_heuristic,
|
||||
&doc.indent_style,
|
||||
@@ -3613,6 +3636,7 @@ fn open(cx: &mut Context, open: Open, comment_continuation: CommentContinuation)
|
||||
enter_insert_mode(cx);
|
||||
let config = cx.editor.config();
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let loader = cx.editor.syn_loader.load();
|
||||
|
||||
let text = doc.text().slice(..);
|
||||
let contents = doc.text();
|
||||
@@ -3662,7 +3686,7 @@ fn open(cx: &mut Context, open: Open, comment_continuation: CommentContinuation)
|
||||
let indent = match line.first_non_whitespace_char() {
|
||||
Some(pos) if continue_comment_token.is_some() => line.slice(..pos).to_string(),
|
||||
_ => indent::indent_for_newline(
|
||||
doc.language_config(),
|
||||
&loader,
|
||||
doc.syntax(),
|
||||
&config.indent_heuristic,
|
||||
&doc.indent_style,
|
||||
@@ -3740,21 +3764,30 @@ fn normal_mode(cx: &mut Context) {
|
||||
}
|
||||
|
||||
// Store a jump on the jumplist.
|
||||
fn push_jump(view: &mut View, doc: &Document) {
|
||||
fn push_jump(view: &mut View, doc: &mut Document) {
|
||||
doc.append_changes_to_history(view);
|
||||
let jump = (doc.id(), doc.selection(view.id).clone());
|
||||
view.jumps.push(jump);
|
||||
}
|
||||
|
||||
fn goto_line(cx: &mut Context) {
|
||||
goto_line_impl(cx, Movement::Move);
|
||||
}
|
||||
|
||||
fn goto_line_impl(cx: &mut Context, movement: Movement) {
|
||||
if cx.count.is_some() {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
push_jump(view, doc);
|
||||
|
||||
goto_line_without_jumplist(cx.editor, cx.count);
|
||||
goto_line_without_jumplist(cx.editor, cx.count, movement);
|
||||
}
|
||||
}
|
||||
|
||||
fn goto_line_without_jumplist(editor: &mut Editor, count: Option<NonZeroUsize>) {
|
||||
fn goto_line_without_jumplist(
|
||||
editor: &mut Editor,
|
||||
count: Option<NonZeroUsize>,
|
||||
movement: Movement,
|
||||
) {
|
||||
if let Some(count) = count {
|
||||
let (view, doc) = current!(editor);
|
||||
let text = doc.text().slice(..);
|
||||
@@ -3769,13 +3802,21 @@ fn goto_line_without_jumplist(editor: &mut Editor, count: Option<NonZeroUsize>)
|
||||
let selection = doc
|
||||
.selection(view.id)
|
||||
.clone()
|
||||
.transform(|range| range.put_cursor(text, pos, editor.mode == Mode::Select));
|
||||
.transform(|range| range.put_cursor(text, pos, movement == Movement::Extend));
|
||||
|
||||
doc.set_selection(view.id, selection);
|
||||
}
|
||||
}
|
||||
|
||||
fn goto_last_line(cx: &mut Context) {
|
||||
goto_last_line_impl(cx, Movement::Move)
|
||||
}
|
||||
|
||||
fn extend_to_last_line(cx: &mut Context) {
|
||||
goto_last_line_impl(cx, Movement::Extend)
|
||||
}
|
||||
|
||||
fn goto_last_line_impl(cx: &mut Context, movement: Movement) {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let text = doc.text().slice(..);
|
||||
let line_idx = if text.line(text.len_lines() - 1).len_chars() == 0 {
|
||||
@@ -3788,12 +3829,34 @@ fn goto_last_line(cx: &mut Context) {
|
||||
let selection = doc
|
||||
.selection(view.id)
|
||||
.clone()
|
||||
.transform(|range| range.put_cursor(text, pos, cx.editor.mode == Mode::Select));
|
||||
.transform(|range| range.put_cursor(text, pos, movement == Movement::Extend));
|
||||
|
||||
push_jump(view, doc);
|
||||
doc.set_selection(view.id, selection);
|
||||
}
|
||||
|
||||
fn goto_column(cx: &mut Context) {
|
||||
goto_column_impl(cx, Movement::Move);
|
||||
}
|
||||
|
||||
fn extend_to_column(cx: &mut Context) {
|
||||
goto_column_impl(cx, Movement::Extend);
|
||||
}
|
||||
|
||||
fn goto_column_impl(cx: &mut Context, movement: Movement) {
|
||||
let count = cx.count();
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let text = doc.text().slice(..);
|
||||
let selection = doc.selection(view.id).clone().transform(|range| {
|
||||
let line = range.cursor_line(text);
|
||||
let line_start = text.line_to_char(line);
|
||||
let line_end = line_end_char_index(&text, line);
|
||||
let pos = graphemes::nth_next_grapheme_boundary(text, line_start, count - 1).min(line_end);
|
||||
range.put_cursor(text, pos, movement == Movement::Extend)
|
||||
});
|
||||
doc.set_selection(view.id, selection);
|
||||
}
|
||||
|
||||
fn goto_last_accessed_file(cx: &mut Context) {
|
||||
let view = view_mut!(cx.editor);
|
||||
if let Some(alt) = view.docs_access_history.pop() {
|
||||
@@ -4126,6 +4189,7 @@ pub mod insert {
|
||||
pub fn insert_newline(cx: &mut Context) {
|
||||
let config = cx.editor.config();
|
||||
let (view, doc) = current_ref!(cx.editor);
|
||||
let loader = cx.editor.syn_loader.load();
|
||||
let text = doc.text().slice(..);
|
||||
let line_ending = doc.line_ending.as_str();
|
||||
|
||||
@@ -4144,6 +4208,7 @@ pub mod insert {
|
||||
None
|
||||
};
|
||||
|
||||
let mut last_pos = 0;
|
||||
let mut transaction = Transaction::change_by_selection(contents, selection, |range| {
|
||||
// Tracks the number of trailing whitespace characters deleted by this selection.
|
||||
let mut chars_deleted = 0;
|
||||
@@ -4165,13 +4230,14 @@ pub mod insert {
|
||||
let (from, to, local_offs) = if let Some(idx) =
|
||||
text.slice(line_start..pos).last_non_whitespace_char()
|
||||
{
|
||||
let first_trailing_whitespace_char = (line_start + idx + 1).min(pos);
|
||||
let first_trailing_whitespace_char = (line_start + idx + 1).clamp(last_pos, pos);
|
||||
last_pos = pos;
|
||||
let line = text.line(current_line);
|
||||
|
||||
let indent = match line.first_non_whitespace_char() {
|
||||
Some(pos) if continue_comment_token.is_some() => line.slice(..pos).to_string(),
|
||||
_ => indent::indent_for_newline(
|
||||
doc.language_config(),
|
||||
&loader,
|
||||
doc.syntax(),
|
||||
&config.indent_heuristic,
|
||||
&doc.indent_style,
|
||||
@@ -5228,6 +5294,21 @@ fn rotate_selections_backward(cx: &mut Context) {
|
||||
rotate_selections(cx, Direction::Backward)
|
||||
}
|
||||
|
||||
fn rotate_selections_first(cx: &mut Context) {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let mut selection = doc.selection(view.id).clone();
|
||||
selection.set_primary_index(0);
|
||||
doc.set_selection(view.id, selection);
|
||||
}
|
||||
|
||||
fn rotate_selections_last(cx: &mut Context) {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let mut selection = doc.selection(view.id).clone();
|
||||
let len = selection.len();
|
||||
selection.set_primary_index(len - 1);
|
||||
doc.set_selection(view.id, selection);
|
||||
}
|
||||
|
||||
enum ReorderStrategy {
|
||||
RotateForward,
|
||||
RotateBackward,
|
||||
@@ -5728,19 +5809,14 @@ fn goto_ts_object_impl(cx: &mut Context, object: &'static str, direction: Direct
|
||||
let count = cx.count();
|
||||
let motion = move |editor: &mut Editor| {
|
||||
let (view, doc) = current!(editor);
|
||||
if let Some((lang_config, syntax)) = doc.language_config().zip(doc.syntax()) {
|
||||
let loader = editor.syn_loader.load();
|
||||
if let Some(syntax) = doc.syntax() {
|
||||
let text = doc.text().slice(..);
|
||||
let root = syntax.tree().root_node();
|
||||
|
||||
let selection = doc.selection(view.id).clone().transform(|range| {
|
||||
let new_range = movement::goto_treesitter_object(
|
||||
text,
|
||||
range,
|
||||
object,
|
||||
direction,
|
||||
root,
|
||||
lang_config,
|
||||
count,
|
||||
text, range, object, direction, &root, syntax, &loader, count,
|
||||
);
|
||||
|
||||
if editor.mode == Mode::Select {
|
||||
@@ -5828,21 +5904,15 @@ fn select_textobject(cx: &mut Context, objtype: textobject::TextObject) {
|
||||
if let Some(ch) = event.char() {
|
||||
let textobject = move |editor: &mut Editor| {
|
||||
let (view, doc) = current!(editor);
|
||||
let loader = editor.syn_loader.load();
|
||||
let text = doc.text().slice(..);
|
||||
|
||||
let textobject_treesitter = |obj_name: &str, range: Range| -> Range {
|
||||
let (lang_config, syntax) = match doc.language_config().zip(doc.syntax()) {
|
||||
Some(t) => t,
|
||||
None => return range,
|
||||
let Some(syntax) = doc.syntax() else {
|
||||
return range;
|
||||
};
|
||||
textobject::textobject_treesitter(
|
||||
text,
|
||||
range,
|
||||
objtype,
|
||||
obj_name,
|
||||
syntax.tree().root_node(),
|
||||
lang_config,
|
||||
count,
|
||||
text, range, objtype, obj_name, syntax, &loader, count,
|
||||
)
|
||||
};
|
||||
|
||||
@@ -6315,7 +6385,7 @@ fn shell_prompt(cx: &mut Context, prompt: Cow<'static, str>, behavior: ShellBeha
|
||||
cx,
|
||||
prompt,
|
||||
Some('|'),
|
||||
ui::completers::filename,
|
||||
ui::completers::shell,
|
||||
move |cx, input: &str, event: PromptEvent| {
|
||||
if event != PromptEvent::Validate {
|
||||
return;
|
||||
@@ -6747,3 +6817,44 @@ fn jump_to_word(cx: &mut Context, behaviour: Movement) {
|
||||
}
|
||||
jump_to_label(cx, words, behaviour)
|
||||
}
|
||||
|
||||
pub fn code_action(cx: &mut Context) {
|
||||
impl ui::menu::Item for helix_view::Action {
|
||||
type Data = ();
|
||||
fn format(&self, _data: &Self::Data) -> ui::menu::Row {
|
||||
self.title().into()
|
||||
}
|
||||
}
|
||||
|
||||
let Some(future) = cx.editor.actions() else {
|
||||
cx.editor.set_error("No code actions available");
|
||||
return;
|
||||
};
|
||||
|
||||
cx.jobs.callback(async move {
|
||||
let actions = future.await;
|
||||
|
||||
let call = move |editor: &mut Editor, compositor: &mut Compositor| {
|
||||
if actions.is_empty() {
|
||||
editor.set_error("No code actions available");
|
||||
return;
|
||||
}
|
||||
let mut picker = ui::Menu::new(actions, (), move |editor, action, event| {
|
||||
if event != PromptEvent::Validate {
|
||||
return;
|
||||
}
|
||||
|
||||
// always present here
|
||||
let action = action.unwrap();
|
||||
action.execute(editor);
|
||||
});
|
||||
picker.move_down(); // pre-select the first item
|
||||
|
||||
let popup = Popup::new("code-action", picker).with_scrollbar(false);
|
||||
|
||||
compositor.replace_or_push("code-action", popup);
|
||||
};
|
||||
|
||||
Ok(Callback::EditorCompositor(Box::new(call)))
|
||||
});
|
||||
}
|
||||
|
@@ -5,7 +5,7 @@ use crate::{
|
||||
ui::{self, overlay::overlaid, Picker, Popup, Prompt, PromptEvent, Text},
|
||||
};
|
||||
use dap::{StackFrame, Thread, ThreadStates};
|
||||
use helix_core::syntax::{DebugArgumentValue, DebugConfigCompletion, DebugTemplate};
|
||||
use helix_core::syntax::config::{DebugArgumentValue, DebugConfigCompletion, DebugTemplate};
|
||||
use helix_dap::{self as dap, Client};
|
||||
use helix_lsp::block_on;
|
||||
use helix_view::editor::Breakpoint;
|
||||
|
@@ -1,21 +1,14 @@
|
||||
use futures_util::{stream::FuturesOrdered, FutureExt};
|
||||
use helix_lsp::{
|
||||
block_on,
|
||||
lsp::{
|
||||
self, CodeAction, CodeActionOrCommand, CodeActionTriggerKind, DiagnosticSeverity,
|
||||
NumberOrString,
|
||||
},
|
||||
util::{diagnostic_to_lsp_diagnostic, lsp_range_to_range, range_to_lsp_range},
|
||||
Client, LanguageServerId, OffsetEncoding,
|
||||
block_on, lsp, util::lsp_range_to_range, Client, LanguageServerId, OffsetEncoding,
|
||||
};
|
||||
use tokio_stream::StreamExt;
|
||||
use tui::{text::Span, widgets::Row};
|
||||
use tui::text::Span;
|
||||
|
||||
use super::{align_view, push_jump, Align, Context, Editor};
|
||||
|
||||
use helix_core::{
|
||||
diagnostic::DiagnosticProvider, syntax::LanguageServerFeature,
|
||||
text_annotations::InlineAnnotation, Selection, Uri,
|
||||
syntax::config::LanguageServerFeature, text_annotations::InlineAnnotation, Selection, Uri,
|
||||
};
|
||||
use helix_stdx::path;
|
||||
use helix_view::{
|
||||
@@ -23,7 +16,7 @@ use helix_view::{
|
||||
editor::Action,
|
||||
handlers::lsp::SignatureHelpInvoked,
|
||||
theme::Style,
|
||||
Document, View,
|
||||
Diagnostic, Document, DocumentId, View,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@@ -32,7 +25,7 @@ use crate::{
|
||||
ui::{self, overlay::overlaid, FileLocation, Picker, Popup, PromptEvent},
|
||||
};
|
||||
|
||||
use std::{cmp::Ordering, collections::HashSet, fmt::Display, future::Future, path::Path};
|
||||
use std::{collections::HashSet, fmt::Display, future::Future};
|
||||
|
||||
/// Gets the first language server that is attached to a document which supports a specific feature.
|
||||
/// If there is no configured language server that supports the feature, this displays a status message.
|
||||
@@ -46,7 +39,7 @@ macro_rules! language_server_with_feature {
|
||||
match language_server {
|
||||
Some(language_server) => language_server,
|
||||
None => {
|
||||
$editor.set_status(format!(
|
||||
$editor.set_error(format!(
|
||||
"No configured language server supports {}",
|
||||
$feature
|
||||
));
|
||||
@@ -56,31 +49,48 @@ macro_rules! language_server_with_feature {
|
||||
}};
|
||||
}
|
||||
|
||||
/// A wrapper around `lsp::Location` that swaps out the LSP URI for `helix_core::Uri` and adds
|
||||
/// the server's offset encoding.
|
||||
/// A wrapper around `lsp::Location`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct Location {
|
||||
pub struct Location {
|
||||
uri: Uri,
|
||||
range: lsp::Range,
|
||||
offset_encoding: OffsetEncoding,
|
||||
range: helix_view::Range,
|
||||
}
|
||||
|
||||
fn lsp_location_to_location(
|
||||
location: lsp::Location,
|
||||
offset_encoding: OffsetEncoding,
|
||||
) -> Option<Location> {
|
||||
let uri = match location.uri.try_into() {
|
||||
Ok(uri) => uri,
|
||||
Err(err) => {
|
||||
log::warn!("discarding invalid or unsupported URI: {err}");
|
||||
return None;
|
||||
}
|
||||
};
|
||||
Some(Location {
|
||||
uri,
|
||||
range: location.range,
|
||||
offset_encoding,
|
||||
})
|
||||
impl Location {
|
||||
fn lsp(location: lsp::Location, offset_encoding: OffsetEncoding) -> Option<Self> {
|
||||
let uri = match location.uri.try_into() {
|
||||
Ok(uri) => uri,
|
||||
Err(err) => {
|
||||
log::warn!("discarding invalid or unsupported URI: {err}");
|
||||
return None;
|
||||
}
|
||||
};
|
||||
Some(Self {
|
||||
uri,
|
||||
range: helix_view::Range::Lsp {
|
||||
range: location.range,
|
||||
offset_encoding,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn file_location<'a>(&'a self, editor: &Editor) -> Option<FileLocation<'a>> {
|
||||
let (path_or_id, doc) = match &self.uri {
|
||||
Uri::File(path) => ((&**path).into(), None),
|
||||
Uri::Scratch(doc_id) => ((*doc_id).into(), editor.documents.get(doc_id)),
|
||||
_ => return None,
|
||||
};
|
||||
let lines = match self.range {
|
||||
helix_view::Range::Lsp { range, .. } => {
|
||||
Some((range.start.line as usize, range.end.line as usize))
|
||||
}
|
||||
helix_view::Range::Document(range) => doc.map(|doc| {
|
||||
let text = doc.text().slice(..);
|
||||
(text.char_to_line(range.start), text.char_to_line(range.end))
|
||||
}),
|
||||
};
|
||||
Some((path_or_id, lines))
|
||||
}
|
||||
}
|
||||
|
||||
struct SymbolInformationItem {
|
||||
@@ -97,63 +107,57 @@ struct DiagnosticStyles {
|
||||
|
||||
struct PickerDiagnostic {
|
||||
location: Location,
|
||||
diag: lsp::Diagnostic,
|
||||
}
|
||||
|
||||
fn location_to_file_location(location: &Location) -> Option<FileLocation> {
|
||||
let path = location.uri.as_path()?;
|
||||
let line = Some((
|
||||
location.range.start.line as usize,
|
||||
location.range.end.line as usize,
|
||||
));
|
||||
Some((path.into(), line))
|
||||
diag: Diagnostic,
|
||||
}
|
||||
|
||||
fn jump_to_location(editor: &mut Editor, location: &Location, action: Action) {
|
||||
let (view, doc) = current!(editor);
|
||||
push_jump(view, doc);
|
||||
|
||||
let Some(path) = location.uri.as_path() else {
|
||||
let err = format!("unable to convert URI to filepath: {:?}", location.uri);
|
||||
editor.set_error(err);
|
||||
return;
|
||||
let doc_id = match &location.uri {
|
||||
Uri::Scratch(doc_id) => {
|
||||
editor.switch(*doc_id, action);
|
||||
*doc_id
|
||||
}
|
||||
Uri::File(path) => match editor.open(path, action) {
|
||||
Ok(doc_id) => doc_id,
|
||||
Err(err) => {
|
||||
editor.set_error(format!("failed to open path: {:?}: {:?}", path, err));
|
||||
return;
|
||||
}
|
||||
},
|
||||
_ => return,
|
||||
};
|
||||
jump_to_position(
|
||||
editor,
|
||||
path,
|
||||
location.range,
|
||||
location.offset_encoding,
|
||||
action,
|
||||
);
|
||||
|
||||
jump_to_position(editor, doc_id, location.range, action);
|
||||
}
|
||||
|
||||
fn jump_to_position(
|
||||
editor: &mut Editor,
|
||||
path: &Path,
|
||||
range: lsp::Range,
|
||||
offset_encoding: OffsetEncoding,
|
||||
doc_id: DocumentId,
|
||||
range: helix_view::Range,
|
||||
action: Action,
|
||||
) {
|
||||
let doc = match editor.open(path, action) {
|
||||
Ok(id) => doc_mut!(editor, &id),
|
||||
Err(err) => {
|
||||
let err = format!("failed to open path: {:?}: {:?}", path, err);
|
||||
editor.set_error(err);
|
||||
return;
|
||||
}
|
||||
let Some(doc) = editor.documents.get_mut(&doc_id) else {
|
||||
return;
|
||||
};
|
||||
let view = view_mut!(editor);
|
||||
// TODO: convert inside server
|
||||
let new_range = if let Some(new_range) = lsp_range_to_range(doc.text(), range, offset_encoding)
|
||||
{
|
||||
new_range
|
||||
} else {
|
||||
log::warn!("lsp position out of bounds - {:?}", range);
|
||||
return;
|
||||
let selection = match range {
|
||||
helix_view::Range::Lsp {
|
||||
range,
|
||||
offset_encoding,
|
||||
} => {
|
||||
let Some(range) = lsp_range_to_range(doc.text(), range, offset_encoding) else {
|
||||
log::warn!("lsp position out of bounds - {:?}", range);
|
||||
return;
|
||||
};
|
||||
range.into()
|
||||
}
|
||||
helix_view::Range::Document(range) => Selection::single(range.start, range.end),
|
||||
};
|
||||
// we flip the range so that the cursor sits on the start of the symbol
|
||||
// (for example start of the function).
|
||||
doc.set_selection(view.id, Selection::single(new_range.head, new_range.anchor));
|
||||
doc.set_selection(view.id, selection);
|
||||
if action.align_view(view, doc.id()) {
|
||||
align_view(doc, view, Align::Center);
|
||||
}
|
||||
@@ -204,30 +208,22 @@ type DiagnosticsPicker = Picker<PickerDiagnostic, DiagnosticStyles>;
|
||||
|
||||
fn diag_picker(
|
||||
cx: &Context,
|
||||
diagnostics: impl IntoIterator<Item = (Uri, Vec<(lsp::Diagnostic, DiagnosticProvider)>)>,
|
||||
diagnostics: impl IntoIterator<Item = (Uri, Vec<Diagnostic>)>,
|
||||
format: DiagnosticsFormat,
|
||||
) -> DiagnosticsPicker {
|
||||
// TODO: drop current_path comparison and instead use workspace: bool flag?
|
||||
|
||||
// flatten the map to a vec of (url, diag) pairs
|
||||
let mut flat_diag = Vec::new();
|
||||
for (uri, diags) in diagnostics {
|
||||
flat_diag.reserve(diags.len());
|
||||
|
||||
for (diag, provider) in diags {
|
||||
if let Some(ls) = provider
|
||||
.language_server_id()
|
||||
.and_then(|id| cx.editor.language_server_by_id(id))
|
||||
{
|
||||
flat_diag.push(PickerDiagnostic {
|
||||
location: Location {
|
||||
uri: uri.clone(),
|
||||
range: diag.range,
|
||||
offset_encoding: ls.offset_encoding(),
|
||||
},
|
||||
diag,
|
||||
});
|
||||
}
|
||||
for diag in diags {
|
||||
flat_diag.push(PickerDiagnostic {
|
||||
location: Location {
|
||||
uri: uri.clone(),
|
||||
range: diag.range,
|
||||
},
|
||||
diag,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -242,33 +238,38 @@ fn diag_picker(
|
||||
ui::PickerColumn::new(
|
||||
"severity",
|
||||
|item: &PickerDiagnostic, styles: &DiagnosticStyles| {
|
||||
use helix_core::diagnostic::Severity::*;
|
||||
match item.diag.severity {
|
||||
Some(DiagnosticSeverity::HINT) => Span::styled("HINT", styles.hint),
|
||||
Some(DiagnosticSeverity::INFORMATION) => Span::styled("INFO", styles.info),
|
||||
Some(DiagnosticSeverity::WARNING) => Span::styled("WARN", styles.warning),
|
||||
Some(DiagnosticSeverity::ERROR) => Span::styled("ERROR", styles.error),
|
||||
Some(Hint) => Span::styled("HINT", styles.hint),
|
||||
Some(Info) => Span::styled("INFO", styles.info),
|
||||
Some(Warning) => Span::styled("WARN", styles.warning),
|
||||
Some(Error) => Span::styled("ERROR", styles.error),
|
||||
_ => Span::raw(""),
|
||||
}
|
||||
.into()
|
||||
},
|
||||
),
|
||||
ui::PickerColumn::new("source", |item: &PickerDiagnostic, _| {
|
||||
item.diag.source.as_deref().unwrap_or("").into()
|
||||
}),
|
||||
ui::PickerColumn::new("code", |item: &PickerDiagnostic, _| {
|
||||
match item.diag.code.as_ref() {
|
||||
Some(NumberOrString::Number(n)) => n.to_string().into(),
|
||||
Some(NumberOrString::String(s)) => s.as_str().into(),
|
||||
None => "".into(),
|
||||
}
|
||||
item.diag
|
||||
.code
|
||||
.as_ref()
|
||||
.map(|c| c.as_string())
|
||||
.unwrap_or_default()
|
||||
.into()
|
||||
}),
|
||||
ui::PickerColumn::new("message", |item: &PickerDiagnostic, _| {
|
||||
item.diag.message.as_str().into()
|
||||
}),
|
||||
];
|
||||
let mut primary_column = 2; // message
|
||||
let mut primary_column = 3; // message
|
||||
|
||||
if format == DiagnosticsFormat::ShowSourcePath {
|
||||
columns.insert(
|
||||
// between message code and message
|
||||
2,
|
||||
3,
|
||||
ui::PickerColumn::new("path", |item: &PickerDiagnostic, _| {
|
||||
if let Some(path) = item.location.uri.as_path() {
|
||||
path::get_truncated_path(path)
|
||||
@@ -295,7 +296,7 @@ fn diag_picker(
|
||||
.immediately_show_diagnostic(doc, view.id);
|
||||
},
|
||||
)
|
||||
.with_preview(move |_editor, diag| location_to_file_location(&diag.location))
|
||||
.with_preview(|editor, diag| diag.location.file_location(editor))
|
||||
.truncate_start(false)
|
||||
}
|
||||
|
||||
@@ -319,8 +320,10 @@ pub fn symbol_picker(cx: &mut Context) {
|
||||
},
|
||||
location: Location {
|
||||
uri: uri.clone(),
|
||||
range: symbol.selection_range,
|
||||
offset_encoding,
|
||||
range: helix_view::Range::Lsp {
|
||||
range: symbol.selection_range,
|
||||
offset_encoding,
|
||||
},
|
||||
},
|
||||
});
|
||||
for child in symbol.children.into_iter().flatten() {
|
||||
@@ -338,9 +341,7 @@ pub fn symbol_picker(cx: &mut Context) {
|
||||
let request = language_server.document_symbols(doc.identifier()).unwrap();
|
||||
let offset_encoding = language_server.offset_encoding();
|
||||
let doc_id = doc.identifier();
|
||||
let doc_uri = doc
|
||||
.uri()
|
||||
.expect("docs with active language servers must be backed by paths");
|
||||
let doc_uri = doc.uri();
|
||||
|
||||
async move {
|
||||
let symbols = match request.await? {
|
||||
@@ -355,8 +356,10 @@ pub fn symbol_picker(cx: &mut Context) {
|
||||
.map(|symbol| SymbolInformationItem {
|
||||
location: Location {
|
||||
uri: doc_uri.clone(),
|
||||
range: symbol.location.range,
|
||||
offset_encoding,
|
||||
range: helix_view::Range::Lsp {
|
||||
range: symbol.location.range,
|
||||
offset_encoding,
|
||||
},
|
||||
},
|
||||
symbol,
|
||||
})
|
||||
@@ -423,7 +426,7 @@ pub fn symbol_picker(cx: &mut Context) {
|
||||
jump_to_location(cx.editor, &item.location, action);
|
||||
},
|
||||
)
|
||||
.with_preview(move |_editor, item| location_to_file_location(&item.location))
|
||||
.with_preview(|editor, item| item.location.file_location(editor))
|
||||
.truncate_start(false);
|
||||
|
||||
compositor.push(Box::new(overlaid(picker)))
|
||||
@@ -480,8 +483,10 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
|
||||
Some(SymbolInformationItem {
|
||||
location: Location {
|
||||
uri,
|
||||
range: symbol.location.range,
|
||||
offset_encoding,
|
||||
range: helix_view::Range::Lsp {
|
||||
range: symbol.location.range,
|
||||
offset_encoding,
|
||||
},
|
||||
},
|
||||
symbol,
|
||||
})
|
||||
@@ -549,7 +554,7 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
|
||||
jump_to_location(cx.editor, &item.location, action);
|
||||
},
|
||||
)
|
||||
.with_preview(|_editor, item| location_to_file_location(&item.location))
|
||||
.with_preview(|editor, item| item.location.file_location(editor))
|
||||
.with_dynamic_query(get_symbols, None)
|
||||
.truncate_start(false);
|
||||
|
||||
@@ -558,11 +563,10 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
|
||||
|
||||
pub fn diagnostics_picker(cx: &mut Context) {
|
||||
let doc = doc!(cx.editor);
|
||||
if let Some(uri) = doc.uri() {
|
||||
let diagnostics = cx.editor.diagnostics.get(&uri).cloned().unwrap_or_default();
|
||||
let picker = diag_picker(cx, [(uri, diagnostics)], DiagnosticsFormat::HideSourcePath);
|
||||
cx.push_layer(Box::new(overlaid(picker)));
|
||||
}
|
||||
let uri = doc.uri();
|
||||
let diagnostics = cx.editor.diagnostics.get(&uri).cloned().unwrap_or_default();
|
||||
let picker = diag_picker(cx, [(uri, diagnostics)], DiagnosticsFormat::HideSourcePath);
|
||||
cx.push_layer(Box::new(overlaid(picker)));
|
||||
}
|
||||
|
||||
pub fn workspace_diagnostics_picker(cx: &mut Context) {
|
||||
@@ -572,247 +576,6 @@ pub fn workspace_diagnostics_picker(cx: &mut Context) {
|
||||
cx.push_layer(Box::new(overlaid(picker)));
|
||||
}
|
||||
|
||||
struct CodeActionOrCommandItem {
|
||||
lsp_item: lsp::CodeActionOrCommand,
|
||||
language_server_id: LanguageServerId,
|
||||
}
|
||||
|
||||
impl ui::menu::Item for CodeActionOrCommandItem {
|
||||
type Data = ();
|
||||
fn format(&self, _data: &Self::Data) -> Row {
|
||||
match &self.lsp_item {
|
||||
lsp::CodeActionOrCommand::CodeAction(action) => action.title.as_str().into(),
|
||||
lsp::CodeActionOrCommand::Command(command) => command.title.as_str().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Determines the category of the `CodeAction` using the `CodeAction::kind` field.
|
||||
/// Returns a number that represent these categories.
|
||||
/// Categories with a lower number should be displayed first.
|
||||
///
|
||||
///
|
||||
/// While the `kind` field is defined as open ended in the LSP spec (any value may be used)
|
||||
/// in practice a closed set of common values (mostly suggested in the LSP spec) are used.
|
||||
/// VSCode displays each of these categories separately (separated by a heading in the codeactions picker)
|
||||
/// to make them easier to navigate. Helix does not display these headings to the user.
|
||||
/// However it does sort code actions by their categories to achieve the same order as the VScode picker,
|
||||
/// just without the headings.
|
||||
///
|
||||
/// The order used here is modeled after the [vscode sourcecode](https://github.com/microsoft/vscode/blob/eaec601dd69aeb4abb63b9601a6f44308c8d8c6e/src/vs/editor/contrib/codeAction/browser/codeActionWidget.ts>)
|
||||
fn action_category(action: &CodeActionOrCommand) -> u32 {
|
||||
if let CodeActionOrCommand::CodeAction(CodeAction {
|
||||
kind: Some(kind), ..
|
||||
}) = action
|
||||
{
|
||||
let mut components = kind.as_str().split('.');
|
||||
match components.next() {
|
||||
Some("quickfix") => 0,
|
||||
Some("refactor") => match components.next() {
|
||||
Some("extract") => 1,
|
||||
Some("inline") => 2,
|
||||
Some("rewrite") => 3,
|
||||
Some("move") => 4,
|
||||
Some("surround") => 5,
|
||||
_ => 7,
|
||||
},
|
||||
Some("source") => 6,
|
||||
_ => 7,
|
||||
}
|
||||
} else {
|
||||
7
|
||||
}
|
||||
}
|
||||
|
||||
fn action_preferred(action: &CodeActionOrCommand) -> bool {
|
||||
matches!(
|
||||
action,
|
||||
CodeActionOrCommand::CodeAction(CodeAction {
|
||||
is_preferred: Some(true),
|
||||
..
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
fn action_fixes_diagnostics(action: &CodeActionOrCommand) -> bool {
|
||||
matches!(
|
||||
action,
|
||||
CodeActionOrCommand::CodeAction(CodeAction {
|
||||
diagnostics: Some(diagnostics),
|
||||
..
|
||||
}) if !diagnostics.is_empty()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn code_action(cx: &mut Context) {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
|
||||
let selection_range = doc.selection(view.id).primary();
|
||||
|
||||
let mut seen_language_servers = HashSet::new();
|
||||
|
||||
let mut futures: FuturesOrdered<_> = doc
|
||||
.language_servers_with_feature(LanguageServerFeature::CodeAction)
|
||||
.filter(|ls| seen_language_servers.insert(ls.id()))
|
||||
// TODO this should probably already been filtered in something like "language_servers_with_feature"
|
||||
.filter_map(|language_server| {
|
||||
let offset_encoding = language_server.offset_encoding();
|
||||
let language_server_id = language_server.id();
|
||||
let range = range_to_lsp_range(doc.text(), selection_range, offset_encoding);
|
||||
// Filter and convert overlapping diagnostics
|
||||
let code_action_context = lsp::CodeActionContext {
|
||||
diagnostics: doc
|
||||
.diagnostics()
|
||||
.iter()
|
||||
.filter(|&diag| {
|
||||
selection_range
|
||||
.overlaps(&helix_core::Range::new(diag.range.start, diag.range.end))
|
||||
})
|
||||
.map(|diag| diagnostic_to_lsp_diagnostic(doc.text(), diag, offset_encoding))
|
||||
.collect(),
|
||||
only: None,
|
||||
trigger_kind: Some(CodeActionTriggerKind::INVOKED),
|
||||
};
|
||||
let code_action_request =
|
||||
language_server.code_actions(doc.identifier(), range, code_action_context)?;
|
||||
Some((code_action_request, language_server_id))
|
||||
})
|
||||
.map(|(request, ls_id)| async move {
|
||||
let Some(mut actions) = request.await? else {
|
||||
return anyhow::Ok(Vec::new());
|
||||
};
|
||||
|
||||
// remove disabled code actions
|
||||
actions.retain(|action| {
|
||||
matches!(
|
||||
action,
|
||||
CodeActionOrCommand::Command(_)
|
||||
| CodeActionOrCommand::CodeAction(CodeAction { disabled: None, .. })
|
||||
)
|
||||
});
|
||||
|
||||
// Sort codeactions into a useful order. This behaviour is only partially described in the LSP spec.
|
||||
// Many details are modeled after vscode because language servers are usually tested against it.
|
||||
// VScode sorts the codeaction two times:
|
||||
//
|
||||
// First the codeactions that fix some diagnostics are moved to the front.
|
||||
// If both codeactions fix some diagnostics (or both fix none) the codeaction
|
||||
// that is marked with `is_preferred` is shown first. The codeactions are then shown in separate
|
||||
// submenus that only contain a certain category (see `action_category`) of actions.
|
||||
//
|
||||
// Below this done in in a single sorting step
|
||||
actions.sort_by(|action1, action2| {
|
||||
// sort actions by category
|
||||
let order = action_category(action1).cmp(&action_category(action2));
|
||||
if order != Ordering::Equal {
|
||||
return order;
|
||||
}
|
||||
// within the categories sort by relevancy.
|
||||
// Modeled after the `codeActionsComparator` function in vscode:
|
||||
// https://github.com/microsoft/vscode/blob/eaec601dd69aeb4abb63b9601a6f44308c8d8c6e/src/vs/editor/contrib/codeAction/browser/codeAction.ts
|
||||
|
||||
// if one code action fixes a diagnostic but the other one doesn't show it first
|
||||
let order = action_fixes_diagnostics(action1)
|
||||
.cmp(&action_fixes_diagnostics(action2))
|
||||
.reverse();
|
||||
if order != Ordering::Equal {
|
||||
return order;
|
||||
}
|
||||
|
||||
// if one of the codeactions is marked as preferred show it first
|
||||
// otherwise keep the original LSP sorting
|
||||
action_preferred(action1)
|
||||
.cmp(&action_preferred(action2))
|
||||
.reverse()
|
||||
});
|
||||
|
||||
Ok(actions
|
||||
.into_iter()
|
||||
.map(|lsp_item| CodeActionOrCommandItem {
|
||||
lsp_item,
|
||||
language_server_id: ls_id,
|
||||
})
|
||||
.collect())
|
||||
})
|
||||
.collect();
|
||||
|
||||
if futures.is_empty() {
|
||||
cx.editor
|
||||
.set_error("No configured language server supports code actions");
|
||||
return;
|
||||
}
|
||||
|
||||
cx.jobs.callback(async move {
|
||||
let mut actions = Vec::new();
|
||||
|
||||
while let Some(output) = futures.next().await {
|
||||
match output {
|
||||
Ok(mut lsp_items) => actions.append(&mut lsp_items),
|
||||
Err(err) => log::error!("while gathering code actions: {err}"),
|
||||
}
|
||||
}
|
||||
|
||||
let call = move |editor: &mut Editor, compositor: &mut Compositor| {
|
||||
if actions.is_empty() {
|
||||
editor.set_error("No code actions available");
|
||||
return;
|
||||
}
|
||||
let mut picker = ui::Menu::new(actions, (), move |editor, action, event| {
|
||||
if event != PromptEvent::Validate {
|
||||
return;
|
||||
}
|
||||
|
||||
// always present here
|
||||
let action = action.unwrap();
|
||||
let Some(language_server) = editor.language_server_by_id(action.language_server_id)
|
||||
else {
|
||||
editor.set_error("Language Server disappeared");
|
||||
return;
|
||||
};
|
||||
let offset_encoding = language_server.offset_encoding();
|
||||
|
||||
match &action.lsp_item {
|
||||
lsp::CodeActionOrCommand::Command(command) => {
|
||||
log::debug!("code action command: {:?}", command);
|
||||
editor.execute_lsp_command(command.clone(), action.language_server_id);
|
||||
}
|
||||
lsp::CodeActionOrCommand::CodeAction(code_action) => {
|
||||
log::debug!("code action: {:?}", code_action);
|
||||
// we support lsp "codeAction/resolve" for `edit` and `command` fields
|
||||
let mut resolved_code_action = None;
|
||||
if code_action.edit.is_none() || code_action.command.is_none() {
|
||||
if let Some(future) = language_server.resolve_code_action(code_action) {
|
||||
if let Ok(code_action) = helix_lsp::block_on(future) {
|
||||
resolved_code_action = Some(code_action);
|
||||
}
|
||||
}
|
||||
}
|
||||
let resolved_code_action =
|
||||
resolved_code_action.as_ref().unwrap_or(code_action);
|
||||
|
||||
if let Some(ref workspace_edit) = resolved_code_action.edit {
|
||||
let _ = editor.apply_workspace_edit(offset_encoding, workspace_edit);
|
||||
}
|
||||
|
||||
// if code action provides both edit and command first the edit
|
||||
// should be applied and then the command
|
||||
if let Some(command) = &code_action.command {
|
||||
editor.execute_lsp_command(command.clone(), action.language_server_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
picker.move_down(); // pre-select the first item
|
||||
|
||||
let popup = Popup::new("code-action", picker).with_scrollbar(false);
|
||||
|
||||
compositor.replace_or_push("code-action", popup);
|
||||
};
|
||||
|
||||
Ok(Callback::EditorCompositor(Box::new(call)))
|
||||
});
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ApplyEditError {
|
||||
pub kind: ApplyEditErrorKind,
|
||||
@@ -853,20 +616,26 @@ fn goto_impl(editor: &mut Editor, compositor: &mut Compositor, locations: Vec<Lo
|
||||
let columns = [ui::PickerColumn::new(
|
||||
"location",
|
||||
|item: &Location, cwdir: &std::path::PathBuf| {
|
||||
let path = if let Some(path) = item.uri.as_path() {
|
||||
path.strip_prefix(cwdir).unwrap_or(path).to_string_lossy()
|
||||
use std::fmt::Write;
|
||||
let mut path = if let Some(path) = item.uri.as_path() {
|
||||
path.strip_prefix(cwdir)
|
||||
.unwrap_or(path)
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
} else {
|
||||
item.uri.to_string().into()
|
||||
item.uri.to_string()
|
||||
};
|
||||
|
||||
format!("{path}:{}", item.range.start.line + 1).into()
|
||||
if let helix_view::Range::Lsp { range, .. } = item.range {
|
||||
write!(path, ":{}", range.start.line + 1).unwrap();
|
||||
}
|
||||
path.into()
|
||||
},
|
||||
)];
|
||||
|
||||
let picker = Picker::new(columns, 0, locations, cwdir, |cx, location, action| {
|
||||
jump_to_location(cx.editor, location, action)
|
||||
})
|
||||
.with_preview(|_editor, location| location_to_file_location(location));
|
||||
.with_preview(|editor, location| location.file_location(editor));
|
||||
compositor.push(Box::new(overlaid(picker)));
|
||||
}
|
||||
}
|
||||
@@ -894,12 +663,14 @@ where
|
||||
match response {
|
||||
Ok((response, offset_encoding)) => match response {
|
||||
Some(lsp::GotoDefinitionResponse::Scalar(lsp_location)) => {
|
||||
locations.extend(lsp_location_to_location(lsp_location, offset_encoding));
|
||||
locations.extend(Location::lsp(lsp_location, offset_encoding));
|
||||
}
|
||||
Some(lsp::GotoDefinitionResponse::Array(lsp_locations)) => {
|
||||
locations.extend(lsp_locations.into_iter().flat_map(|location| {
|
||||
lsp_location_to_location(location, offset_encoding)
|
||||
}));
|
||||
locations.extend(
|
||||
lsp_locations
|
||||
.into_iter()
|
||||
.flat_map(|location| Location::lsp(location, offset_encoding)),
|
||||
);
|
||||
}
|
||||
Some(lsp::GotoDefinitionResponse::Link(lsp_locations)) => {
|
||||
locations.extend(
|
||||
@@ -911,9 +682,7 @@ where
|
||||
location_link.target_range,
|
||||
)
|
||||
})
|
||||
.flat_map(|location| {
|
||||
lsp_location_to_location(location, offset_encoding)
|
||||
}),
|
||||
.flat_map(|location| Location::lsp(location, offset_encoding)),
|
||||
);
|
||||
}
|
||||
None => (),
|
||||
@@ -993,7 +762,7 @@ pub fn goto_reference(cx: &mut Context) {
|
||||
lsp_locations
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.flat_map(|location| lsp_location_to_location(location, offset_encoding)),
|
||||
.flat_map(|location| Location::lsp(location, offset_encoding)),
|
||||
),
|
||||
Err(err) => log::error!("Error requesting references: {err}"),
|
||||
}
|
||||
@@ -1357,6 +1126,7 @@ fn compute_inlay_hints_for_view(
|
||||
let mut padding_after_inlay_hints = Vec::new();
|
||||
|
||||
let doc_text = doc.text();
|
||||
let inlay_hints_length_limit = doc.config.load().lsp.inlay_hints_length_limit;
|
||||
|
||||
for hint in hints {
|
||||
let char_idx =
|
||||
@@ -1367,7 +1137,7 @@ fn compute_inlay_hints_for_view(
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let label = match hint.label {
|
||||
let mut label = match hint.label {
|
||||
lsp::InlayHintLabel::String(s) => s,
|
||||
lsp::InlayHintLabel::LabelParts(parts) => parts
|
||||
.into_iter()
|
||||
@@ -1375,6 +1145,31 @@ fn compute_inlay_hints_for_view(
|
||||
.collect::<Vec<_>>()
|
||||
.join(""),
|
||||
};
|
||||
// Truncate the hint if too long
|
||||
if let Some(limit) = inlay_hints_length_limit {
|
||||
// Limit on displayed width
|
||||
use helix_core::unicode::{
|
||||
segmentation::UnicodeSegmentation, width::UnicodeWidthStr,
|
||||
};
|
||||
|
||||
let width = label.width();
|
||||
let limit = limit.get().into();
|
||||
if width > limit {
|
||||
let mut floor_boundary = 0;
|
||||
let mut acc = 0;
|
||||
for (i, grapheme_cluster) in label.grapheme_indices(true) {
|
||||
acc += grapheme_cluster.width();
|
||||
|
||||
if acc > limit {
|
||||
floor_boundary = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
label.truncate(floor_boundary);
|
||||
label.push('…');
|
||||
}
|
||||
}
|
||||
|
||||
let inlay_hints_vec = match hint.kind {
|
||||
Some(lsp::InlayHintKind::TYPE) => &mut type_inlay_hints,
|
||||
|
@@ -339,8 +339,9 @@ fn write_impl(cx: &mut compositor::Context, path: Option<&str>, force: bool) ->
|
||||
// Save an undo checkpoint for any outstanding changes.
|
||||
doc.append_changes_to_history(view);
|
||||
|
||||
let (view, doc) = current_ref!(cx.editor);
|
||||
let fmt = if config.auto_format {
|
||||
doc.auto_format().map(|fmt| {
|
||||
doc.auto_format(cx.editor).map(|fmt| {
|
||||
let callback = make_format_callback(
|
||||
doc.id(),
|
||||
doc.version(),
|
||||
@@ -483,8 +484,8 @@ fn format(cx: &mut compositor::Context, _args: Args, event: PromptEvent) -> anyh
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let format = doc.format().context(
|
||||
let (view, doc) = current_ref!(cx.editor);
|
||||
let format = doc.format(cx.editor).context(
|
||||
"A formatter isn't available, and no language server provides formatting capabilities",
|
||||
)?;
|
||||
let callback = make_format_callback(doc.id(), doc.version(), view.id, format, None);
|
||||
@@ -752,7 +753,8 @@ pub fn write_all_impl(
|
||||
doc.append_changes_to_history(view);
|
||||
|
||||
let fmt = if options.auto_format && config.auto_format {
|
||||
doc.auto_format().map(|fmt| {
|
||||
let doc = doc!(cx.editor, &doc_id);
|
||||
doc.auto_format(cx.editor).map(|fmt| {
|
||||
let callback = make_format_callback(
|
||||
doc_id,
|
||||
doc.version(),
|
||||
@@ -1670,16 +1672,14 @@ fn tree_sitter_highlight_name(
|
||||
_args: Args,
|
||||
event: PromptEvent,
|
||||
) -> anyhow::Result<()> {
|
||||
fn find_highlight_at_cursor(
|
||||
cx: &mut compositor::Context<'_>,
|
||||
) -> Option<helix_core::syntax::Highlight> {
|
||||
use helix_core::syntax::HighlightEvent;
|
||||
use helix_core::syntax::Highlight;
|
||||
|
||||
let (view, doc) = current!(cx.editor);
|
||||
fn find_highlight_at_cursor(editor: &Editor) -> Option<Highlight> {
|
||||
let (view, doc) = current_ref!(editor);
|
||||
let syntax = doc.syntax()?;
|
||||
let text = doc.text().slice(..);
|
||||
let cursor = doc.selection(view.id).primary().cursor(text);
|
||||
let byte = text.char_to_byte(cursor);
|
||||
let byte = text.char_to_byte(cursor) as u32;
|
||||
let node = syntax.descendant_for_byte_range(byte, byte)?;
|
||||
// Query the same range as the one used in syntax highlighting.
|
||||
let range = {
|
||||
@@ -1689,25 +1689,22 @@ fn tree_sitter_highlight_name(
|
||||
let last_line = text.len_lines().saturating_sub(1);
|
||||
let height = view.inner_area(doc).height;
|
||||
let last_visible_line = (row + height as usize).saturating_sub(1).min(last_line);
|
||||
let start = text.line_to_byte(row.min(last_line));
|
||||
let end = text.line_to_byte(last_visible_line + 1);
|
||||
let start = text.line_to_byte(row.min(last_line)) as u32;
|
||||
let end = text.line_to_byte(last_visible_line + 1) as u32;
|
||||
|
||||
start..end
|
||||
};
|
||||
|
||||
let mut highlight = None;
|
||||
let loader = editor.syn_loader.load();
|
||||
let mut highlighter = syntax.highlighter(text, &loader, range);
|
||||
|
||||
for event in syntax.highlight_iter(text, Some(range), None) {
|
||||
match event.unwrap() {
|
||||
HighlightEvent::Source { start, end }
|
||||
if start == node.start_byte() && end == node.end_byte() =>
|
||||
{
|
||||
return highlight;
|
||||
}
|
||||
HighlightEvent::HighlightStart(hl) => {
|
||||
highlight = Some(hl);
|
||||
}
|
||||
_ => (),
|
||||
while highlighter.next_event_offset() != u32::MAX {
|
||||
let start = highlighter.next_event_offset();
|
||||
highlighter.advance();
|
||||
let end = highlighter.next_event_offset();
|
||||
|
||||
if start <= node.start_byte() && end >= node.end_byte() {
|
||||
return highlighter.active_highlights().next_back();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1718,11 +1715,11 @@ fn tree_sitter_highlight_name(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let Some(highlight) = find_highlight_at_cursor(cx) else {
|
||||
let Some(highlight) = find_highlight_at_cursor(cx.editor) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let content = cx.editor.theme.scope(highlight.0).to_string();
|
||||
let content = cx.editor.theme.scope(highlight).to_string();
|
||||
|
||||
let callback = async move {
|
||||
let call: job::Callback = Callback::EditorCompositor(Box::new(
|
||||
@@ -1880,7 +1877,15 @@ fn update_goto_line_number_preview(cx: &mut compositor::Context, args: Args) ->
|
||||
|
||||
let scrolloff = cx.editor.config().scrolloff;
|
||||
let line = args[0].parse::<usize>()?;
|
||||
goto_line_without_jumplist(cx.editor, NonZeroUsize::new(line));
|
||||
goto_line_without_jumplist(
|
||||
cx.editor,
|
||||
NonZeroUsize::new(line),
|
||||
if cx.editor.mode == Mode::Select {
|
||||
Movement::Extend
|
||||
} else {
|
||||
Movement::Move
|
||||
},
|
||||
);
|
||||
|
||||
let (view, doc) = current!(cx.editor);
|
||||
view.ensure_cursor_in_view(doc, scrolloff);
|
||||
@@ -2080,10 +2085,11 @@ fn language(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> any
|
||||
|
||||
let doc = doc_mut!(cx.editor);
|
||||
|
||||
let loader = cx.editor.syn_loader.load();
|
||||
if &args[0] == DEFAULT_LANGUAGE_NAME {
|
||||
doc.set_language(None, None)
|
||||
doc.set_language(None, &loader)
|
||||
} else {
|
||||
doc.set_language_by_language_id(&args[0], cx.editor.syn_loader.clone())?;
|
||||
doc.set_language_by_language_id(&args[0], &loader)?;
|
||||
}
|
||||
doc.detect_indent_and_line_ending();
|
||||
|
||||
@@ -2101,10 +2107,6 @@ fn sort(cx: &mut compositor::Context, args: Args, event: PromptEvent) -> anyhow:
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
sort_impl(cx, args.has_flag("reverse"))
|
||||
}
|
||||
|
||||
fn sort_impl(cx: &mut compositor::Context, reverse: bool) -> anyhow::Result<()> {
|
||||
let scrolloff = cx.editor.config().scrolloff;
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let text = doc.text().slice(..);
|
||||
@@ -2120,10 +2122,14 @@ fn sort_impl(cx: &mut compositor::Context, reverse: bool) -> anyhow::Result<()>
|
||||
.map(|fragment| fragment.chunks().collect())
|
||||
.collect();
|
||||
|
||||
fragments.sort_by(match reverse {
|
||||
true => |a: &Tendril, b: &Tendril| b.cmp(a),
|
||||
false => |a: &Tendril, b: &Tendril| a.cmp(b),
|
||||
});
|
||||
fragments.sort_by(
|
||||
match (args.has_flag("insensitive"), args.has_flag("reverse")) {
|
||||
(true, true) => |a: &Tendril, b: &Tendril| b.to_lowercase().cmp(&a.to_lowercase()),
|
||||
(true, false) => |a: &Tendril, b: &Tendril| a.to_lowercase().cmp(&b.to_lowercase()),
|
||||
(false, true) => |a: &Tendril, b: &Tendril| b.cmp(a),
|
||||
(false, false) => |a: &Tendril, b: &Tendril| a.cmp(b),
|
||||
},
|
||||
);
|
||||
|
||||
let transaction = Transaction::change(
|
||||
doc.text(),
|
||||
@@ -2189,8 +2195,8 @@ fn tree_sitter_subtree(
|
||||
if let Some(syntax) = doc.syntax() {
|
||||
let primary_selection = doc.selection(view.id).primary();
|
||||
let text = doc.text();
|
||||
let from = text.char_to_byte(primary_selection.from());
|
||||
let to = text.char_to_byte(primary_selection.to());
|
||||
let from = text.char_to_byte(primary_selection.from()) as u32;
|
||||
let to = text.char_to_byte(primary_selection.to()) as u32;
|
||||
if let Some(selected_node) = syntax.descendant_for_byte_range(from, to) {
|
||||
let mut contents = String::from("```tsq\n");
|
||||
helix_core::syntax::pretty_print_tree(&mut contents, selected_node)?;
|
||||
@@ -2491,7 +2497,7 @@ fn yank_diagnostic(
|
||||
.diagnostics()
|
||||
.iter()
|
||||
.filter(|d| primary.overlaps(&helix_core::Range::new(d.range.start, d.range.end)))
|
||||
.map(|d| d.message.clone())
|
||||
.map(|d| d.inner.message.clone())
|
||||
.collect();
|
||||
let n = diag.len();
|
||||
if n == 0 {
|
||||
@@ -2558,6 +2564,9 @@ fn noop(_cx: &mut compositor::Context, _args: Args, _event: PromptEvent) -> anyh
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: SHELL_SIGNATURE should specify var args for arguments, so that just completers::filename can be used,
|
||||
// but Signature does not yet allow for var args.
|
||||
|
||||
/// This command handles all of its input as-is with no quoting or flags.
|
||||
const SHELL_SIGNATURE: Signature = Signature {
|
||||
positionals: (1, Some(2)),
|
||||
@@ -2566,10 +2575,10 @@ const SHELL_SIGNATURE: Signature = Signature {
|
||||
};
|
||||
|
||||
const SHELL_COMPLETER: CommandCompleter = CommandCompleter::positional(&[
|
||||
// Command name (TODO: consider a command completer - Kakoune has prior art)
|
||||
completers::none,
|
||||
// Command name
|
||||
completers::program,
|
||||
// Shell argument(s)
|
||||
completers::filename,
|
||||
completers::repeating_filenames,
|
||||
]);
|
||||
|
||||
pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
@@ -3348,6 +3357,12 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
signature: Signature {
|
||||
positionals: (0, Some(0)),
|
||||
flags: &[
|
||||
Flag {
|
||||
name: "insensitive",
|
||||
alias: Some('i'),
|
||||
doc: "sort the ranges case-insensitively",
|
||||
..Flag::DEFAULT
|
||||
},
|
||||
Flag {
|
||||
name: "reverse",
|
||||
alias: Some('r'),
|
||||
@@ -3442,7 +3457,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
},
|
||||
TypableCommand {
|
||||
name: "pipe",
|
||||
aliases: &[],
|
||||
aliases: &["|"],
|
||||
doc: "Pipe each selection to the shell command.",
|
||||
fun: pipe,
|
||||
completer: SHELL_COMPLETER,
|
||||
@@ -3458,7 +3473,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
},
|
||||
TypableCommand {
|
||||
name: "run-shell-command",
|
||||
aliases: &["sh"],
|
||||
aliases: &["sh", "!"],
|
||||
doc: "Run a shell command",
|
||||
fun: run_shell_command,
|
||||
completer: SHELL_COMPLETER,
|
||||
@@ -3852,10 +3867,12 @@ fn quote_completion<'a>(
|
||||
span.content = Cow::Owned(format!(
|
||||
"'{}{}'",
|
||||
// Escape any inner single quotes by doubling them.
|
||||
replace(token.content.as_ref().into(), '\'', "''"),
|
||||
replace(token.content[..range.start].into(), '\'', "''"),
|
||||
replace(span.content, '\'', "''")
|
||||
));
|
||||
// Ignore `range.start` here since we're replacing the entire token.
|
||||
// Ignore `range.start` here since we're replacing the entire token. We used
|
||||
// `range.start` above to emulate the replacement that using `range.start` would have
|
||||
// done.
|
||||
((offset + token.content_start).., span)
|
||||
}
|
||||
TokenKind::Quoted(quote) => {
|
||||
|
@@ -18,20 +18,25 @@ mod diagnostics;
|
||||
mod document_colors;
|
||||
mod signature_help;
|
||||
mod snippet;
|
||||
mod spelling;
|
||||
|
||||
pub fn setup(config: Arc<ArcSwap<Config>>) -> Handlers {
|
||||
events::register();
|
||||
|
||||
let event_tx = completion::CompletionHandler::new(config).spawn();
|
||||
let completion_tx = completion::CompletionHandler::new(config).spawn();
|
||||
let signature_hints = SignatureHelpHandler::new().spawn();
|
||||
let auto_save = AutoSaveHandler::new().spawn();
|
||||
let document_colors = DocumentColorsHandler::default().spawn();
|
||||
let spelling = helix_view::handlers::spelling::SpellingHandler::new(
|
||||
spelling::SpellingHandler::default().spawn(),
|
||||
);
|
||||
|
||||
let handlers = Handlers {
|
||||
completions: helix_view::handlers::completion::CompletionHandler::new(event_tx),
|
||||
completions: helix_view::handlers::completion::CompletionHandler::new(completion_tx),
|
||||
signature_hints,
|
||||
auto_save,
|
||||
document_colors,
|
||||
spelling,
|
||||
};
|
||||
|
||||
helix_view::handlers::register_hooks(&handlers);
|
||||
@@ -41,5 +46,6 @@ pub fn setup(config: Arc<ArcSwap<Config>>) -> Handlers {
|
||||
diagnostics::register_hooks(&handlers);
|
||||
snippet::register_hooks(&handlers);
|
||||
document_colors::register_hooks(&handlers);
|
||||
spelling::register_hooks(&handlers);
|
||||
handlers
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@ use std::collections::HashMap;
|
||||
|
||||
use helix_core::chars::char_is_word;
|
||||
use helix_core::completion::CompletionProvider;
|
||||
use helix_core::syntax::LanguageServerFeature;
|
||||
use helix_core::syntax::config::LanguageServerFeature;
|
||||
use helix_event::{register_hook, TaskHandle};
|
||||
use helix_lsp::lsp;
|
||||
use helix_stdx::rope::RopeSliceExt;
|
||||
|
@@ -5,7 +5,7 @@ use std::time::Duration;
|
||||
use arc_swap::ArcSwap;
|
||||
use futures_util::Future;
|
||||
use helix_core::completion::CompletionProvider;
|
||||
use helix_core::syntax::LanguageServerFeature;
|
||||
use helix_core::syntax::config::LanguageServerFeature;
|
||||
use helix_event::{cancelable_future, TaskController, TaskHandle};
|
||||
use helix_lsp::lsp;
|
||||
use helix_lsp::lsp::{CompletionContext, CompletionTriggerKind};
|
||||
|
@@ -1,7 +1,7 @@
|
||||
use std::{collections::HashSet, time::Duration};
|
||||
|
||||
use futures_util::{stream::FuturesOrdered, StreamExt};
|
||||
use helix_core::{syntax::LanguageServerFeature, text_annotations::InlineAnnotation};
|
||||
use helix_core::{syntax::config::LanguageServerFeature, text_annotations::InlineAnnotation};
|
||||
use helix_event::{cancelable_future, register_hook};
|
||||
use helix_lsp::lsp;
|
||||
use helix_view::{
|
||||
@@ -81,6 +81,10 @@ fn request_document_colors(editor: &mut Editor, doc_id: DocumentId) {
|
||||
})
|
||||
.collect();
|
||||
|
||||
if futures.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
tokio::spawn(async move {
|
||||
let mut all_colors = Vec::new();
|
||||
loop {
|
||||
|
@@ -1,7 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use helix_core::syntax::LanguageServerFeature;
|
||||
use helix_core::syntax::config::LanguageServerFeature;
|
||||
use helix_event::{cancelable_future, register_hook, send_blocking, TaskController, TaskHandle};
|
||||
use helix_lsp::lsp::{self, SignatureInformation};
|
||||
use helix_stdx::rope::RopeSliceExt;
|
||||
|
208
helix-term/src/handlers/spelling.rs
Normal file
208
helix-term/src/handlers/spelling.rs
Normal file
@@ -0,0 +1,208 @@
|
||||
use std::{borrow::Cow, collections::HashSet, future::Future, sync::Arc, time::Duration};
|
||||
|
||||
use anyhow::Result;
|
||||
use helix_core::{Rope, SpellingLanguage};
|
||||
use helix_event::{cancelable_future, register_hook, send_blocking};
|
||||
use helix_stdx::rope::{Regex, RopeSliceExt as _};
|
||||
use helix_view::{
|
||||
diagnostic::DiagnosticProvider,
|
||||
editor::Severity,
|
||||
events::{DocumentDidChange, DocumentDidOpen},
|
||||
handlers::{spelling::SpellingEvent, Handlers},
|
||||
Diagnostic, Dictionary, DocumentId, Editor,
|
||||
};
|
||||
use once_cell::sync::Lazy;
|
||||
use parking_lot::RwLock;
|
||||
use tokio::time::Instant;
|
||||
|
||||
use crate::job;
|
||||
|
||||
const PROVIDER: DiagnosticProvider = DiagnosticProvider::Spelling;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct SpellingHandler {
|
||||
changed_docs: HashSet<DocumentId>,
|
||||
}
|
||||
|
||||
impl helix_event::AsyncHook for SpellingHandler {
|
||||
type Event = SpellingEvent;
|
||||
|
||||
fn handle_event(&mut self, event: Self::Event, timeout: Option<Instant>) -> Option<Instant> {
|
||||
match event {
|
||||
SpellingEvent::DictionaryLoaded { language } => {
|
||||
job::dispatch_blocking(move |editor, _compositor| {
|
||||
let docs: Vec<_> = editor
|
||||
.documents
|
||||
.iter()
|
||||
.filter_map(|(&doc_id, doc)| {
|
||||
(doc.spelling_language() == Some(language)).then_some(doc_id)
|
||||
})
|
||||
.collect();
|
||||
for doc in docs {
|
||||
check_document(editor, doc);
|
||||
}
|
||||
});
|
||||
timeout
|
||||
}
|
||||
SpellingEvent::DocumentOpened { doc } => {
|
||||
job::dispatch_blocking(move |editor, _compositor| {
|
||||
check_document(editor, doc);
|
||||
});
|
||||
timeout
|
||||
}
|
||||
SpellingEvent::DocumentChanged { doc } => {
|
||||
self.changed_docs.insert(doc);
|
||||
Some(Instant::now() + Duration::from_secs(3))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn finish_debounce(&mut self) {
|
||||
let docs = std::mem::take(&mut self.changed_docs);
|
||||
job::dispatch_blocking(move |editor, _compositor| {
|
||||
for doc in docs {
|
||||
check_document(editor, doc);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn check_document(editor: &mut Editor, doc_id: DocumentId) {
|
||||
let Some(doc) = editor.documents.get(&doc_id) else {
|
||||
return;
|
||||
};
|
||||
let Some(language) = doc.spelling_language() else {
|
||||
return;
|
||||
};
|
||||
let Some(dictionary) = editor.dictionaries.get(&language).cloned() else {
|
||||
if editor
|
||||
.handlers
|
||||
.spelling
|
||||
.loading_dictionaries
|
||||
.insert(language)
|
||||
{
|
||||
load_dictionary(language);
|
||||
}
|
||||
return;
|
||||
};
|
||||
|
||||
let uri = doc.uri();
|
||||
let future = check_text(dictionary, doc.text().clone());
|
||||
let cancel = editor.handlers.spelling.open_request(doc_id);
|
||||
|
||||
tokio::spawn(async move {
|
||||
match cancelable_future(future, cancel).await {
|
||||
Some(Ok(diagnostics)) => {
|
||||
job::dispatch_blocking(move |editor, _compositor| {
|
||||
editor.handlers.spelling.requests.remove(&doc_id);
|
||||
editor.handle_diagnostics(&PROVIDER, uri, None, diagnostics);
|
||||
});
|
||||
}
|
||||
Some(Err(err)) => log::error!("spelling background job failed: {err}"),
|
||||
None => (),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn load_dictionary(language: SpellingLanguage) {
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let aff = std::fs::read_to_string(helix_loader::runtime_file(format!(
|
||||
"dictionaries/{language}/{language}.aff"
|
||||
)))
|
||||
.unwrap();
|
||||
let dic = std::fs::read_to_string(helix_loader::runtime_file(format!(
|
||||
"dictionaries/{language}/{language}.dic"
|
||||
)))
|
||||
.unwrap();
|
||||
|
||||
let mut dictionary = Dictionary::new(&aff, &dic).unwrap();
|
||||
// TODO: personal dictionaries should be namespaced under runtime directories under the
|
||||
// language.
|
||||
if let Ok(file) = std::fs::File::open(helix_loader::personal_dictionary_file()) {
|
||||
use std::io::{BufRead as _, BufReader};
|
||||
let reader = BufReader::with_capacity(8 * 1024, file);
|
||||
for line in reader.lines() {
|
||||
let line = line.unwrap();
|
||||
let line = line.trim();
|
||||
if line.is_empty() {
|
||||
continue;
|
||||
}
|
||||
dictionary.add(line).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
job::dispatch_blocking(move |editor, _compositor| {
|
||||
let was_removed = editor
|
||||
.handlers
|
||||
.spelling
|
||||
.loading_dictionaries
|
||||
.remove(&language);
|
||||
// Other processes should respect that a dictionary is loading and not change
|
||||
// `loading_dictionaries`. So this should always be true.
|
||||
debug_assert!(was_removed);
|
||||
editor
|
||||
.dictionaries
|
||||
.insert(language, Arc::new(RwLock::new(dictionary)));
|
||||
send_blocking(
|
||||
&editor.handlers.spelling.event_tx,
|
||||
SpellingEvent::DictionaryLoaded { language },
|
||||
);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
fn check_text(
|
||||
dictionary: Arc<RwLock<Dictionary>>,
|
||||
text: Rope,
|
||||
) -> impl Future<Output = Result<Vec<Diagnostic>, tokio::task::JoinError>> {
|
||||
tokio::task::spawn_blocking(move || {
|
||||
static WORDS: Lazy<Regex> = Lazy::new(|| Regex::new(r#"[0-9A-Z]*(['-]?[a-z]+)*"#).unwrap());
|
||||
|
||||
let dict = dictionary.read();
|
||||
let text = text.slice(..);
|
||||
let mut diagnostics = Vec::new();
|
||||
for match_ in WORDS.find_iter(text.regex_input()) {
|
||||
let word = Cow::from(text.byte_slice(match_.range()));
|
||||
if !dict.check(&word) {
|
||||
diagnostics.push(Diagnostic {
|
||||
range: helix_view::Range::Document(helix_stdx::Range {
|
||||
start: text.byte_to_char(match_.start()),
|
||||
end: text.byte_to_char(match_.end()),
|
||||
}),
|
||||
message: format!("Possible spelling issue '{word}'"),
|
||||
severity: Some(Severity::Error),
|
||||
code: None,
|
||||
provider: PROVIDER,
|
||||
tags: Default::default(),
|
||||
source: None,
|
||||
data: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
diagnostics
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) fn register_hooks(handlers: &Handlers) {
|
||||
let tx = handlers.spelling.event_tx.clone();
|
||||
register_hook!(move |event: &mut DocumentDidOpen<'_>| {
|
||||
let doc = doc!(event.editor, &event.doc);
|
||||
if doc.spelling_language().is_some() {
|
||||
send_blocking(&tx, SpellingEvent::DocumentOpened { doc: event.doc });
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
|
||||
let tx = handlers.spelling.event_tx.clone();
|
||||
register_hook!(move |event: &mut DocumentDidChange<'_>| {
|
||||
if event.doc.spelling_language().is_some() {
|
||||
send_blocking(
|
||||
&tx,
|
||||
SpellingEvent::DocumentChanged {
|
||||
doc: event.doc.id(),
|
||||
},
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
}
|
@@ -185,14 +185,16 @@ pub fn languages_all() -> std::io::Result<()> {
|
||||
.language
|
||||
.sort_unstable_by_key(|l| l.language_id.clone());
|
||||
|
||||
let check_binary = |cmd: Option<&str>| match cmd {
|
||||
Some(cmd) => match helix_stdx::env::which(cmd) {
|
||||
Ok(_) => color(fit(&format!("✓ {}", cmd)), Color::Green),
|
||||
Err(_) => color(fit(&format!("✘ {}", cmd)), Color::Red),
|
||||
let check_binary_with_name = |cmd: Option<(&str, &str)>| match cmd {
|
||||
Some((name, cmd)) => match helix_stdx::env::which(cmd) {
|
||||
Ok(_) => color(fit(&format!("✓ {}", name)), Color::Green),
|
||||
Err(_) => color(fit(&format!("✘ {}", name)), Color::Red),
|
||||
},
|
||||
None => color(fit("None"), Color::Yellow),
|
||||
};
|
||||
|
||||
let check_binary = |cmd: Option<&str>| check_binary_with_name(cmd.map(|cmd| (cmd, cmd)));
|
||||
|
||||
for lang in &syn_loader_conf.language {
|
||||
write!(stdout, "{}", fit(&lang.language_id))?;
|
||||
|
||||
@@ -200,9 +202,9 @@ pub fn languages_all() -> std::io::Result<()> {
|
||||
syn_loader_conf
|
||||
.language_server
|
||||
.get(&ls.name)
|
||||
.map(|config| config.command.as_str())
|
||||
.map(|config| (ls.name.as_str(), config.command.as_str()))
|
||||
});
|
||||
write!(stdout, "{}", check_binary(cmds.next()))?;
|
||||
write!(stdout, "{}", check_binary_with_name(cmds.next()))?;
|
||||
|
||||
let dap = lang.debugger.as_ref().map(|dap| dap.command.as_str());
|
||||
write!(stdout, "{}", check_binary(dap))?;
|
||||
@@ -224,7 +226,7 @@ pub fn languages_all() -> std::io::Result<()> {
|
||||
|
||||
for cmd in cmds {
|
||||
write!(stdout, "{}", fit(""))?;
|
||||
writeln!(stdout, "{}", check_binary(Some(cmd)))?;
|
||||
writeln!(stdout, "{}", check_binary_with_name(Some(cmd)))?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -283,10 +285,12 @@ pub fn language(lang_str: String) -> std::io::Result<()> {
|
||||
|
||||
probe_protocols(
|
||||
"language server",
|
||||
lang.language_servers
|
||||
.iter()
|
||||
.filter_map(|ls| syn_loader_conf.language_server.get(&ls.name))
|
||||
.map(|config| config.command.as_str()),
|
||||
lang.language_servers.iter().filter_map(|ls| {
|
||||
syn_loader_conf
|
||||
.language_server
|
||||
.get(&ls.name)
|
||||
.map(|config| (ls.name.as_str(), config.command.as_str()))
|
||||
}),
|
||||
)?;
|
||||
|
||||
probe_protocol(
|
||||
@@ -323,7 +327,7 @@ fn probe_parser(grammar_name: &str) -> std::io::Result<()> {
|
||||
}
|
||||
|
||||
/// Display diagnostics about multiple LSPs and DAPs.
|
||||
fn probe_protocols<'a, I: Iterator<Item = &'a str> + 'a>(
|
||||
fn probe_protocols<'a, I: Iterator<Item = (&'a str, &'a str)> + 'a>(
|
||||
protocol_name: &str,
|
||||
server_cmds: I,
|
||||
) -> std::io::Result<()> {
|
||||
@@ -338,12 +342,12 @@ fn probe_protocols<'a, I: Iterator<Item = &'a str> + 'a>(
|
||||
}
|
||||
writeln!(stdout)?;
|
||||
|
||||
for cmd in server_cmds {
|
||||
let (path, icon) = match helix_stdx::env::which(cmd) {
|
||||
for (name, cmd) in server_cmds {
|
||||
let (diag, icon) = match helix_stdx::env::which(cmd) {
|
||||
Ok(path) => (path.display().to_string().green(), "✓".green()),
|
||||
Err(_) => (format!("'{}' not found in $PATH", cmd).red(), "✘".red()),
|
||||
};
|
||||
writeln!(stdout, " {} {}: {}", icon, cmd, path)?;
|
||||
writeln!(stdout, " {} {}: {}", icon, name, diag)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -354,19 +358,18 @@ fn probe_protocol(protocol_name: &str, server_cmd: Option<String>) -> std::io::R
|
||||
let stdout = std::io::stdout();
|
||||
let mut stdout = stdout.lock();
|
||||
|
||||
let cmd_name = match server_cmd {
|
||||
Some(ref cmd) => cmd.as_str().green(),
|
||||
None => "None".yellow(),
|
||||
write!(stdout, "Configured {}:", protocol_name)?;
|
||||
let Some(cmd) = server_cmd else {
|
||||
writeln!(stdout, "{}", " None".yellow())?;
|
||||
return Ok(());
|
||||
};
|
||||
writeln!(stdout, "Configured {}: {}", protocol_name, cmd_name)?;
|
||||
writeln!(stdout)?;
|
||||
|
||||
if let Some(cmd) = server_cmd {
|
||||
let path = match helix_stdx::env::which(&cmd) {
|
||||
Ok(path) => path.display().to_string().green(),
|
||||
Err(_) => format!("'{}' not found in $PATH", cmd).red(),
|
||||
};
|
||||
writeln!(stdout, "Binary for {}: {}", protocol_name, path)?;
|
||||
}
|
||||
let (diag, icon) = match helix_stdx::env::which(&cmd) {
|
||||
Ok(path) => (path.display().to_string().green(), "✓".green()),
|
||||
Err(_) => (format!("'{}' not found in $PATH", cmd).red(), "✘".red()),
|
||||
};
|
||||
writeln!(stdout, " {} {}", icon, diag)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@@ -38,6 +38,7 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
|
||||
"G" => goto_line,
|
||||
"g" => { "Goto"
|
||||
"g" => goto_file_start,
|
||||
"|" => goto_column,
|
||||
"e" => goto_last_line,
|
||||
"f" => goto_file,
|
||||
"h" => goto_line_start,
|
||||
@@ -367,6 +368,9 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
|
||||
|
||||
"v" => normal_mode,
|
||||
"g" => { "Goto"
|
||||
"g" => extend_to_file_start,
|
||||
"|" => extend_to_column,
|
||||
"e" => extend_to_last_line,
|
||||
"k" => extend_line_up,
|
||||
"j" => extend_line_down,
|
||||
"w" => extend_to_word,
|
||||
|
@@ -90,7 +90,7 @@ macro_rules! keymap {
|
||||
};
|
||||
|
||||
(@trie [$($cmd:ident),* $(,)?]) => {
|
||||
$crate::keymap::KeyTrie::Sequence(vec![$($crate::commands::Command::$cmd),*])
|
||||
$crate::keymap::KeyTrie::Sequence(vec![$($crate::commands::MappableCommand::$cmd),*])
|
||||
};
|
||||
|
||||
(
|
||||
|
@@ -76,8 +76,7 @@ fn open_external_url_callback(
|
||||
let commands = open::commands(url.as_str());
|
||||
async {
|
||||
for cmd in commands {
|
||||
let mut command = tokio::process::Command::new(cmd.get_program());
|
||||
command.args(cmd.get_args());
|
||||
let mut command: tokio::process::Command = cmd.into();
|
||||
if command.output().await.is_ok() {
|
||||
return Ok(job::Callback::Editor(Box::new(|_| {})));
|
||||
}
|
||||
|
@@ -3,8 +3,7 @@ use std::cmp::min;
|
||||
use helix_core::doc_formatter::{DocumentFormatter, GraphemeSource, TextFormat};
|
||||
use helix_core::graphemes::Grapheme;
|
||||
use helix_core::str_utils::char_to_byte_idx;
|
||||
use helix_core::syntax::Highlight;
|
||||
use helix_core::syntax::HighlightEvent;
|
||||
use helix_core::syntax::{self, HighlightEvent, Highlighter, OverlayHighlights};
|
||||
use helix_core::text_annotations::TextAnnotations;
|
||||
use helix_core::{visual_offset_from_block, Position, RopeSlice};
|
||||
use helix_stdx::rope::RopeSliceExt;
|
||||
@@ -17,61 +16,6 @@ use tui::buffer::Buffer as Surface;
|
||||
|
||||
use crate::ui::text_decorations::DecorationManager;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
enum StyleIterKind {
|
||||
/// base highlights (usually emitted by TS), byte indices (potentially not codepoint aligned)
|
||||
BaseHighlights,
|
||||
/// overlay highlights (emitted by custom code from selections), char indices
|
||||
Overlay,
|
||||
}
|
||||
|
||||
/// A wrapper around a HighlightIterator
|
||||
/// that merges the layered highlights to create the final text style
|
||||
/// and yields the active text style and the char_idx where the active
|
||||
/// style will have to be recomputed.
|
||||
///
|
||||
/// TODO(ropey2): hopefully one day helix and ropey will operate entirely
|
||||
/// on byte ranges and we can remove this
|
||||
struct StyleIter<'a, H: Iterator<Item = HighlightEvent>> {
|
||||
text_style: Style,
|
||||
active_highlights: Vec<Highlight>,
|
||||
highlight_iter: H,
|
||||
kind: StyleIterKind,
|
||||
text: RopeSlice<'a>,
|
||||
theme: &'a Theme,
|
||||
}
|
||||
|
||||
impl<H: Iterator<Item = HighlightEvent>> Iterator for StyleIter<'_, H> {
|
||||
type Item = (Style, usize);
|
||||
fn next(&mut self) -> Option<(Style, usize)> {
|
||||
while let Some(event) = self.highlight_iter.next() {
|
||||
match event {
|
||||
HighlightEvent::HighlightStart(highlights) => {
|
||||
self.active_highlights.push(highlights)
|
||||
}
|
||||
HighlightEvent::HighlightEnd => {
|
||||
self.active_highlights.pop();
|
||||
}
|
||||
HighlightEvent::Source { mut end, .. } => {
|
||||
let style = self
|
||||
.active_highlights
|
||||
.iter()
|
||||
.fold(self.text_style, |acc, span| {
|
||||
acc.patch(self.theme.highlight(span.0))
|
||||
});
|
||||
if self.kind == StyleIterKind::BaseHighlights {
|
||||
// Move the end byte index to the nearest character boundary (rounding up)
|
||||
// and convert it to a character index.
|
||||
end = self.text.byte_to_char(self.text.ceil_char_boundary(end));
|
||||
}
|
||||
return Some((style, end));
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
||||
pub struct LinePos {
|
||||
/// Indicates whether the given visual line
|
||||
@@ -90,8 +34,8 @@ pub fn render_document(
|
||||
doc: &Document,
|
||||
offset: ViewPosition,
|
||||
doc_annotations: &TextAnnotations,
|
||||
syntax_highlight_iter: impl Iterator<Item = HighlightEvent>,
|
||||
overlay_highlight_iter: impl Iterator<Item = HighlightEvent>,
|
||||
syntax_highlighter: Option<Highlighter<'_>>,
|
||||
overlay_highlights: Vec<syntax::OverlayHighlights>,
|
||||
theme: &Theme,
|
||||
decorations: DecorationManager,
|
||||
) {
|
||||
@@ -108,8 +52,8 @@ pub fn render_document(
|
||||
offset.anchor,
|
||||
&doc.text_format(viewport.width, Some(theme)),
|
||||
doc_annotations,
|
||||
syntax_highlight_iter,
|
||||
overlay_highlight_iter,
|
||||
syntax_highlighter,
|
||||
overlay_highlights,
|
||||
theme,
|
||||
decorations,
|
||||
)
|
||||
@@ -122,8 +66,8 @@ pub fn render_text(
|
||||
anchor: usize,
|
||||
text_fmt: &TextFormat,
|
||||
text_annotations: &TextAnnotations,
|
||||
syntax_highlight_iter: impl Iterator<Item = HighlightEvent>,
|
||||
overlay_highlight_iter: impl Iterator<Item = HighlightEvent>,
|
||||
syntax_highlighter: Option<Highlighter<'_>>,
|
||||
overlay_highlights: Vec<syntax::OverlayHighlights>,
|
||||
theme: &Theme,
|
||||
mut decorations: DecorationManager,
|
||||
) {
|
||||
@@ -133,22 +77,9 @@ pub fn render_text(
|
||||
|
||||
let mut formatter =
|
||||
DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, text_annotations, anchor);
|
||||
let mut syntax_styles = StyleIter {
|
||||
text_style: renderer.text_style,
|
||||
active_highlights: Vec::with_capacity(64),
|
||||
highlight_iter: syntax_highlight_iter,
|
||||
kind: StyleIterKind::BaseHighlights,
|
||||
theme,
|
||||
text,
|
||||
};
|
||||
let mut overlay_styles = StyleIter {
|
||||
text_style: Style::default(),
|
||||
active_highlights: Vec::with_capacity(64),
|
||||
highlight_iter: overlay_highlight_iter,
|
||||
kind: StyleIterKind::Overlay,
|
||||
theme,
|
||||
text,
|
||||
};
|
||||
let mut syntax_highlighter =
|
||||
SyntaxHighlighter::new(syntax_highlighter, text, theme, renderer.text_style);
|
||||
let mut overlay_highlighter = OverlayHighlighter::new(overlay_highlights, theme);
|
||||
|
||||
let mut last_line_pos = LinePos {
|
||||
first_visual_line: false,
|
||||
@@ -158,12 +89,6 @@ pub fn render_text(
|
||||
let mut last_line_end = 0;
|
||||
let mut is_in_indent_area = true;
|
||||
let mut last_line_indent_level = 0;
|
||||
let mut syntax_style_span = syntax_styles
|
||||
.next()
|
||||
.unwrap_or_else(|| (Style::default(), usize::MAX));
|
||||
let mut overlay_style_span = overlay_styles
|
||||
.next()
|
||||
.unwrap_or_else(|| (Style::default(), usize::MAX));
|
||||
let mut reached_view_top = false;
|
||||
|
||||
loop {
|
||||
@@ -207,21 +132,17 @@ pub fn render_text(
|
||||
}
|
||||
|
||||
// acquire the correct grapheme style
|
||||
while grapheme.char_idx >= syntax_style_span.1 {
|
||||
syntax_style_span = syntax_styles
|
||||
.next()
|
||||
.unwrap_or((Style::default(), usize::MAX));
|
||||
while grapheme.char_idx >= syntax_highlighter.pos {
|
||||
syntax_highlighter.advance();
|
||||
}
|
||||
while grapheme.char_idx >= overlay_style_span.1 {
|
||||
overlay_style_span = overlay_styles
|
||||
.next()
|
||||
.unwrap_or((Style::default(), usize::MAX));
|
||||
while grapheme.char_idx >= overlay_highlighter.pos {
|
||||
overlay_highlighter.advance();
|
||||
}
|
||||
|
||||
let grapheme_style = if let GraphemeSource::VirtualText { highlight } = grapheme.source {
|
||||
let mut style = renderer.text_style;
|
||||
if let Some(highlight) = highlight {
|
||||
style = style.patch(theme.highlight(highlight.0));
|
||||
style = style.patch(theme.highlight(highlight));
|
||||
}
|
||||
GraphemeStyle {
|
||||
syntax_style: style,
|
||||
@@ -229,8 +150,8 @@ pub fn render_text(
|
||||
}
|
||||
} else {
|
||||
GraphemeStyle {
|
||||
syntax_style: syntax_style_span.0,
|
||||
overlay_style: overlay_style_span.0,
|
||||
syntax_style: syntax_highlighter.style,
|
||||
overlay_style: overlay_highlighter.style,
|
||||
}
|
||||
};
|
||||
decorations.decorate_grapheme(renderer, &grapheme);
|
||||
@@ -549,3 +470,105 @@ impl<'a> TextRenderer<'a> {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
struct SyntaxHighlighter<'h, 'r, 't> {
|
||||
inner: Option<Highlighter<'h>>,
|
||||
text: RopeSlice<'r>,
|
||||
/// The character index of the next highlight event, or `usize::MAX` if the highlighter is
|
||||
/// finished.
|
||||
pos: usize,
|
||||
theme: &'t Theme,
|
||||
text_style: Style,
|
||||
style: Style,
|
||||
}
|
||||
|
||||
impl<'h, 'r, 't> SyntaxHighlighter<'h, 'r, 't> {
|
||||
fn new(
|
||||
inner: Option<Highlighter<'h>>,
|
||||
text: RopeSlice<'r>,
|
||||
theme: &'t Theme,
|
||||
text_style: Style,
|
||||
) -> Self {
|
||||
let mut highlighter = Self {
|
||||
inner,
|
||||
text,
|
||||
pos: 0,
|
||||
theme,
|
||||
style: text_style,
|
||||
text_style,
|
||||
};
|
||||
highlighter.update_pos();
|
||||
highlighter
|
||||
}
|
||||
|
||||
fn update_pos(&mut self) {
|
||||
self.pos = self
|
||||
.inner
|
||||
.as_ref()
|
||||
.and_then(|highlighter| {
|
||||
let next_byte_idx = highlighter.next_event_offset();
|
||||
(next_byte_idx != u32::MAX).then(|| {
|
||||
// Move the byte index to the nearest character boundary (rounding up) and
|
||||
// convert it to a character index.
|
||||
self.text
|
||||
.byte_to_char(self.text.ceil_char_boundary(next_byte_idx as usize))
|
||||
})
|
||||
})
|
||||
.unwrap_or(usize::MAX);
|
||||
}
|
||||
|
||||
fn advance(&mut self) {
|
||||
let Some(highlighter) = self.inner.as_mut() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let (event, highlights) = highlighter.advance();
|
||||
let base = match event {
|
||||
HighlightEvent::Refresh => self.text_style,
|
||||
HighlightEvent::Push => self.style,
|
||||
};
|
||||
|
||||
self.style = highlights.fold(base, |acc, highlight| {
|
||||
acc.patch(self.theme.highlight(highlight))
|
||||
});
|
||||
self.update_pos();
|
||||
}
|
||||
}
|
||||
|
||||
struct OverlayHighlighter<'t> {
|
||||
inner: syntax::OverlayHighlighter,
|
||||
pos: usize,
|
||||
theme: &'t Theme,
|
||||
style: Style,
|
||||
}
|
||||
|
||||
impl<'t> OverlayHighlighter<'t> {
|
||||
fn new(overlays: Vec<OverlayHighlights>, theme: &'t Theme) -> Self {
|
||||
let inner = syntax::OverlayHighlighter::new(overlays);
|
||||
let mut highlighter = Self {
|
||||
inner,
|
||||
pos: 0,
|
||||
theme,
|
||||
style: Style::default(),
|
||||
};
|
||||
highlighter.update_pos();
|
||||
highlighter
|
||||
}
|
||||
|
||||
fn update_pos(&mut self) {
|
||||
self.pos = self.inner.next_event_offset();
|
||||
}
|
||||
|
||||
fn advance(&mut self) {
|
||||
let (event, highlights) = self.inner.advance();
|
||||
let base = match event {
|
||||
HighlightEvent::Refresh => Style::default(),
|
||||
HighlightEvent::Push => self.style,
|
||||
};
|
||||
|
||||
self.style = highlights.fold(base, |acc, highlight| {
|
||||
acc.patch(self.theme.highlight(highlight))
|
||||
});
|
||||
self.update_pos();
|
||||
}
|
||||
}
|
||||
|
@@ -14,10 +14,9 @@ use crate::{
|
||||
};
|
||||
|
||||
use helix_core::{
|
||||
diagnostic::NumberOrString,
|
||||
graphemes::{next_grapheme_boundary, prev_grapheme_boundary},
|
||||
movement::Direction,
|
||||
syntax::{self, HighlightEvent},
|
||||
syntax::{self, OverlayHighlights},
|
||||
text_annotations::TextAnnotations,
|
||||
unicode::width::UnicodeWidthStr,
|
||||
visual_offset_from_block, Change, Position, Range, Selection, Transaction,
|
||||
@@ -31,7 +30,7 @@ use helix_view::{
|
||||
keyboard::{KeyCode, KeyModifiers},
|
||||
Document, Editor, Theme, View,
|
||||
};
|
||||
use std::{mem::take, num::NonZeroUsize, path::PathBuf, rc::Rc};
|
||||
use std::{mem::take, num::NonZeroUsize, ops, path::PathBuf, rc::Rc};
|
||||
|
||||
use tui::{buffer::Buffer as Surface, text::Span};
|
||||
|
||||
@@ -87,6 +86,7 @@ impl EditorView {
|
||||
let area = view.area;
|
||||
let theme = &editor.theme;
|
||||
let config = editor.config();
|
||||
let loader = editor.syn_loader.load();
|
||||
|
||||
let view_offset = doc.view_offset(view.id);
|
||||
|
||||
@@ -115,51 +115,33 @@ impl EditorView {
|
||||
decorations.add_decoration(line_decoration);
|
||||
}
|
||||
|
||||
let syntax_highlights =
|
||||
Self::doc_syntax_highlights(doc, view_offset.anchor, inner.height, theme);
|
||||
let syntax_highlighter =
|
||||
Self::doc_syntax_highlighter(doc, view_offset.anchor, inner.height, &loader);
|
||||
let mut overlays = Vec::new();
|
||||
|
||||
let mut overlay_highlights =
|
||||
Self::empty_highlight_iter(doc, view_offset.anchor, inner.height);
|
||||
let overlay_syntax_highlights = Self::overlay_syntax_highlights(
|
||||
overlays.push(Self::overlay_syntax_highlights(
|
||||
doc,
|
||||
view_offset.anchor,
|
||||
inner.height,
|
||||
&text_annotations,
|
||||
);
|
||||
if !overlay_syntax_highlights.is_empty() {
|
||||
overlay_highlights =
|
||||
Box::new(syntax::merge(overlay_highlights, overlay_syntax_highlights));
|
||||
}
|
||||
));
|
||||
|
||||
for diagnostic in Self::doc_diagnostics_highlights(doc, theme) {
|
||||
// Most of the `diagnostic` Vecs are empty most of the time. Skipping
|
||||
// a merge for any empty Vec saves a significant amount of work.
|
||||
if diagnostic.is_empty() {
|
||||
continue;
|
||||
}
|
||||
overlay_highlights = Box::new(syntax::merge(overlay_highlights, diagnostic));
|
||||
}
|
||||
Self::doc_diagnostics_highlights_into(doc, theme, &mut overlays);
|
||||
|
||||
if is_focused {
|
||||
if let Some(tabstops) = Self::tabstop_highlights(doc, theme) {
|
||||
overlay_highlights = Box::new(syntax::merge(overlay_highlights, tabstops));
|
||||
overlays.push(tabstops);
|
||||
}
|
||||
let highlights = syntax::merge(
|
||||
overlay_highlights,
|
||||
Self::doc_selection_highlights(
|
||||
editor.mode(),
|
||||
doc,
|
||||
view,
|
||||
theme,
|
||||
&config.cursor_shape,
|
||||
self.terminal_focused,
|
||||
),
|
||||
);
|
||||
let focused_view_elements = Self::highlight_focused_view_elements(view, doc, theme);
|
||||
if focused_view_elements.is_empty() {
|
||||
overlay_highlights = Box::new(highlights)
|
||||
} else {
|
||||
overlay_highlights = Box::new(syntax::merge(highlights, focused_view_elements))
|
||||
overlays.push(Self::doc_selection_highlights(
|
||||
editor.mode(),
|
||||
doc,
|
||||
view,
|
||||
theme,
|
||||
&config.cursor_shape,
|
||||
self.terminal_focused,
|
||||
));
|
||||
if let Some(overlay) = Self::highlight_focused_view_elements(view, doc, theme) {
|
||||
overlays.push(overlay);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -207,8 +189,8 @@ impl EditorView {
|
||||
doc,
|
||||
view_offset,
|
||||
&text_annotations,
|
||||
syntax_highlights,
|
||||
overlay_highlights,
|
||||
syntax_highlighter,
|
||||
overlays,
|
||||
theme,
|
||||
decorations,
|
||||
);
|
||||
@@ -287,57 +269,23 @@ impl EditorView {
|
||||
start..end
|
||||
}
|
||||
|
||||
pub fn empty_highlight_iter(
|
||||
doc: &Document,
|
||||
anchor: usize,
|
||||
height: u16,
|
||||
) -> Box<dyn Iterator<Item = HighlightEvent>> {
|
||||
let text = doc.text().slice(..);
|
||||
let row = text.char_to_line(anchor.min(text.len_chars()));
|
||||
|
||||
// Calculate viewport byte ranges:
|
||||
// Saturating subs to make it inclusive zero indexing.
|
||||
let range = Self::viewport_byte_range(text, row, height);
|
||||
Box::new(
|
||||
[HighlightEvent::Source {
|
||||
start: text.byte_to_char(range.start),
|
||||
end: text.byte_to_char(range.end),
|
||||
}]
|
||||
.into_iter(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Get syntax highlights for a document in a view represented by the first line
|
||||
/// Get the syntax highlighter for a document in a view represented by the first line
|
||||
/// and column (`offset`) and the last line. This is done instead of using a view
|
||||
/// directly to enable rendering syntax highlighted docs anywhere (eg. picker preview)
|
||||
pub fn doc_syntax_highlights<'doc>(
|
||||
doc: &'doc Document,
|
||||
pub fn doc_syntax_highlighter<'editor>(
|
||||
doc: &'editor Document,
|
||||
anchor: usize,
|
||||
height: u16,
|
||||
_theme: &Theme,
|
||||
) -> Box<dyn Iterator<Item = HighlightEvent> + 'doc> {
|
||||
loader: &'editor syntax::Loader,
|
||||
) -> Option<syntax::Highlighter<'editor>> {
|
||||
let syntax = doc.syntax()?;
|
||||
let text = doc.text().slice(..);
|
||||
let row = text.char_to_line(anchor.min(text.len_chars()));
|
||||
|
||||
let range = Self::viewport_byte_range(text, row, height);
|
||||
let range = range.start as u32..range.end as u32;
|
||||
|
||||
match doc.syntax() {
|
||||
Some(syntax) => {
|
||||
let iter = syntax
|
||||
// TODO: range doesn't actually restrict source, just highlight range
|
||||
.highlight_iter(text.slice(..), Some(range), None)
|
||||
.map(|event| event.unwrap());
|
||||
|
||||
Box::new(iter)
|
||||
}
|
||||
None => Box::new(
|
||||
[HighlightEvent::Source {
|
||||
start: range.start,
|
||||
end: range.end,
|
||||
}]
|
||||
.into_iter(),
|
||||
),
|
||||
}
|
||||
let highlighter = syntax.highlighter(text, loader, range);
|
||||
Some(highlighter)
|
||||
}
|
||||
|
||||
pub fn overlay_syntax_highlights(
|
||||
@@ -345,7 +293,7 @@ impl EditorView {
|
||||
anchor: usize,
|
||||
height: u16,
|
||||
text_annotations: &TextAnnotations,
|
||||
) -> Vec<(usize, std::ops::Range<usize>)> {
|
||||
) -> OverlayHighlights {
|
||||
let text = doc.text().slice(..);
|
||||
let row = text.char_to_line(anchor.min(text.len_chars()));
|
||||
|
||||
@@ -356,35 +304,31 @@ impl EditorView {
|
||||
}
|
||||
|
||||
/// Get highlight spans for document diagnostics
|
||||
pub fn doc_diagnostics_highlights(
|
||||
pub fn doc_diagnostics_highlights_into(
|
||||
doc: &Document,
|
||||
theme: &Theme,
|
||||
) -> [Vec<(usize, std::ops::Range<usize>)>; 7] {
|
||||
use helix_core::diagnostic::{DiagnosticTag, Range, Severity};
|
||||
overlay_highlights: &mut Vec<OverlayHighlights>,
|
||||
) {
|
||||
use helix_core::diagnostic::Severity;
|
||||
use helix_stdx::Range;
|
||||
use helix_view::diagnostic::DiagnosticTag;
|
||||
let get_scope_of = |scope| {
|
||||
theme
|
||||
.find_scope_index_exact(scope)
|
||||
// get one of the themes below as fallback values
|
||||
.or_else(|| theme.find_scope_index_exact("diagnostic"))
|
||||
.or_else(|| theme.find_scope_index_exact("ui.cursor"))
|
||||
.or_else(|| theme.find_scope_index_exact("ui.selection"))
|
||||
.expect(
|
||||
"at least one of the following scopes must be defined in the theme: `diagnostic`, `ui.cursor`, or `ui.selection`",
|
||||
)
|
||||
.find_highlight_exact(scope)
|
||||
// get one of the themes below as fallback values
|
||||
.or_else(|| theme.find_highlight_exact("diagnostic"))
|
||||
.or_else(|| theme.find_highlight_exact("ui.cursor"))
|
||||
.or_else(|| theme.find_highlight_exact("ui.selection"))
|
||||
.expect(
|
||||
"at least one of the following scopes must be defined in the theme: `diagnostic`, `ui.cursor`, or `ui.selection`",
|
||||
)
|
||||
};
|
||||
|
||||
// basically just queries the theme color defined in the config
|
||||
let hint = get_scope_of("diagnostic.hint");
|
||||
let info = get_scope_of("diagnostic.info");
|
||||
let warning = get_scope_of("diagnostic.warning");
|
||||
let error = get_scope_of("diagnostic.error");
|
||||
let r#default = get_scope_of("diagnostic"); // this is a bit redundant but should be fine
|
||||
|
||||
// Diagnostic tags
|
||||
let unnecessary = theme.find_scope_index_exact("diagnostic.unnecessary");
|
||||
let deprecated = theme.find_scope_index_exact("diagnostic.deprecated");
|
||||
let unnecessary = theme.find_highlight_exact("diagnostic.unnecessary");
|
||||
let deprecated = theme.find_highlight_exact("diagnostic.deprecated");
|
||||
|
||||
let mut default_vec: Vec<(usize, std::ops::Range<usize>)> = Vec::new();
|
||||
let mut default_vec = Vec::new();
|
||||
let mut info_vec = Vec::new();
|
||||
let mut hint_vec = Vec::new();
|
||||
let mut warning_vec = Vec::new();
|
||||
@@ -392,71 +336,95 @@ impl EditorView {
|
||||
let mut unnecessary_vec = Vec::new();
|
||||
let mut deprecated_vec = Vec::new();
|
||||
|
||||
let push_diagnostic =
|
||||
|vec: &mut Vec<(usize, std::ops::Range<usize>)>, scope, range: Range| {
|
||||
// If any diagnostic overlaps ranges with the prior diagnostic,
|
||||
// merge the two together. Otherwise push a new span.
|
||||
match vec.last_mut() {
|
||||
Some((_, existing_range)) if range.start <= existing_range.end => {
|
||||
// This branch merges overlapping diagnostics, assuming that the current
|
||||
// diagnostic starts on range.start or later. If this assertion fails,
|
||||
// we will discard some part of `diagnostic`. This implies that
|
||||
// `doc.diagnostics()` is not sorted by `diagnostic.range`.
|
||||
debug_assert!(existing_range.start <= range.start);
|
||||
existing_range.end = range.end.max(existing_range.end)
|
||||
}
|
||||
_ => vec.push((scope, range.start..range.end)),
|
||||
let push_diagnostic = |vec: &mut Vec<ops::Range<usize>>, range: Range| {
|
||||
// If any diagnostic overlaps ranges with the prior diagnostic,
|
||||
// merge the two together. Otherwise push a new span.
|
||||
match vec.last_mut() {
|
||||
Some(existing_range) if range.start <= existing_range.end => {
|
||||
// This branch merges overlapping diagnostics, assuming that the current
|
||||
// diagnostic starts on range.start or later. If this assertion fails,
|
||||
// we will discard some part of `diagnostic`. This implies that
|
||||
// `doc.diagnostics()` is not sorted by `diagnostic.range`.
|
||||
debug_assert!(existing_range.start <= range.start);
|
||||
existing_range.end = range.end.max(existing_range.end)
|
||||
}
|
||||
};
|
||||
_ => vec.push(range.start..range.end),
|
||||
}
|
||||
};
|
||||
|
||||
for diagnostic in doc.diagnostics() {
|
||||
// Separate diagnostics into different Vecs by severity.
|
||||
let (vec, scope) = match diagnostic.severity {
|
||||
Some(Severity::Info) => (&mut info_vec, info),
|
||||
Some(Severity::Hint) => (&mut hint_vec, hint),
|
||||
Some(Severity::Warning) => (&mut warning_vec, warning),
|
||||
Some(Severity::Error) => (&mut error_vec, error),
|
||||
_ => (&mut default_vec, r#default),
|
||||
let vec = match diagnostic.inner.severity {
|
||||
Some(Severity::Info) => &mut info_vec,
|
||||
Some(Severity::Hint) => &mut hint_vec,
|
||||
Some(Severity::Warning) => &mut warning_vec,
|
||||
Some(Severity::Error) => &mut error_vec,
|
||||
_ => &mut default_vec,
|
||||
};
|
||||
|
||||
// If the diagnostic has tags and a non-warning/error severity, skip rendering
|
||||
// the diagnostic as info/hint/default and only render it as unnecessary/deprecated
|
||||
// instead. For warning/error diagnostics, render both the severity highlight and
|
||||
// the tag highlight.
|
||||
if diagnostic.tags.is_empty()
|
||||
if diagnostic.inner.tags.is_empty()
|
||||
|| matches!(
|
||||
diagnostic.severity,
|
||||
diagnostic.inner.severity,
|
||||
Some(Severity::Warning | Severity::Error)
|
||||
)
|
||||
{
|
||||
push_diagnostic(vec, scope, diagnostic.range);
|
||||
push_diagnostic(vec, diagnostic.range);
|
||||
}
|
||||
|
||||
for tag in &diagnostic.tags {
|
||||
for tag in &diagnostic.inner.tags {
|
||||
match tag {
|
||||
DiagnosticTag::Unnecessary => {
|
||||
if let Some(scope) = unnecessary {
|
||||
push_diagnostic(&mut unnecessary_vec, scope, diagnostic.range)
|
||||
if unnecessary.is_some() {
|
||||
push_diagnostic(&mut unnecessary_vec, diagnostic.range)
|
||||
}
|
||||
}
|
||||
DiagnosticTag::Deprecated => {
|
||||
if let Some(scope) = deprecated {
|
||||
push_diagnostic(&mut deprecated_vec, scope, diagnostic.range)
|
||||
if deprecated.is_some() {
|
||||
push_diagnostic(&mut deprecated_vec, diagnostic.range)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[
|
||||
default_vec,
|
||||
unnecessary_vec,
|
||||
deprecated_vec,
|
||||
info_vec,
|
||||
hint_vec,
|
||||
warning_vec,
|
||||
error_vec,
|
||||
]
|
||||
overlay_highlights.push(OverlayHighlights::Homogeneous {
|
||||
highlight: get_scope_of("diagnostic"),
|
||||
ranges: default_vec,
|
||||
});
|
||||
if let Some(highlight) = unnecessary {
|
||||
overlay_highlights.push(OverlayHighlights::Homogeneous {
|
||||
highlight,
|
||||
ranges: unnecessary_vec,
|
||||
});
|
||||
}
|
||||
if let Some(highlight) = deprecated {
|
||||
overlay_highlights.push(OverlayHighlights::Homogeneous {
|
||||
highlight,
|
||||
ranges: deprecated_vec,
|
||||
});
|
||||
}
|
||||
overlay_highlights.extend([
|
||||
OverlayHighlights::Homogeneous {
|
||||
highlight: get_scope_of("diagnostic.info"),
|
||||
ranges: info_vec,
|
||||
},
|
||||
OverlayHighlights::Homogeneous {
|
||||
highlight: get_scope_of("diagnostic.hint"),
|
||||
ranges: hint_vec,
|
||||
},
|
||||
OverlayHighlights::Homogeneous {
|
||||
highlight: get_scope_of("diagnostic.warning"),
|
||||
ranges: warning_vec,
|
||||
},
|
||||
OverlayHighlights::Homogeneous {
|
||||
highlight: get_scope_of("diagnostic.error"),
|
||||
ranges: error_vec,
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
/// Get highlight spans for selections in a document view.
|
||||
@@ -467,7 +435,7 @@ impl EditorView {
|
||||
theme: &Theme,
|
||||
cursor_shape_config: &CursorShapeConfig,
|
||||
is_terminal_focused: bool,
|
||||
) -> Vec<(usize, std::ops::Range<usize>)> {
|
||||
) -> OverlayHighlights {
|
||||
let text = doc.text().slice(..);
|
||||
let selection = doc.selection(view.id);
|
||||
let primary_idx = selection.primary_index();
|
||||
@@ -476,34 +444,34 @@ impl EditorView {
|
||||
let cursor_is_block = cursorkind == CursorKind::Block;
|
||||
|
||||
let selection_scope = theme
|
||||
.find_scope_index_exact("ui.selection")
|
||||
.find_highlight_exact("ui.selection")
|
||||
.expect("could not find `ui.selection` scope in the theme!");
|
||||
let primary_selection_scope = theme
|
||||
.find_scope_index_exact("ui.selection.primary")
|
||||
.find_highlight_exact("ui.selection.primary")
|
||||
.unwrap_or(selection_scope);
|
||||
|
||||
let base_cursor_scope = theme
|
||||
.find_scope_index_exact("ui.cursor")
|
||||
.find_highlight_exact("ui.cursor")
|
||||
.unwrap_or(selection_scope);
|
||||
let base_primary_cursor_scope = theme
|
||||
.find_scope_index("ui.cursor.primary")
|
||||
.find_highlight("ui.cursor.primary")
|
||||
.unwrap_or(base_cursor_scope);
|
||||
|
||||
let cursor_scope = match mode {
|
||||
Mode::Insert => theme.find_scope_index_exact("ui.cursor.insert"),
|
||||
Mode::Select => theme.find_scope_index_exact("ui.cursor.select"),
|
||||
Mode::Normal => theme.find_scope_index_exact("ui.cursor.normal"),
|
||||
Mode::Insert => theme.find_highlight_exact("ui.cursor.insert"),
|
||||
Mode::Select => theme.find_highlight_exact("ui.cursor.select"),
|
||||
Mode::Normal => theme.find_highlight_exact("ui.cursor.normal"),
|
||||
}
|
||||
.unwrap_or(base_cursor_scope);
|
||||
|
||||
let primary_cursor_scope = match mode {
|
||||
Mode::Insert => theme.find_scope_index_exact("ui.cursor.primary.insert"),
|
||||
Mode::Select => theme.find_scope_index_exact("ui.cursor.primary.select"),
|
||||
Mode::Normal => theme.find_scope_index_exact("ui.cursor.primary.normal"),
|
||||
Mode::Insert => theme.find_highlight_exact("ui.cursor.primary.insert"),
|
||||
Mode::Select => theme.find_highlight_exact("ui.cursor.primary.select"),
|
||||
Mode::Normal => theme.find_highlight_exact("ui.cursor.primary.normal"),
|
||||
}
|
||||
.unwrap_or(base_primary_cursor_scope);
|
||||
|
||||
let mut spans: Vec<(usize, std::ops::Range<usize>)> = Vec::new();
|
||||
let mut spans = Vec::new();
|
||||
for (i, range) in selection.iter().enumerate() {
|
||||
let selection_is_primary = i == primary_idx;
|
||||
let (cursor_scope, selection_scope) = if selection_is_primary {
|
||||
@@ -563,7 +531,7 @@ impl EditorView {
|
||||
}
|
||||
}
|
||||
|
||||
spans
|
||||
OverlayHighlights::Heterogenous { highlights: spans }
|
||||
}
|
||||
|
||||
/// Render brace match, etc (meant for the focused view only)
|
||||
@@ -571,41 +539,24 @@ impl EditorView {
|
||||
view: &View,
|
||||
doc: &Document,
|
||||
theme: &Theme,
|
||||
) -> Vec<(usize, std::ops::Range<usize>)> {
|
||||
) -> Option<OverlayHighlights> {
|
||||
// Highlight matching braces
|
||||
if let Some(syntax) = doc.syntax() {
|
||||
let text = doc.text().slice(..);
|
||||
use helix_core::match_brackets;
|
||||
let pos = doc.selection(view.id).primary().cursor(text);
|
||||
|
||||
if let Some(pos) =
|
||||
match_brackets::find_matching_bracket(syntax, doc.text().slice(..), pos)
|
||||
{
|
||||
// ensure col is on screen
|
||||
if let Some(highlight) = theme.find_scope_index_exact("ui.cursor.match") {
|
||||
return vec![(highlight, pos..pos + 1)];
|
||||
}
|
||||
}
|
||||
}
|
||||
Vec::new()
|
||||
let syntax = doc.syntax()?;
|
||||
let highlight = theme.find_highlight_exact("ui.cursor.match")?;
|
||||
let text = doc.text().slice(..);
|
||||
let pos = doc.selection(view.id).primary().cursor(text);
|
||||
let pos = helix_core::match_brackets::find_matching_bracket(syntax, text, pos)?;
|
||||
Some(OverlayHighlights::single(highlight, pos..pos + 1))
|
||||
}
|
||||
|
||||
pub fn tabstop_highlights(
|
||||
doc: &Document,
|
||||
theme: &Theme,
|
||||
) -> Option<Vec<(usize, std::ops::Range<usize>)>> {
|
||||
pub fn tabstop_highlights(doc: &Document, theme: &Theme) -> Option<OverlayHighlights> {
|
||||
let snippet = doc.active_snippet.as_ref()?;
|
||||
let highlight = theme.find_scope_index_exact("tabstop")?;
|
||||
let mut highlights = Vec::new();
|
||||
let highlight = theme.find_highlight_exact("tabstop")?;
|
||||
let mut ranges = Vec::new();
|
||||
for tabstop in snippet.tabstops() {
|
||||
highlights.extend(
|
||||
tabstop
|
||||
.ranges
|
||||
.iter()
|
||||
.map(|range| (highlight, range.start..range.end)),
|
||||
);
|
||||
ranges.extend(tabstop.ranges.iter().map(|range| range.start..range.end));
|
||||
}
|
||||
(!highlights.is_empty()).then_some(highlights)
|
||||
Some(OverlayHighlights::Homogeneous { highlight, ranges })
|
||||
}
|
||||
|
||||
/// Render bufferline at the top
|
||||
@@ -734,6 +685,7 @@ impl EditorView {
|
||||
theme: &Theme,
|
||||
) {
|
||||
use helix_core::diagnostic::Severity;
|
||||
use helix_view::diagnostic::NumberOrString;
|
||||
use tui::{
|
||||
layout::Alignment,
|
||||
text::Text,
|
||||
@@ -757,17 +709,18 @@ impl EditorView {
|
||||
let mut lines = Vec::new();
|
||||
let background_style = theme.get("ui.background");
|
||||
for diagnostic in diagnostics {
|
||||
let style = Style::reset()
|
||||
.patch(background_style)
|
||||
.patch(match diagnostic.severity {
|
||||
Some(Severity::Error) => error,
|
||||
Some(Severity::Warning) | None => warning,
|
||||
Some(Severity::Info) => info,
|
||||
Some(Severity::Hint) => hint,
|
||||
});
|
||||
let text = Text::styled(&diagnostic.message, style);
|
||||
let style =
|
||||
Style::reset()
|
||||
.patch(background_style)
|
||||
.patch(match diagnostic.inner.severity {
|
||||
Some(Severity::Error) => error,
|
||||
Some(Severity::Warning) | None => warning,
|
||||
Some(Severity::Info) => info,
|
||||
Some(Severity::Hint) => hint,
|
||||
});
|
||||
let text = Text::styled(&diagnostic.inner.message, style);
|
||||
lines.extend(text.lines);
|
||||
let code = diagnostic.code.as_ref().map(|x| match x {
|
||||
let code = diagnostic.inner.code.as_ref().map(|x| match x {
|
||||
NumberOrString::Number(n) => format!("({n})"),
|
||||
NumberOrString::String(s) => format!("({s})"),
|
||||
});
|
||||
|
@@ -1,7 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use arc_swap::ArcSwap;
|
||||
use helix_core::syntax;
|
||||
use helix_core::syntax::{self, OverlayHighlights};
|
||||
use helix_view::graphics::{Margin, Rect, Style};
|
||||
use helix_view::input::Event;
|
||||
use tui::buffer::Buffer;
|
||||
@@ -94,7 +94,8 @@ impl Component for SignatureHelp {
|
||||
}
|
||||
|
||||
fn render(&mut self, area: Rect, surface: &mut Buffer, cx: &mut Context) {
|
||||
let margin = Margin::horizontal(1);
|
||||
let margin = Margin::all(1);
|
||||
let area = area.inner(margin);
|
||||
|
||||
let signature = self
|
||||
.signatures
|
||||
@@ -102,13 +103,12 @@ impl Component for SignatureHelp {
|
||||
.unwrap_or_else(|| &self.signatures[0]);
|
||||
|
||||
let active_param_span = signature.active_param_range.map(|(start, end)| {
|
||||
vec![(
|
||||
cx.editor
|
||||
.theme
|
||||
.find_scope_index_exact("ui.selection")
|
||||
.unwrap(),
|
||||
start..end,
|
||||
)]
|
||||
let highlight = cx
|
||||
.editor
|
||||
.theme
|
||||
.find_highlight_exact("ui.selection")
|
||||
.unwrap();
|
||||
OverlayHighlights::single(highlight, start..end)
|
||||
});
|
||||
|
||||
let signature = self
|
||||
@@ -120,7 +120,7 @@ impl Component for SignatureHelp {
|
||||
signature.signature.as_str(),
|
||||
&self.language,
|
||||
Some(&cx.editor.theme),
|
||||
Arc::clone(&self.config_loader),
|
||||
&self.config_loader.load(),
|
||||
active_param_span,
|
||||
);
|
||||
|
||||
@@ -128,13 +128,15 @@ impl Component for SignatureHelp {
|
||||
let signature_index = self.signature_index();
|
||||
let text = Text::from(signature_index);
|
||||
let paragraph = Paragraph::new(&text).alignment(Alignment::Right);
|
||||
paragraph.render(area.clip_top(1).with_height(1).clip_right(1), surface);
|
||||
paragraph.render(area.with_height(1).clip_right(1), surface);
|
||||
}
|
||||
|
||||
let (_, sig_text_height) = crate::ui::text::required_size(&sig_text, area.width);
|
||||
let sig_text_area = area.clip_top(1).with_height(sig_text_height);
|
||||
let sig_text_area = sig_text_area.inner(margin).intersection(surface.area);
|
||||
let sig_text_para = Paragraph::new(&sig_text).wrap(Wrap { trim: false });
|
||||
let sig_text_para = Paragraph::new(&sig_text)
|
||||
.wrap(Wrap { trim: false })
|
||||
.scroll((cx.scroll.unwrap_or_default() as u16, 0));
|
||||
let (_, sig_text_height) = sig_text_para.required_size(area.width);
|
||||
let sig_text_area = area.with_height(sig_text_height.min(area.height));
|
||||
let sig_text_area = sig_text_area.intersection(surface.area);
|
||||
sig_text_para.render(sig_text_area, surface);
|
||||
|
||||
if signature.signature_doc.is_none() {
|
||||
@@ -160,7 +162,7 @@ impl Component for SignatureHelp {
|
||||
let sig_doc_para = Paragraph::new(&sig_doc)
|
||||
.wrap(Wrap { trim: false })
|
||||
.scroll((cx.scroll.unwrap_or_default() as u16, 0));
|
||||
sig_doc_para.render(sig_doc_area.inner(margin), surface);
|
||||
sig_doc_para.render(sig_doc_area, surface);
|
||||
}
|
||||
|
||||
fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> {
|
||||
@@ -178,11 +180,11 @@ impl Component for SignatureHelp {
|
||||
signature.signature.as_str(),
|
||||
&self.language,
|
||||
None,
|
||||
Arc::clone(&self.config_loader),
|
||||
&self.config_loader.load(),
|
||||
None,
|
||||
);
|
||||
let (sig_width, sig_height) =
|
||||
crate::ui::text::required_size(&signature_text, max_text_width);
|
||||
let sig_text_para = Paragraph::new(&signature_text).wrap(Wrap { trim: false });
|
||||
let (sig_width, sig_height) = sig_text_para.required_size(max_text_width);
|
||||
|
||||
let (width, height) = match signature.signature_doc {
|
||||
Some(ref doc) => {
|
||||
|
@@ -10,8 +10,8 @@ use std::sync::Arc;
|
||||
use pulldown_cmark::{CodeBlockKind, Event, HeadingLevel, Options, Parser, Tag, TagEnd};
|
||||
|
||||
use helix_core::{
|
||||
syntax::{self, HighlightEvent, InjectionLanguageMarker, Syntax},
|
||||
RopeSlice,
|
||||
syntax::{self, HighlightEvent, OverlayHighlights},
|
||||
RopeSlice, Syntax,
|
||||
};
|
||||
use helix_view::{
|
||||
graphics::{Margin, Rect, Style},
|
||||
@@ -32,8 +32,12 @@ pub fn highlighted_code_block<'a>(
|
||||
text: &str,
|
||||
language: &str,
|
||||
theme: Option<&Theme>,
|
||||
config_loader: Arc<ArcSwap<syntax::Loader>>,
|
||||
additional_highlight_spans: Option<Vec<(usize, std::ops::Range<usize>)>>,
|
||||
loader: &syntax::Loader,
|
||||
// Optional overlay highlights to mix in with the syntax highlights.
|
||||
//
|
||||
// Note that `OverlayHighlights` is typically used with char indexing but the only caller
|
||||
// which passes this parameter currently passes **byte indices** instead.
|
||||
additional_highlight_spans: Option<OverlayHighlights>,
|
||||
) -> Text<'a> {
|
||||
let mut spans = Vec::new();
|
||||
let mut lines = Vec::new();
|
||||
@@ -48,67 +52,74 @@ pub fn highlighted_code_block<'a>(
|
||||
};
|
||||
|
||||
let ropeslice = RopeSlice::from(text);
|
||||
let syntax = config_loader
|
||||
.load()
|
||||
.language_configuration_for_injection_string(&InjectionLanguageMarker::Name(
|
||||
language.into(),
|
||||
))
|
||||
.and_then(|config| config.highlight_config(theme.scopes()))
|
||||
.and_then(|config| Syntax::new(ropeslice, config, Arc::clone(&config_loader)));
|
||||
|
||||
let syntax = match syntax {
|
||||
Some(s) => s,
|
||||
None => return styled_multiline_text(text, code_style),
|
||||
let Some(syntax) = loader
|
||||
.language_for_match(RopeSlice::from(language))
|
||||
.and_then(|lang| Syntax::new(ropeslice, lang, loader).ok())
|
||||
else {
|
||||
return styled_multiline_text(text, code_style);
|
||||
};
|
||||
|
||||
let highlight_iter = syntax
|
||||
.highlight_iter(ropeslice, None, None)
|
||||
.map(|e| e.unwrap());
|
||||
let highlight_iter: Box<dyn Iterator<Item = HighlightEvent>> =
|
||||
if let Some(spans) = additional_highlight_spans {
|
||||
Box::new(helix_core::syntax::merge(highlight_iter, spans))
|
||||
} else {
|
||||
Box::new(highlight_iter)
|
||||
};
|
||||
let mut syntax_highlighter = syntax.highlighter(ropeslice, loader, ..);
|
||||
let mut syntax_highlight_stack = Vec::new();
|
||||
let mut overlay_highlight_stack = Vec::new();
|
||||
let mut overlay_highlighter = syntax::OverlayHighlighter::new(additional_highlight_spans);
|
||||
let mut pos = 0;
|
||||
|
||||
let mut highlights = Vec::new();
|
||||
for event in highlight_iter {
|
||||
match event {
|
||||
HighlightEvent::HighlightStart(span) => {
|
||||
highlights.push(span);
|
||||
while pos < ropeslice.len_bytes() as u32 {
|
||||
if pos == syntax_highlighter.next_event_offset() {
|
||||
let (event, new_highlights) = syntax_highlighter.advance();
|
||||
if event == HighlightEvent::Refresh {
|
||||
syntax_highlight_stack.clear();
|
||||
}
|
||||
HighlightEvent::HighlightEnd => {
|
||||
highlights.pop();
|
||||
syntax_highlight_stack.extend(new_highlights);
|
||||
} else if pos == overlay_highlighter.next_event_offset() as u32 {
|
||||
let (event, new_highlights) = overlay_highlighter.advance();
|
||||
if event == HighlightEvent::Refresh {
|
||||
overlay_highlight_stack.clear();
|
||||
}
|
||||
HighlightEvent::Source { start, end } => {
|
||||
let style = highlights
|
||||
.iter()
|
||||
.fold(text_style, |acc, span| acc.patch(theme.highlight(span.0)));
|
||||
overlay_highlight_stack.extend(new_highlights)
|
||||
}
|
||||
|
||||
let mut slice = &text[start..end];
|
||||
// TODO: do we need to handle all unicode line endings
|
||||
// here, or is just '\n' okay?
|
||||
while let Some(end) = slice.find('\n') {
|
||||
// emit span up to newline
|
||||
let text = &slice[..end];
|
||||
let text = text.replace('\t', " "); // replace tabs
|
||||
let span = Span::styled(text, style);
|
||||
spans.push(span);
|
||||
let start = pos;
|
||||
pos = syntax_highlighter
|
||||
.next_event_offset()
|
||||
.min(overlay_highlighter.next_event_offset() as u32);
|
||||
if pos == u32::MAX {
|
||||
pos = ropeslice.len_bytes() as u32;
|
||||
}
|
||||
if pos == start {
|
||||
continue;
|
||||
}
|
||||
assert!(pos > start);
|
||||
|
||||
// truncate slice to after newline
|
||||
slice = &slice[end + 1..];
|
||||
let style = syntax_highlight_stack
|
||||
.iter()
|
||||
.chain(overlay_highlight_stack.iter())
|
||||
.fold(text_style, |acc, highlight| {
|
||||
acc.patch(theme.highlight(*highlight))
|
||||
});
|
||||
|
||||
// make a new line
|
||||
let spans = std::mem::take(&mut spans);
|
||||
lines.push(Spans::from(spans));
|
||||
}
|
||||
let mut slice = &text[start as usize..pos as usize];
|
||||
// TODO: do we need to handle all unicode line endings
|
||||
// here, or is just '\n' okay?
|
||||
while let Some(end) = slice.find('\n') {
|
||||
// emit span up to newline
|
||||
let text = &slice[..end];
|
||||
let text = text.replace('\t', " "); // replace tabs
|
||||
let span = Span::styled(text, style);
|
||||
spans.push(span);
|
||||
|
||||
// if there's anything left, emit it too
|
||||
if !slice.is_empty() {
|
||||
let span = Span::styled(slice.replace('\t', " "), style);
|
||||
spans.push(span);
|
||||
}
|
||||
}
|
||||
// truncate slice to after newline
|
||||
slice = &slice[end + 1..];
|
||||
|
||||
// make a new line
|
||||
let spans = std::mem::take(&mut spans);
|
||||
lines.push(Spans::from(spans));
|
||||
}
|
||||
|
||||
if !slice.is_empty() {
|
||||
let span = Span::styled(slice.replace('\t', " "), style);
|
||||
spans.push(span);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,7 +297,7 @@ impl Markdown {
|
||||
&text,
|
||||
language,
|
||||
theme,
|
||||
Arc::clone(&self.config_loader),
|
||||
&self.config_loader.load(),
|
||||
None,
|
||||
);
|
||||
lines.extend(tui_text.lines.into_iter());
|
||||
|
@@ -371,13 +371,15 @@ fn directory_content(path: &Path) -> Result<Vec<(PathBuf, bool)>, std::io::Error
|
||||
pub mod completers {
|
||||
use super::Utf8PathBuf;
|
||||
use crate::ui::prompt::Completion;
|
||||
use helix_core::command_line::{self, Tokenizer};
|
||||
use helix_core::fuzzy::fuzzy_match;
|
||||
use helix_core::syntax::LanguageServerFeature;
|
||||
use helix_core::syntax::config::LanguageServerFeature;
|
||||
use helix_view::document::SCRATCH_BUFFER_NAME;
|
||||
use helix_view::theme;
|
||||
use helix_view::{editor::Config, Editor};
|
||||
use once_cell::sync::Lazy;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeSet;
|
||||
use tui::text::Span;
|
||||
|
||||
pub type Completer = fn(&Editor, &str) -> Vec<Completion>;
|
||||
@@ -677,4 +679,63 @@ pub mod completers {
|
||||
.map(|(name, _)| ((0..), name.into()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn program(_editor: &Editor, input: &str) -> Vec<Completion> {
|
||||
static PROGRAMS_IN_PATH: Lazy<BTreeSet<String>> = Lazy::new(|| {
|
||||
// Go through the entire PATH and read all files into a set.
|
||||
let Some(path) = std::env::var_os("PATH") else {
|
||||
return Default::default();
|
||||
};
|
||||
|
||||
std::env::split_paths(&path)
|
||||
.filter_map(|path| std::fs::read_dir(path).ok())
|
||||
.flatten()
|
||||
.filter_map(|res| {
|
||||
let entry = res.ok()?;
|
||||
if entry.metadata().ok()?.is_file() {
|
||||
entry.file_name().into_string().ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
|
||||
fuzzy_match(input, PROGRAMS_IN_PATH.iter(), false)
|
||||
.into_iter()
|
||||
.map(|(name, _)| ((0..), name.clone().into()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// This expects input to be a raw string of arguments, because this is what Signature's raw_after does.
|
||||
pub fn repeating_filenames(editor: &Editor, input: &str) -> Vec<Completion> {
|
||||
let token = match Tokenizer::new(input, false).last() {
|
||||
Some(token) => token.unwrap(),
|
||||
None => return filename(editor, input),
|
||||
};
|
||||
|
||||
let offset = token.content_start;
|
||||
|
||||
let mut completions = filename(editor, &input[offset..]);
|
||||
for completion in completions.iter_mut() {
|
||||
completion.0.start += offset;
|
||||
}
|
||||
completions
|
||||
}
|
||||
|
||||
pub fn shell(editor: &Editor, input: &str) -> Vec<Completion> {
|
||||
let (command, args, complete_command) = command_line::split(input);
|
||||
|
||||
if complete_command {
|
||||
return program(editor, command);
|
||||
}
|
||||
|
||||
let mut completions = repeating_filenames(editor, args);
|
||||
for completion in completions.iter_mut() {
|
||||
// + 1 for separator between `command` and `args`
|
||||
completion.0.start += command.len() + 1;
|
||||
}
|
||||
|
||||
completions
|
||||
}
|
||||
}
|
||||
|
@@ -585,8 +585,7 @@ impl<T: 'static + Send + Sync, D: 'static + Send + Sync> Picker<T, D> {
|
||||
// retrieve the `Arc<Path>` key. The `path` in scope here is a `&Path` and
|
||||
// we can cheaply clone the key for the preview highlight handler.
|
||||
let (path, preview) = self.preview_cache.get_key_value(path).unwrap();
|
||||
if matches!(preview, CachedPreview::Document(doc) if doc.language_config().is_none())
|
||||
{
|
||||
if matches!(preview, CachedPreview::Document(doc) if doc.syntax().is_none()) {
|
||||
helix_event::send_blocking(&self.preview_highlight_handler, path.clone());
|
||||
}
|
||||
return Some((Preview::Cached(preview), range));
|
||||
@@ -624,20 +623,27 @@ impl<T: 'static + Send + Sync, D: 'static + Send + Sync> Picker<T, D> {
|
||||
if content_type.is_binary() {
|
||||
return Ok(CachedPreview::Binary);
|
||||
}
|
||||
Document::open(&path, None, None, editor.config.clone()).map_or(
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"Cannot open document",
|
||||
)),
|
||||
|doc| {
|
||||
// Asynchronously highlight the new document
|
||||
helix_event::send_blocking(
|
||||
&self.preview_highlight_handler,
|
||||
path.clone(),
|
||||
);
|
||||
Ok(CachedPreview::Document(Box::new(doc)))
|
||||
},
|
||||
let mut doc = Document::open(
|
||||
&path,
|
||||
None,
|
||||
false,
|
||||
editor.config.clone(),
|
||||
editor.syn_loader.clone(),
|
||||
)
|
||||
.or(Err(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"Cannot open document",
|
||||
)))?;
|
||||
let loader = editor.syn_loader.load();
|
||||
if let Some(language_config) = doc.detect_language_config(&loader) {
|
||||
doc.language = Some(language_config);
|
||||
// Asynchronously highlight the new document
|
||||
helix_event::send_blocking(
|
||||
&self.preview_highlight_handler,
|
||||
path.clone(),
|
||||
);
|
||||
}
|
||||
Ok(CachedPreview::Document(Box::new(doc)))
|
||||
} else {
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
@@ -933,21 +939,18 @@ impl<T: 'static + Send + Sync, D: 'static + Send + Sync> Picker<T, D> {
|
||||
}
|
||||
}
|
||||
|
||||
let syntax_highlights = EditorView::doc_syntax_highlights(
|
||||
let loader = cx.editor.syn_loader.load();
|
||||
|
||||
let syntax_highlighter =
|
||||
EditorView::doc_syntax_highlighter(doc, offset.anchor, area.height, &loader);
|
||||
let mut overlay_highlights = Vec::new();
|
||||
|
||||
EditorView::doc_diagnostics_highlights_into(
|
||||
doc,
|
||||
offset.anchor,
|
||||
area.height,
|
||||
&cx.editor.theme,
|
||||
&mut overlay_highlights,
|
||||
);
|
||||
|
||||
let mut overlay_highlights =
|
||||
EditorView::empty_highlight_iter(doc, offset.anchor, area.height);
|
||||
for spans in EditorView::doc_diagnostics_highlights(doc, &cx.editor.theme) {
|
||||
if spans.is_empty() {
|
||||
continue;
|
||||
}
|
||||
overlay_highlights = Box::new(helix_core::syntax::merge(overlay_highlights, spans));
|
||||
}
|
||||
let mut decorations = DecorationManager::default();
|
||||
|
||||
if let Some((start, end)) = range {
|
||||
@@ -977,7 +980,7 @@ impl<T: 'static + Send + Sync, D: 'static + Send + Sync> Picker<T, D> {
|
||||
offset,
|
||||
// TODO: compute text annotations asynchronously here (like inlay hints)
|
||||
&TextAnnotations::default(),
|
||||
syntax_highlights,
|
||||
syntax_highlighter,
|
||||
overlay_highlights,
|
||||
&cx.editor.theme,
|
||||
decorations,
|
||||
|
@@ -66,27 +66,24 @@ impl<T: 'static + Send + Sync, D: 'static + Send + Sync> AsyncHook
|
||||
return;
|
||||
};
|
||||
|
||||
if doc.language_config().is_some() {
|
||||
if doc.syntax().is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(language_config) = doc.detect_language_config(&editor.syn_loader.load())
|
||||
else {
|
||||
let Some(language) = doc.language_config().map(|config| config.language()) else {
|
||||
return;
|
||||
};
|
||||
doc.language = Some(language_config.clone());
|
||||
|
||||
let loader = editor.syn_loader.load();
|
||||
let text = doc.text().clone();
|
||||
let loader = editor.syn_loader.clone();
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let Some(syntax) = language_config
|
||||
.highlight_config(&loader.load().scopes())
|
||||
.and_then(|highlight_config| {
|
||||
helix_core::Syntax::new(text.slice(..), highlight_config, loader)
|
||||
})
|
||||
else {
|
||||
log::info!("highlighting picker item failed");
|
||||
return;
|
||||
let syntax = match helix_core::Syntax::new(text.slice(..), language, &loader) {
|
||||
Ok(syntax) => syntax,
|
||||
Err(err) => {
|
||||
log::info!("highlighting picker preview failed: {err}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
job::dispatch_blocking(move |editor, compositor| {
|
||||
|
@@ -12,7 +12,9 @@ use tui::text::Span;
|
||||
use tui::widgets::{Block, Widget};
|
||||
|
||||
use helix_core::{
|
||||
unicode::segmentation::GraphemeCursor, unicode::width::UnicodeWidthStr, Position,
|
||||
unicode::segmentation::{GraphemeCursor, UnicodeSegmentation},
|
||||
unicode::width::UnicodeWidthStr,
|
||||
Position,
|
||||
};
|
||||
use helix_view::{
|
||||
graphics::{CursorKind, Margin, Rect},
|
||||
@@ -529,27 +531,57 @@ impl Prompt {
|
||||
&self.line,
|
||||
language,
|
||||
Some(&cx.editor.theme),
|
||||
loader.clone(),
|
||||
&loader.load(),
|
||||
None,
|
||||
)
|
||||
.into();
|
||||
text.render(self.line_area, surface, cx);
|
||||
} else {
|
||||
if self.line.len() < self.line_area.width as usize {
|
||||
let line_width = self.line_area.width as usize;
|
||||
|
||||
if self.line.width() < line_width {
|
||||
self.anchor = 0;
|
||||
} else if self.cursor < self.anchor {
|
||||
self.anchor = self.cursor;
|
||||
} else if self.cursor - self.anchor > self.line_area.width as usize {
|
||||
self.anchor = self.cursor - self.line_area.width as usize;
|
||||
} else if self.cursor <= self.anchor {
|
||||
// Ensure the grapheme under the cursor is in view.
|
||||
self.anchor = self.line[..self.cursor]
|
||||
.grapheme_indices(true)
|
||||
.next_back()
|
||||
.map(|(i, _)| i)
|
||||
.unwrap_or_default();
|
||||
} else if self.line[self.anchor..self.cursor].width() > line_width {
|
||||
// Set the anchor to the last grapheme cluster before the width is exceeded.
|
||||
let mut width = 0;
|
||||
self.anchor = self.line[..self.cursor]
|
||||
.grapheme_indices(true)
|
||||
.rev()
|
||||
.find_map(|(idx, g)| {
|
||||
width += g.width();
|
||||
if width > line_width {
|
||||
Some(idx + g.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
self.truncate_start = self.anchor > 0;
|
||||
self.truncate_end = self.line.len() - self.anchor > self.line_area.width as usize;
|
||||
self.truncate_end = self.line[self.anchor..].width() > line_width;
|
||||
|
||||
// if we keep inserting characters just before the end elipsis, we move the anchor
|
||||
// so that those new characters are displayed
|
||||
if self.truncate_end && self.cursor - self.anchor >= self.line_area.width as usize {
|
||||
self.anchor += 1;
|
||||
if self.truncate_end && self.line[self.anchor..self.cursor].width() >= line_width {
|
||||
// Move the anchor forward by one non-zero-width grapheme.
|
||||
self.anchor += self.line[self.anchor..]
|
||||
.grapheme_indices(true)
|
||||
.find_map(|(idx, g)| {
|
||||
if g.width() > 0 {
|
||||
Some(idx + g.len())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
surface.set_string_anchored(
|
||||
@@ -558,7 +590,7 @@ impl Prompt {
|
||||
self.truncate_start,
|
||||
self.truncate_end,
|
||||
&self.line.as_str()[self.anchor..],
|
||||
self.line_area.width as usize - self.truncate_end as usize,
|
||||
line_width,
|
||||
|_| prompt_color,
|
||||
);
|
||||
}
|
||||
@@ -732,19 +764,23 @@ impl Component for Prompt {
|
||||
fn cursor(&self, area: Rect, editor: &Editor) -> (Option<Position>, CursorKind) {
|
||||
let area = area
|
||||
.clip_left(self.prompt.len() as u16)
|
||||
.clip_right(if self.prompt.len() > 0 { 0 } else { 2 });
|
||||
.clip_right(if self.prompt.is_empty() { 2 } else { 0 });
|
||||
|
||||
let anchor = self.anchor.min(self.line.len().saturating_sub(1));
|
||||
let mut col = area.left() as usize
|
||||
+ UnicodeWidthStr::width(&self.line[anchor..self.cursor.max(anchor)]);
|
||||
let mut col = area.left() as usize + self.line[self.anchor..self.cursor].width();
|
||||
|
||||
// ensure the cursor does not go beyond elipses
|
||||
if self.truncate_end && self.cursor - self.anchor >= self.line_area.width as usize {
|
||||
if self.truncate_end
|
||||
&& self.line[self.anchor..self.cursor].width() >= self.line_area.width as usize
|
||||
{
|
||||
col -= 1;
|
||||
}
|
||||
|
||||
if self.truncate_start && self.cursor == self.anchor {
|
||||
col += 1;
|
||||
col += self.line[self.cursor..]
|
||||
.graphemes(true)
|
||||
.next()
|
||||
.unwrap()
|
||||
.width();
|
||||
}
|
||||
|
||||
let line = area.height as usize - 1;
|
||||
|
@@ -1,5 +1,7 @@
|
||||
use helix_core::{coords_at_pos, encoding, Position};
|
||||
use helix_lsp::lsp::DiagnosticSeverity;
|
||||
use std::borrow::Cow;
|
||||
|
||||
use helix_core::indent::IndentStyle;
|
||||
use helix_core::{coords_at_pos, diagnostic::Severity, encoding, Position};
|
||||
use helix_view::document::DEFAULT_LANGUAGE_NAME;
|
||||
use helix_view::{
|
||||
document::{Mode, SCRATCH_BUFFER_NAME},
|
||||
@@ -58,25 +60,16 @@ pub fn render(context: &mut RenderContext, viewport: Rect, surface: &mut Surface
|
||||
|
||||
surface.set_style(viewport.with_height(1), base_style);
|
||||
|
||||
let write_left = |context: &mut RenderContext, text, style| {
|
||||
append(&mut context.parts.left, text, &base_style, style)
|
||||
};
|
||||
let write_center = |context: &mut RenderContext, text, style| {
|
||||
append(&mut context.parts.center, text, &base_style, style)
|
||||
};
|
||||
let write_right = |context: &mut RenderContext, text, style| {
|
||||
append(&mut context.parts.right, text, &base_style, style)
|
||||
};
|
||||
|
||||
// Left side of the status line.
|
||||
|
||||
let config = context.editor.config();
|
||||
|
||||
let element_ids = &config.statusline.left;
|
||||
element_ids
|
||||
.iter()
|
||||
.map(|element_id| get_render_function(*element_id))
|
||||
.for_each(|render| render(context, write_left));
|
||||
for element_id in &config.statusline.left {
|
||||
let render = get_render_function(*element_id);
|
||||
(render)(context, |context, span| {
|
||||
append(&mut context.parts.left, span, base_style)
|
||||
});
|
||||
}
|
||||
|
||||
surface.set_spans(
|
||||
viewport.x,
|
||||
@@ -87,11 +80,12 @@ pub fn render(context: &mut RenderContext, viewport: Rect, surface: &mut Surface
|
||||
|
||||
// Right side of the status line.
|
||||
|
||||
let element_ids = &config.statusline.right;
|
||||
element_ids
|
||||
.iter()
|
||||
.map(|element_id| get_render_function(*element_id))
|
||||
.for_each(|render| render(context, write_right));
|
||||
for element_id in &config.statusline.right {
|
||||
let render = get_render_function(*element_id);
|
||||
(render)(context, |context, span| {
|
||||
append(&mut context.parts.right, span, base_style)
|
||||
})
|
||||
}
|
||||
|
||||
surface.set_spans(
|
||||
viewport.x
|
||||
@@ -105,11 +99,12 @@ pub fn render(context: &mut RenderContext, viewport: Rect, surface: &mut Surface
|
||||
|
||||
// Center of the status line.
|
||||
|
||||
let element_ids = &config.statusline.center;
|
||||
element_ids
|
||||
.iter()
|
||||
.map(|element_id| get_render_function(*element_id))
|
||||
.for_each(|render| render(context, write_center));
|
||||
for element_id in &config.statusline.center {
|
||||
let render = get_render_function(*element_id);
|
||||
(render)(context, |context, span| {
|
||||
append(&mut context.parts.center, span, base_style)
|
||||
})
|
||||
}
|
||||
|
||||
// Width of the empty space between the left and center area and between the center and right area.
|
||||
let spacing = 1u16;
|
||||
@@ -126,16 +121,14 @@ pub fn render(context: &mut RenderContext, viewport: Rect, surface: &mut Surface
|
||||
);
|
||||
}
|
||||
|
||||
fn append(buffer: &mut Spans, text: String, base_style: &Style, style: Option<Style>) {
|
||||
buffer.0.push(Span::styled(
|
||||
text,
|
||||
style.map_or(*base_style, |s| (*base_style).patch(s)),
|
||||
));
|
||||
fn append<'a>(buffer: &mut Spans<'a>, mut span: Span<'a>, base_style: Style) {
|
||||
span.style = base_style.patch(span.style);
|
||||
buffer.0.push(span);
|
||||
}
|
||||
|
||||
fn get_render_function<F>(element_id: StatusLineElementID) -> impl Fn(&mut RenderContext, F)
|
||||
fn get_render_function<'a, F>(element_id: StatusLineElementID) -> impl Fn(&mut RenderContext<'a>, F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
match element_id {
|
||||
helix_view::editor::StatusLineElement::Mode => render_mode,
|
||||
@@ -149,6 +142,7 @@ where
|
||||
helix_view::editor::StatusLineElement::ReadOnlyIndicator => render_read_only_indicator,
|
||||
helix_view::editor::StatusLineElement::FileEncoding => render_file_encoding,
|
||||
helix_view::editor::StatusLineElement::FileLineEnding => render_file_line_ending,
|
||||
helix_view::editor::StatusLineElement::FileIndentStyle => render_file_indent_style,
|
||||
helix_view::editor::StatusLineElement::FileType => render_file_type,
|
||||
helix_view::editor::StatusLineElement::Diagnostics => render_diagnostics,
|
||||
helix_view::editor::StatusLineElement::WorkspaceDiagnostics => render_workspace_diagnostics,
|
||||
@@ -166,44 +160,42 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn render_mode<F>(context: &mut RenderContext, write: F)
|
||||
fn render_mode<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let visible = context.focused;
|
||||
let config = context.editor.config();
|
||||
let modenames = &config.statusline.mode;
|
||||
write(
|
||||
context,
|
||||
format!(
|
||||
let content = if visible {
|
||||
Cow::Owned(format!(
|
||||
" {} ",
|
||||
if visible {
|
||||
match context.editor.mode() {
|
||||
Mode::Insert => &modenames.insert,
|
||||
Mode::Select => &modenames.select,
|
||||
Mode::Normal => &modenames.normal,
|
||||
}
|
||||
} else {
|
||||
// If not focused, explicitly leave an empty space instead of returning None.
|
||||
" "
|
||||
}
|
||||
),
|
||||
if visible && config.color_modes {
|
||||
match context.editor.mode() {
|
||||
Mode::Insert => Some(context.editor.theme.get("ui.statusline.insert")),
|
||||
Mode::Select => Some(context.editor.theme.get("ui.statusline.select")),
|
||||
Mode::Normal => Some(context.editor.theme.get("ui.statusline.normal")),
|
||||
Mode::Insert => &modenames.insert,
|
||||
Mode::Select => &modenames.select,
|
||||
Mode::Normal => &modenames.normal,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
},
|
||||
);
|
||||
))
|
||||
} else {
|
||||
// If not focused, explicitly leave an empty space instead of returning None.
|
||||
Cow::Borrowed(" ")
|
||||
};
|
||||
let style = if visible && config.color_modes {
|
||||
match context.editor.mode() {
|
||||
Mode::Insert => context.editor.theme.get("ui.statusline.insert"),
|
||||
Mode::Select => context.editor.theme.get("ui.statusline.select"),
|
||||
Mode::Normal => context.editor.theme.get("ui.statusline.normal"),
|
||||
}
|
||||
} else {
|
||||
Style::default()
|
||||
};
|
||||
write(context, Span::styled(content, style));
|
||||
}
|
||||
|
||||
// TODO think about handling multiple language servers
|
||||
fn render_lsp_spinner<F>(context: &mut RenderContext, write: F)
|
||||
fn render_lsp_spinner<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let language_server = context.doc.language_servers().next();
|
||||
write(
|
||||
@@ -217,111 +209,149 @@ where
|
||||
})
|
||||
// Even if there's no spinner; reserve its space to avoid elements frequently shifting.
|
||||
.unwrap_or(" ")
|
||||
.to_string(),
|
||||
None,
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
fn render_diagnostics<F>(context: &mut RenderContext, write: F)
|
||||
fn render_diagnostics<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let (warnings, errors) = context
|
||||
.doc
|
||||
.diagnostics()
|
||||
.iter()
|
||||
.fold((0, 0), |mut counts, diag| {
|
||||
use helix_core::diagnostic::Severity;
|
||||
match diag.severity {
|
||||
Some(Severity::Warning) => counts.0 += 1,
|
||||
Some(Severity::Error) | None => counts.1 += 1,
|
||||
_ => {}
|
||||
}
|
||||
counts
|
||||
});
|
||||
|
||||
if warnings > 0 {
|
||||
write(
|
||||
context,
|
||||
"●".to_string(),
|
||||
Some(context.editor.theme.get("warning")),
|
||||
);
|
||||
write(context, format!(" {} ", warnings), None);
|
||||
}
|
||||
|
||||
if errors > 0 {
|
||||
write(
|
||||
context,
|
||||
"●".to_string(),
|
||||
Some(context.editor.theme.get("error")),
|
||||
);
|
||||
write(context, format!(" {} ", errors), None);
|
||||
}
|
||||
}
|
||||
|
||||
fn render_workspace_diagnostics<F>(context: &mut RenderContext, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
{
|
||||
let (warnings, errors) =
|
||||
let (hints, info, warnings, errors) =
|
||||
context
|
||||
.editor
|
||||
.diagnostics
|
||||
.values()
|
||||
.flatten()
|
||||
.fold((0, 0), |mut counts, (diag, _)| {
|
||||
match diag.severity {
|
||||
Some(DiagnosticSeverity::WARNING) => counts.0 += 1,
|
||||
Some(DiagnosticSeverity::ERROR) | None => counts.1 += 1,
|
||||
_ => {}
|
||||
.doc
|
||||
.diagnostics()
|
||||
.iter()
|
||||
.fold((0, 0, 0, 0), |mut counts, diag| {
|
||||
match diag.inner.severity {
|
||||
Some(Severity::Hint) | None => counts.0 += 1,
|
||||
Some(Severity::Info) => counts.1 += 1,
|
||||
Some(Severity::Warning) => counts.2 += 1,
|
||||
Some(Severity::Error) => counts.3 += 1,
|
||||
}
|
||||
counts
|
||||
});
|
||||
|
||||
if warnings > 0 || errors > 0 {
|
||||
write(context, " W ".into(), None);
|
||||
}
|
||||
|
||||
if warnings > 0 {
|
||||
write(
|
||||
context,
|
||||
"●".to_string(),
|
||||
Some(context.editor.theme.get("warning")),
|
||||
);
|
||||
write(context, format!(" {} ", warnings), None);
|
||||
}
|
||||
|
||||
if errors > 0 {
|
||||
write(
|
||||
context,
|
||||
"●".to_string(),
|
||||
Some(context.editor.theme.get("error")),
|
||||
);
|
||||
write(context, format!(" {} ", errors), None);
|
||||
for sev in &context.editor.config().statusline.diagnostics {
|
||||
match sev {
|
||||
Severity::Hint if hints > 0 => {
|
||||
write(context, Span::styled("●", context.editor.theme.get("hint")));
|
||||
write(context, format!(" {} ", hints).into());
|
||||
}
|
||||
Severity::Info if info > 0 => {
|
||||
write(context, Span::styled("●", context.editor.theme.get("info")));
|
||||
write(context, format!(" {} ", info).into());
|
||||
}
|
||||
Severity::Warning if warnings > 0 => {
|
||||
write(
|
||||
context,
|
||||
Span::styled("●", context.editor.theme.get("warning")),
|
||||
);
|
||||
write(context, format!(" {} ", warnings).into());
|
||||
}
|
||||
Severity::Error if errors > 0 => {
|
||||
write(
|
||||
context,
|
||||
Span::styled("●", context.editor.theme.get("error")),
|
||||
);
|
||||
write(context, format!(" {} ", errors).into());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_selections<F>(context: &mut RenderContext, write: F)
|
||||
fn render_workspace_diagnostics<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let count = context.doc.selection(context.view.id).len();
|
||||
use helix_core::diagnostic::Severity;
|
||||
let (hints, info, warnings, errors) = context.editor.diagnostics.values().flatten().fold(
|
||||
(0u32, 0u32, 0u32, 0u32),
|
||||
|mut counts, diag| {
|
||||
match diag.severity {
|
||||
// PERF: For large workspace diagnostics, this loop can be very tight.
|
||||
//
|
||||
// Most often the diagnostics will be for warnings and errors.
|
||||
// Errors should tend to be fixed fast, leaving warnings as the most common.
|
||||
Some(Severity::Warning) => counts.2 += 1,
|
||||
Some(Severity::Error) => counts.3 += 1,
|
||||
Some(Severity::Hint) => counts.0 += 1,
|
||||
Some(Severity::Info) => counts.1 += 1,
|
||||
// Fallback to `hint`.
|
||||
_ => counts.0 += 1,
|
||||
}
|
||||
counts
|
||||
},
|
||||
);
|
||||
|
||||
let sevs_to_show = &context.editor.config().statusline.workspace_diagnostics;
|
||||
|
||||
// Avoid showing the " W " if no diagnostic counts will be shown.
|
||||
if !sevs_to_show.iter().any(|sev| match sev {
|
||||
Severity::Hint => hints != 0,
|
||||
Severity::Info => info != 0,
|
||||
Severity::Warning => warnings != 0,
|
||||
Severity::Error => errors != 0,
|
||||
}) {
|
||||
return;
|
||||
}
|
||||
|
||||
write(context, " W ".into());
|
||||
|
||||
for sev in sevs_to_show {
|
||||
match sev {
|
||||
Severity::Hint if hints > 0 => {
|
||||
write(context, Span::styled("●", context.editor.theme.get("hint")));
|
||||
write(context, format!(" {} ", hints).into());
|
||||
}
|
||||
Severity::Info if info > 0 => {
|
||||
write(context, Span::styled("●", context.editor.theme.get("info")));
|
||||
write(context, format!(" {} ", info).into());
|
||||
}
|
||||
Severity::Warning if warnings > 0 => {
|
||||
write(
|
||||
context,
|
||||
Span::styled("●", context.editor.theme.get("warning")),
|
||||
);
|
||||
write(context, format!(" {} ", warnings).into());
|
||||
}
|
||||
Severity::Error if errors > 0 => {
|
||||
write(
|
||||
context,
|
||||
Span::styled("●", context.editor.theme.get("error")),
|
||||
);
|
||||
write(context, format!(" {} ", errors).into());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_selections<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let selection = context.doc.selection(context.view.id);
|
||||
let count = selection.len();
|
||||
write(
|
||||
context,
|
||||
format!(" {} sel{} ", count, if count == 1 { "" } else { "s" }),
|
||||
None,
|
||||
if count == 1 {
|
||||
" 1 sel ".into()
|
||||
} else {
|
||||
format!(" {}/{count} sels ", selection.primary_index() + 1).into()
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
fn render_primary_selection_length<F>(context: &mut RenderContext, write: F)
|
||||
fn render_primary_selection_length<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let tot_sel = context.doc.selection(context.view.id).primary().len();
|
||||
write(
|
||||
context,
|
||||
format!(" {} char{} ", tot_sel, if tot_sel == 1 { "" } else { "s" }),
|
||||
None,
|
||||
format!(" {} char{} ", tot_sel, if tot_sel == 1 { "" } else { "s" }).into(),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -336,54 +366,52 @@ fn get_position(context: &RenderContext) -> Position {
|
||||
)
|
||||
}
|
||||
|
||||
fn render_position<F>(context: &mut RenderContext, write: F)
|
||||
fn render_position<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let position = get_position(context);
|
||||
write(
|
||||
context,
|
||||
format!(" {}:{} ", position.row + 1, position.col + 1),
|
||||
None,
|
||||
format!(" {}:{} ", position.row + 1, position.col + 1).into(),
|
||||
);
|
||||
}
|
||||
|
||||
fn render_total_line_numbers<F>(context: &mut RenderContext, write: F)
|
||||
fn render_total_line_numbers<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let total_line_numbers = context.doc.text().len_lines();
|
||||
|
||||
write(context, format!(" {} ", total_line_numbers), None);
|
||||
write(context, format!(" {} ", total_line_numbers).into());
|
||||
}
|
||||
|
||||
fn render_position_percentage<F>(context: &mut RenderContext, write: F)
|
||||
fn render_position_percentage<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let position = get_position(context);
|
||||
let maxrows = context.doc.text().len_lines();
|
||||
write(
|
||||
context,
|
||||
format!("{}%", (position.row + 1) * 100 / maxrows),
|
||||
None,
|
||||
format!("{}%", (position.row + 1) * 100 / maxrows).into(),
|
||||
);
|
||||
}
|
||||
|
||||
fn render_file_encoding<F>(context: &mut RenderContext, write: F)
|
||||
fn render_file_encoding<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let enc = context.doc.encoding();
|
||||
|
||||
if enc != encoding::UTF_8 {
|
||||
write(context, format!(" {} ", enc.name()), None);
|
||||
write(context, format!(" {} ", enc.name()).into());
|
||||
}
|
||||
}
|
||||
|
||||
fn render_file_line_ending<F>(context: &mut RenderContext, write: F)
|
||||
fn render_file_line_ending<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
use helix_core::LineEnding::*;
|
||||
let line_ending = match context.doc.line_ending {
|
||||
@@ -403,21 +431,21 @@ where
|
||||
PS => "PS", // U+2029 -- ParagraphSeparator
|
||||
};
|
||||
|
||||
write(context, format!(" {} ", line_ending), None);
|
||||
write(context, format!(" {} ", line_ending).into());
|
||||
}
|
||||
|
||||
fn render_file_type<F>(context: &mut RenderContext, write: F)
|
||||
fn render_file_type<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let file_type = context.doc.language_name().unwrap_or(DEFAULT_LANGUAGE_NAME);
|
||||
|
||||
write(context, format!(" {} ", file_type), None);
|
||||
write(context, format!(" {} ", file_type).into());
|
||||
}
|
||||
|
||||
fn render_file_name<F>(context: &mut RenderContext, write: F)
|
||||
fn render_file_name<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let title = {
|
||||
let rel_path = context.doc.relative_path();
|
||||
@@ -428,12 +456,12 @@ where
|
||||
format!(" {} ", path)
|
||||
};
|
||||
|
||||
write(context, title, None);
|
||||
write(context, title.into());
|
||||
}
|
||||
|
||||
fn render_file_absolute_path<F>(context: &mut RenderContext, write: F)
|
||||
fn render_file_absolute_path<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let title = {
|
||||
let path = context.doc.path();
|
||||
@@ -444,39 +472,37 @@ where
|
||||
format!(" {} ", path)
|
||||
};
|
||||
|
||||
write(context, title, None);
|
||||
write(context, title.into());
|
||||
}
|
||||
|
||||
fn render_file_modification_indicator<F>(context: &mut RenderContext, write: F)
|
||||
fn render_file_modification_indicator<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let title = (if context.doc.is_modified() {
|
||||
let title = if context.doc.is_modified() {
|
||||
"[+]"
|
||||
} else {
|
||||
" "
|
||||
})
|
||||
.to_string();
|
||||
};
|
||||
|
||||
write(context, title, None);
|
||||
write(context, title.into());
|
||||
}
|
||||
|
||||
fn render_read_only_indicator<F>(context: &mut RenderContext, write: F)
|
||||
fn render_read_only_indicator<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let title = if context.doc.readonly {
|
||||
" [readonly] "
|
||||
} else {
|
||||
""
|
||||
}
|
||||
.to_string();
|
||||
write(context, title, None);
|
||||
};
|
||||
write(context, title.into());
|
||||
}
|
||||
|
||||
fn render_file_base_name<F>(context: &mut RenderContext, write: F)
|
||||
fn render_file_base_name<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let title = {
|
||||
let rel_path = context.doc.relative_path();
|
||||
@@ -487,32 +513,29 @@ where
|
||||
format!(" {} ", path)
|
||||
};
|
||||
|
||||
write(context, title, None);
|
||||
write(context, title.into());
|
||||
}
|
||||
|
||||
fn render_separator<F>(context: &mut RenderContext, write: F)
|
||||
fn render_separator<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let sep = &context.editor.config().statusline.separator;
|
||||
let style = context.editor.theme.get("ui.statusline.separator");
|
||||
|
||||
write(
|
||||
context,
|
||||
sep.to_string(),
|
||||
Some(context.editor.theme.get("ui.statusline.separator")),
|
||||
);
|
||||
write(context, Span::styled(sep.to_string(), style));
|
||||
}
|
||||
|
||||
fn render_spacer<F>(context: &mut RenderContext, write: F)
|
||||
fn render_spacer<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
write(context, String::from(" "), None);
|
||||
write(context, " ".into());
|
||||
}
|
||||
|
||||
fn render_version_control<F>(context: &mut RenderContext, write: F)
|
||||
fn render_version_control<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let head = context
|
||||
.doc
|
||||
@@ -520,14 +543,31 @@ where
|
||||
.unwrap_or_default()
|
||||
.to_string();
|
||||
|
||||
write(context, head, None);
|
||||
write(context, head.into());
|
||||
}
|
||||
|
||||
fn render_register<F>(context: &mut RenderContext, write: F)
|
||||
fn render_register<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext, String, Option<Style>) + Copy,
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
if let Some(reg) = context.editor.selected_register {
|
||||
write(context, format!(" reg={} ", reg), None)
|
||||
write(context, format!(" reg={} ", reg).into())
|
||||
}
|
||||
}
|
||||
|
||||
fn render_file_indent_style<'a, F>(context: &mut RenderContext<'a>, write: F)
|
||||
where
|
||||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let style = context.doc.indent_style;
|
||||
|
||||
write(
|
||||
context,
|
||||
match style {
|
||||
IndentStyle::Tabs => " tabs ".into(),
|
||||
IndentStyle::Spaces(indent) => {
|
||||
format!(" {} space{} ", indent, if indent == 1 { "" } else { "s" }).into()
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
@@ -4,13 +4,13 @@ use helix_core::diagnostic::Severity;
|
||||
use helix_core::doc_formatter::{DocumentFormatter, FormattedGrapheme};
|
||||
use helix_core::graphemes::Grapheme;
|
||||
use helix_core::text_annotations::TextAnnotations;
|
||||
use helix_core::{Diagnostic, Position};
|
||||
use helix_core::Position;
|
||||
use helix_view::annotations::diagnostics::{
|
||||
DiagnosticFilter, InlineDiagnosticAccumulator, InlineDiagnosticsConfig,
|
||||
};
|
||||
|
||||
use helix_view::theme::Style;
|
||||
use helix_view::{Document, Theme};
|
||||
use helix_view::{document::Diagnostic, Document, Theme};
|
||||
|
||||
use crate::ui::document::{LinePos, TextRenderer};
|
||||
use crate::ui::text_decorations::Decoration;
|
||||
@@ -102,7 +102,7 @@ impl Renderer<'_, '_> {
|
||||
let mut end_col = start_col;
|
||||
let mut draw_col = (col + 1) as u16;
|
||||
|
||||
for line in diag.message.lines() {
|
||||
for line in diag.inner.message.lines() {
|
||||
if !self.renderer.column_in_bounds(draw_col as usize, 1) {
|
||||
break;
|
||||
}
|
||||
@@ -139,7 +139,7 @@ impl Renderer<'_, '_> {
|
||||
let text_fmt = self.config.text_fmt(text_col, self.renderer.viewport.width);
|
||||
let annotations = TextAnnotations::default();
|
||||
let formatter = DocumentFormatter::new_at_prev_checkpoint(
|
||||
diag.message.as_str().trim().into(),
|
||||
diag.inner.message.as_str().trim().into(),
|
||||
&text_fmt,
|
||||
&annotations,
|
||||
0,
|
||||
@@ -262,9 +262,9 @@ impl Decoration for InlineDiagnostics<'_> {
|
||||
match filter {
|
||||
DiagnosticFilter::Enable(filter) => eol_diganogistcs
|
||||
.filter(|(diag, _)| filter > diag.severity())
|
||||
.max_by_key(|(diagnostic, _)| diagnostic.severity),
|
||||
.max_by_key(|(diagnostic, _)| diagnostic.inner.severity),
|
||||
DiagnosticFilter::Disable => {
|
||||
eol_diganogistcs.max_by_key(|(diagnostic, _)| diagnostic.severity)
|
||||
eol_diganogistcs.max_by_key(|(diagnostic, _)| diagnostic.inner.severity)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
mod test {
|
||||
mod helpers;
|
||||
|
||||
use helix_core::{syntax::AutoPairConfig, Selection};
|
||||
use helix_core::{syntax::config::AutoPairConfig, Selection};
|
||||
use helix_term::config::Config;
|
||||
|
||||
use indoc::indoc;
|
||||
|
@@ -734,7 +734,7 @@ async fn surround_replace_ts() -> anyhow::Result<()> {
|
||||
const INPUT: &str = r#"\
|
||||
fn foo() {
|
||||
if let Some(_) = None {
|
||||
todo!("f#[|o]#o)");
|
||||
testing!("f#[|o]#o)");
|
||||
}
|
||||
}
|
||||
"#;
|
||||
@@ -744,7 +744,7 @@ fn foo() {
|
||||
r#"\
|
||||
fn foo() {
|
||||
if let Some(_) = None {
|
||||
todo!('f#[|o]#o)');
|
||||
testing!('f#[|o]#o)');
|
||||
}
|
||||
}
|
||||
"#,
|
||||
@@ -757,7 +757,7 @@ fn foo() {
|
||||
r#"\
|
||||
fn foo() {
|
||||
if let Some(_) = None [
|
||||
todo!("f#[|o]#o)");
|
||||
testing!("f#[|o]#o)");
|
||||
]
|
||||
}
|
||||
"#,
|
||||
@@ -770,7 +770,7 @@ fn foo() {
|
||||
r#"\
|
||||
fn foo() {
|
||||
if let Some(_) = None {
|
||||
todo!{"f#[|o]#o)"};
|
||||
testing!{"f#[|o]#o)"};
|
||||
}
|
||||
}
|
||||
"#,
|
||||
@@ -820,3 +820,25 @@ async fn macro_play_within_macro_record() -> anyhow::Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn global_search_with_multibyte_chars() -> anyhow::Result<()> {
|
||||
// Assert that `helix_term::commands::global_search` handles multibyte characters correctly.
|
||||
test((
|
||||
indoc! {"\
|
||||
// Hello world!
|
||||
// #[|
|
||||
]#
|
||||
"},
|
||||
// start global search
|
||||
" /«十分に長い マルチバイトキャラクター列» で検索<ret><esc>",
|
||||
indoc! {"\
|
||||
// Hello world!
|
||||
// #[|
|
||||
]#
|
||||
"},
|
||||
))
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@@ -172,6 +172,18 @@ async fn insert_newline_trim_trailing_whitespace() -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn insert_newline_trim_whitespace_to_previous_selection() -> anyhow::Result<()> {
|
||||
test((
|
||||
indoc! {"\"#[a|]# #(a|)# #(a|)#\""},
|
||||
"c<ret>",
|
||||
indoc! {"\"\n#[\n|]##(\n|)##(\"|)#"},
|
||||
))
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn insert_newline_continue_line_comment() -> anyhow::Result<()> {
|
||||
// `insert_newline` continues a single line comment
|
||||
@@ -512,3 +524,62 @@ async fn test_open_above_with_comments() -> anyhow::Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn try_restore_indent() -> anyhow::Result<()> {
|
||||
// Assert that `helix_view::editor::try_restore_indent` handles line endings correctly
|
||||
// endings.
|
||||
test((
|
||||
indoc! {"\
|
||||
if true #[|{]#
|
||||
}
|
||||
"},
|
||||
// `try_restore_indent` should remove the indentation when adding a blank line.
|
||||
":lang rust<ret>o<esc>",
|
||||
indoc! {"\
|
||||
if true {
|
||||
#[
|
||||
|]#}
|
||||
"},
|
||||
))
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Tests being able to jump in insert mode, then undo the write performed by the jump
|
||||
// https://github.com/helix-editor/helix/issues/13480
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_jump_undo_redo() -> anyhow::Result<()> {
|
||||
use helix_core::hashmap;
|
||||
use helix_term::keymap;
|
||||
use helix_view::document::Mode;
|
||||
|
||||
let mut config = Config::default();
|
||||
config.keys.insert(
|
||||
Mode::Insert,
|
||||
keymap!({"Insert Mode"
|
||||
"C-i" => goto_file_start,
|
||||
"C-o" => goto_file_end,
|
||||
}),
|
||||
);
|
||||
|
||||
// Undo
|
||||
test_with_config(
|
||||
AppBuilder::new().with_config(config.clone()),
|
||||
("#[|]#", "iworld<C-i>Hello, <esc>u", "#[w|]#orld"),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Redo
|
||||
test_with_config(
|
||||
AppBuilder::new().with_config(config),
|
||||
(
|
||||
"#[|]#",
|
||||
"iworld<C-i>Hello, <esc>ui<C-o><esc>U",
|
||||
"Hello, #[w|]#orld",
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
@@ -379,9 +379,9 @@ async fn match_around_closest_ts() -> anyhow::Result<()> {
|
||||
test_with_config(
|
||||
AppBuilder::new().with_file("foo.rs", None),
|
||||
(
|
||||
r#"fn main() {todo!{"f#[|oo]#)"};}"#,
|
||||
r#"fn main() {testing!{"f#[|oo]#)"};}"#,
|
||||
"mam",
|
||||
r#"fn main() {todo!{#[|"foo)"]#};}"#,
|
||||
r#"fn main() {testing!{#[|"foo)"]#};}"#,
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
@@ -326,43 +326,44 @@ impl Buffer {
|
||||
return (x, y);
|
||||
}
|
||||
|
||||
let max_offset = min(
|
||||
self.area.right() as usize - 1,
|
||||
width.saturating_add(x as usize),
|
||||
);
|
||||
let mut start_index = self.index_of(x, y);
|
||||
let mut end_index = self.index_of(max_offset as u16, y);
|
||||
|
||||
if truncate_end {
|
||||
self.content[end_index].set_symbol("…");
|
||||
end_index -= 1;
|
||||
}
|
||||
let mut index = self.index_of(x, y);
|
||||
let mut rendered_width = 0;
|
||||
let mut graphemes = string.grapheme_indices(true);
|
||||
|
||||
if truncate_start {
|
||||
self.content[start_index].set_symbol("…");
|
||||
start_index += 1;
|
||||
for _ in 0..graphemes.next().map(|(_, g)| g.width()).unwrap_or_default() {
|
||||
self.content[index].set_symbol("…");
|
||||
index += 1;
|
||||
rendered_width += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let graphemes = string.grapheme_indices(true);
|
||||
|
||||
for (byte_offset, s) in graphemes.skip(truncate_start as usize) {
|
||||
if start_index > end_index {
|
||||
for (byte_offset, s) in graphemes {
|
||||
let grapheme_width = s.width();
|
||||
if truncate_end && rendered_width + grapheme_width >= width {
|
||||
break;
|
||||
}
|
||||
let width = s.width();
|
||||
if width == 0 {
|
||||
if grapheme_width == 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
self.content[start_index].set_symbol(s);
|
||||
self.content[start_index].set_style(style(byte_offset));
|
||||
self.content[index].set_symbol(s);
|
||||
self.content[index].set_style(style(byte_offset));
|
||||
|
||||
// Reset following cells if multi-width (they would be hidden by the grapheme):
|
||||
for i in start_index + 1..start_index + width {
|
||||
for i in index + 1..index + grapheme_width {
|
||||
self.content[i].reset();
|
||||
}
|
||||
|
||||
start_index += width;
|
||||
index += grapheme_width;
|
||||
rendered_width += grapheme_width;
|
||||
}
|
||||
|
||||
if truncate_end {
|
||||
for _ in 0..width.saturating_sub(rendered_width) {
|
||||
self.content[index].set_symbol("…");
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
|
||||
(x, y)
|
||||
|
@@ -127,6 +127,38 @@ impl<'a> Paragraph<'a> {
|
||||
self.alignment = alignment;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn required_size(&self, max_text_width: u16) -> (u16, u16) {
|
||||
let style = self.style;
|
||||
let mut styled = self.text.lines.iter().flat_map(|spans| {
|
||||
spans
|
||||
.0
|
||||
.iter()
|
||||
.flat_map(|span| span.styled_graphemes(style))
|
||||
// Required given the way composers work but might be refactored out if we change
|
||||
// composers to operate on lines instead of a stream of graphemes.
|
||||
.chain(iter::once(StyledGrapheme {
|
||||
symbol: "\n",
|
||||
style: self.style,
|
||||
}))
|
||||
});
|
||||
let mut line_composer: Box<dyn LineComposer> = if let Some(Wrap { trim }) = self.wrap {
|
||||
Box::new(WordWrapper::new(&mut styled, max_text_width, trim))
|
||||
} else {
|
||||
let mut line_composer = Box::new(LineTruncator::new(&mut styled, max_text_width));
|
||||
if self.alignment == Alignment::Left {
|
||||
line_composer.set_horizontal_offset(self.scroll.1);
|
||||
}
|
||||
line_composer
|
||||
};
|
||||
let mut text_width = 0;
|
||||
let mut text_height = 0;
|
||||
while let Some((_, line_width)) = line_composer.next_line() {
|
||||
text_width = line_width.max(text_width);
|
||||
text_height += 1;
|
||||
}
|
||||
(text_width, text_height)
|
||||
}
|
||||
}
|
||||
|
||||
impl Widget for Paragraph<'_> {
|
||||
|
@@ -19,8 +19,8 @@ tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "p
|
||||
parking_lot.workspace = true
|
||||
arc-swap = { version = "1.7.1" }
|
||||
|
||||
gix = { version = "0.70.0", features = ["attributes", "status"], default-features = false, optional = true }
|
||||
imara-diff = "0.1.8"
|
||||
gix = { version = "0.72.1", features = ["attributes", "status"], default-features = false, optional = true }
|
||||
imara-diff = "0.2.0"
|
||||
anyhow = "1"
|
||||
|
||||
log = "0.4"
|
||||
|
@@ -1,5 +1,4 @@
|
||||
use std::iter::Peekable;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
use helix_core::Rope;
|
||||
@@ -12,6 +11,8 @@ use tokio::time::Instant;
|
||||
|
||||
use crate::diff::worker::DiffWorker;
|
||||
|
||||
pub use imara_diff::Hunk;
|
||||
|
||||
mod line_cache;
|
||||
mod worker;
|
||||
|
||||
@@ -52,8 +53,8 @@ impl DiffHandle {
|
||||
let worker = DiffWorker {
|
||||
channel: receiver,
|
||||
diff: diff.clone(),
|
||||
new_hunks: Vec::default(),
|
||||
diff_finished_notify: Arc::default(),
|
||||
diff_alloc: imara_diff::Diff::default(),
|
||||
};
|
||||
let handle = tokio::spawn(worker.run(diff_base, doc));
|
||||
let differ = DiffHandle {
|
||||
@@ -118,48 +119,6 @@ const MAX_DIFF_LINES: usize = 64 * u16::MAX as usize;
|
||||
// cap average line length to 128 for files with MAX_DIFF_LINES
|
||||
const MAX_DIFF_BYTES: usize = MAX_DIFF_LINES * 128;
|
||||
|
||||
/// A single change in a file potentially spanning multiple lines
|
||||
/// Hunks produced by the differs are always ordered by their position
|
||||
/// in the file and non-overlapping.
|
||||
/// Specifically for any two hunks `x` and `y` the following properties hold:
|
||||
///
|
||||
/// ``` no_compile
|
||||
/// assert!(x.before.end <= y.before.start);
|
||||
/// assert!(x.after.end <= y.after.start);
|
||||
/// ```
|
||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
pub struct Hunk {
|
||||
pub before: Range<u32>,
|
||||
pub after: Range<u32>,
|
||||
}
|
||||
|
||||
impl Hunk {
|
||||
/// Can be used instead of `Option::None` for better performance
|
||||
/// because lines larger then `i32::MAX` are not supported by `imara-diff` anyways.
|
||||
/// Has some nice properties where it usually is not necessary to check for `None` separately:
|
||||
/// Empty ranges fail contains checks and also fails smaller then checks.
|
||||
pub const NONE: Hunk = Hunk {
|
||||
before: u32::MAX..u32::MAX,
|
||||
after: u32::MAX..u32::MAX,
|
||||
};
|
||||
|
||||
/// Inverts a change so that `before`
|
||||
pub fn invert(&self) -> Hunk {
|
||||
Hunk {
|
||||
before: self.after.clone(),
|
||||
after: self.before.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_pure_insertion(&self) -> bool {
|
||||
self.before.is_empty()
|
||||
}
|
||||
|
||||
pub fn is_pure_removal(&self) -> bool {
|
||||
self.after.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
/// A list of changes in a file sorted in ascending
|
||||
/// non-overlapping order
|
||||
#[derive(Debug)]
|
||||
|
@@ -13,7 +13,7 @@
|
||||
use std::mem::transmute;
|
||||
|
||||
use helix_core::{Rope, RopeSlice};
|
||||
use imara_diff::intern::{InternedInput, Interner};
|
||||
use imara_diff::{InternedInput, Interner};
|
||||
|
||||
use super::{MAX_DIFF_BYTES, MAX_DIFF_LINES};
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user