mirror of
https://github.com/helix-editor/helix.git
synced 2025-10-06 00:13:28 +02:00
Compare commits
194 Commits
23.10
...
get-lang-c
Author | SHA1 | Date | |
---|---|---|---|
|
13b76ea797 | ||
|
3014a2ae9b | ||
|
10b9c38ed9 | ||
|
4e1aeb1b99 | ||
|
17dd102e5c | ||
|
7739d3ece1 | ||
|
84e24b33dc | ||
|
65d0412880 | ||
|
305d6e9c89 | ||
|
41ca46cf8c | ||
|
46ecc102ba | ||
|
7af78c7788 | ||
|
48c49f0227 | ||
|
0cbd8d3df1 | ||
|
4da6191a1c | ||
|
20b91fd99a | ||
|
97145eaae6 | ||
|
bad10a5ddd | ||
|
f4212421da | ||
|
e2e8d2739a | ||
|
77ab792ac7 | ||
|
918bd9c2b0 | ||
|
e46fb58595 | ||
|
00d681cc69 | ||
|
c8e58304bf | ||
|
154d9b6ed1 | ||
|
73deba7044 | ||
|
a32d537d0a | ||
|
f8ae2bc61b | ||
|
7e389b67c2 | ||
|
da4afaf3da | ||
|
8f2e611b7e | ||
|
2e3f330b12 | ||
|
b908abae2d | ||
|
78d85eb13f | ||
|
5876b763e1 | ||
|
a6ed104ea2 | ||
|
efc4865c78 | ||
|
a680b2e409 | ||
|
7fd266efa9 | ||
|
85fce2f5b6 | ||
|
783ff27b1b | ||
|
515ef17207 | ||
|
8653e1b02f | ||
|
c874a896a5 | ||
|
0036782059 | ||
|
2ec4d5004d | ||
|
1fc20cd02b | ||
|
d5e6749fa2 | ||
|
7b0f92bb3a | ||
|
ab50299efa | ||
|
585402d9ff | ||
|
5f04d09f03 | ||
|
a98b8ddd1a | ||
|
63218a5126 | ||
|
8736ce3889 | ||
|
9ba691cd3a | ||
|
06d7dc628e | ||
|
970f9e6333 | ||
|
0a83d85124 | ||
|
80dd585966 | ||
|
c1ab94bbef | ||
|
f27fdb2bf4 | ||
|
a1a20d231f | ||
|
c56cd6ee8b | ||
|
914c83420b | ||
|
33d85606cf | ||
|
723a132bdf | ||
|
3e79a35656 | ||
|
a5acfdbf10 | ||
|
938a710904 | ||
|
559bfc1f5e | ||
|
d29a66f267 | ||
|
23fd145a56 | ||
|
e332c7d875 | ||
|
f1e34ce5a2 | ||
|
11856329bf | ||
|
b4571c292e | ||
|
437fbee425 | ||
|
7c55190806 | ||
|
c2591445c9 | ||
|
49dffa7d24 | ||
|
f036451a0e | ||
|
7f44a6ad50 | ||
|
3e249829ee | ||
|
53ad0f72a5 | ||
|
510928618d | ||
|
b81aacc5e1 | ||
|
c3cb1795bf | ||
|
4c2bd4905e | ||
|
bf7c4e1659 | ||
|
44c3d48a94 | ||
|
ab763b3111 | ||
|
9fcfb88132 | ||
|
8532cec01c | ||
|
fa7a8ffc50 | ||
|
0d890ef0f7 | ||
|
00d565bf74 | ||
|
58daa31523 | ||
|
aad44f6dd0 | ||
|
79965a238d | ||
|
fcd564fddf | ||
|
455b206a8c | ||
|
466b87c8e5 | ||
|
86023cf1e6 | ||
|
0c81ef73e1 | ||
|
f8d261cd20 | ||
|
0739d13b03 | ||
|
0d9145a1bf | ||
|
b023faacf8 | ||
|
6d168eda27 | ||
|
3e451f0d53 | ||
|
008208fcfb | ||
|
b30451f776 | ||
|
4bc43347a1 | ||
|
8de8a66182 | ||
|
71fd85894b | ||
|
b7f98d1d99 | ||
|
3f9788daaa | ||
|
8c68074fa6 | ||
|
8b0ae3d279 | ||
|
090ed97e00 | ||
|
ff095ebd9b | ||
|
f1b9c19fa9 | ||
|
db83eb0c50 | ||
|
811d62d3b3 | ||
|
41b307b673 | ||
|
47b6c4bc78 | ||
|
3052050ee0 | ||
|
bfd60a5b39 | ||
|
a0e5bb8520 | ||
|
b16752306c | ||
|
b306b25e82 | ||
|
6bf5548dbd | ||
|
09c78e8b4e | ||
|
d4a0eba1a7 | ||
|
5913073733 | ||
|
1271a50a82 | ||
|
34de1cab62 | ||
|
2acf5e365e | ||
|
39aa6fa646 | ||
|
2579bca21c | ||
|
7868136a18 | ||
|
5889b81fc7 | ||
|
73ca2d5f84 | ||
|
3c8bf9df4a | ||
|
2040444da9 | ||
|
40959bb449 | ||
|
e5d02cd4bd | ||
|
69a0df929a | ||
|
8b2d97eb56 | ||
|
13386a4786 | ||
|
5ec53c0222 | ||
|
23fea46815 | ||
|
172ef2fa9f | ||
|
6ab774da0b | ||
|
4229583631 | ||
|
aac7bd9b08 | ||
|
91bdceb8b6 | ||
|
676ab0c1f3 | ||
|
cb0bc25a9f | ||
|
e868678139 | ||
|
7bc564d3dc | ||
|
f73e9a8d15 | ||
|
a98ad137f9 | ||
|
2fddc2a4fc | ||
|
10b178e94b | ||
|
8dc197721b | ||
|
5c325fe342 | ||
|
70bbbd7d19 | ||
|
ae6a0a9cfd | ||
|
1755c61d08 | ||
|
a069b92897 | ||
|
53bb62b318 | ||
|
d32e052e0e | ||
|
d171e23f72 | ||
|
403a1739cf | ||
|
ccc3085ad0 | ||
|
566f41635e | ||
|
d0430f1c81 | ||
|
7d7ed78681 | ||
|
992b7a0c39 | ||
|
44e03fa414 | ||
|
ef0c31db02 | ||
|
f992c3b597 | ||
|
ab266b99e6 | ||
|
99bf62a560 | ||
|
4f1d414d9c | ||
|
553ffbcaa0 | ||
|
9eec9adb8f | ||
|
2906660119 | ||
|
ef1f4f31b6 | ||
|
5ce1c30f77 | ||
|
c24a67c0e4 |
1
.envrc
1
.envrc
@@ -1,5 +1,6 @@
|
||||
watch_file shell.nix
|
||||
watch_file flake.lock
|
||||
watch_file rust-toolchain.toml
|
||||
|
||||
# try to use flakes, if it fails use normal nix (ie. shell.nix)
|
||||
use flake || use nix
|
||||
|
10
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
10
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -55,6 +55,16 @@ body:
|
||||
placeholder: wezterm 20220101-133340-7edc5b5a
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: installation-method
|
||||
attributes:
|
||||
label: Installation Method
|
||||
description: >
|
||||
How you installed Helix - from a package manager like Homebrew or the
|
||||
AUR, built from source, downloaded a binary from the releases page, etc.
|
||||
placeholder: "source / brew / nixpkgs / flake / releases page"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: helix-version
|
||||
attributes:
|
||||
|
14
.github/workflows/build.yml
vendored
14
.github/workflows/build.yml
vendored
@@ -22,6 +22,8 @@ jobs:
|
||||
override: true
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
shared-key: "build"
|
||||
|
||||
- name: Run cargo check
|
||||
run: cargo check
|
||||
@@ -37,9 +39,11 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@1.65
|
||||
uses: dtolnay/rust-toolchain@1.70
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
shared-key: "build"
|
||||
|
||||
- name: Cache test tree-sitter grammar
|
||||
uses: actions/cache@v3
|
||||
@@ -66,11 +70,13 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@1.65
|
||||
uses: dtolnay/rust-toolchain@1.70
|
||||
with:
|
||||
components: rustfmt, clippy
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
shared-key: "build"
|
||||
|
||||
- name: Run cargo fmt
|
||||
run: cargo fmt --all --check
|
||||
@@ -91,9 +97,11 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@1.65
|
||||
uses: dtolnay/rust-toolchain@1.70
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
shared-key: "build"
|
||||
|
||||
- name: Validate queries
|
||||
run: cargo xtask query-check
|
||||
|
4
.github/workflows/cachix.yml
vendored
4
.github/workflows/cachix.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install nix
|
||||
uses: cachix/install-nix-action@v23
|
||||
uses: cachix/install-nix-action@v24
|
||||
|
||||
- name: Authenticate with Cachix
|
||||
uses: cachix/cachix-action@v12
|
||||
uses: cachix/cachix-action@v13
|
||||
with:
|
||||
name: helix
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
|
14
.github/workflows/gh-pages.yml
vendored
14
.github/workflows/gh-pages.yml
vendored
@@ -26,16 +26,16 @@ jobs:
|
||||
OUTDIR=$(basename ${{ github.ref }})
|
||||
echo "OUTDIR=$OUTDIR" >> $GITHUB_ENV
|
||||
|
||||
- name: Deploy
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book/book
|
||||
destination_dir: ./${{ env.OUTDIR }}
|
||||
|
||||
- name: Deploy stable
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
if: startswith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book/book
|
||||
|
||||
- name: Deploy
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book/book
|
||||
destination_dir: ./${{ env.OUTDIR }}
|
||||
|
10
.github/workflows/release.yml
vendored
10
.github/workflows/release.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
- name: Bundle grammars
|
||||
run: tar cJf grammars.tar.xz -C runtime/grammars/sources .
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: grammars
|
||||
path: grammars.tar.xz
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download grammars
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Move grammars under runtime
|
||||
if: "!startsWith(matrix.os, 'windows')"
|
||||
@@ -220,7 +220,7 @@ jobs:
|
||||
fi
|
||||
cp -r runtime dist
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bins-${{ matrix.build }}
|
||||
path: dist
|
||||
@@ -233,7 +233,7 @@ jobs:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
- uses: actions/download-artifact@v4
|
||||
|
||||
- name: Build archive
|
||||
shell: bash
|
||||
@@ -288,7 +288,7 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload binaries as artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
if: env.preview == 'true'
|
||||
with:
|
||||
name: release
|
||||
|
688
Cargo.lock
generated
688
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
13
Cargo.toml
13
Cargo.toml
@@ -1,4 +1,5 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"helix-core",
|
||||
"helix-view",
|
||||
@@ -35,5 +36,15 @@ package.helix-tui.opt-level = 2
|
||||
package.helix-term.opt-level = 2
|
||||
|
||||
[workspace.dependencies]
|
||||
tree-sitter = { version = "0.20" }
|
||||
tree-sitter = { version = "0.20", git = "https://github.com/tree-sitter/tree-sitter", rev = "ab09ae20d640711174b8da8a654f6b3dec93da1a" }
|
||||
nucleo = "0.2.0"
|
||||
|
||||
[workspace.package]
|
||||
version = "23.10.0"
|
||||
edition = "2021"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
license = "MPL-2.0"
|
||||
rust-version = "1.70"
|
||||
|
@@ -18,7 +18,7 @@
|
||||
|
||||

|
||||
|
||||
A Kakoune / Neovim inspired editor, written in Rust.
|
||||
A [Kakoune](https://github.com/mawww/kakoune) / [Neovim](https://github.com/neovim/neovim) inspired editor, written in Rust.
|
||||
|
||||
The editing model is very heavily based on Kakoune; during development I found
|
||||
myself agreeing with most of Kakoune's design decisions.
|
||||
|
@@ -51,7 +51,7 @@ Its settings will be merged with the configuration directory `config.toml` and t
|
||||
| `auto-completion` | Enable automatic pop up of auto-completion | `true` |
|
||||
| `auto-format` | Enable automatic formatting on save | `true` |
|
||||
| `auto-save` | Enable automatic saving on the focus moving away from Helix. Requires [focus event support](https://github.com/helix-editor/helix/wiki/Terminal-Support) from your terminal | `false` |
|
||||
| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant | `400` |
|
||||
| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant | `250` |
|
||||
| `preview-completion-insert` | Whether to apply completion item instantly when selected | `true` |
|
||||
| `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` |
|
||||
| `completion-replace` | Set to `true` to make completions always replace the entire word and not just the part before the cursor | `false` |
|
||||
@@ -65,6 +65,8 @@ Its settings will be merged with the configuration directory `config.toml` and t
|
||||
| `workspace-lsp-roots` | Directories relative to the workspace root that are treated as LSP roots. Should only be set in `.helix/config.toml` | `[]` |
|
||||
| `default-line-ending` | The line ending to use for new documents. Can be `native`, `lf`, `crlf`, `ff`, `cr` or `nel`. `native` uses the platform's native line ending (`crlf` on Windows, otherwise `lf`). | `native` |
|
||||
| `insert-final-newline` | Whether to automatically insert a trailing line-ending on write if missing | `true` |
|
||||
| `popup-border` | Draw border around `popup`, `menu`, `all`, or `none` | `none` |
|
||||
| `indent-heuristic` | How the indentation for a newly inserted line is computed: `simple` just copies the indentation level from the previous line, `tree-sitter` computes the indentation based on the syntax tree and `hybrid` combines both approaches. If the chosen heuristic is not available, a different one will be used as a fallback (the fallback order being `hybrid` -> `tree-sitter` -> `simple`). | `hybrid`
|
||||
|
||||
### `[editor.statusline]` Section
|
||||
|
||||
@@ -166,16 +168,31 @@ All git related options are only enabled in a git repository.
|
||||
|
||||
| Key | Description | Default |
|
||||
|--|--|---------|
|
||||
|`hidden` | Enables ignoring hidden files | true
|
||||
|`follow-symlinks` | Follow symlinks instead of ignoring them | true
|
||||
|`deduplicate-links` | Ignore symlinks that point at files already shown in the picker | true
|
||||
|`parents` | Enables reading ignore files from parent directories | true
|
||||
|`ignore` | Enables reading `.ignore` files | true
|
||||
|`git-ignore` | Enables reading `.gitignore` files | true
|
||||
|`git-global` | Enables reading global `.gitignore`, whose path is specified in git's config: `core.excludefile` option | true
|
||||
|`git-exclude` | Enables reading `.git/info/exclude` files | true
|
||||
|`hidden` | Enables ignoring hidden files | `true`
|
||||
|`follow-symlinks` | Follow symlinks instead of ignoring them | `true`
|
||||
|`deduplicate-links` | Ignore symlinks that point at files already shown in the picker | `true`
|
||||
|`parents` | Enables reading ignore files from parent directories | `true`
|
||||
|`ignore` | Enables reading `.ignore` files | `true`
|
||||
|`git-ignore` | Enables reading `.gitignore` files | `true`
|
||||
|`git-global` | Enables reading global `.gitignore`, whose path is specified in git's config: `core.excludesfile` option | `true`
|
||||
|`git-exclude` | Enables reading `.git/info/exclude` files | `true`
|
||||
|`max-depth` | Set with an integer value for maximum depth to recurse | Defaults to `None`.
|
||||
|
||||
Ignore files can be placed locally as `.ignore` or put in your home directory as `~/.ignore`. They support the usual ignore and negative ignore (unignore) rules used in `.gitignore` files.
|
||||
|
||||
Additionally, you can use Helix-specific ignore files by creating a local `.helix/ignore` file in the current workspace or a global `ignore` file located in your Helix config directory:
|
||||
- Linux and Mac: `~/.config/helix/ignore`
|
||||
- Windows: `%AppData%\helix\ignore`
|
||||
|
||||
Example:
|
||||
|
||||
```ini
|
||||
# unignore in file picker and global search
|
||||
!.github/
|
||||
!.gitignore
|
||||
!.gitattributes
|
||||
```
|
||||
|
||||
### `[editor.auto-pairs]` Section
|
||||
|
||||
Enables automatic insertion of pairs to parentheses, brackets, etc. Can be a
|
||||
|
@@ -1,5 +1,6 @@
|
||||
| Language | Syntax Highlighting | Treesitter Textobjects | Auto Indent | Default LSP |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| agda | ✓ | | | |
|
||||
| astro | ✓ | | | |
|
||||
| awk | ✓ | ✓ | | `awk-language-server` |
|
||||
| bash | ✓ | ✓ | ✓ | `bash-language-server` |
|
||||
@@ -10,13 +11,13 @@
|
||||
| blueprint | ✓ | | | `blueprint-compiler` |
|
||||
| c | ✓ | ✓ | ✓ | `clangd` |
|
||||
| c-sharp | ✓ | ✓ | | `OmniSharp` |
|
||||
| cabal | | | | |
|
||||
| cabal | | | | `haskell-language-server-wrapper` |
|
||||
| cairo | ✓ | ✓ | ✓ | `cairo-language-server` |
|
||||
| capnp | ✓ | | ✓ | |
|
||||
| clojure | ✓ | | | `clojure-lsp` |
|
||||
| cmake | ✓ | ✓ | ✓ | `cmake-language-server` |
|
||||
| comment | ✓ | | | |
|
||||
| common-lisp | ✓ | | | `cl-lsp` |
|
||||
| common-lisp | ✓ | | ✓ | `cl-lsp` |
|
||||
| cpon | ✓ | | ✓ | |
|
||||
| cpp | ✓ | ✓ | ✓ | `clangd` |
|
||||
| crystal | ✓ | ✓ | | `crystalline` |
|
||||
@@ -24,6 +25,7 @@
|
||||
| cue | ✓ | | | `cuelsp` |
|
||||
| d | ✓ | ✓ | ✓ | `serve-d` |
|
||||
| dart | ✓ | | ✓ | `dart` |
|
||||
| dbml | ✓ | | | |
|
||||
| devicetree | ✓ | | | |
|
||||
| dhall | ✓ | ✓ | | `dhall-lsp-server` |
|
||||
| diff | ✓ | | | |
|
||||
@@ -54,7 +56,8 @@
|
||||
| git-rebase | ✓ | | | |
|
||||
| gleam | ✓ | ✓ | | `gleam` |
|
||||
| glsl | ✓ | ✓ | ✓ | |
|
||||
| go | ✓ | ✓ | ✓ | `gopls` |
|
||||
| gn | ✓ | | | |
|
||||
| go | ✓ | ✓ | ✓ | `gopls`, `golangci-lint-langserver` |
|
||||
| godot-resource | ✓ | | | |
|
||||
| gomod | ✓ | | | `gopls` |
|
||||
| gotmpl | ✓ | | | `gopls` |
|
||||
@@ -65,12 +68,14 @@
|
||||
| haskell-persistent | ✓ | | | |
|
||||
| hcl | ✓ | | ✓ | `terraform-ls` |
|
||||
| heex | ✓ | ✓ | | `elixir-ls` |
|
||||
| hocon | ✓ | | ✓ | |
|
||||
| hosts | ✓ | | | |
|
||||
| html | ✓ | | | `vscode-html-language-server` |
|
||||
| hurl | ✓ | | ✓ | |
|
||||
| idris | | | | `idris2-lsp` |
|
||||
| iex | ✓ | | | |
|
||||
| ini | ✓ | | | |
|
||||
| janet | ✓ | | | |
|
||||
| java | ✓ | ✓ | ✓ | `jdtls` |
|
||||
| javascript | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| jinja | ✓ | | | |
|
||||
@@ -81,7 +86,7 @@
|
||||
| jsx | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| julia | ✓ | ✓ | ✓ | `julia` |
|
||||
| just | ✓ | ✓ | ✓ | |
|
||||
| kdl | ✓ | | | |
|
||||
| kdl | ✓ | ✓ | ✓ | |
|
||||
| kotlin | ✓ | | | `kotlin-language-server` |
|
||||
| latex | ✓ | ✓ | | `texlab` |
|
||||
| lean | ✓ | | | `lean` |
|
||||
@@ -89,6 +94,8 @@
|
||||
| llvm | ✓ | ✓ | ✓ | |
|
||||
| llvm-mir | ✓ | ✓ | ✓ | |
|
||||
| llvm-mir-yaml | ✓ | | ✓ | |
|
||||
| log | ✓ | | | |
|
||||
| lpf | ✓ | | | |
|
||||
| lua | ✓ | ✓ | ✓ | `lua-language-server` |
|
||||
| make | ✓ | | | |
|
||||
| markdoc | ✓ | | | `markdoc-ls` |
|
||||
@@ -103,7 +110,7 @@
|
||||
| nickel | ✓ | | ✓ | `nls` |
|
||||
| nim | ✓ | ✓ | ✓ | `nimlangserver` |
|
||||
| nix | ✓ | | | `nil` |
|
||||
| nu | ✓ | | | |
|
||||
| nu | ✓ | | | `nu` |
|
||||
| nunjucks | ✓ | | | |
|
||||
| ocaml | ✓ | | ✓ | `ocamllsp` |
|
||||
| ocaml-interface | ✓ | | | `ocamllsp` |
|
||||
@@ -121,13 +128,13 @@
|
||||
| ponylang | ✓ | ✓ | ✓ | |
|
||||
| prisma | ✓ | | | `prisma-language-server` |
|
||||
| prolog | | | | `swipl` |
|
||||
| protobuf | ✓ | | ✓ | `bufls`, `pb` |
|
||||
| protobuf | ✓ | ✓ | ✓ | `bufls`, `pb` |
|
||||
| prql | ✓ | | | |
|
||||
| purescript | ✓ | ✓ | | `purescript-language-server` |
|
||||
| python | ✓ | ✓ | ✓ | `pylsp` |
|
||||
| qml | ✓ | | ✓ | `qmlls` |
|
||||
| r | ✓ | | | `R` |
|
||||
| racket | ✓ | | | `racket` |
|
||||
| racket | ✓ | | ✓ | `racket` |
|
||||
| regex | ✓ | | | |
|
||||
| rego | ✓ | | | `regols` |
|
||||
| rescript | ✓ | ✓ | | `rescript-language-server` |
|
||||
@@ -138,10 +145,11 @@
|
||||
| ruby | ✓ | ✓ | ✓ | `solargraph` |
|
||||
| rust | ✓ | ✓ | ✓ | `rust-analyzer` |
|
||||
| sage | ✓ | ✓ | | |
|
||||
| scala | ✓ | | ✓ | `metals` |
|
||||
| scheme | ✓ | | | |
|
||||
| scala | ✓ | ✓ | ✓ | `metals` |
|
||||
| scheme | ✓ | | ✓ | |
|
||||
| scss | ✓ | | | `vscode-css-language-server` |
|
||||
| slint | ✓ | | ✓ | `slint-lsp` |
|
||||
| smali | ✓ | | ✓ | |
|
||||
| smithy | ✓ | | | `cs` |
|
||||
| sml | ✓ | | | |
|
||||
| solidity | ✓ | | | `solc` |
|
||||
@@ -163,6 +171,7 @@
|
||||
| tsx | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| twig | ✓ | | | |
|
||||
| typescript | ✓ | ✓ | ✓ | `typescript-language-server` |
|
||||
| typst | ✓ | | | `typst-lsp` |
|
||||
| ungrammar | ✓ | | | |
|
||||
| unison | ✓ | | | |
|
||||
| uxntal | ✓ | | | |
|
||||
|
@@ -17,7 +17,7 @@
|
||||
| `:write-buffer-close!`, `:wbc!` | Force write changes to disk creating necessary subdirectories and closes the buffer. Accepts an optional path (:write-buffer-close! some/path.txt) |
|
||||
| `:new`, `:n` | Create a new scratch buffer. |
|
||||
| `:format`, `:fmt` | Format the file using the LSP formatter. |
|
||||
| `:indent-style` | Set the indentation style for editing. ('t' for tabs or 1-8 for number of spaces.) |
|
||||
| `:indent-style` | Set the indentation style for editing. ('t' for tabs or 1-16 for number of spaces.) |
|
||||
| `:line-ending` | Set the document's default line ending. Options: crlf, lf. |
|
||||
| `:earlier`, `:ear` | Jump back to an earlier point in edit history. Accepts a number of steps or a time span. |
|
||||
| `:later`, `:lat` | Jump to a later point in edit history. Accepts a number of steps or a time span. |
|
||||
@@ -85,3 +85,4 @@
|
||||
| `:reset-diff-change`, `:diffget`, `:diffg` | Reset the diff change at the cursor position. |
|
||||
| `:clear-register` | Clear given register. If no argument is provided, clear all registers. |
|
||||
| `:redraw` | Clear and re-render the whole UI |
|
||||
| `:move` | Move the current buffer and its corresponding file to a different path |
|
||||
|
@@ -36,6 +36,7 @@ below.
|
||||
3. Refer to the
|
||||
[tree-sitter website](https://tree-sitter.github.io/tree-sitter/syntax-highlighting#queries)
|
||||
for more information on writing queries.
|
||||
4. A list of highlight captures can be found [on the themes page](https://docs.helix-editor.com/themes.html#scopes).
|
||||
|
||||
> 💡 In Helix, the first matching query takes precedence when evaluating
|
||||
> queries, which is different from other editors such as Neovim where the last
|
||||
@@ -51,3 +52,4 @@ below.
|
||||
grammars.
|
||||
- If a parser is causing a segfault, or you want to remove it, make sure to
|
||||
remove the compiled parser located at `runtime/grammars/<name>.so`.
|
||||
- If you are attempting to add queries and Helix is unable to locate them, ensure that the environment variable `HELIX_RUNTIME` is set to the location of the `runtime` folder you're developing in.
|
||||
|
@@ -12,6 +12,15 @@ Note that it matters where these added indents begin. For example,
|
||||
multiple indent level increases that start on the same line only increase
|
||||
the total indent level by 1. See [Capture types](#capture-types).
|
||||
|
||||
By default, Helix uses the `hybrid` indentation heuristic. This means that
|
||||
indent queries are not used to compute the expected absolute indentation of a
|
||||
line but rather the expected difference in indentation between the new and an
|
||||
already existing line. This difference is then added to the actual indentation
|
||||
of the already existing line. Since this makes errors in the indent queries
|
||||
harder to find, it is recommended to disable it when testing via
|
||||
`:set indent-heuristic tree-sitter`. The rest of this guide assumes that
|
||||
the `tree-sitter` heuristic is used.
|
||||
|
||||
## Indent queries
|
||||
|
||||
When Helix is inserting a new line through `o`, `O`, or `<ret>`, to determine
|
||||
|
@@ -13,6 +13,7 @@
|
||||
- [AppImage](#appimage)
|
||||
- [macOS](#macos)
|
||||
- [Homebrew Core](#homebrew-core)
|
||||
- [MacPorts](#macports)
|
||||
- [Windows](#windows)
|
||||
- [Winget](#winget)
|
||||
- [Scoop](#scoop)
|
||||
@@ -64,10 +65,7 @@ sudo apt install helix
|
||||
|
||||
### Fedora/RHEL
|
||||
|
||||
Enable the `COPR` repository for Helix:
|
||||
|
||||
```sh
|
||||
sudo dnf copr enable varlad/helix
|
||||
sudo dnf install helix
|
||||
```
|
||||
|
||||
@@ -133,6 +131,12 @@ chmod +x helix-*.AppImage # change permission for executable mode
|
||||
brew install helix
|
||||
```
|
||||
|
||||
### MacPorts
|
||||
|
||||
```sh
|
||||
port install helix
|
||||
```
|
||||
|
||||
## Windows
|
||||
|
||||
Install on Windows using [Winget](https://learn.microsoft.com/en-us/windows/package-manager/winget/), [Scoop](https://scoop.sh/), [Chocolatey](https://chocolatey.org/)
|
||||
@@ -200,6 +204,8 @@ RUSTFLAGS="-C target-feature=-crt-static"
|
||||
This command will create the `hx` executable and construct the tree-sitter
|
||||
grammars in the local `runtime` folder.
|
||||
|
||||
> 💡 If you do not want to fetch or build grammars, set an environment variable `HELIX_DISABLE_AUTO_GRAMMAR_BUILD`
|
||||
|
||||
> 💡 Tree-sitter grammars can be fetched and compiled if not pre-packaged. Fetch
|
||||
> grammars with `hx --grammar fetch` and compile them with
|
||||
> `hx --grammar build`. This will install them in
|
||||
|
@@ -25,7 +25,7 @@
|
||||
|
||||
## Normal mode
|
||||
|
||||
Normal mode is the default mode when you launch helix. Return to it from other modes by typing `Escape`.
|
||||
Normal mode is the default mode when you launch helix. You can return to it from other modes by pressing the `Escape` key.
|
||||
|
||||
### Movement
|
||||
|
||||
@@ -205,7 +205,7 @@ Jumps to various locations.
|
||||
| ----- | ----------- | ------- |
|
||||
| `g` | Go to line number `<n>` else start of file | `goto_file_start` |
|
||||
| `e` | Go to the end of the file | `goto_last_line` |
|
||||
| `f` | Go to files in the selection | `goto_file` |
|
||||
| `f` | Go to files in the selections | `goto_file` |
|
||||
| `h` | Go to the start of the line | `goto_line_start` |
|
||||
| `l` | Go to the end of the line | `goto_line_end` |
|
||||
| `s` | Go to first non-whitespace character of the line | `goto_first_nonwhitespace` |
|
||||
@@ -253,8 +253,8 @@ This layer is similar to Vim keybindings as Kakoune does not support windows.
|
||||
| `w`, `Ctrl-w` | Switch to next window | `rotate_view` |
|
||||
| `v`, `Ctrl-v` | Vertical right split | `vsplit` |
|
||||
| `s`, `Ctrl-s` | Horizontal bottom split | `hsplit` |
|
||||
| `f` | Go to files in the selection in horizontal splits | `goto_file` |
|
||||
| `F` | Go to files in the selection in vertical splits | `goto_file` |
|
||||
| `f` | Go to files in the selections in horizontal splits | `goto_file` |
|
||||
| `F` | Go to files in the selections in vertical splits | `goto_file` |
|
||||
| `h`, `Ctrl-h`, `Left` | Move to left split | `jump_view_left` |
|
||||
| `j`, `Ctrl-j`, `Down` | Move to split below | `jump_view_down` |
|
||||
| `k`, `Ctrl-k`, `Up` | Move to split above | `jump_view_up` |
|
||||
|
@@ -66,8 +66,10 @@ These configuration keys are available:
|
||||
| `language-servers` | The Language Servers used for this language. See below for more information in the section [Configuring Language Servers for a language](#configuring-language-servers-for-a-language) |
|
||||
| `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) |
|
||||
| `formatter` | The formatter for the language, it will take precedence over the lsp when defined. The formatter must be able to take the original file as input from stdin and write the formatted file to stdout |
|
||||
| `soft-wrap` | [editor.softwrap](./configuration.md#editorsoft-wrap-section)
|
||||
| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap-at-text-width` is set, defaults to `editor.text-width` |
|
||||
| `workspace-lsp-roots` | Directories relative to the workspace root that are treated as LSP roots. Should only be set in `.helix/config.toml`. Overwrites the setting of the same name in `config.toml` if set. |
|
||||
| `persistent-diagnostic-sources` | An array of LSP diagnostic sources assumed unchanged when the language server resends the same set of diagnostics. Helix can track the position for these diagnostics internally instead. Useful for diagnostics that are recomputed on save.
|
||||
|
||||
### File-type detection and the `file-types` key
|
||||
|
||||
|
@@ -155,6 +155,7 @@ We use a similar set of scopes as
|
||||
|
||||
- `type` - Types
|
||||
- `builtin` - Primitive types provided by the language (`int`, `usize`)
|
||||
- `parameter` - Generic type parameters (`T`)
|
||||
- `enum`
|
||||
- `variant`
|
||||
- `constructor`
|
||||
@@ -295,11 +296,14 @@ These scopes are used for theming the editor interface:
|
||||
| `ui.statusline.insert` | Statusline mode during insert mode ([only if `editor.color-modes` is enabled][editor-section]) |
|
||||
| `ui.statusline.select` | Statusline mode during select mode ([only if `editor.color-modes` is enabled][editor-section]) |
|
||||
| `ui.statusline.separator` | Separator character in statusline |
|
||||
| `ui.bufferline` | Style for the buffer line |
|
||||
| `ui.bufferline.active` | Style for the active buffer in buffer line |
|
||||
| `ui.bufferline.background` | Style for bufferline background |
|
||||
| `ui.popup` | Documentation popups (e.g. Space + k) |
|
||||
| `ui.popup.info` | Prompt for multiple key options |
|
||||
| `ui.window` | Borderlines separating splits |
|
||||
| `ui.help` | Description box for commands |
|
||||
| `ui.text` | Command prompts, popup text, etc. |
|
||||
| `ui.text` | Default text style, command prompts, popup text, etc. |
|
||||
| `ui.text.focus` | The currently selected line in the picker |
|
||||
| `ui.text.inactive` | Same as `ui.text` but when the text is inactive (e.g. suggestions) |
|
||||
| `ui.text.info` | The key: command text in `ui.popup.info` boxes |
|
||||
|
@@ -59,8 +59,8 @@ Some registers have special behavior when read from and written to.
|
||||
| `#` | Selection indices (first selection is `1`, second is `2`, etc.) | This register is not writable |
|
||||
| `.` | Contents of the current selections | This register is not writable |
|
||||
| `%` | Name of the current file | This register is not writable |
|
||||
| `*` | Reads from the system clipboard | Joins and yanks to the system clipboard |
|
||||
| `+` | Reads from the primary clipboard | Joins and yanks to the primary clipboard |
|
||||
| `+` | Reads from the system clipboard | Joins and yanks to the system clipboard |
|
||||
| `*` | Reads from the primary clipboard | Joins and yanks to the primary clipboard |
|
||||
|
||||
When yanking multiple selections to the clipboard registers, the selections
|
||||
are joined with newlines. Pasting from these registers will paste multiple
|
||||
|
@@ -1,16 +1,23 @@
|
||||
## Checklist
|
||||
|
||||
Helix releases are versioned in the Calendar Versioning scheme:
|
||||
`YY.0M(.MICRO)`, for example, `22.05` for May of 2022. In these instructions
|
||||
we'll use `<tag>` as a placeholder for the tag being published.
|
||||
`YY.0M(.MICRO)`, for example, `22.05` for May of 2022, or in a patch release,
|
||||
`22.05.1`. In these instructions we'll use `<tag>` as a placeholder for the tag
|
||||
being published.
|
||||
|
||||
* Merge the changelog PR
|
||||
* Add new `<release>` entry in `contrib/Helix.appdata.xml` with release information according to the [AppStream spec](https://www.freedesktop.org/software/appstream/docs/sect-Metadata-Releases.html)
|
||||
* Tag and push
|
||||
* `git tag -s -m "<tag>" -a <tag> && git push`
|
||||
* Make sure to switch to master and pull first
|
||||
* Edit the `VERSION` file and change the date to the next planned release
|
||||
* Releases are planned to happen every two months, so `22.05` would change to `22.07`
|
||||
* Edit the `Cargo.toml` file and change the date in the `version` field to the next planned release
|
||||
* Due to Cargo having a strict requirement on SemVer with 3 or more version
|
||||
numbers, a `0` is required in the micro version; however, unless we are
|
||||
publishing a patch release after a major release, the `.0` is dropped in
|
||||
the user facing version.
|
||||
* Releases are planned to happen every two months, so `22.05.0` would change to `22.07.0`
|
||||
* If we are pushing a patch/bugfix release in the same month as the previous
|
||||
release, bump the micro version, e.g. `22.07.0` to `22.07.1`
|
||||
* Wait for the Release CI to finish
|
||||
* It will automatically turn the git tag into a GitHub release when it uploads artifacts
|
||||
* Edit the new release
|
||||
|
47
flake.lock
generated
47
flake.lock
generated
@@ -2,23 +2,16 @@
|
||||
"nodes": {
|
||||
"crane": {
|
||||
"inputs": {
|
||||
"flake-compat": "flake-compat",
|
||||
"flake-utils": [
|
||||
"flake-utils"
|
||||
],
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"rust-overlay": [
|
||||
"rust-overlay"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1688772518,
|
||||
"narHash": "sha256-ol7gZxwvgLnxNSZwFTDJJ49xVY5teaSvF7lzlo3YQfM=",
|
||||
"lastModified": 1701025348,
|
||||
"narHash": "sha256-42GHmYH+GF7VjwGSt+fVT1CQuNpGanJbNgVHTAZppUM=",
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"rev": "8b08e96c9af8c6e3a2b69af5a7fa168750fcf88e",
|
||||
"rev": "42afaeb1a0325194a7cdb526332d2cb92fddd07b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -27,32 +20,16 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1673956053,
|
||||
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1689068808,
|
||||
"narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
|
||||
"lastModified": 1694529238,
|
||||
"narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
|
||||
"rev": "ff7b65b44d01cf9ba6a71320833626af21126384",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -63,11 +40,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1690272529,
|
||||
"narHash": "sha256-MakzcKXEdv/I4qJUtq/k/eG+rVmyOZLnYNC2w1mB59Y=",
|
||||
"lastModified": 1700794826,
|
||||
"narHash": "sha256-RyJTnTNKhO0yqRpDISk03I/4A67/dp96YRxc86YOPgU=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "ef99fa5c5ed624460217c31ac4271cfb5cb2502c",
|
||||
"rev": "5a09cb4b393d58f9ed0d9ca1555016a8543c2ac8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -95,11 +72,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1690424156,
|
||||
"narHash": "sha256-Bpml+L280tHTQpwpC5/BJbU4HSvEzMvW8IZ4gAXimhE=",
|
||||
"lastModified": 1701137803,
|
||||
"narHash": "sha256-0LcPAdql5IhQSUXJx3Zna0dYTgdIoYO7zUrsKgiBd04=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "f335a0213504c7e6481c359dc1009be9cf34432c",
|
||||
"rev": "9dd940c967502f844eacea52a61e9596268d4f70",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
10
flake.nix
10
flake.nix
@@ -13,8 +13,6 @@
|
||||
};
|
||||
crane = {
|
||||
url = "github:ipetkov/crane";
|
||||
inputs.rust-overlay.follows = "rust-overlay";
|
||||
inputs.flake-utils.follows = "flake-utils";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
@@ -123,9 +121,10 @@
|
||||
rustToolchain = pkgs.pkgsBuildHost.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml;
|
||||
craneLibMSRV = (crane.mkLib pkgs).overrideToolchain rustToolchain;
|
||||
craneLibStable = (crane.mkLib pkgs).overrideToolchain pkgs.pkgsBuildHost.rust-bin.stable.latest.default;
|
||||
commonArgs =
|
||||
{
|
||||
commonArgs = {
|
||||
inherit stdenv;
|
||||
inherit (craneLibMSRV.crateNameFromCargoToml {cargoToml = ./helix-term/Cargo.toml;}) pname;
|
||||
inherit (craneLibMSRV.crateNameFromCargoToml {cargoToml = ./Cargo.toml;}) version;
|
||||
src = filteredSource;
|
||||
# disable fetching and building of tree-sitter grammars in the helix-term build.rs
|
||||
HELIX_DISABLE_AUTO_GRAMMAR_BUILD = "1";
|
||||
@@ -133,8 +132,7 @@
|
||||
# disable tests
|
||||
doCheck = false;
|
||||
meta.mainProgram = "hx";
|
||||
}
|
||||
// craneLibMSRV.crateNameFromCargoToml {cargoToml = ./helix-term/Cargo.toml;};
|
||||
};
|
||||
cargoArtifacts = craneLibMSRV.buildDepsOnly commonArgs;
|
||||
in {
|
||||
packages = {
|
||||
|
43
grammars.nix
43
grammars.nix
@@ -5,6 +5,7 @@
|
||||
runCommand,
|
||||
yj,
|
||||
includeGrammarIf ? _: true,
|
||||
grammarOverlays ? [],
|
||||
...
|
||||
}: let
|
||||
# HACK: nix < 2.6 has a bug in the toml parser, so we convert to JSON
|
||||
@@ -48,22 +49,22 @@
|
||||
then sourceGitHub
|
||||
else sourceGit;
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
stdenv.mkDerivation {
|
||||
# see https://github.com/NixOS/nixpkgs/blob/fbdd1a7c0bc29af5325e0d7dd70e804a972eb465/pkgs/development/tools/parsing/tree-sitter/grammar.nix
|
||||
|
||||
pname = "helix-tree-sitter-${grammar.name}";
|
||||
version = grammar.source.rev;
|
||||
|
||||
src =
|
||||
if builtins.hasAttr "subpath" grammar.source
|
||||
then "${source}/${grammar.source.subpath}"
|
||||
else source;
|
||||
src = source;
|
||||
sourceRoot = if builtins.hasAttr "subpath" grammar.source then
|
||||
"source/${grammar.source.subpath}"
|
||||
else
|
||||
"source";
|
||||
|
||||
dontUnpack = true;
|
||||
dontConfigure = true;
|
||||
|
||||
FLAGS = [
|
||||
"-I${src}/src"
|
||||
"-Isrc"
|
||||
"-g"
|
||||
"-O3"
|
||||
"-fPIC"
|
||||
@@ -76,13 +77,13 @@
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
if [[ -e "$src/src/scanner.cc" ]]; then
|
||||
$CXX -c "$src/src/scanner.cc" -o scanner.o $FLAGS
|
||||
elif [[ -e "$src/src/scanner.c" ]]; then
|
||||
$CC -c "$src/src/scanner.c" -o scanner.o $FLAGS
|
||||
if [[ -e src/scanner.cc ]]; then
|
||||
$CXX -c src/scanner.cc -o scanner.o $FLAGS
|
||||
elif [[ -e src/scanner.c ]]; then
|
||||
$CC -c src/scanner.c -o scanner.o $FLAGS
|
||||
fi
|
||||
|
||||
$CC -c "$src/src/parser.c" -o parser.o $FLAGS
|
||||
$CC -c src/parser.c -o parser.o $FLAGS
|
||||
$CXX -shared -o $NAME.so *.o
|
||||
|
||||
ls -al
|
||||
@@ -105,15 +106,17 @@
|
||||
'';
|
||||
};
|
||||
grammarsToBuild = builtins.filter includeGrammarIf gitGrammars;
|
||||
builtGrammars =
|
||||
builtins.map (grammar: {
|
||||
builtGrammars = builtins.map (grammar: {
|
||||
inherit (grammar) name;
|
||||
artifact = buildGrammar grammar;
|
||||
})
|
||||
grammarsToBuild;
|
||||
grammarLinks =
|
||||
builtins.map (grammar: "ln -s ${grammar.artifact}/${grammar.name}.so $out/${grammar.name}.so")
|
||||
builtGrammars;
|
||||
value = buildGrammar grammar;
|
||||
}) grammarsToBuild;
|
||||
extensibleGrammars =
|
||||
lib.makeExtensible (self: builtins.listToAttrs builtGrammars);
|
||||
overlayedGrammars = lib.pipe extensibleGrammars
|
||||
(builtins.map (overlay: grammar: grammar.extend overlay) grammarOverlays);
|
||||
grammarLinks = lib.mapAttrsToList
|
||||
(name: artifact: "ln -s ${artifact}/${name}.so $out/${name}.so")
|
||||
(lib.filterAttrs (n: v: lib.isDerivation v) overlayedGrammars);
|
||||
in
|
||||
runCommand "consolidated-helix-grammars" {} ''
|
||||
mkdir -p $out
|
||||
|
@@ -1,21 +1,22 @@
|
||||
[package]
|
||||
name = "helix-core"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
description = "Helix editor core editing primitives"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
include = ["src/**/*", "README.md"]
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
categories.workspace = true
|
||||
repository.workspace = true
|
||||
homepage.workspace = true
|
||||
|
||||
[features]
|
||||
unicode-lines = ["ropey/unicode_lines"]
|
||||
integration = []
|
||||
|
||||
[dependencies]
|
||||
helix-loader = { version = "0.6", path = "../helix-loader" }
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
|
||||
ropey = { version = "1.6.1", default-features = false, features = ["simd"] }
|
||||
smallvec = "1.11"
|
||||
@@ -26,12 +27,12 @@ unicode-general-category = "0.6"
|
||||
# slab = "0.4.2"
|
||||
slotmap = "1.0"
|
||||
tree-sitter.workspace = true
|
||||
once_cell = "1.18"
|
||||
once_cell = "1.19"
|
||||
arc-swap = "1"
|
||||
regex = "1"
|
||||
bitflags = "2.4"
|
||||
ahash = "0.8.5"
|
||||
hashbrown = { version = "0.14.2", features = ["raw"] }
|
||||
ahash = "0.8.6"
|
||||
hashbrown = { version = "0.14.3", features = ["raw"] }
|
||||
dunce = "1.0"
|
||||
|
||||
log = "0.4"
|
||||
|
@@ -39,6 +39,10 @@ pub enum DiagnosticTag {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Diagnostic {
|
||||
pub range: Range,
|
||||
// whether this diagnostic ends at the end of(or inside) a word
|
||||
pub ends_at_word: bool,
|
||||
pub starts_at_word: bool,
|
||||
pub zero_width: bool,
|
||||
pub line: usize,
|
||||
pub message: String,
|
||||
pub severity: Option<Severity>,
|
||||
|
@@ -4,10 +4,11 @@ use tree_sitter::{Query, QueryCursor, QueryPredicateArg};
|
||||
|
||||
use crate::{
|
||||
chars::{char_is_line_ending, char_is_whitespace},
|
||||
find_first_non_whitespace_char,
|
||||
graphemes::{grapheme_width, tab_width_at},
|
||||
syntax::{LanguageConfiguration, RopeProvider, Syntax},
|
||||
syntax::{IndentationHeuristic, LanguageConfiguration, RopeProvider, Syntax},
|
||||
tree_sitter::Node,
|
||||
Rope, RopeGraphemes, RopeSlice,
|
||||
Position, Rope, RopeGraphemes, RopeSlice,
|
||||
};
|
||||
|
||||
/// Enum representing indentation style.
|
||||
@@ -21,7 +22,7 @@ pub enum IndentStyle {
|
||||
|
||||
// 16 spaces
|
||||
const INDENTS: &str = " ";
|
||||
const MAX_INDENT: u8 = 16;
|
||||
pub const MAX_INDENT: u8 = 16;
|
||||
|
||||
impl IndentStyle {
|
||||
/// Creates an `IndentStyle` from an indentation string.
|
||||
@@ -196,6 +197,56 @@ pub fn indent_level_for_line(line: RopeSlice, tab_width: usize, indent_width: us
|
||||
len / indent_width
|
||||
}
|
||||
|
||||
/// Create a string of tabs & spaces that has the same visual width as the given RopeSlice (independent of the tab width).
|
||||
fn whitespace_with_same_width(text: RopeSlice) -> String {
|
||||
let mut s = String::new();
|
||||
for grapheme in RopeGraphemes::new(text) {
|
||||
if grapheme == "\t" {
|
||||
s.push('\t');
|
||||
} else {
|
||||
s.extend(std::iter::repeat(' ').take(grapheme_width(&Cow::from(grapheme))));
|
||||
}
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
fn add_indent_level(
|
||||
mut base_indent: String,
|
||||
added_indent_level: isize,
|
||||
indent_style: &IndentStyle,
|
||||
tab_width: usize,
|
||||
) -> String {
|
||||
if added_indent_level >= 0 {
|
||||
// Adding a non-negative indent is easy, we can simply append the indent string
|
||||
base_indent.push_str(&indent_style.as_str().repeat(added_indent_level as usize));
|
||||
base_indent
|
||||
} else {
|
||||
// In this case, we want to return a prefix of `base_indent`.
|
||||
// Since the width of a tab depends on its offset, we cannot simply iterate over
|
||||
// the chars of `base_indent` in reverse until we have the desired indent reduction,
|
||||
// instead we iterate over them twice in forward direction.
|
||||
let base_indent_rope = RopeSlice::from(base_indent.as_str());
|
||||
#[allow(deprecated)]
|
||||
let base_indent_width =
|
||||
crate::visual_coords_at_pos(base_indent_rope, base_indent_rope.len_chars(), tab_width)
|
||||
.col;
|
||||
let target_indent_width = base_indent_width
|
||||
.saturating_sub((-added_indent_level) as usize * indent_style.indent_width(tab_width));
|
||||
#[allow(deprecated)]
|
||||
let char_end_idx = crate::pos_at_visual_coords(
|
||||
base_indent_rope,
|
||||
Position {
|
||||
row: 0,
|
||||
col: target_indent_width,
|
||||
},
|
||||
tab_width,
|
||||
);
|
||||
let byte_end_idx = base_indent_rope.char_to_byte(char_end_idx);
|
||||
base_indent.truncate(byte_end_idx);
|
||||
base_indent
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes for node and all ancestors whether they are the first node on their line.
|
||||
/// The first entry in the return value represents the root node, the last one the node itself
|
||||
fn get_first_in_line(mut node: Node, new_line_byte_pos: Option<usize>) -> Vec<bool> {
|
||||
@@ -241,21 +292,21 @@ fn get_first_in_line(mut node: Node, new_line_byte_pos: Option<usize>) -> Vec<bo
|
||||
/// - max(0, indent - outdent) tabs, if tabs are used for indentation
|
||||
/// - max(0, indent - outdent)*indent_width spaces, if spaces are used for indentation
|
||||
#[derive(Default, Debug, PartialEq, Eq, Clone)]
|
||||
pub struct Indentation {
|
||||
pub struct Indentation<'a> {
|
||||
indent: usize,
|
||||
indent_always: usize,
|
||||
outdent: usize,
|
||||
outdent_always: usize,
|
||||
/// The alignment, as a string containing only tabs & spaces. Storing this as a string instead of e.g.
|
||||
/// the (visual) width ensures that the alignment is preserved even if the tab width changes.
|
||||
align: Option<String>,
|
||||
align: Option<RopeSlice<'a>>,
|
||||
}
|
||||
|
||||
impl Indentation {
|
||||
impl<'a> Indentation<'a> {
|
||||
/// Add some other [Indentation] to this.
|
||||
/// The added indent should be the total added indent from one line.
|
||||
/// Indent should always be added starting from the bottom (or equivalently, the innermost tree-sitter node).
|
||||
fn add_line(&mut self, added: Indentation) {
|
||||
fn add_line(&mut self, added: Indentation<'a>) {
|
||||
// Align overrides the indent from outer scopes.
|
||||
if self.align.is_some() {
|
||||
return;
|
||||
@@ -271,8 +322,10 @@ impl Indentation {
|
||||
}
|
||||
|
||||
/// Add an indent capture to this indent.
|
||||
/// All the captures that are added in this way should be on the same line.
|
||||
fn add_capture(&mut self, added: IndentCaptureType) {
|
||||
/// Only captures that apply to the same line should be added together in this way (otherwise use `add_line`)
|
||||
/// and the captures should be added starting from the innermost tree-sitter node (currently this only matters
|
||||
/// if multiple `@align` patterns occur on the same line).
|
||||
fn add_capture(&mut self, added: IndentCaptureType<'a>) {
|
||||
match added {
|
||||
IndentCaptureType::Indent => {
|
||||
if self.indent_always == 0 {
|
||||
@@ -295,47 +348,68 @@ impl Indentation {
|
||||
self.outdent = 0;
|
||||
}
|
||||
IndentCaptureType::Align(align) => {
|
||||
if self.align.is_none() {
|
||||
self.align = Some(align);
|
||||
}
|
||||
}
|
||||
}
|
||||
fn into_string(self, indent_style: &IndentStyle) -> String {
|
||||
let indent = self.indent_always + self.indent;
|
||||
let outdent = self.outdent_always + self.outdent;
|
||||
|
||||
let indent_level = if indent >= outdent {
|
||||
indent - outdent
|
||||
}
|
||||
fn net_indent(&self) -> isize {
|
||||
(self.indent + self.indent_always) as isize
|
||||
- ((self.outdent + self.outdent_always) as isize)
|
||||
}
|
||||
/// Convert `self` into a string, taking into account the computed and actual indentation of some other line.
|
||||
fn relative_indent(
|
||||
&self,
|
||||
other_computed_indent: &Self,
|
||||
other_leading_whitespace: RopeSlice,
|
||||
indent_style: &IndentStyle,
|
||||
tab_width: usize,
|
||||
) -> Option<String> {
|
||||
if self.align == other_computed_indent.align {
|
||||
// If self and baseline are either not aligned to anything or both aligned the same way,
|
||||
// we can simply take `other_leading_whitespace` and add some indent / outdent to it (in the second
|
||||
// case, the alignment should already be accounted for in `other_leading_whitespace`).
|
||||
let indent_diff = self.net_indent() - other_computed_indent.net_indent();
|
||||
Some(add_indent_level(
|
||||
String::from(other_leading_whitespace),
|
||||
indent_diff,
|
||||
indent_style,
|
||||
tab_width,
|
||||
))
|
||||
} else {
|
||||
log::warn!("Encountered more outdent than indent nodes while calculating indentation: {} outdent, {} indent", self.outdent, self.indent);
|
||||
0
|
||||
};
|
||||
let mut indent_string = if let Some(align) = self.align {
|
||||
align
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
indent_string.push_str(&indent_style.as_str().repeat(indent_level));
|
||||
indent_string
|
||||
// If the alignment of both lines is different, we cannot compare their indentation in any meaningful way
|
||||
None
|
||||
}
|
||||
}
|
||||
pub fn to_string(&self, indent_style: &IndentStyle, tab_width: usize) -> String {
|
||||
add_indent_level(
|
||||
self.align
|
||||
.map_or_else(String::new, whitespace_with_same_width),
|
||||
self.net_indent(),
|
||||
indent_style,
|
||||
tab_width,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// An indent definition which corresponds to a capture from the indent query
|
||||
#[derive(Debug)]
|
||||
struct IndentCapture {
|
||||
capture_type: IndentCaptureType,
|
||||
struct IndentCapture<'a> {
|
||||
capture_type: IndentCaptureType<'a>,
|
||||
scope: IndentScope,
|
||||
}
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
enum IndentCaptureType {
|
||||
enum IndentCaptureType<'a> {
|
||||
Indent,
|
||||
IndentAlways,
|
||||
Outdent,
|
||||
OutdentAlways,
|
||||
/// Alignment given as a string of whitespace
|
||||
Align(String),
|
||||
Align(RopeSlice<'a>),
|
||||
}
|
||||
|
||||
impl IndentCaptureType {
|
||||
impl<'a> IndentCaptureType<'a> {
|
||||
fn default_scope(&self) -> IndentScope {
|
||||
match self {
|
||||
IndentCaptureType::Indent | IndentCaptureType::IndentAlways => IndentScope::Tail,
|
||||
@@ -367,8 +441,8 @@ enum ExtendCapture {
|
||||
/// each node (identified by its ID) the relevant captures (already filtered
|
||||
/// by predicates).
|
||||
#[derive(Debug)]
|
||||
struct IndentQueryResult {
|
||||
indent_captures: HashMap<usize, Vec<IndentCapture>>,
|
||||
struct IndentQueryResult<'a> {
|
||||
indent_captures: HashMap<usize, Vec<IndentCapture<'a>>>,
|
||||
extend_captures: HashMap<usize, Vec<ExtendCapture>>,
|
||||
}
|
||||
|
||||
@@ -389,14 +463,14 @@ fn get_node_end_line(node: Node, new_line_byte_pos: Option<usize>) -> usize {
|
||||
node_line
|
||||
}
|
||||
|
||||
fn query_indents(
|
||||
fn query_indents<'a>(
|
||||
query: &Query,
|
||||
syntax: &Syntax,
|
||||
cursor: &mut QueryCursor,
|
||||
text: RopeSlice,
|
||||
text: RopeSlice<'a>,
|
||||
range: std::ops::Range<usize>,
|
||||
new_line_byte_pos: Option<usize>,
|
||||
) -> IndentQueryResult {
|
||||
) -> IndentQueryResult<'a> {
|
||||
let mut indent_captures: HashMap<usize, Vec<IndentCapture>> = HashMap::new();
|
||||
let mut extend_captures: HashMap<usize, Vec<ExtendCapture>> = HashMap::new();
|
||||
cursor.set_byte_range(range);
|
||||
@@ -484,7 +558,7 @@ fn query_indents(
|
||||
"outdent" => IndentCaptureType::Outdent,
|
||||
"outdent.always" => IndentCaptureType::OutdentAlways,
|
||||
// The alignment will be updated to the correct value at the end, when the anchor is known.
|
||||
"align" => IndentCaptureType::Align(String::from("")),
|
||||
"align" => IndentCaptureType::Align(RopeSlice::from("")),
|
||||
"anchor" => {
|
||||
if anchor.is_some() {
|
||||
log::error!("Invalid indent query: Encountered more than one @anchor in the same match.")
|
||||
@@ -556,23 +630,11 @@ fn query_indents(
|
||||
}
|
||||
Some(anchor) => anchor,
|
||||
};
|
||||
// Create a string of tabs & spaces that should have the same width
|
||||
// as the string that precedes the anchor (independent of the tab width).
|
||||
let mut align = String::new();
|
||||
for grapheme in RopeGraphemes::new(
|
||||
capture.capture_type = IndentCaptureType::Align(
|
||||
text.line(anchor.start_position().row)
|
||||
.byte_slice(0..anchor.start_position().column),
|
||||
) {
|
||||
if grapheme == "\t" {
|
||||
align.push('\t');
|
||||
} else {
|
||||
align.extend(
|
||||
std::iter::repeat(' ').take(grapheme_width(&Cow::from(grapheme))),
|
||||
);
|
||||
}
|
||||
}
|
||||
capture.capture_type = IndentCaptureType::Align(align);
|
||||
}
|
||||
indent_captures
|
||||
.entry(node_id)
|
||||
.or_insert_with(|| Vec::with_capacity(1))
|
||||
@@ -657,56 +719,20 @@ fn extend_nodes<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
/// Use the syntax tree to determine the indentation for a given position.
|
||||
/// This can be used in 2 ways:
|
||||
///
|
||||
/// - To get the correct indentation for an existing line (new_line=false), not necessarily equal to the current indentation.
|
||||
/// - In this case, pos should be inside the first tree-sitter node on that line.
|
||||
/// In most cases, this can just be the first non-whitespace on that line.
|
||||
/// - To get the indentation for a new line (new_line=true). This behaves like the first usecase if the part of the current line
|
||||
/// after pos were moved to a new line.
|
||||
///
|
||||
/// The indentation is determined by traversing all the tree-sitter nodes containing the position.
|
||||
/// Each of these nodes produces some [Indentation] for:
|
||||
///
|
||||
/// - The line of the (beginning of the) node. This is defined by the scope `all` if this is the first node on its line.
|
||||
/// - The line after the node. This is defined by:
|
||||
/// - The scope `tail`.
|
||||
/// - The scope `all` if this node is not the first node on its line.
|
||||
/// Intuitively, `all` applies to everything contained in this node while `tail` applies to everything except for the first line of the node.
|
||||
/// The indents from different nodes for the same line are then combined.
|
||||
/// The result [Indentation] is simply the sum of the [Indentation] for all lines.
|
||||
///
|
||||
/// Specifying which line exactly an [Indentation] applies to is important because indents on the same line combine differently than indents on different lines:
|
||||
/// ```ignore
|
||||
/// some_function(|| {
|
||||
/// // Both the function parameters as well as the contained block should be indented.
|
||||
/// // Because they are on the same line, this only yields one indent level
|
||||
/// });
|
||||
/// ```
|
||||
///
|
||||
/// ```ignore
|
||||
/// some_function(
|
||||
/// param1,
|
||||
/// || {
|
||||
/// // Here we get 2 indent levels because the 'parameters' and the 'block' node begin on different lines
|
||||
/// },
|
||||
/// );
|
||||
/// ```
|
||||
/// Prepare an indent query by computing:
|
||||
/// - The node from which to start the query (this is non-trivial due to `@extend` captures)
|
||||
/// - The indent captures for all relevant nodes.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn treesitter_indent_for_pos(
|
||||
fn init_indent_query<'a, 'b>(
|
||||
query: &Query,
|
||||
syntax: &Syntax,
|
||||
indent_style: &IndentStyle,
|
||||
syntax: &'a Syntax,
|
||||
text: RopeSlice<'b>,
|
||||
tab_width: usize,
|
||||
indent_width: usize,
|
||||
text: RopeSlice,
|
||||
line: usize,
|
||||
pos: usize,
|
||||
new_line: bool,
|
||||
) -> Option<String> {
|
||||
let byte_pos = text.char_to_byte(pos);
|
||||
let new_line_byte_pos = new_line.then_some(byte_pos);
|
||||
byte_pos: usize,
|
||||
new_line_byte_pos: Option<usize>,
|
||||
) -> Option<(Node<'a>, HashMap<usize, Vec<IndentCapture<'b>>>)> {
|
||||
// The innermost tree-sitter node which is considered for the indent
|
||||
// computation. It may change if some predeceding node is extended
|
||||
let mut node = syntax
|
||||
@@ -750,7 +776,6 @@ pub fn treesitter_indent_for_pos(
|
||||
(query_result, deepest_preceding)
|
||||
})
|
||||
};
|
||||
let mut indent_captures = query_result.indent_captures;
|
||||
let extend_captures = query_result.extend_captures;
|
||||
|
||||
// Check for extend captures, potentially changing the node that the indent calculation starts with
|
||||
@@ -765,6 +790,68 @@ pub fn treesitter_indent_for_pos(
|
||||
indent_width,
|
||||
);
|
||||
}
|
||||
Some((node, query_result.indent_captures))
|
||||
}
|
||||
|
||||
/// Use the syntax tree to determine the indentation for a given position.
|
||||
/// This can be used in 2 ways:
|
||||
///
|
||||
/// - To get the correct indentation for an existing line (new_line=false), not necessarily equal to the current indentation.
|
||||
/// - In this case, pos should be inside the first tree-sitter node on that line.
|
||||
/// In most cases, this can just be the first non-whitespace on that line.
|
||||
/// - To get the indentation for a new line (new_line=true). This behaves like the first usecase if the part of the current line
|
||||
/// after pos were moved to a new line.
|
||||
///
|
||||
/// The indentation is determined by traversing all the tree-sitter nodes containing the position.
|
||||
/// Each of these nodes produces some [Indentation] for:
|
||||
///
|
||||
/// - The line of the (beginning of the) node. This is defined by the scope `all` if this is the first node on its line.
|
||||
/// - The line after the node. This is defined by:
|
||||
/// - The scope `tail`.
|
||||
/// - The scope `all` if this node is not the first node on its line.
|
||||
/// Intuitively, `all` applies to everything contained in this node while `tail` applies to everything except for the first line of the node.
|
||||
/// The indents from different nodes for the same line are then combined.
|
||||
/// The result [Indentation] is simply the sum of the [Indentation] for all lines.
|
||||
///
|
||||
/// Specifying which line exactly an [Indentation] applies to is important because indents on the same line combine differently than indents on different lines:
|
||||
/// ```ignore
|
||||
/// some_function(|| {
|
||||
/// // Both the function parameters as well as the contained block should be indented.
|
||||
/// // Because they are on the same line, this only yields one indent level
|
||||
/// });
|
||||
/// ```
|
||||
///
|
||||
/// ```ignore
|
||||
/// some_function(
|
||||
/// param1,
|
||||
/// || {
|
||||
/// // Here we get 2 indent levels because the 'parameters' and the 'block' node begin on different lines
|
||||
/// },
|
||||
/// );
|
||||
/// ```
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn treesitter_indent_for_pos<'a>(
|
||||
query: &Query,
|
||||
syntax: &Syntax,
|
||||
tab_width: usize,
|
||||
indent_width: usize,
|
||||
text: RopeSlice<'a>,
|
||||
line: usize,
|
||||
pos: usize,
|
||||
new_line: bool,
|
||||
) -> Option<Indentation<'a>> {
|
||||
let byte_pos = text.char_to_byte(pos);
|
||||
let new_line_byte_pos = new_line.then_some(byte_pos);
|
||||
let (mut node, mut indent_captures) = init_indent_query(
|
||||
query,
|
||||
syntax,
|
||||
text,
|
||||
tab_width,
|
||||
indent_width,
|
||||
line,
|
||||
byte_pos,
|
||||
new_line_byte_pos,
|
||||
)?;
|
||||
let mut first_in_line = get_first_in_line(node, new_line.then_some(byte_pos));
|
||||
|
||||
let mut result = Indentation::default();
|
||||
@@ -832,7 +919,7 @@ pub fn treesitter_indent_for_pos(
|
||||
break;
|
||||
}
|
||||
}
|
||||
Some(result.into_string(indent_style))
|
||||
Some(result)
|
||||
}
|
||||
|
||||
/// Returns the indentation for a new line.
|
||||
@@ -841,6 +928,7 @@ pub fn treesitter_indent_for_pos(
|
||||
pub fn indent_for_newline(
|
||||
language_config: Option<&LanguageConfiguration>,
|
||||
syntax: Option<&Syntax>,
|
||||
indent_heuristic: &IndentationHeuristic,
|
||||
indent_style: &IndentStyle,
|
||||
tab_width: usize,
|
||||
text: RopeSlice,
|
||||
@@ -849,14 +937,18 @@ pub fn indent_for_newline(
|
||||
current_line: usize,
|
||||
) -> String {
|
||||
let indent_width = indent_style.indent_width(tab_width);
|
||||
if let (Some(query), Some(syntax)) = (
|
||||
if let (
|
||||
IndentationHeuristic::TreeSitter | IndentationHeuristic::Hybrid,
|
||||
Some(query),
|
||||
Some(syntax),
|
||||
) = (
|
||||
indent_heuristic,
|
||||
language_config.and_then(|config| config.indent_query()),
|
||||
syntax,
|
||||
) {
|
||||
if let Some(indent) = treesitter_indent_for_pos(
|
||||
query,
|
||||
syntax,
|
||||
indent_style,
|
||||
tab_width,
|
||||
indent_width,
|
||||
text,
|
||||
@@ -864,9 +956,57 @@ pub fn indent_for_newline(
|
||||
line_before_end_pos,
|
||||
true,
|
||||
) {
|
||||
if *indent_heuristic == IndentationHeuristic::Hybrid {
|
||||
// We want to compute the indentation not only based on the
|
||||
// syntax tree but also on the actual indentation of a previous
|
||||
// line. This makes indentation computation more resilient to
|
||||
// incomplete queries, incomplete source code & differing indentation
|
||||
// styles for the same language.
|
||||
// However, using the indent of a previous line as a baseline may not
|
||||
// make sense, e.g. if it has a different alignment than the new line.
|
||||
// In order to prevent edge cases with long running times, we only try
|
||||
// a constant number of (non-empty) lines.
|
||||
const MAX_ATTEMPTS: usize = 4;
|
||||
let mut num_attempts = 0;
|
||||
for line_idx in (0..=line_before).rev() {
|
||||
let line = text.line(line_idx);
|
||||
let first_non_whitespace_char = match find_first_non_whitespace_char(line) {
|
||||
Some(i) => i,
|
||||
None => {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
if let Some(indent) = (|| {
|
||||
let computed_indent = treesitter_indent_for_pos(
|
||||
query,
|
||||
syntax,
|
||||
tab_width,
|
||||
indent_width,
|
||||
text,
|
||||
line_idx,
|
||||
text.line_to_char(line_idx) + first_non_whitespace_char,
|
||||
false,
|
||||
)?;
|
||||
let leading_whitespace = line.slice(0..first_non_whitespace_char);
|
||||
indent.relative_indent(
|
||||
&computed_indent,
|
||||
leading_whitespace,
|
||||
indent_style,
|
||||
tab_width,
|
||||
)
|
||||
})() {
|
||||
return indent;
|
||||
}
|
||||
num_attempts += 1;
|
||||
if num_attempts == MAX_ATTEMPTS {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return indent.to_string(indent_style, tab_width);
|
||||
};
|
||||
}
|
||||
// Fallback in case we either don't have indent queries or they failed for some reason
|
||||
let indent_level = indent_level_for_line(text.line(current_line), tab_width, indent_width);
|
||||
indent_style.as_str().repeat(indent_level)
|
||||
}
|
||||
@@ -958,10 +1098,13 @@ mod test {
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let add_capture = |mut indent: Indentation, capture| {
|
||||
fn add_capture<'a>(
|
||||
mut indent: Indentation<'a>,
|
||||
capture: IndentCaptureType<'a>,
|
||||
) -> Indentation<'a> {
|
||||
indent.add_capture(capture);
|
||||
indent
|
||||
};
|
||||
}
|
||||
|
||||
// adding an indent to no indent makes an indent
|
||||
assert_eq!(
|
||||
@@ -1056,4 +1199,74 @@ mod test {
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_relative_indent() {
|
||||
let indent_style = IndentStyle::Spaces(4);
|
||||
let tab_width: usize = 4;
|
||||
let no_align = [
|
||||
Indentation::default(),
|
||||
Indentation {
|
||||
indent: 1,
|
||||
..Default::default()
|
||||
},
|
||||
Indentation {
|
||||
indent: 5,
|
||||
outdent: 1,
|
||||
..Default::default()
|
||||
},
|
||||
];
|
||||
let align = no_align.clone().map(|indent| Indentation {
|
||||
align: Some(RopeSlice::from("12345")),
|
||||
..indent
|
||||
});
|
||||
let different_align = Indentation {
|
||||
align: Some(RopeSlice::from("123456")),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
// Check that relative and absolute indentation computation are the same when the line we compare to is
|
||||
// indented as we expect.
|
||||
let check_consistency = |indent: &Indentation, other: &Indentation| {
|
||||
assert_eq!(
|
||||
indent.relative_indent(
|
||||
other,
|
||||
RopeSlice::from(other.to_string(&indent_style, tab_width).as_str()),
|
||||
&indent_style,
|
||||
tab_width
|
||||
),
|
||||
Some(indent.to_string(&indent_style, tab_width))
|
||||
);
|
||||
};
|
||||
for a in &no_align {
|
||||
for b in &no_align {
|
||||
check_consistency(a, b);
|
||||
}
|
||||
}
|
||||
for a in &align {
|
||||
for b in &align {
|
||||
check_consistency(a, b);
|
||||
}
|
||||
}
|
||||
|
||||
// Relative indent computation makes no sense if the alignment differs
|
||||
assert_eq!(
|
||||
align[0].relative_indent(
|
||||
&no_align[0],
|
||||
RopeSlice::from(" "),
|
||||
&indent_style,
|
||||
tab_width
|
||||
),
|
||||
None
|
||||
);
|
||||
assert_eq!(
|
||||
align[0].relative_indent(
|
||||
&different_align,
|
||||
RopeSlice::from(" "),
|
||||
&indent_style,
|
||||
tab_width
|
||||
),
|
||||
None
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -11,7 +11,7 @@ use ahash::RandomState;
|
||||
use arc_swap::{ArcSwap, Guard};
|
||||
use bitflags::bitflags;
|
||||
use hashbrown::raw::RawTable;
|
||||
use slotmap::{DefaultKey as LayerId, HopSlotMap};
|
||||
use slotmap::{DefaultKey as LayerId, DefaultKey as LanguageId, HopSlotMap};
|
||||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
@@ -92,8 +92,10 @@ impl Default for Configuration {
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct LanguageConfiguration {
|
||||
#[serde(skip)]
|
||||
language_id: LanguageId,
|
||||
#[serde(rename = "name")]
|
||||
pub language_id: String, // c-sharp, rust, tsx
|
||||
pub language_name: String, // c-sharp, rust, tsx
|
||||
#[serde(rename = "language-id")]
|
||||
// see the table under https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentItem
|
||||
pub language_server_language_id: Option<String>, // csharp, rust, typescriptreact, for the language-server
|
||||
@@ -101,6 +103,7 @@ pub struct LanguageConfiguration {
|
||||
pub file_types: Vec<FileType>, // filename extension or ends_with? <Gemfile, rb, etc>
|
||||
#[serde(default)]
|
||||
pub shebangs: Vec<String>, // interpreter(s) associated with language
|
||||
#[serde(default)]
|
||||
pub roots: Vec<String>, // these indicate project roots <.git, Cargo.toml>
|
||||
pub comment_token: Option<String>,
|
||||
pub text_width: Option<usize>,
|
||||
@@ -154,6 +157,8 @@ pub struct LanguageConfiguration {
|
||||
/// Hardcoded LSP root directories relative to the workspace root, like `examples` or `tools/fuzz`.
|
||||
/// Falling back to the current working directory if none are configured.
|
||||
pub workspace_lsp_roots: Option<Vec<PathBuf>>,
|
||||
#[serde(default)]
|
||||
pub persistent_diagnostic_sources: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
@@ -211,10 +216,7 @@ impl<'de> Deserialize<'de> for FileType {
|
||||
{
|
||||
match map.next_entry::<String, String>()? {
|
||||
Some((key, suffix)) if key == "suffix" => Ok(FileType::Suffix({
|
||||
// FIXME: use `suffix.replace('/', std::path::MAIN_SEPARATOR_STR)`
|
||||
// if MSRV is updated to 1.68
|
||||
let mut separator = [0; 1];
|
||||
suffix.replace('/', std::path::MAIN_SEPARATOR.encode_utf8(&mut separator))
|
||||
suffix.replace('/', std::path::MAIN_SEPARATOR_STR)
|
||||
})),
|
||||
Some((key, _value)) => Err(serde::de::Error::custom(format!(
|
||||
"unknown key in `file-types` list: {}",
|
||||
@@ -444,6 +446,22 @@ pub struct IndentationConfiguration {
|
||||
pub unit: String,
|
||||
}
|
||||
|
||||
/// How the indentation for a newly inserted line should be determined.
|
||||
/// If the selected heuristic is not available (e.g. because the current
|
||||
/// language has no tree-sitter indent queries), a simpler one will be used.
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum IndentationHeuristic {
|
||||
/// Just copy the indentation of the line that the cursor is currently on.
|
||||
Simple,
|
||||
/// Use tree-sitter indent queries to compute the expected absolute indentation level of the new line.
|
||||
TreeSitter,
|
||||
/// Use tree-sitter indent queries to compute the expected difference in indentation between the new line
|
||||
/// and the line before. Add this to the actual indentation level of the line before.
|
||||
#[default]
|
||||
Hybrid,
|
||||
}
|
||||
|
||||
/// Configuration for auto pairs
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields, untagged)]
|
||||
@@ -620,32 +638,33 @@ pub fn read_query(language: &str, filename: &str) -> String {
|
||||
|
||||
impl LanguageConfiguration {
|
||||
fn initialize_highlight(&self, scopes: &[String]) -> Option<Arc<HighlightConfiguration>> {
|
||||
let highlights_query = read_query(&self.language_id, "highlights.scm");
|
||||
let highlights_query = read_query(&self.language_name, "highlights.scm");
|
||||
// always highlight syntax errors
|
||||
// highlights_query += "\n(ERROR) @error";
|
||||
|
||||
let injections_query = read_query(&self.language_id, "injections.scm");
|
||||
let locals_query = read_query(&self.language_id, "locals.scm");
|
||||
let injections_query = read_query(&self.language_name, "injections.scm");
|
||||
let locals_query = read_query(&self.language_name, "locals.scm");
|
||||
|
||||
if highlights_query.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let language = get_language(self.grammar.as_deref().unwrap_or(&self.language_id))
|
||||
let language = get_language(self.grammar.as_deref().unwrap_or(&self.language_name))
|
||||
.map_err(|err| {
|
||||
log::error!(
|
||||
"Failed to load tree-sitter parser for language {:?}: {}",
|
||||
self.language_id,
|
||||
self.language_name,
|
||||
err
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
let config = HighlightConfiguration::new(
|
||||
self.language_id,
|
||||
language,
|
||||
&highlights_query,
|
||||
&injections_query,
|
||||
&locals_query,
|
||||
)
|
||||
.map_err(|err| log::error!("Could not parse queries for language {:?}. Are your grammars out of sync? Try running 'hx --grammar fetch' and 'hx --grammar build'. This query could not be parsed: {:?}", self.language_id, err))
|
||||
.map_err(|err| log::error!("Could not parse queries for language {:?}. Are your grammars out of sync? Try running 'hx --grammar fetch' and 'hx --grammar build'. This query could not be parsed: {:?}", self.language_name, err))
|
||||
.ok()?;
|
||||
|
||||
config.configure(scopes);
|
||||
@@ -689,7 +708,7 @@ impl LanguageConfiguration {
|
||||
}
|
||||
|
||||
fn load_query(&self, kind: &str) -> Option<Query> {
|
||||
let query_text = read_query(&self.language_id, kind);
|
||||
let query_text = read_query(&self.language_name, kind);
|
||||
if query_text.is_empty() {
|
||||
return None;
|
||||
}
|
||||
@@ -699,7 +718,7 @@ impl LanguageConfiguration {
|
||||
log::error!(
|
||||
"Failed to parse {} queries for {}: {}",
|
||||
kind,
|
||||
self.language_id,
|
||||
self.language_name,
|
||||
e
|
||||
)
|
||||
})
|
||||
@@ -741,10 +760,10 @@ pub struct SoftWrap {
|
||||
#[derive(Debug)]
|
||||
pub struct Loader {
|
||||
// highlight_names ?
|
||||
language_configs: Vec<Arc<LanguageConfiguration>>,
|
||||
language_config_ids_by_extension: HashMap<String, usize>, // Vec<usize>
|
||||
language_config_ids_by_suffix: HashMap<String, usize>,
|
||||
language_config_ids_by_shebang: HashMap<String, usize>,
|
||||
language_configs: HopSlotMap<LanguageId, Arc<LanguageConfiguration>>,
|
||||
language_config_ids_by_extension: HashMap<String, LanguageId>, // Vec<LanguageId>
|
||||
language_config_ids_by_suffix: HashMap<String, LanguageId>,
|
||||
language_config_ids_by_shebang: HashMap<String, LanguageId>,
|
||||
|
||||
language_server_configs: HashMap<String, LanguageServerConfiguration>,
|
||||
|
||||
@@ -754,7 +773,7 @@ pub struct Loader {
|
||||
impl Loader {
|
||||
pub fn new(config: Configuration) -> Self {
|
||||
let mut loader = Self {
|
||||
language_configs: Vec::new(),
|
||||
language_configs: HopSlotMap::new(),
|
||||
language_server_configs: config.language_server,
|
||||
language_config_ids_by_extension: HashMap::new(),
|
||||
language_config_ids_by_suffix: HashMap::new(),
|
||||
@@ -762,9 +781,12 @@ impl Loader {
|
||||
scopes: ArcSwap::from_pointee(Vec::new()),
|
||||
};
|
||||
|
||||
for config in config.language {
|
||||
// get the next id
|
||||
let language_id = loader.language_configs.len();
|
||||
for mut config in config.language {
|
||||
let language_id = loader.language_configs.insert_with_key(|key| {
|
||||
config.language_id = key;
|
||||
Arc::new(config)
|
||||
});
|
||||
let config = &loader.language_configs[language_id];
|
||||
|
||||
for file_type in &config.file_types {
|
||||
// entry().or_insert(Vec::new).push(language_id);
|
||||
@@ -782,8 +804,6 @@ impl Loader {
|
||||
.language_config_ids_by_shebang
|
||||
.insert(shebang.clone(), language_id);
|
||||
}
|
||||
|
||||
loader.language_configs.push(Arc::new(config));
|
||||
}
|
||||
|
||||
loader
|
||||
@@ -834,15 +854,18 @@ impl Loader {
|
||||
|
||||
pub fn language_config_for_scope(&self, scope: &str) -> Option<Arc<LanguageConfiguration>> {
|
||||
self.language_configs
|
||||
.iter()
|
||||
.values()
|
||||
.find(|config| config.scope == scope)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
pub fn language_config_for_language_id(&self, id: &str) -> Option<Arc<LanguageConfiguration>> {
|
||||
pub fn language_config_for_language_name(
|
||||
&self,
|
||||
name: &str,
|
||||
) -> Option<Arc<LanguageConfiguration>> {
|
||||
self.language_configs
|
||||
.iter()
|
||||
.find(|config| config.language_id == id)
|
||||
.values()
|
||||
.find(|config| config.language_name == name)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
@@ -851,19 +874,19 @@ impl Loader {
|
||||
pub fn language_config_for_name(&self, name: &str) -> Option<Arc<LanguageConfiguration>> {
|
||||
let mut best_match_length = 0;
|
||||
let mut best_match_position = None;
|
||||
for (i, configuration) in self.language_configs.iter().enumerate() {
|
||||
for (id, configuration) in self.language_configs.iter() {
|
||||
if let Some(injection_regex) = &configuration.injection_regex {
|
||||
if let Some(mat) = injection_regex.find(name) {
|
||||
let length = mat.end() - mat.start();
|
||||
if length > best_match_length {
|
||||
best_match_position = Some(i);
|
||||
best_match_position = Some(id);
|
||||
best_match_length = length;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
best_match_position.map(|i| self.language_configs[i].clone())
|
||||
best_match_position.map(|id| self.language_configs[id].clone())
|
||||
}
|
||||
|
||||
pub fn language_configuration_for_injection_string(
|
||||
@@ -874,13 +897,13 @@ impl Loader {
|
||||
InjectionLanguageMarker::Name(string) => self.language_config_for_name(string),
|
||||
InjectionLanguageMarker::Filename(file) => self.language_config_for_file_name(file),
|
||||
InjectionLanguageMarker::Shebang(shebang) => {
|
||||
self.language_config_for_language_id(shebang)
|
||||
self.language_config_for_language_name(shebang)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn language_configs(&self) -> impl Iterator<Item = &Arc<LanguageConfiguration>> {
|
||||
self.language_configs.iter()
|
||||
self.language_configs.values()
|
||||
}
|
||||
|
||||
pub fn language_server_configs(&self) -> &HashMap<String, LanguageServerConfiguration> {
|
||||
@@ -893,7 +916,7 @@ impl Loader {
|
||||
// Reconfigure existing grammars
|
||||
for config in self
|
||||
.language_configs
|
||||
.iter()
|
||||
.values()
|
||||
.filter(|cfg| cfg.is_highlight_initialized())
|
||||
{
|
||||
config.reconfigure(&self.scopes());
|
||||
@@ -1322,6 +1345,13 @@ impl Syntax {
|
||||
result
|
||||
}
|
||||
|
||||
/// Gets the [LanguageConfiguration] for a given injection layer.
|
||||
pub fn layer_config(&self, layer_id: LayerId) -> &Arc<LanguageConfiguration> {
|
||||
let language_id = self.layers[layer_id].config.language_id;
|
||||
|
||||
&self.loader.language_configs[language_id]
|
||||
}
|
||||
|
||||
// Commenting
|
||||
// comment_strings_for_pos
|
||||
// is_commented
|
||||
@@ -1560,6 +1590,7 @@ pub enum HighlightEvent {
|
||||
/// This struct is immutable and can be shared between threads.
|
||||
#[derive(Debug)]
|
||||
pub struct HighlightConfiguration {
|
||||
language_id: LanguageId,
|
||||
pub language: Grammar,
|
||||
pub query: Query,
|
||||
injections_query: Query,
|
||||
@@ -1614,7 +1645,7 @@ impl<'a> Iterator for ChunksBytes<'a> {
|
||||
}
|
||||
|
||||
pub struct RopeProvider<'a>(pub RopeSlice<'a>);
|
||||
impl<'a> TextProvider<'a> for RopeProvider<'a> {
|
||||
impl<'a> TextProvider<&'a [u8]> for RopeProvider<'a> {
|
||||
type I = ChunksBytes<'a>;
|
||||
|
||||
fn text(&mut self, node: Node) -> Self::I {
|
||||
@@ -1628,7 +1659,7 @@ impl<'a> TextProvider<'a> for RopeProvider<'a> {
|
||||
struct HighlightIterLayer<'a> {
|
||||
_tree: Option<Tree>,
|
||||
cursor: QueryCursor,
|
||||
captures: RefCell<iter::Peekable<QueryCaptures<'a, 'a, RopeProvider<'a>>>>,
|
||||
captures: RefCell<iter::Peekable<QueryCaptures<'a, 'a, RopeProvider<'a>, &'a [u8]>>>,
|
||||
config: &'a HighlightConfiguration,
|
||||
highlight_end_stack: Vec<usize>,
|
||||
scope_stack: Vec<LocalScope<'a>>,
|
||||
@@ -1657,6 +1688,7 @@ impl HighlightConfiguration {
|
||||
///
|
||||
/// Returns a `HighlightConfiguration` that can then be used with the `highlight` method.
|
||||
pub fn new(
|
||||
language_id: LanguageId,
|
||||
language: Grammar,
|
||||
highlights_query: &str,
|
||||
injection_query: &str,
|
||||
@@ -1733,6 +1765,7 @@ impl HighlightConfiguration {
|
||||
|
||||
let highlight_indices = ArcSwap::from_pointee(vec![None; query.capture_names().len()]);
|
||||
Ok(Self {
|
||||
language_id,
|
||||
language,
|
||||
query,
|
||||
injections_query,
|
||||
@@ -1779,7 +1812,6 @@ impl HighlightConfiguration {
|
||||
let mut best_index = None;
|
||||
let mut best_match_len = 0;
|
||||
for (i, recognized_name) in recognized_names.iter().enumerate() {
|
||||
let recognized_name = recognized_name;
|
||||
let mut len = 0;
|
||||
let mut matches = true;
|
||||
for (i, part) in recognized_name.split('.').enumerate() {
|
||||
@@ -2531,7 +2563,8 @@ mod test {
|
||||
let textobject = TextObjectQuery { query };
|
||||
let mut cursor = QueryCursor::new();
|
||||
|
||||
let config = HighlightConfiguration::new(language, "", "", "").unwrap();
|
||||
let config =
|
||||
HighlightConfiguration::new(LanguageId::default(), language, "", "", "").unwrap();
|
||||
let syntax = Syntax::new(source.slice(..), Arc::new(config), Arc::new(loader)).unwrap();
|
||||
|
||||
let root = syntax.tree().root_node();
|
||||
@@ -2590,6 +2623,7 @@ mod test {
|
||||
|
||||
let language = get_language("rust").unwrap();
|
||||
let config = HighlightConfiguration::new(
|
||||
LanguageId::default(),
|
||||
language,
|
||||
&std::fs::read_to_string("../runtime/grammars/sources/rust/queries/highlights.scm")
|
||||
.unwrap(),
|
||||
@@ -2695,7 +2729,8 @@ mod test {
|
||||
});
|
||||
let language = get_language(language_name).unwrap();
|
||||
|
||||
let config = HighlightConfiguration::new(language, "", "", "").unwrap();
|
||||
let config =
|
||||
HighlightConfiguration::new(LanguageId::default(), language, "", "", "").unwrap();
|
||||
let syntax = Syntax::new(source.slice(..), Arc::new(config), Arc::new(loader)).unwrap();
|
||||
|
||||
let root = syntax
|
||||
|
@@ -1,7 +1,7 @@
|
||||
use ropey::RopeSlice;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use crate::{Range, Rope, Selection, Tendril};
|
||||
use crate::{chars::char_is_word, Range, Rope, Selection, Tendril};
|
||||
use std::{borrow::Cow, iter::once};
|
||||
|
||||
/// (from, to, replacement)
|
||||
@@ -23,6 +23,30 @@ pub enum Operation {
|
||||
pub enum Assoc {
|
||||
Before,
|
||||
After,
|
||||
/// Acts like `After` if a word character is inserted
|
||||
/// after the position, otherwise acts like `Before`
|
||||
AfterWord,
|
||||
/// Acts like `Before` if a word character is inserted
|
||||
/// before the position, otherwise acts like `After`
|
||||
BeforeWord,
|
||||
}
|
||||
|
||||
impl Assoc {
|
||||
/// Whether to stick to gaps
|
||||
fn stay_at_gaps(self) -> bool {
|
||||
!matches!(self, Self::BeforeWord | Self::AfterWord)
|
||||
}
|
||||
|
||||
fn insert_offset(self, s: &str) -> usize {
|
||||
let chars = s.chars().count();
|
||||
match self {
|
||||
Assoc::After => chars,
|
||||
Assoc::AfterWord => s.chars().take_while(|&c| char_is_word(c)).count(),
|
||||
// return position before inserted text
|
||||
Assoc::Before => 0,
|
||||
Assoc::BeforeWord => chars - s.chars().rev().take_while(|&c| char_is_word(c)).count(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
@@ -415,8 +439,6 @@ impl ChangeSet {
|
||||
map!(|pos, _| (old_end > pos).then_some(new_pos), i);
|
||||
}
|
||||
Insert(s) => {
|
||||
let ins = s.chars().count();
|
||||
|
||||
// a subsequent delete means a replace, consume it
|
||||
if let Some((_, Delete(len))) = iter.peek() {
|
||||
iter.next();
|
||||
@@ -424,13 +446,13 @@ impl ChangeSet {
|
||||
old_end = old_pos + len;
|
||||
// in range of replaced text
|
||||
map!(
|
||||
|pos, assoc| (old_end > pos).then(|| {
|
||||
|pos, assoc: Assoc| (old_end > pos).then(|| {
|
||||
// at point or tracking before
|
||||
if pos == old_pos || assoc == Assoc::Before {
|
||||
if pos == old_pos && assoc.stay_at_gaps() {
|
||||
new_pos
|
||||
} else {
|
||||
// place to end of insert
|
||||
new_pos + ins
|
||||
new_pos + assoc.insert_offset(s)
|
||||
}
|
||||
}),
|
||||
i
|
||||
@@ -438,20 +460,15 @@ impl ChangeSet {
|
||||
} else {
|
||||
// at insert point
|
||||
map!(
|
||||
|pos, assoc| (old_pos == pos).then(|| {
|
||||
|pos, assoc: Assoc| (old_pos == pos).then(|| {
|
||||
// return position before inserted text
|
||||
if assoc == Assoc::Before {
|
||||
new_pos
|
||||
} else {
|
||||
// after text
|
||||
new_pos + ins
|
||||
}
|
||||
new_pos + assoc.insert_offset(s)
|
||||
}),
|
||||
i
|
||||
);
|
||||
}
|
||||
|
||||
new_pos += ins;
|
||||
new_pos += s.chars().count();
|
||||
}
|
||||
}
|
||||
old_pos = old_end;
|
||||
@@ -884,6 +901,48 @@ mod test {
|
||||
let mut positions = [4, 2];
|
||||
cs.update_positions(positions.iter_mut().map(|pos| (pos, Assoc::After)));
|
||||
assert_eq!(positions, [4, 2]);
|
||||
// stays at word boundary
|
||||
let cs = ChangeSet {
|
||||
changes: vec![
|
||||
Retain(2), // <space><space>
|
||||
Insert(" ab".into()),
|
||||
Retain(2), // cd
|
||||
Insert("de ".into()),
|
||||
],
|
||||
len: 4,
|
||||
len_after: 10,
|
||||
};
|
||||
assert_eq!(cs.map_pos(2, Assoc::BeforeWord), 3);
|
||||
assert_eq!(cs.map_pos(4, Assoc::AfterWord), 9);
|
||||
let cs = ChangeSet {
|
||||
changes: vec![
|
||||
Retain(1), // <space>
|
||||
Insert(" b".into()),
|
||||
Delete(1), // c
|
||||
Retain(1), // d
|
||||
Insert("e ".into()),
|
||||
Delete(1), // <space>
|
||||
],
|
||||
len: 5,
|
||||
len_after: 7,
|
||||
};
|
||||
assert_eq!(cs.map_pos(1, Assoc::BeforeWord), 2);
|
||||
assert_eq!(cs.map_pos(3, Assoc::AfterWord), 5);
|
||||
let cs = ChangeSet {
|
||||
changes: vec![
|
||||
Retain(1), // <space>
|
||||
Insert("a".into()),
|
||||
Delete(2), // <space>b
|
||||
Retain(1), // d
|
||||
Insert("e".into()),
|
||||
Delete(1), // f
|
||||
Retain(1), // <space>
|
||||
],
|
||||
len: 5,
|
||||
len_after: 7,
|
||||
};
|
||||
assert_eq!(cs.map_pos(2, Assoc::BeforeWord), 1);
|
||||
assert_eq!(cs.map_pos(4, Assoc::AfterWord), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@@ -210,7 +210,6 @@ fn test_treesitter_indent(
|
||||
let suggested_indent = treesitter_indent_for_pos(
|
||||
indent_query,
|
||||
&syntax,
|
||||
&indent_style,
|
||||
tab_width,
|
||||
indent_style.indent_width(tab_width),
|
||||
text,
|
||||
@@ -218,7 +217,8 @@ fn test_treesitter_indent(
|
||||
text.line_to_char(i) + pos,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
.unwrap()
|
||||
.to_string(&indent_style, tab_width);
|
||||
assert!(
|
||||
line.get_slice(..pos).map_or(false, |s| s == suggested_indent),
|
||||
"Wrong indentation for file {:?} on line {}:\n\"{}\" (original line)\n\"{}\" (suggested indentation)\n",
|
||||
|
@@ -1,25 +1,27 @@
|
||||
[package]
|
||||
name = "helix-dap"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2018"
|
||||
license = "MPL-2.0"
|
||||
description = "DAP client implementation for Helix project"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
categories.workspace = true
|
||||
repository.workspace = true
|
||||
homepage.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
helix-core = { path = "../helix-core" }
|
||||
|
||||
anyhow = "1.0"
|
||||
log = "0.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "net", "sync"] }
|
||||
which = "4.4"
|
||||
which = "5.0.0"
|
||||
|
||||
[dev-dependencies]
|
||||
fern = "0.6"
|
||||
|
@@ -9,7 +9,6 @@ use helix_core::syntax::DebuggerQuirks;
|
||||
use serde_json::Value;
|
||||
|
||||
use anyhow::anyhow;
|
||||
pub use log::{error, info};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
future::Future,
|
||||
|
@@ -1,12 +1,13 @@
|
||||
[package]
|
||||
name = "helix-event"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
categories.workspace = true
|
||||
repository.workspace = true
|
||||
homepage.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
|
@@ -1,13 +1,14 @@
|
||||
[package]
|
||||
name = "helix-loader"
|
||||
version = "0.6.0"
|
||||
description = "A post-modern text editor."
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
description = "Build bootstrapping for Helix crates"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
categories.workspace = true
|
||||
repository.workspace = true
|
||||
homepage.workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "hx-loader"
|
||||
@@ -19,16 +20,16 @@ serde = { version = "1.0", features = ["derive"] }
|
||||
toml = "0.7"
|
||||
etcetera = "0.8"
|
||||
tree-sitter.workspace = true
|
||||
once_cell = "1.18"
|
||||
once_cell = "1.19"
|
||||
log = "0.4"
|
||||
which = "4.4"
|
||||
which = "5.0.0"
|
||||
|
||||
# TODO: these two should be on !wasm32 only
|
||||
|
||||
# cloning/compiling tree-sitter grammars
|
||||
cc = { version = "1" }
|
||||
threadpool = { version = "1.0" }
|
||||
tempfile = "3.8.0"
|
||||
tempfile = "3.9.0"
|
||||
dunce = "1.0.4"
|
||||
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||
|
@@ -2,7 +2,17 @@ use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
const VERSION: &str = include_str!("../VERSION");
|
||||
const MAJOR: &str = env!("CARGO_PKG_VERSION_MAJOR");
|
||||
const MINOR: &str = env!("CARGO_PKG_VERSION_MINOR");
|
||||
const PATCH: &str = env!("CARGO_PKG_VERSION_PATCH");
|
||||
|
||||
fn get_calver() -> String {
|
||||
if PATCH == "0" {
|
||||
format!("{MAJOR}.{MINOR}")
|
||||
} else {
|
||||
format!("{MAJOR}.{MINOR}.{PATCH}")
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let git_hash = Command::new("git")
|
||||
@@ -12,9 +22,10 @@ fn main() {
|
||||
.filter(|output| output.status.success())
|
||||
.and_then(|x| String::from_utf8(x.stdout).ok());
|
||||
|
||||
let calver = get_calver();
|
||||
let version: Cow<_> = match &git_hash {
|
||||
Some(git_hash) => format!("{} ({})", VERSION, &git_hash[..8]).into(),
|
||||
None => VERSION.into(),
|
||||
Some(git_hash) => format!("{} ({})", calver, &git_hash[..8]).into(),
|
||||
None => calver.into(),
|
||||
};
|
||||
|
||||
println!(
|
||||
@@ -22,7 +33,6 @@ fn main() {
|
||||
std::env::var("TARGET").unwrap()
|
||||
);
|
||||
|
||||
println!("cargo:rerun-if-changed=../VERSION");
|
||||
println!("cargo:rustc-env=VERSION_AND_GIT_HASH={}", version);
|
||||
|
||||
if git_hash.is_none() {
|
||||
|
@@ -1,31 +1,32 @@
|
||||
[package]
|
||||
name = "helix-lsp"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
description = "LSP client implementation for Helix project"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
categories.workspace = true
|
||||
repository.workspace = true
|
||||
homepage.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
helix-loader = { version = "0.6", path = "../helix-loader" }
|
||||
helix-parsec = { version = "0.6", path = "../helix-parsec" }
|
||||
helix-core = { path = "../helix-core" }
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
helix-parsec = { path = "../helix-parsec" }
|
||||
|
||||
anyhow = "1.0"
|
||||
futures-executor = "0.3"
|
||||
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
|
||||
globset = "0.4.13"
|
||||
globset = "0.4.14"
|
||||
log = "0.4"
|
||||
lsp-types = { version = "0.94" }
|
||||
lsp-types = { version = "0.95" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1.33", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
||||
tokio = { version = "1.35", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
||||
tokio-stream = "0.1.14"
|
||||
which = "4.4"
|
||||
which = "5.0.0"
|
||||
parking_lot = "0.12.1"
|
||||
|
@@ -401,12 +401,22 @@ impl Client {
|
||||
&self,
|
||||
params: R::Params,
|
||||
) -> impl Future<Output = Result<Value>>
|
||||
where
|
||||
R::Params: serde::Serialize,
|
||||
{
|
||||
self.call_with_timeout::<R>(params, self.req_timeout)
|
||||
}
|
||||
|
||||
fn call_with_timeout<R: lsp::request::Request>(
|
||||
&self,
|
||||
params: R::Params,
|
||||
timeout_secs: u64,
|
||||
) -> impl Future<Output = Result<Value>>
|
||||
where
|
||||
R::Params: serde::Serialize,
|
||||
{
|
||||
let server_tx = self.server_tx.clone();
|
||||
let id = self.next_request_id();
|
||||
let timeout_secs = self.req_timeout;
|
||||
|
||||
async move {
|
||||
use std::time::Duration;
|
||||
@@ -548,6 +558,11 @@ impl Client {
|
||||
dynamic_registration: Some(true),
|
||||
relative_pattern_support: Some(false),
|
||||
}),
|
||||
file_operations: Some(lsp::WorkspaceFileOperationsClientCapabilities {
|
||||
will_rename: Some(true),
|
||||
did_rename: Some(true),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
text_document: Some(lsp::TextDocumentClientCapabilities {
|
||||
@@ -652,6 +667,7 @@ impl Client {
|
||||
version: Some(String::from(VERSION_AND_GIT_HASH)),
|
||||
}),
|
||||
locale: None, // TODO
|
||||
work_done_progress_params: lsp::WorkDoneProgressParams::default(),
|
||||
};
|
||||
|
||||
self.request::<lsp::request::Initialize>(params).await
|
||||
@@ -700,6 +716,65 @@ impl Client {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn prepare_file_rename(
|
||||
&self,
|
||||
old_uri: &lsp::Url,
|
||||
new_uri: &lsp::Url,
|
||||
) -> Option<impl Future<Output = Result<lsp::WorkspaceEdit>>> {
|
||||
let capabilities = self.capabilities.get().unwrap();
|
||||
|
||||
// Return early if the server does not support willRename feature
|
||||
match &capabilities.workspace {
|
||||
Some(workspace) => match &workspace.file_operations {
|
||||
Some(op) => {
|
||||
op.will_rename.as_ref()?;
|
||||
}
|
||||
_ => return None,
|
||||
},
|
||||
_ => return None,
|
||||
}
|
||||
|
||||
let files = vec![lsp::FileRename {
|
||||
old_uri: old_uri.to_string(),
|
||||
new_uri: new_uri.to_string(),
|
||||
}];
|
||||
let request = self.call_with_timeout::<lsp::request::WillRenameFiles>(
|
||||
lsp::RenameFilesParams { files },
|
||||
5,
|
||||
);
|
||||
|
||||
Some(async move {
|
||||
let json = request.await?;
|
||||
let response: Option<lsp::WorkspaceEdit> = serde_json::from_value(json)?;
|
||||
Ok(response.unwrap_or_default())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn did_file_rename(
|
||||
&self,
|
||||
old_uri: &lsp::Url,
|
||||
new_uri: &lsp::Url,
|
||||
) -> Option<impl Future<Output = std::result::Result<(), Error>>> {
|
||||
let capabilities = self.capabilities.get().unwrap();
|
||||
|
||||
// Return early if the server does not support DidRename feature
|
||||
match &capabilities.workspace {
|
||||
Some(workspace) => match &workspace.file_operations {
|
||||
Some(op) => {
|
||||
op.did_rename.as_ref()?;
|
||||
}
|
||||
_ => return None,
|
||||
},
|
||||
_ => return None,
|
||||
}
|
||||
|
||||
let files = vec![lsp::FileRename {
|
||||
old_uri: old_uri.to_string(),
|
||||
new_uri: new_uri.to_string(),
|
||||
}];
|
||||
Some(self.notify::<lsp::notification::DidRenameFiles>(lsp::RenameFilesParams { files }))
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------------------------
|
||||
// Text document
|
||||
// -------------------------------------------------------------------------------------------
|
||||
@@ -895,20 +970,19 @@ impl Client {
|
||||
) -> Option<impl Future<Output = Result<()>>> {
|
||||
let capabilities = self.capabilities.get().unwrap();
|
||||
|
||||
let include_text = match &capabilities.text_document_sync {
|
||||
Some(lsp::TextDocumentSyncCapability::Options(lsp::TextDocumentSyncOptions {
|
||||
save: Some(options),
|
||||
let include_text = match &capabilities.text_document_sync.as_ref()? {
|
||||
lsp::TextDocumentSyncCapability::Options(lsp::TextDocumentSyncOptions {
|
||||
save: options,
|
||||
..
|
||||
})) => match options {
|
||||
}) => match options.as_ref()? {
|
||||
lsp::TextDocumentSyncSaveOptions::Supported(true) => false,
|
||||
lsp::TextDocumentSyncSaveOptions::SaveOptions(lsp_types::SaveOptions {
|
||||
include_text,
|
||||
}) => include_text.unwrap_or(false),
|
||||
// Supported(false)
|
||||
_ => return None,
|
||||
lsp::TextDocumentSyncSaveOptions::Supported(false) => return None,
|
||||
},
|
||||
// unsupported
|
||||
_ => return None,
|
||||
// see: https://github.com/microsoft/language-server-protocol/issues/288
|
||||
lsp::TextDocumentSyncCapability::Kind(..) => false,
|
||||
};
|
||||
|
||||
Some(self.notify::<lsp::notification::DidSaveTextDocument>(
|
||||
|
@@ -915,10 +915,17 @@ fn start_client(
|
||||
}
|
||||
|
||||
// next up, notify<initialized>
|
||||
_client
|
||||
let notification_result = _client
|
||||
.notify::<lsp::notification::Initialized>(lsp::InitializedParams {})
|
||||
.await
|
||||
.unwrap();
|
||||
.await;
|
||||
|
||||
if let Err(e) = notification_result {
|
||||
log::error!(
|
||||
"failed to notify language server of its initialization: {}",
|
||||
e
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
initialize_notify.notify_one();
|
||||
});
|
||||
|
@@ -1,13 +1,14 @@
|
||||
[package]
|
||||
name = "helix-parsec"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
description = "Parser combinators for Helix"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
include = ["src/**/*", "README.md"]
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
categories.workspace = true
|
||||
repository.workspace = true
|
||||
homepage.workspace = true
|
||||
|
||||
[dependencies]
|
||||
|
@@ -1,16 +1,16 @@
|
||||
[package]
|
||||
name = "helix-term"
|
||||
version = "0.6.0"
|
||||
description = "A post-modern text editor."
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
categories = ["editor", "command-line-utilities"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
include = ["src/**/*", "README.md"]
|
||||
default-run = "hx"
|
||||
rust-version = "1.65"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
categories.workspace = true
|
||||
repository.workspace = true
|
||||
homepage.workspace = true
|
||||
|
||||
[features]
|
||||
default = ["git"]
|
||||
@@ -23,18 +23,18 @@ name = "hx"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
helix-event = { version = "0.6", path = "../helix-event" }
|
||||
helix-view = { version = "0.6", path = "../helix-view" }
|
||||
helix-lsp = { version = "0.6", path = "../helix-lsp" }
|
||||
helix-dap = { version = "0.6", path = "../helix-dap" }
|
||||
helix-vcs = { version = "0.6", path = "../helix-vcs" }
|
||||
helix-loader = { version = "0.6", path = "../helix-loader" }
|
||||
helix-core = { path = "../helix-core" }
|
||||
helix-event = { path = "../helix-event" }
|
||||
helix-view = { path = "../helix-view" }
|
||||
helix-lsp = { path = "../helix-lsp" }
|
||||
helix-dap = { path = "../helix-dap" }
|
||||
helix-vcs = { path = "../helix-vcs" }
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
|
||||
anyhow = "1"
|
||||
once_cell = "1.18"
|
||||
once_cell = "1.19"
|
||||
|
||||
which = "4.4"
|
||||
which = "5.0.0"
|
||||
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] }
|
||||
tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] }
|
||||
@@ -57,6 +57,10 @@ pulldown-cmark = { version = "0.9", default-features = false }
|
||||
# file type detection
|
||||
content_inspector = "0.2.4"
|
||||
|
||||
# opening URLs
|
||||
open = "5.0.1"
|
||||
url = "2.5.0"
|
||||
|
||||
# config
|
||||
toml = "0.7"
|
||||
|
||||
@@ -64,20 +68,20 @@ serde_json = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
# ripgrep for global search
|
||||
grep-regex = "0.1.11"
|
||||
grep-searcher = "0.1.11"
|
||||
grep-regex = "0.1.12"
|
||||
grep-searcher = "0.1.13"
|
||||
|
||||
[target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100
|
||||
signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] }
|
||||
libc = "0.2.149"
|
||||
libc = "0.2.152"
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
crossterm = { version = "0.27", features = ["event-stream", "use-dev-tty"] }
|
||||
|
||||
[build-dependencies]
|
||||
helix-loader = { version = "0.6", path = "../helix-loader" }
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
|
||||
[dev-dependencies]
|
||||
smallvec = "1.11"
|
||||
indoc = "2.0.4"
|
||||
tempfile = "3.8.0"
|
||||
tempfile = "3.9.0"
|
||||
|
@@ -1,13 +1,8 @@
|
||||
use arc_swap::{access::Map, ArcSwap};
|
||||
use futures_util::Stream;
|
||||
use helix_core::{
|
||||
diagnostic::{DiagnosticTag, NumberOrString},
|
||||
path::get_relative_path,
|
||||
pos_at_coords, syntax, Selection,
|
||||
};
|
||||
use helix_core::{path::get_relative_path, pos_at_coords, syntax, Selection};
|
||||
use helix_lsp::{
|
||||
lsp::{self, notification::Notification},
|
||||
util::lsp_pos_to_pos,
|
||||
LspProgressMap,
|
||||
};
|
||||
use helix_view::{
|
||||
@@ -162,14 +157,19 @@ impl Application {
|
||||
// Unset path to prevent accidentally saving to the original tutor file.
|
||||
doc_mut!(editor).set_path(None);
|
||||
} else if !args.files.is_empty() {
|
||||
if args.open_cwd {
|
||||
// NOTE: The working directory is already set to args.files[0] in main()
|
||||
editor.new_file(Action::VerticalSplit);
|
||||
let picker = ui::file_picker(".".into(), &config.load().editor);
|
||||
let mut files_it = args.files.into_iter().peekable();
|
||||
|
||||
// If the first file is a directory, skip it and open a picker
|
||||
if let Some((first, _)) = files_it.next_if(|(p, _)| p.is_dir()) {
|
||||
let picker = ui::file_picker(first, &config.load().editor);
|
||||
compositor.push(Box::new(overlaid(picker)));
|
||||
} else {
|
||||
let nr_of_files = args.files.len();
|
||||
for (i, (file, pos)) in args.files.into_iter().enumerate() {
|
||||
}
|
||||
|
||||
// If there are any more files specified, open them
|
||||
if files_it.peek().is_some() {
|
||||
let mut nr_of_files = 0;
|
||||
for (file, pos) in files_it {
|
||||
nr_of_files += 1;
|
||||
if file.is_dir() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"expected a path to file, found a directory. (to open a directory pass it as first argument)"
|
||||
@@ -181,7 +181,7 @@ impl Application {
|
||||
// option. If neither of those two arguments are passed
|
||||
// in, just load the files normally.
|
||||
let action = match args.split {
|
||||
_ if i == 0 => Action::VerticalSplit,
|
||||
_ if nr_of_files == 1 => Action::VerticalSplit,
|
||||
Some(Layout::Vertical) => Action::VerticalSplit,
|
||||
Some(Layout::Horizontal) => Action::HorizontalSplit,
|
||||
None => Action::Load,
|
||||
@@ -208,6 +208,8 @@ impl Application {
|
||||
// does not affect views without pos since it is at the top
|
||||
let (view, doc) = current!(editor);
|
||||
align_view(doc, view, Align::Center);
|
||||
} else {
|
||||
editor.new_file(Action::VerticalSplit);
|
||||
}
|
||||
} else if stdin().is_tty() || cfg!(feature = "integration") {
|
||||
editor.new_file(Action::VerticalSplit);
|
||||
@@ -384,6 +386,12 @@ impl Application {
|
||||
self.editor.syn_loader = self.syn_loader.clone();
|
||||
for document in self.editor.documents.values_mut() {
|
||||
document.detect_language(self.syn_loader.clone());
|
||||
let diagnostics = Editor::doc_diagnostics(
|
||||
&self.editor.language_servers,
|
||||
&self.editor.diagnostics,
|
||||
document,
|
||||
);
|
||||
document.replace_diagnostics(diagnostics, &[], None);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -559,6 +567,14 @@ impl Application {
|
||||
let id = doc.id();
|
||||
doc.detect_language(loader);
|
||||
self.editor.refresh_language_servers(id);
|
||||
// and again a borrow checker workaround...
|
||||
let doc = doc_mut!(self.editor, &doc_save_event.doc_id);
|
||||
let diagnostics = Editor::doc_diagnostics(
|
||||
&self.editor.language_servers,
|
||||
&self.editor.diagnostics,
|
||||
doc,
|
||||
);
|
||||
doc.replace_diagnostics(diagnostics, &[], None);
|
||||
}
|
||||
|
||||
// TODO: fix being overwritten by lsp
|
||||
@@ -710,7 +726,7 @@ impl Application {
|
||||
));
|
||||
}
|
||||
}
|
||||
Notification::PublishDiagnostics(params) => {
|
||||
Notification::PublishDiagnostics(mut params) => {
|
||||
let path = match params.uri.to_file_path() {
|
||||
Ok(path) => path,
|
||||
Err(_) => {
|
||||
@@ -723,144 +739,97 @@ impl Application {
|
||||
log::error!("Discarding publishDiagnostic notification sent by an uninitialized server: {}", language_server.name());
|
||||
return;
|
||||
}
|
||||
let offset_encoding = language_server.offset_encoding();
|
||||
let doc = self.editor.document_by_path_mut(&path).filter(|doc| {
|
||||
// have to inline the function because of borrow checking...
|
||||
let doc = self.editor.documents.values_mut()
|
||||
.find(|doc| doc.path().map(|p| p == &path).unwrap_or(false))
|
||||
.filter(|doc| {
|
||||
if let Some(version) = params.version {
|
||||
if version != doc.version() {
|
||||
log::info!("Version ({version}) is out of date for {path:?} (expected ({}), dropping PublishDiagnostic notification", doc.version());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
});
|
||||
|
||||
if let Some(doc) = doc {
|
||||
let lang_conf = doc.language_config();
|
||||
let text = doc.text();
|
||||
let mut unchanged_diag_sources = Vec::new();
|
||||
if let Some(doc) = &doc {
|
||||
let lang_conf = doc.language.clone();
|
||||
|
||||
let diagnostics = params
|
||||
.diagnostics
|
||||
.iter()
|
||||
.filter_map(|diagnostic| {
|
||||
use helix_core::diagnostic::{Diagnostic, Range, Severity::*};
|
||||
use lsp::DiagnosticSeverity;
|
||||
|
||||
// TODO: convert inside server
|
||||
let start = if let Some(start) = lsp_pos_to_pos(
|
||||
text,
|
||||
diagnostic.range.start,
|
||||
offset_encoding,
|
||||
) {
|
||||
start
|
||||
} else {
|
||||
log::warn!("lsp position out of bounds - {:?}", diagnostic);
|
||||
return None;
|
||||
};
|
||||
|
||||
let end = if let Some(end) =
|
||||
lsp_pos_to_pos(text, diagnostic.range.end, offset_encoding)
|
||||
if let Some(lang_conf) = &lang_conf {
|
||||
if let Some(old_diagnostics) =
|
||||
self.editor.diagnostics.get(¶ms.uri)
|
||||
{
|
||||
end
|
||||
} else {
|
||||
log::warn!("lsp position out of bounds - {:?}", diagnostic);
|
||||
return None;
|
||||
};
|
||||
|
||||
let severity =
|
||||
diagnostic.severity.map(|severity| match severity {
|
||||
DiagnosticSeverity::ERROR => Error,
|
||||
DiagnosticSeverity::WARNING => Warning,
|
||||
DiagnosticSeverity::INFORMATION => Info,
|
||||
DiagnosticSeverity::HINT => Hint,
|
||||
severity => unreachable!(
|
||||
"unrecognized diagnostic severity: {:?}",
|
||||
severity
|
||||
),
|
||||
});
|
||||
|
||||
if let Some(lang_conf) = lang_conf {
|
||||
if let Some(severity) = severity {
|
||||
if severity < lang_conf.diagnostic_severity {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let code = match diagnostic.code.clone() {
|
||||
Some(x) => match x {
|
||||
lsp::NumberOrString::Number(x) => {
|
||||
Some(NumberOrString::Number(x))
|
||||
}
|
||||
lsp::NumberOrString::String(x) => {
|
||||
Some(NumberOrString::String(x))
|
||||
}
|
||||
},
|
||||
None => None,
|
||||
};
|
||||
|
||||
let tags = if let Some(tags) = &diagnostic.tags {
|
||||
let new_tags = tags
|
||||
.iter()
|
||||
.filter_map(|tag| match *tag {
|
||||
lsp::DiagnosticTag::DEPRECATED => {
|
||||
Some(DiagnosticTag::Deprecated)
|
||||
}
|
||||
lsp::DiagnosticTag::UNNECESSARY => {
|
||||
Some(DiagnosticTag::Unnecessary)
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
new_tags
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
Some(Diagnostic {
|
||||
range: Range { start, end },
|
||||
line: diagnostic.range.start.line as usize,
|
||||
message: diagnostic.message.clone(),
|
||||
severity,
|
||||
code,
|
||||
tags,
|
||||
source: diagnostic.source.clone(),
|
||||
data: diagnostic.data.clone(),
|
||||
language_server_id: server_id,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
doc.replace_diagnostics(diagnostics, server_id);
|
||||
}
|
||||
|
||||
let mut diagnostics = params
|
||||
if !lang_conf.persistent_diagnostic_sources.is_empty() {
|
||||
// Sort diagnostics first by severity and then by line numbers.
|
||||
// Note: The `lsp::DiagnosticSeverity` enum is already defined in decreasing order
|
||||
params
|
||||
.diagnostics
|
||||
.into_iter()
|
||||
.map(|d| (d, server_id))
|
||||
.collect();
|
||||
.sort_unstable_by_key(|d| (d.severity, d.range.start));
|
||||
}
|
||||
for source in &lang_conf.persistent_diagnostic_sources {
|
||||
let new_diagnostics = params
|
||||
.diagnostics
|
||||
.iter()
|
||||
.filter(|d| d.source.as_ref() == Some(source));
|
||||
let old_diagnostics = old_diagnostics
|
||||
.iter()
|
||||
.filter(|(d, d_server)| {
|
||||
*d_server == server_id
|
||||
&& d.source.as_ref() == Some(source)
|
||||
})
|
||||
.map(|(d, _)| d);
|
||||
if new_diagnostics.eq(old_diagnostics) {
|
||||
unchanged_diag_sources.push(source.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let diagnostics = params.diagnostics.into_iter().map(|d| (d, server_id));
|
||||
|
||||
// Insert the original lsp::Diagnostics here because we may have no open document
|
||||
// for diagnosic message and so we can't calculate the exact position.
|
||||
// When using them later in the diagnostics picker, we calculate them on-demand.
|
||||
match self.editor.diagnostics.entry(params.uri) {
|
||||
let diagnostics = match self.editor.diagnostics.entry(params.uri) {
|
||||
Entry::Occupied(o) => {
|
||||
let current_diagnostics = o.into_mut();
|
||||
// there may entries of other language servers, which is why we can't overwrite the whole entry
|
||||
current_diagnostics.retain(|(_, lsp_id)| *lsp_id != server_id);
|
||||
current_diagnostics.append(&mut diagnostics);
|
||||
current_diagnostics.extend(diagnostics);
|
||||
current_diagnostics
|
||||
// Sort diagnostics first by severity and then by line numbers.
|
||||
}
|
||||
Entry::Vacant(v) => v.insert(diagnostics.collect()),
|
||||
};
|
||||
|
||||
// Sort diagnostics first by severity and then by line numbers.
|
||||
// Note: The `lsp::DiagnosticSeverity` enum is already defined in decreasing order
|
||||
current_diagnostics
|
||||
.sort_unstable_by_key(|(d, _)| (d.severity, d.range.start));
|
||||
}
|
||||
Entry::Vacant(v) => {
|
||||
diagnostics
|
||||
.sort_unstable_by_key(|(d, _)| (d.severity, d.range.start));
|
||||
v.insert(diagnostics);
|
||||
}
|
||||
diagnostics.sort_unstable_by_key(|(d, server_id)| {
|
||||
(d.severity, d.range.start, *server_id)
|
||||
});
|
||||
|
||||
if let Some(doc) = doc {
|
||||
let diagnostic_of_language_server_and_not_in_unchanged_sources =
|
||||
|diagnostic: &lsp::Diagnostic, ls_id| {
|
||||
ls_id == server_id
|
||||
&& diagnostic.source.as_ref().map_or(true, |source| {
|
||||
!unchanged_diag_sources.contains(source)
|
||||
})
|
||||
};
|
||||
let diagnostics = Editor::doc_diagnostics_with_filter(
|
||||
&self.editor.language_servers,
|
||||
&self.editor.diagnostics,
|
||||
doc,
|
||||
diagnostic_of_language_server_and_not_in_unchanged_sources,
|
||||
);
|
||||
doc.replace_diagnostics(
|
||||
diagnostics,
|
||||
&unchanged_diag_sources,
|
||||
Some(server_id),
|
||||
);
|
||||
}
|
||||
}
|
||||
Notification::ShowMessage(params) => {
|
||||
log::warn!("unhandled window/showMessage: {:?}", params);
|
||||
@@ -968,7 +937,7 @@ impl Application {
|
||||
|
||||
// Clear any diagnostics for documents with this server open.
|
||||
for doc in self.editor.documents_mut() {
|
||||
doc.clear_diagnostics(server_id);
|
||||
doc.clear_diagnostics(Some(server_id));
|
||||
}
|
||||
|
||||
// Remove the language server from the registry.
|
||||
|
@@ -17,7 +17,6 @@ pub struct Args {
|
||||
pub log_file: Option<PathBuf>,
|
||||
pub config_file: Option<PathBuf>,
|
||||
pub files: Vec<(PathBuf, Position)>,
|
||||
pub open_cwd: bool,
|
||||
pub working_directory: Option<PathBuf>,
|
||||
}
|
||||
|
||||
|
@@ -60,8 +60,13 @@ use crate::{
|
||||
|
||||
use crate::job::{self, Jobs};
|
||||
use futures_util::{stream::FuturesUnordered, TryStreamExt};
|
||||
use std::{collections::HashMap, fmt, future::Future};
|
||||
use std::{collections::HashSet, num::NonZeroUsize};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt,
|
||||
future::Future,
|
||||
io::Read,
|
||||
num::NonZeroUsize,
|
||||
};
|
||||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
@@ -70,6 +75,7 @@ use std::{
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::de::{self, Deserialize, Deserializer};
|
||||
use url::Url;
|
||||
|
||||
use grep_regex::RegexMatcherBuilder;
|
||||
use grep_searcher::{sinks, BinaryDetection, SearcherBuilder};
|
||||
@@ -331,9 +337,9 @@ impl MappableCommand {
|
||||
goto_implementation, "Goto implementation",
|
||||
goto_file_start, "Goto line number <n> else file start",
|
||||
goto_file_end, "Goto file end",
|
||||
goto_file, "Goto files in selection",
|
||||
goto_file_hsplit, "Goto files in selection (hsplit)",
|
||||
goto_file_vsplit, "Goto files in selection (vsplit)",
|
||||
goto_file, "Goto files/URLs in selections",
|
||||
goto_file_hsplit, "Goto files in selections (hsplit)",
|
||||
goto_file_vsplit, "Goto files in selections (vsplit)",
|
||||
goto_reference, "Goto references",
|
||||
goto_window_top, "Goto window top",
|
||||
goto_window_center, "Goto window center",
|
||||
@@ -984,6 +990,7 @@ fn align_selections(cx: &mut Context) {
|
||||
|
||||
let transaction = Transaction::change(doc.text(), changes.into_iter());
|
||||
doc.apply(&transaction, view.id);
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn goto_window(cx: &mut Context, align: Align) {
|
||||
@@ -1189,9 +1196,17 @@ fn goto_file_impl(cx: &mut Context, action: Action) {
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
|
||||
for sel in paths {
|
||||
let p = sel.trim();
|
||||
if !p.is_empty() {
|
||||
if p.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Ok(url) = Url::parse(p) {
|
||||
return open_url(cx, url, action);
|
||||
}
|
||||
|
||||
let path = &rel_path.join(p);
|
||||
if path.is_dir() {
|
||||
let picker = ui::file_picker(path.into(), &cx.editor.config());
|
||||
@@ -1200,7 +1215,59 @@ fn goto_file_impl(cx: &mut Context, action: Action) {
|
||||
cx.editor.set_error(format!("Open file failed: {:?}", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Opens the given url. If the URL points to a valid textual file it is open in helix.
|
||||
// Otherwise, the file is open using external program.
|
||||
fn open_url(cx: &mut Context, url: Url, action: Action) {
|
||||
let doc = doc!(cx.editor);
|
||||
let rel_path = doc
|
||||
.relative_path()
|
||||
.map(|path| path.parent().unwrap().to_path_buf())
|
||||
.unwrap_or_default();
|
||||
|
||||
if url.scheme() != "file" {
|
||||
return open_external_url(cx, url);
|
||||
}
|
||||
|
||||
let content_type = std::fs::File::open(url.path()).and_then(|file| {
|
||||
// Read up to 1kb to detect the content type
|
||||
let mut read_buffer = Vec::new();
|
||||
let n = file.take(1024).read_to_end(&mut read_buffer)?;
|
||||
Ok(content_inspector::inspect(&read_buffer[..n]))
|
||||
});
|
||||
|
||||
// we attempt to open binary files - files that can't be open in helix - using external
|
||||
// program as well, e.g. pdf files or images
|
||||
match content_type {
|
||||
Ok(content_inspector::ContentType::BINARY) => open_external_url(cx, url),
|
||||
Ok(_) | Err(_) => {
|
||||
let path = &rel_path.join(url.path());
|
||||
if path.is_dir() {
|
||||
let picker = ui::file_picker(path.into(), &cx.editor.config());
|
||||
cx.push_layer(Box::new(overlaid(picker)));
|
||||
} else if let Err(e) = cx.editor.open(path, action) {
|
||||
cx.editor.set_error(format!("Open file failed: {:?}", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Opens URL in external program.
|
||||
fn open_external_url(cx: &mut Context, url: Url) {
|
||||
let commands = open::commands(url.as_str());
|
||||
cx.jobs.callback(async {
|
||||
for cmd in commands {
|
||||
let mut command = tokio::process::Command::new(cmd.get_program());
|
||||
command.args(cmd.get_args());
|
||||
if command.output().await.is_ok() {
|
||||
return Ok(job::Callback::Editor(Box::new(|_| {})));
|
||||
}
|
||||
}
|
||||
Ok(job::Callback::Editor(Box::new(move |editor| {
|
||||
editor.set_error("Opening URL in external program failed")
|
||||
})))
|
||||
});
|
||||
}
|
||||
|
||||
fn extend_word_impl<F>(cx: &mut Context, extend_fn: F)
|
||||
@@ -1251,7 +1318,7 @@ fn extend_next_long_word_end(cx: &mut Context) {
|
||||
extend_word_impl(cx, movement::move_next_long_word_end)
|
||||
}
|
||||
|
||||
/// Separate branch to find_char designed only for <ret> char.
|
||||
/// Separate branch to find_char designed only for `<ret>` char.
|
||||
//
|
||||
// This is necessary because the one document can have different line endings inside. And we
|
||||
// cannot predict what character to find when <ret> is pressed. On the current line it can be `lf`
|
||||
@@ -1527,6 +1594,7 @@ where
|
||||
});
|
||||
|
||||
doc.apply(&transaction, view.id);
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn switch_case(cx: &mut Context) {
|
||||
@@ -2985,6 +3053,7 @@ fn insert_with_indent(cx: &mut Context, cursor_fallback: IndentFallbackPos) {
|
||||
let indent = indent::indent_for_newline(
|
||||
language_config,
|
||||
syntax,
|
||||
&doc.config.load().indent_heuristic,
|
||||
&doc.indent_style,
|
||||
tab_width,
|
||||
text,
|
||||
@@ -3113,6 +3182,7 @@ fn open(cx: &mut Context, open: Open) {
|
||||
let indent = indent::indent_for_newline(
|
||||
doc.language_config(),
|
||||
doc.syntax(),
|
||||
&doc.config.load().indent_heuristic,
|
||||
&doc.indent_style,
|
||||
doc.tab_width(),
|
||||
text,
|
||||
@@ -3280,7 +3350,7 @@ fn exit_select_mode(cx: &mut Context) {
|
||||
|
||||
fn goto_first_diag(cx: &mut Context) {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let selection = match doc.shown_diagnostics().next() {
|
||||
let selection = match doc.diagnostics().first() {
|
||||
Some(diag) => Selection::single(diag.range.start, diag.range.end),
|
||||
None => return,
|
||||
};
|
||||
@@ -3289,7 +3359,7 @@ fn goto_first_diag(cx: &mut Context) {
|
||||
|
||||
fn goto_last_diag(cx: &mut Context) {
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let selection = match doc.shown_diagnostics().last() {
|
||||
let selection = match doc.diagnostics().last() {
|
||||
Some(diag) => Selection::single(diag.range.start, diag.range.end),
|
||||
None => return,
|
||||
};
|
||||
@@ -3305,9 +3375,10 @@ fn goto_next_diag(cx: &mut Context) {
|
||||
.cursor(doc.text().slice(..));
|
||||
|
||||
let diag = doc
|
||||
.shown_diagnostics()
|
||||
.diagnostics()
|
||||
.iter()
|
||||
.find(|diag| diag.range.start > cursor_pos)
|
||||
.or_else(|| doc.shown_diagnostics().next());
|
||||
.or_else(|| doc.diagnostics().first());
|
||||
|
||||
let selection = match diag {
|
||||
Some(diag) => Selection::single(diag.range.start, diag.range.end),
|
||||
@@ -3325,10 +3396,11 @@ fn goto_prev_diag(cx: &mut Context) {
|
||||
.cursor(doc.text().slice(..));
|
||||
|
||||
let diag = doc
|
||||
.shown_diagnostics()
|
||||
.diagnostics()
|
||||
.iter()
|
||||
.rev()
|
||||
.find(|diag| diag.range.start < cursor_pos)
|
||||
.or_else(|| doc.shown_diagnostics().last());
|
||||
.or_else(|| doc.diagnostics().last());
|
||||
|
||||
let selection = match diag {
|
||||
// NOTE: the selection is reversed because we're jumping to the
|
||||
@@ -3652,6 +3724,7 @@ pub mod insert {
|
||||
let indent = indent::indent_for_newline(
|
||||
doc.language_config(),
|
||||
doc.syntax(),
|
||||
&doc.config.load().indent_heuristic,
|
||||
&doc.indent_style,
|
||||
doc.tab_width(),
|
||||
text,
|
||||
@@ -3897,12 +3970,12 @@ fn yank(cx: &mut Context) {
|
||||
}
|
||||
|
||||
fn yank_to_clipboard(cx: &mut Context) {
|
||||
yank_impl(cx.editor, '*');
|
||||
yank_impl(cx.editor, '+');
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn yank_to_primary_clipboard(cx: &mut Context) {
|
||||
yank_impl(cx.editor, '+');
|
||||
yank_impl(cx.editor, '*');
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
@@ -3959,13 +4032,13 @@ fn yank_joined(cx: &mut Context) {
|
||||
|
||||
fn yank_joined_to_clipboard(cx: &mut Context) {
|
||||
let line_ending = doc!(cx.editor).line_ending;
|
||||
yank_joined_impl(cx.editor, line_ending.as_str(), '*');
|
||||
yank_joined_impl(cx.editor, line_ending.as_str(), '+');
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn yank_joined_to_primary_clipboard(cx: &mut Context) {
|
||||
let line_ending = doc!(cx.editor).line_ending;
|
||||
yank_joined_impl(cx.editor, line_ending.as_str(), '+');
|
||||
yank_joined_impl(cx.editor, line_ending.as_str(), '*');
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
@@ -3982,12 +4055,12 @@ fn yank_primary_selection_impl(editor: &mut Editor, register: char) {
|
||||
}
|
||||
|
||||
fn yank_main_selection_to_clipboard(cx: &mut Context) {
|
||||
yank_primary_selection_impl(cx.editor, '*');
|
||||
yank_primary_selection_impl(cx.editor, '+');
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn yank_main_selection_to_primary_clipboard(cx: &mut Context) {
|
||||
yank_primary_selection_impl(cx.editor, '+');
|
||||
yank_primary_selection_impl(cx.editor, '*');
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
@@ -4085,22 +4158,27 @@ pub(crate) fn paste_bracketed_value(cx: &mut Context, contents: String) {
|
||||
};
|
||||
let (view, doc) = current!(cx.editor);
|
||||
paste_impl(&[contents], doc, view, paste, count, cx.editor.mode);
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn paste_clipboard_after(cx: &mut Context) {
|
||||
paste(cx.editor, '*', Paste::After, cx.count());
|
||||
paste(cx.editor, '+', Paste::After, cx.count());
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn paste_clipboard_before(cx: &mut Context) {
|
||||
paste(cx.editor, '*', Paste::Before, cx.count());
|
||||
paste(cx.editor, '+', Paste::Before, cx.count());
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn paste_primary_clipboard_after(cx: &mut Context) {
|
||||
paste(cx.editor, '+', Paste::After, cx.count());
|
||||
paste(cx.editor, '*', Paste::After, cx.count());
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn paste_primary_clipboard_before(cx: &mut Context) {
|
||||
paste(cx.editor, '+', Paste::Before, cx.count());
|
||||
paste(cx.editor, '*', Paste::Before, cx.count());
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn replace_with_yanked(cx: &mut Context) {
|
||||
@@ -4109,9 +4187,13 @@ fn replace_with_yanked(cx: &mut Context) {
|
||||
}
|
||||
|
||||
fn replace_with_yanked_impl(editor: &mut Editor, register: char, count: usize) {
|
||||
let Some(values) = editor.registers
|
||||
let Some(values) = editor
|
||||
.registers
|
||||
.read(register, editor)
|
||||
.filter(|values| values.len() > 0) else { return };
|
||||
.filter(|values| values.len() > 0)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let values: Vec<_> = values.map(|value| value.to_string()).collect();
|
||||
|
||||
let (view, doc) = current!(editor);
|
||||
@@ -4138,15 +4220,19 @@ fn replace_with_yanked_impl(editor: &mut Editor, register: char, count: usize) {
|
||||
}
|
||||
|
||||
fn replace_selections_with_clipboard(cx: &mut Context) {
|
||||
replace_with_yanked_impl(cx.editor, '*', cx.count());
|
||||
replace_with_yanked_impl(cx.editor, '+', cx.count());
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn replace_selections_with_primary_clipboard(cx: &mut Context) {
|
||||
replace_with_yanked_impl(cx.editor, '+', cx.count());
|
||||
replace_with_yanked_impl(cx.editor, '*', cx.count());
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn paste(editor: &mut Editor, register: char, pos: Paste, count: usize) {
|
||||
let Some(values) = editor.registers.read(register, editor) else { return };
|
||||
let Some(values) = editor.registers.read(register, editor) else {
|
||||
return;
|
||||
};
|
||||
let values: Vec<_> = values.map(|value| value.to_string()).collect();
|
||||
|
||||
let (view, doc) = current!(editor);
|
||||
@@ -4160,6 +4246,7 @@ fn paste_after(cx: &mut Context) {
|
||||
Paste::After,
|
||||
cx.count(),
|
||||
);
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn paste_before(cx: &mut Context) {
|
||||
@@ -4169,6 +4256,7 @@ fn paste_before(cx: &mut Context) {
|
||||
Paste::Before,
|
||||
cx.count(),
|
||||
);
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn get_lines(doc: &Document, view_id: ViewId) -> Vec<usize> {
|
||||
@@ -4207,6 +4295,7 @@ fn indent(cx: &mut Context) {
|
||||
}),
|
||||
);
|
||||
doc.apply(&transaction, view.id);
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn unindent(cx: &mut Context) {
|
||||
@@ -4246,6 +4335,7 @@ fn unindent(cx: &mut Context) {
|
||||
let transaction = Transaction::change(doc.text(), changes.into_iter());
|
||||
|
||||
doc.apply(&transaction, view.id);
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
|
||||
fn format_selections(cx: &mut Context) {
|
||||
@@ -4312,10 +4402,9 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) {
|
||||
use movement::skip_while;
|
||||
let (view, doc) = current!(cx.editor);
|
||||
let text = doc.text();
|
||||
let slice = doc.text().slice(..);
|
||||
let slice = text.slice(..);
|
||||
|
||||
let mut changes = Vec::new();
|
||||
let fragment = Tendril::from(" ");
|
||||
|
||||
for selection in doc.selection(view.id) {
|
||||
let (start, mut end) = selection.line_range(slice);
|
||||
@@ -4331,9 +4420,13 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) {
|
||||
let mut end = text.line_to_char(line + 1);
|
||||
end = skip_while(slice, end, |ch| matches!(ch, ' ' | '\t')).unwrap_or(end);
|
||||
|
||||
// need to skip from start, not end
|
||||
let change = (start, end, Some(fragment.clone()));
|
||||
changes.push(change);
|
||||
let separator = if end == line_end_char_index(&slice, line + 1) {
|
||||
// the joining line contains only space-characters => don't include a whitespace when joining
|
||||
None
|
||||
} else {
|
||||
Some(Tendril::from(" "))
|
||||
};
|
||||
changes.push((start, end, separator));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4345,9 +4438,6 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) {
|
||||
changes.sort_unstable_by_key(|(from, _to, _text)| *from);
|
||||
changes.dedup();
|
||||
|
||||
// TODO: joining multiple empty lines should be replaced by a single space.
|
||||
// need to merge change ranges that touch
|
||||
|
||||
// select inserted spaces
|
||||
let transaction = if select_space {
|
||||
let ranges: SmallVec<_> = changes
|
||||
@@ -4359,9 +4449,9 @@ fn join_selections_impl(cx: &mut Context, select_space: bool) {
|
||||
})
|
||||
.collect();
|
||||
let selection = Selection::new(ranges, 0);
|
||||
Transaction::change(doc.text(), changes.into_iter()).with_selection(selection)
|
||||
Transaction::change(text, changes.into_iter()).with_selection(selection)
|
||||
} else {
|
||||
Transaction::change(doc.text(), changes.into_iter())
|
||||
Transaction::change(text, changes.into_iter())
|
||||
};
|
||||
|
||||
doc.apply(&transaction, view.id);
|
||||
@@ -5527,12 +5617,18 @@ fn shell(cx: &mut compositor::Context, cmd: &str, behavior: &ShellBehavior) {
|
||||
};
|
||||
|
||||
// These `usize`s cannot underflow because selection ranges cannot overlap.
|
||||
// Once the MSRV is 1.66.0 (mixed_integer_ops is stabilized), we can use checked
|
||||
// arithmetic to assert this.
|
||||
let anchor = (to as isize + offset - deleted_len as isize) as usize;
|
||||
let anchor = to
|
||||
.checked_add_signed(offset)
|
||||
.expect("Selection ranges cannot overlap")
|
||||
.checked_sub(deleted_len)
|
||||
.expect("Selection ranges cannot overlap");
|
||||
let new_range = Range::new(anchor, anchor + output_len).with_direction(range.direction());
|
||||
ranges.push(new_range);
|
||||
offset = offset + output_len as isize - deleted_len as isize;
|
||||
offset = offset
|
||||
.checked_add_unsigned(output_len)
|
||||
.expect("Selection ranges cannot overlap")
|
||||
.checked_sub_unsigned(deleted_len)
|
||||
.expect("Selection ranges cannot overlap");
|
||||
|
||||
changes.push((from, to, Some(output)));
|
||||
}
|
||||
@@ -5671,6 +5767,7 @@ fn increment_impl(cx: &mut Context, increment_direction: IncrementDirection) {
|
||||
let transaction = Transaction::change(doc.text(), changes.into_iter());
|
||||
let transaction = transaction.with_selection(new_selection);
|
||||
doc.apply(&transaction, view.id);
|
||||
exit_select_mode(cx);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -8,7 +8,7 @@ use dap::{StackFrame, Thread, ThreadStates};
|
||||
use helix_core::syntax::{DebugArgumentValue, DebugConfigCompletion, DebugTemplate};
|
||||
use helix_dap::{self as dap, Client};
|
||||
use helix_lsp::block_on;
|
||||
use helix_view::editor::Breakpoint;
|
||||
use helix_view::{editor::Breakpoint, graphics::Margin};
|
||||
|
||||
use serde_json::{to_value, Value};
|
||||
use tokio_stream::wrappers::UnboundedReceiverStream;
|
||||
@@ -581,7 +581,12 @@ pub fn dap_variables(cx: &mut Context) {
|
||||
}
|
||||
|
||||
let contents = Text::from(tui::text::Text::from(variables));
|
||||
let popup = Popup::new("dap-variables", contents);
|
||||
let margin = if cx.editor.popup_border() {
|
||||
Margin::all(1)
|
||||
} else {
|
||||
Margin::none()
|
||||
};
|
||||
let popup = Popup::new("dap-variables", contents).margin(margin);
|
||||
cx.replace_or_push_layer("dap-variables", popup);
|
||||
}
|
||||
|
||||
|
@@ -23,6 +23,7 @@ use helix_core::{
|
||||
use helix_view::{
|
||||
document::{DocumentInlayHints, DocumentInlayHintsId, Mode},
|
||||
editor::Action,
|
||||
graphics::Margin,
|
||||
theme::Style,
|
||||
Document, View,
|
||||
};
|
||||
@@ -49,7 +50,7 @@ use std::{
|
||||
/// If there is no configured language server that supports the feature, this displays a status message.
|
||||
/// Using this macro in a context where the editor automatically queries the LSP
|
||||
/// (instead of when the user explicitly does so via a keybind like `gd`)
|
||||
/// will spam the "No configured language server supports <feature>" status message confusingly.
|
||||
/// will spam the "No configured language server supports \<feature>" status message confusingly.
|
||||
#[macro_export]
|
||||
macro_rules! language_server_with_feature {
|
||||
($editor:expr, $doc:expr, $feature:expr) => {{
|
||||
@@ -744,7 +745,16 @@ pub fn code_action(cx: &mut Context) {
|
||||
});
|
||||
picker.move_down(); // pre-select the first item
|
||||
|
||||
let popup = Popup::new("code-action", picker).with_scrollbar(false);
|
||||
let margin = if editor.menu_border() {
|
||||
Margin::vertical(1)
|
||||
} else {
|
||||
Margin::none()
|
||||
};
|
||||
|
||||
let popup = Popup::new("code-action", picker)
|
||||
.with_scrollbar(false)
|
||||
.margin(margin);
|
||||
|
||||
compositor.replace_or_push("code-action", popup);
|
||||
};
|
||||
|
||||
@@ -886,7 +896,6 @@ pub fn apply_workspace_edit(
|
||||
}
|
||||
};
|
||||
|
||||
let current_view_id = view!(editor).id;
|
||||
let doc_id = match editor.open(&path, Action::Load) {
|
||||
Ok(doc_id) => doc_id,
|
||||
Err(err) => {
|
||||
@@ -897,7 +906,7 @@ pub fn apply_workspace_edit(
|
||||
}
|
||||
};
|
||||
|
||||
let doc = doc_mut!(editor, &doc_id);
|
||||
let doc = doc!(editor, &doc_id);
|
||||
if let Some(version) = version {
|
||||
if version != doc.version() {
|
||||
let err = format!("outdated workspace edit for {path:?}");
|
||||
@@ -908,18 +917,8 @@ pub fn apply_workspace_edit(
|
||||
}
|
||||
|
||||
// Need to determine a view for apply/append_changes_to_history
|
||||
let selections = doc.selections();
|
||||
let view_id = if selections.contains_key(¤t_view_id) {
|
||||
// use current if possible
|
||||
current_view_id
|
||||
} else {
|
||||
// Hack: we take the first available view_id
|
||||
selections
|
||||
.keys()
|
||||
.next()
|
||||
.copied()
|
||||
.expect("No view_id available")
|
||||
};
|
||||
let view_id = editor.get_synced_view_id(doc_id);
|
||||
let doc = doc_mut!(editor, &doc_id);
|
||||
|
||||
let transaction = helix_lsp::util::generate_transaction_from_edits(
|
||||
doc.text(),
|
||||
@@ -1411,6 +1410,16 @@ pub fn rename_symbol(cx: &mut Context) {
|
||||
|
||||
let (view, doc) = current_ref!(cx.editor);
|
||||
|
||||
if doc
|
||||
.language_servers_with_feature(LanguageServerFeature::RenameSymbol)
|
||||
.next()
|
||||
.is_none()
|
||||
{
|
||||
cx.editor
|
||||
.set_error("No configured language server supports symbol renaming");
|
||||
return;
|
||||
}
|
||||
|
||||
let language_server_with_prepare_rename_support = doc
|
||||
.language_servers_with_feature(LanguageServerFeature::RenameSymbol)
|
||||
.find(|ls| {
|
||||
|
@@ -6,7 +6,9 @@ use crate::job::Job;
|
||||
use super::*;
|
||||
|
||||
use helix_core::fuzzy::fuzzy_match;
|
||||
use helix_core::{encoding, line_ending, shellwords::Shellwords};
|
||||
use helix_core::indent::MAX_INDENT;
|
||||
use helix_core::{encoding, line_ending, path::get_canonicalized_path, shellwords::Shellwords};
|
||||
use helix_lsp::{OffsetEncoding, Url};
|
||||
use helix_view::document::DEFAULT_LANGUAGE_NAME;
|
||||
use helix_view::editor::{Action, CloseError, ConfigEvent};
|
||||
use serde_json::Value;
|
||||
@@ -475,8 +477,7 @@ fn set_indent_style(
|
||||
cx.editor.set_status(match style {
|
||||
Tabs => "tabs".to_owned(),
|
||||
Spaces(1) => "1 space".to_owned(),
|
||||
Spaces(n) if (2..=8).contains(&n) => format!("{} spaces", n),
|
||||
_ => unreachable!(), // Shouldn't happen.
|
||||
Spaces(n) => format!("{} spaces", n),
|
||||
});
|
||||
return Ok(());
|
||||
}
|
||||
@@ -488,7 +489,7 @@ fn set_indent_style(
|
||||
Some(arg) => arg
|
||||
.parse::<u8>()
|
||||
.ok()
|
||||
.filter(|n| (1..=8).contains(n))
|
||||
.filter(|n| (1..=MAX_INDENT).contains(n))
|
||||
.map(Spaces),
|
||||
_ => None,
|
||||
};
|
||||
@@ -573,7 +574,6 @@ fn set_line_ending(
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn earlier(
|
||||
cx: &mut compositor::Context,
|
||||
args: &[Cow<str>],
|
||||
@@ -674,13 +674,15 @@ pub fn write_all_impl(
|
||||
let mut errors: Vec<&'static str> = Vec::new();
|
||||
let config = cx.editor.config();
|
||||
let jobs = &mut cx.jobs;
|
||||
let current_view = view!(cx.editor);
|
||||
|
||||
let saves: Vec<_> = cx
|
||||
.editor
|
||||
.documents
|
||||
.values_mut()
|
||||
.filter_map(|doc| {
|
||||
.keys()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
.into_iter()
|
||||
.filter_map(|id| {
|
||||
let doc = doc!(cx.editor, &id);
|
||||
if !doc.is_modified() {
|
||||
return None;
|
||||
}
|
||||
@@ -691,22 +693,9 @@ pub fn write_all_impl(
|
||||
return None;
|
||||
}
|
||||
|
||||
// Look for a view to apply the formatting change to. If the document
|
||||
// is in the current view, just use that. Otherwise, since we don't
|
||||
// have any other metric available for better selection, just pick
|
||||
// the first view arbitrarily so that we still commit the document
|
||||
// state for undos. If somehow we have a document that has not been
|
||||
// initialized with any view, initialize it with the current view.
|
||||
let target_view = if doc.selections().contains_key(¤t_view.id) {
|
||||
current_view.id
|
||||
} else if let Some(view) = doc.selections().keys().next() {
|
||||
*view
|
||||
} else {
|
||||
doc.ensure_view_init(current_view.id);
|
||||
current_view.id
|
||||
};
|
||||
|
||||
Some((doc.id(), target_view))
|
||||
// Look for a view to apply the formatting change to.
|
||||
let target_view = cx.editor.get_synced_view_id(doc.id());
|
||||
Some((id, target_view))
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -921,7 +910,7 @@ fn yank_main_selection_to_clipboard(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
yank_primary_selection_impl(cx.editor, '*');
|
||||
yank_primary_selection_impl(cx.editor, '+');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -956,7 +945,7 @@ fn yank_joined_to_clipboard(
|
||||
let doc = doc!(cx.editor);
|
||||
let default_sep = Cow::Borrowed(doc.line_ending.as_str());
|
||||
let separator = args.first().unwrap_or(&default_sep);
|
||||
yank_joined_impl(cx.editor, separator, '*');
|
||||
yank_joined_impl(cx.editor, separator, '+');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -969,7 +958,7 @@ fn yank_main_selection_to_primary_clipboard(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
yank_primary_selection_impl(cx.editor, '+');
|
||||
yank_primary_selection_impl(cx.editor, '*');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -985,7 +974,7 @@ fn yank_joined_to_primary_clipboard(
|
||||
let doc = doc!(cx.editor);
|
||||
let default_sep = Cow::Borrowed(doc.line_ending.as_str());
|
||||
let separator = args.first().unwrap_or(&default_sep);
|
||||
yank_joined_impl(cx.editor, separator, '+');
|
||||
yank_joined_impl(cx.editor, separator, '*');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -998,7 +987,7 @@ fn paste_clipboard_after(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
paste(cx.editor, '*', Paste::After, 1);
|
||||
paste(cx.editor, '+', Paste::After, 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1011,7 +1000,7 @@ fn paste_clipboard_before(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
paste(cx.editor, '*', Paste::Before, 1);
|
||||
paste(cx.editor, '+', Paste::Before, 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1024,7 +1013,7 @@ fn paste_primary_clipboard_after(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
paste(cx.editor, '+', Paste::After, 1);
|
||||
paste(cx.editor, '*', Paste::After, 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1037,7 +1026,7 @@ fn paste_primary_clipboard_before(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
paste(cx.editor, '+', Paste::Before, 1);
|
||||
paste(cx.editor, '*', Paste::Before, 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1050,7 +1039,7 @@ fn replace_selections_with_clipboard(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
replace_with_yanked_impl(cx.editor, '*', 1);
|
||||
replace_with_yanked_impl(cx.editor, '+', 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1063,7 +1052,7 @@ fn replace_selections_with_primary_clipboard(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
replace_with_yanked_impl(cx.editor, '+', 1);
|
||||
replace_with_yanked_impl(cx.editor, '*', 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1502,7 +1491,7 @@ fn lsp_stop(
|
||||
|
||||
for doc in cx.editor.documents_mut() {
|
||||
if let Some(client) = doc.remove_language_server_by_name(ls_name) {
|
||||
doc.clear_diagnostics(client.id());
|
||||
doc.clear_diagnostics(Some(client.id()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2008,6 +1997,10 @@ fn language(
|
||||
|
||||
let id = doc.id();
|
||||
cx.editor.refresh_language_servers(id);
|
||||
let doc = doc_mut!(cx.editor);
|
||||
let diagnostics =
|
||||
Editor::doc_diagnostics(&cx.editor.language_servers, &cx.editor.diagnostics, doc);
|
||||
doc.replace_diagnostics(diagnostics, &[], None);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -2408,6 +2401,80 @@ fn redraw(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn move_buffer(
|
||||
cx: &mut compositor::Context,
|
||||
args: &[Cow<str>],
|
||||
event: PromptEvent,
|
||||
) -> anyhow::Result<()> {
|
||||
if event != PromptEvent::Validate {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
ensure!(args.len() == 1, format!(":move takes one argument"));
|
||||
let doc = doc!(cx.editor);
|
||||
|
||||
let new_path = get_canonicalized_path(&PathBuf::from(args.first().unwrap().to_string()));
|
||||
let old_path = doc
|
||||
.path()
|
||||
.ok_or_else(|| anyhow!("Scratch buffer cannot be moved. Use :write instead"))?
|
||||
.clone();
|
||||
let old_path_as_url = doc.url().unwrap();
|
||||
let new_path_as_url = Url::from_file_path(&new_path).unwrap();
|
||||
|
||||
let edits: Vec<(
|
||||
helix_lsp::Result<helix_lsp::lsp::WorkspaceEdit>,
|
||||
OffsetEncoding,
|
||||
String,
|
||||
)> = doc
|
||||
.language_servers()
|
||||
.map(|lsp| {
|
||||
(
|
||||
lsp.prepare_file_rename(&old_path_as_url, &new_path_as_url),
|
||||
lsp.offset_encoding(),
|
||||
lsp.name().to_owned(),
|
||||
)
|
||||
})
|
||||
.filter(|(f, _, _)| f.is_some())
|
||||
.map(|(f, encoding, name)| (helix_lsp::block_on(f.unwrap()), encoding, name))
|
||||
.collect();
|
||||
|
||||
for (lsp_reply, encoding, name) in edits {
|
||||
match lsp_reply {
|
||||
Ok(edit) => {
|
||||
if let Err(e) = apply_workspace_edit(cx.editor, encoding, &edit) {
|
||||
log::error!(
|
||||
":move command failed to apply edits from lsp {}: {:?}",
|
||||
name,
|
||||
e
|
||||
);
|
||||
};
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("LSP {} failed to treat willRename request: {:?}", name, e);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let doc = doc_mut!(cx.editor);
|
||||
|
||||
doc.set_path(Some(new_path.as_path()));
|
||||
if let Err(e) = std::fs::rename(&old_path, &new_path) {
|
||||
doc.set_path(Some(old_path.as_path()));
|
||||
bail!("Could not move file: {}", e);
|
||||
};
|
||||
|
||||
doc.language_servers().for_each(|lsp| {
|
||||
lsp.did_file_rename(&old_path_as_url, &new_path_as_url);
|
||||
});
|
||||
|
||||
cx.editor
|
||||
.language_servers
|
||||
.file_event_handler
|
||||
.file_changed(new_path);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
TypableCommand {
|
||||
name: "quit",
|
||||
@@ -2531,7 +2598,7 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
TypableCommand {
|
||||
name: "indent-style",
|
||||
aliases: &[],
|
||||
doc: "Set the indentation style for editing. ('t' for tabs or 1-8 for number of spaces.)",
|
||||
doc: "Set the indentation style for editing. ('t' for tabs or 1-16 for number of spaces.)",
|
||||
fun: set_indent_style,
|
||||
signature: CommandSignature::none(),
|
||||
},
|
||||
@@ -3008,6 +3075,13 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
|
||||
fun: redraw,
|
||||
signature: CommandSignature::none(),
|
||||
},
|
||||
TypableCommand {
|
||||
name: "move",
|
||||
aliases: &[],
|
||||
doc: "Move the current buffer and its corresponding file to a different path",
|
||||
fun: move_buffer,
|
||||
signature: CommandSignature::positional(&[completers::filename]),
|
||||
},
|
||||
];
|
||||
|
||||
pub static TYPABLE_COMMAND_MAP: Lazy<HashMap<&'static str, &'static TypableCommand>> =
|
||||
|
@@ -145,7 +145,7 @@ pub fn languages_all() -> std::io::Result<()> {
|
||||
}
|
||||
};
|
||||
|
||||
let mut headings = vec!["Language", "LSP", "DAP"];
|
||||
let mut headings = vec!["Language", "LSP", "DAP", "Formatter"];
|
||||
|
||||
for feat in TsFeature::all() {
|
||||
headings.push(feat.short_title())
|
||||
@@ -179,7 +179,7 @@ pub fn languages_all() -> std::io::Result<()> {
|
||||
|
||||
syn_loader_conf
|
||||
.language
|
||||
.sort_unstable_by_key(|l| l.language_id.clone());
|
||||
.sort_unstable_by_key(|l| l.language_name.clone());
|
||||
|
||||
let check_binary = |cmd: Option<&str>| match cmd {
|
||||
Some(cmd) => match which::which(cmd) {
|
||||
@@ -190,7 +190,7 @@ pub fn languages_all() -> std::io::Result<()> {
|
||||
};
|
||||
|
||||
for lang in &syn_loader_conf.language {
|
||||
column(&lang.language_id, Color::Reset);
|
||||
column(&lang.language_name, Color::Reset);
|
||||
|
||||
let mut cmds = lang.language_servers.iter().filter_map(|ls| {
|
||||
syn_loader_conf
|
||||
@@ -203,8 +203,14 @@ pub fn languages_all() -> std::io::Result<()> {
|
||||
let dap = lang.debugger.as_ref().map(|dap| dap.command.as_str());
|
||||
check_binary(dap);
|
||||
|
||||
let formatter = lang
|
||||
.formatter
|
||||
.as_ref()
|
||||
.map(|formatter| formatter.command.as_str());
|
||||
check_binary(formatter);
|
||||
|
||||
for ts_feat in TsFeature::all() {
|
||||
match load_runtime_file(&lang.language_id, ts_feat.runtime_filename()).is_ok() {
|
||||
match load_runtime_file(&lang.language_name, ts_feat.runtime_filename()).is_ok() {
|
||||
true => column("✓", Color::Green),
|
||||
false => column("✘", Color::Red),
|
||||
}
|
||||
@@ -248,7 +254,7 @@ pub fn language(lang_str: String) -> std::io::Result<()> {
|
||||
let lang = match syn_loader_conf
|
||||
.language
|
||||
.iter()
|
||||
.find(|l| l.language_id == lang_str)
|
||||
.find(|l| l.language_name == lang_str)
|
||||
{
|
||||
Some(l) => l,
|
||||
None => {
|
||||
@@ -257,8 +263,11 @@ pub fn language(lang_str: String) -> std::io::Result<()> {
|
||||
let suggestions: Vec<&str> = syn_loader_conf
|
||||
.language
|
||||
.iter()
|
||||
.filter(|l| l.language_id.starts_with(lang_str.chars().next().unwrap()))
|
||||
.map(|l| l.language_id.as_str())
|
||||
.filter(|l| {
|
||||
l.language_name
|
||||
.starts_with(lang_str.chars().next().unwrap())
|
||||
})
|
||||
.map(|l| l.language_name.as_str())
|
||||
.collect();
|
||||
if !suggestions.is_empty() {
|
||||
let suggestions = suggestions.join(", ");
|
||||
@@ -285,6 +294,13 @@ pub fn language(lang_str: String) -> std::io::Result<()> {
|
||||
lang.debugger.as_ref().map(|dap| dap.command.to_string()),
|
||||
)?;
|
||||
|
||||
probe_protocol(
|
||||
"formatter",
|
||||
lang.formatter
|
||||
.as_ref()
|
||||
.map(|formatter| formatter.command.to_string()),
|
||||
)?;
|
||||
|
||||
for ts_feat in TsFeature::all() {
|
||||
probe_treesitter_feature(&lang_str, *ts_feat)?
|
||||
}
|
||||
|
@@ -13,7 +13,6 @@ pub mod ui;
|
||||
use std::path::Path;
|
||||
|
||||
use ignore::DirEntry;
|
||||
pub use keymap::macros::*;
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn true_color() -> bool {
|
||||
|
@@ -116,16 +116,18 @@ FLAGS:
|
||||
|
||||
setup_logging(args.verbosity).context("failed to initialize logging")?;
|
||||
|
||||
// Before setting the working directory, resolve all the paths in args.files
|
||||
for (path, _) in args.files.iter_mut() {
|
||||
*path = helix_core::path::get_canonicalized_path(path);
|
||||
}
|
||||
|
||||
// NOTE: Set the working directory early so the correct configuration is loaded. Be aware that
|
||||
// Application::new() depends on this logic so it must be updated if this changes.
|
||||
if let Some(path) = &args.working_directory {
|
||||
helix_loader::set_current_working_dir(path)?;
|
||||
}
|
||||
|
||||
// If the first file is a directory, it will be the working directory and a file picker will be opened
|
||||
if let Some((path, _)) = args.files.first().filter(|p| p.0.is_dir()) {
|
||||
} else if let Some((path, _)) = args.files.first().filter(|p| p.0.is_dir()) {
|
||||
// If the first file is a directory, it will be the working directory unless -w was specified
|
||||
helix_loader::set_current_working_dir(path)?;
|
||||
args.open_cwd = true; // Signal Application that we want to open the picker on "."
|
||||
}
|
||||
|
||||
let config = match Config::load_default() {
|
||||
|
@@ -2,6 +2,7 @@ use crate::compositor::{Component, Context, Event, EventResult};
|
||||
use helix_view::{
|
||||
document::SavePoint,
|
||||
editor::CompleteAction,
|
||||
graphics::Margin,
|
||||
theme::{Modifier, Style},
|
||||
ViewId,
|
||||
};
|
||||
@@ -326,9 +327,18 @@ impl Completion {
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
let margin = if editor.menu_border() {
|
||||
Margin::vertical(1)
|
||||
} else {
|
||||
Margin::none()
|
||||
};
|
||||
|
||||
let popup = Popup::new(Self::ID, menu)
|
||||
.with_scrollbar(false)
|
||||
.ignore_escape_key(true);
|
||||
.ignore_escape_key(true)
|
||||
.margin(margin);
|
||||
|
||||
let mut completion = Self {
|
||||
popup,
|
||||
start_offset,
|
||||
@@ -569,6 +579,12 @@ impl Component for Completion {
|
||||
// clear area
|
||||
let background = cx.editor.theme.get("ui.popup");
|
||||
surface.clear_with(doc_area, background);
|
||||
|
||||
if cx.editor.popup_border() {
|
||||
use tui::widgets::{Block, Borders, Widget};
|
||||
Widget::render(Block::default().borders(Borders::ALL), doc_area, surface);
|
||||
}
|
||||
|
||||
markdown_doc.render(doc_area, surface, cx);
|
||||
}
|
||||
}
|
||||
|
@@ -97,7 +97,8 @@ pub fn render_document(
|
||||
doc: &Document,
|
||||
offset: ViewPosition,
|
||||
doc_annotations: &TextAnnotations,
|
||||
highlight_iter: impl Iterator<Item = HighlightEvent>,
|
||||
syntax_highlight_iter: impl Iterator<Item = HighlightEvent>,
|
||||
overlay_highlight_iter: impl Iterator<Item = HighlightEvent>,
|
||||
theme: &Theme,
|
||||
line_decoration: &mut [Box<dyn LineDecoration + '_>],
|
||||
translated_positions: &mut [TranslatedPosition],
|
||||
@@ -109,7 +110,8 @@ pub fn render_document(
|
||||
offset,
|
||||
&doc.text_format(viewport.width, Some(theme)),
|
||||
doc_annotations,
|
||||
highlight_iter,
|
||||
syntax_highlight_iter,
|
||||
overlay_highlight_iter,
|
||||
theme,
|
||||
line_decoration,
|
||||
translated_positions,
|
||||
@@ -157,7 +159,8 @@ pub fn render_text<'t>(
|
||||
offset: ViewPosition,
|
||||
text_fmt: &TextFormat,
|
||||
text_annotations: &TextAnnotations,
|
||||
highlight_iter: impl Iterator<Item = HighlightEvent>,
|
||||
syntax_highlight_iter: impl Iterator<Item = HighlightEvent>,
|
||||
overlay_highlight_iter: impl Iterator<Item = HighlightEvent>,
|
||||
theme: &Theme,
|
||||
line_decorations: &mut [Box<dyn LineDecoration + '_>],
|
||||
translated_positions: &mut [TranslatedPosition],
|
||||
@@ -178,10 +181,16 @@ pub fn render_text<'t>(
|
||||
|
||||
let (mut formatter, mut first_visible_char_idx) =
|
||||
DocumentFormatter::new_at_prev_checkpoint(text, text_fmt, text_annotations, offset.anchor);
|
||||
let mut styles = StyleIter {
|
||||
let mut syntax_styles = StyleIter {
|
||||
text_style: renderer.text_style,
|
||||
active_highlights: Vec::with_capacity(64),
|
||||
highlight_iter,
|
||||
highlight_iter: syntax_highlight_iter,
|
||||
theme,
|
||||
};
|
||||
let mut overlay_styles = StyleIter {
|
||||
text_style: Style::default(),
|
||||
active_highlights: Vec::with_capacity(64),
|
||||
highlight_iter: overlay_highlight_iter,
|
||||
theme,
|
||||
};
|
||||
|
||||
@@ -193,7 +202,10 @@ pub fn render_text<'t>(
|
||||
};
|
||||
let mut is_in_indent_area = true;
|
||||
let mut last_line_indent_level = 0;
|
||||
let mut style_span = styles
|
||||
let mut syntax_style_span = syntax_styles
|
||||
.next()
|
||||
.unwrap_or_else(|| (Style::default(), usize::MAX));
|
||||
let mut overlay_style_span = overlay_styles
|
||||
.next()
|
||||
.unwrap_or_else(|| (Style::default(), usize::MAX));
|
||||
|
||||
@@ -221,9 +233,16 @@ pub fn render_text<'t>(
|
||||
|
||||
// skip any graphemes on visual lines before the block start
|
||||
if pos.row < row_off {
|
||||
if char_pos >= style_span.1 {
|
||||
style_span = if let Some(style_span) = styles.next() {
|
||||
style_span
|
||||
if char_pos >= syntax_style_span.1 {
|
||||
syntax_style_span = if let Some(syntax_style_span) = syntax_styles.next() {
|
||||
syntax_style_span
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if char_pos >= overlay_style_span.1 {
|
||||
overlay_style_span = if let Some(overlay_style_span) = overlay_styles.next() {
|
||||
overlay_style_span
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
@@ -260,8 +279,15 @@ pub fn render_text<'t>(
|
||||
}
|
||||
|
||||
// acquire the correct grapheme style
|
||||
if char_pos >= style_span.1 {
|
||||
style_span = styles.next().unwrap_or((Style::default(), usize::MAX));
|
||||
if char_pos >= syntax_style_span.1 {
|
||||
syntax_style_span = syntax_styles
|
||||
.next()
|
||||
.unwrap_or((Style::default(), usize::MAX));
|
||||
}
|
||||
if char_pos >= overlay_style_span.1 {
|
||||
overlay_style_span = overlay_styles
|
||||
.next()
|
||||
.unwrap_or((Style::default(), usize::MAX));
|
||||
}
|
||||
char_pos += grapheme.doc_chars();
|
||||
|
||||
@@ -275,22 +301,25 @@ pub fn render_text<'t>(
|
||||
pos,
|
||||
);
|
||||
|
||||
let grapheme_style = if let GraphemeSource::VirtualText { highlight } = grapheme.source {
|
||||
let style = renderer.text_style;
|
||||
let (syntax_style, overlay_style) =
|
||||
if let GraphemeSource::VirtualText { highlight } = grapheme.source {
|
||||
let mut style = renderer.text_style;
|
||||
if let Some(highlight) = highlight {
|
||||
style.patch(theme.highlight(highlight.0))
|
||||
} else {
|
||||
style
|
||||
style = style.patch(theme.highlight(highlight.0))
|
||||
}
|
||||
(style, Style::default())
|
||||
} else {
|
||||
style_span.0
|
||||
(syntax_style_span.0, overlay_style_span.0)
|
||||
};
|
||||
|
||||
let virt = grapheme.is_virtual();
|
||||
let is_virtual = grapheme.is_virtual();
|
||||
renderer.draw_grapheme(
|
||||
grapheme.grapheme,
|
||||
grapheme_style,
|
||||
virt,
|
||||
GraphemeStyle {
|
||||
syntax_style,
|
||||
overlay_style,
|
||||
},
|
||||
is_virtual,
|
||||
&mut last_line_indent_level,
|
||||
&mut is_in_indent_area,
|
||||
pos,
|
||||
@@ -322,6 +351,11 @@ pub struct TextRenderer<'a> {
|
||||
pub viewport: Rect,
|
||||
}
|
||||
|
||||
pub struct GraphemeStyle {
|
||||
syntax_style: Style,
|
||||
overlay_style: Style,
|
||||
}
|
||||
|
||||
impl<'a> TextRenderer<'a> {
|
||||
pub fn new(
|
||||
surface: &'a mut Surface,
|
||||
@@ -395,7 +429,7 @@ impl<'a> TextRenderer<'a> {
|
||||
pub fn draw_grapheme(
|
||||
&mut self,
|
||||
grapheme: Grapheme,
|
||||
mut style: Style,
|
||||
grapheme_style: GraphemeStyle,
|
||||
is_virtual: bool,
|
||||
last_indent_level: &mut usize,
|
||||
is_in_indent_area: &mut bool,
|
||||
@@ -405,9 +439,11 @@ impl<'a> TextRenderer<'a> {
|
||||
let is_whitespace = grapheme.is_whitespace();
|
||||
|
||||
// TODO is it correct to apply the whitespace style to all unicode white spaces?
|
||||
let mut style = grapheme_style.syntax_style;
|
||||
if is_whitespace {
|
||||
style = style.patch(self.whitespace_style);
|
||||
}
|
||||
style = style.patch(grapheme_style.overlay_style);
|
||||
|
||||
let width = grapheme.width();
|
||||
let space = if is_virtual { " " } else { &self.space };
|
||||
|
@@ -124,16 +124,20 @@ impl EditorView {
|
||||
line_decorations.push(Box::new(line_decoration));
|
||||
}
|
||||
|
||||
let mut highlights =
|
||||
let syntax_highlights =
|
||||
Self::doc_syntax_highlights(doc, view.offset.anchor, inner.height, theme);
|
||||
let overlay_highlights = Self::overlay_syntax_highlights(
|
||||
|
||||
let mut overlay_highlights =
|
||||
Self::empty_highlight_iter(doc, view.offset.anchor, inner.height);
|
||||
let overlay_syntax_highlights = Self::overlay_syntax_highlights(
|
||||
doc,
|
||||
view.offset.anchor,
|
||||
inner.height,
|
||||
&text_annotations,
|
||||
);
|
||||
if !overlay_highlights.is_empty() {
|
||||
highlights = Box::new(syntax::merge(highlights, overlay_highlights));
|
||||
if !overlay_syntax_highlights.is_empty() {
|
||||
overlay_highlights =
|
||||
Box::new(syntax::merge(overlay_highlights, overlay_syntax_highlights));
|
||||
}
|
||||
|
||||
for diagnostic in Self::doc_diagnostics_highlights(doc, theme) {
|
||||
@@ -142,29 +146,28 @@ impl EditorView {
|
||||
if diagnostic.is_empty() {
|
||||
continue;
|
||||
}
|
||||
highlights = Box::new(syntax::merge(highlights, diagnostic));
|
||||
overlay_highlights = Box::new(syntax::merge(overlay_highlights, diagnostic));
|
||||
}
|
||||
|
||||
let highlights: Box<dyn Iterator<Item = HighlightEvent>> = if is_focused {
|
||||
if is_focused {
|
||||
let highlights = syntax::merge(
|
||||
highlights,
|
||||
overlay_highlights,
|
||||
Self::doc_selection_highlights(
|
||||
editor.mode(),
|
||||
doc,
|
||||
view,
|
||||
theme,
|
||||
&config.cursor_shape,
|
||||
self.terminal_focused,
|
||||
),
|
||||
);
|
||||
let focused_view_elements = Self::highlight_focused_view_elements(view, doc, theme);
|
||||
if focused_view_elements.is_empty() {
|
||||
Box::new(highlights)
|
||||
overlay_highlights = Box::new(highlights)
|
||||
} else {
|
||||
Box::new(syntax::merge(highlights, focused_view_elements))
|
||||
overlay_highlights = Box::new(syntax::merge(highlights, focused_view_elements))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Box::new(highlights)
|
||||
};
|
||||
|
||||
let gutter_overflow = view.gutter_offset(doc) == 0;
|
||||
if !gutter_overflow {
|
||||
@@ -197,7 +200,8 @@ impl EditorView {
|
||||
doc,
|
||||
view.offset,
|
||||
&text_annotations,
|
||||
highlights,
|
||||
syntax_highlights,
|
||||
overlay_highlights,
|
||||
theme,
|
||||
&mut line_decorations,
|
||||
&mut translated_positions,
|
||||
@@ -257,16 +261,11 @@ impl EditorView {
|
||||
.for_each(|area| surface.set_style(area, ruler_theme))
|
||||
}
|
||||
|
||||
pub fn overlay_syntax_highlights(
|
||||
doc: &Document,
|
||||
anchor: usize,
|
||||
fn viewport_byte_range(
|
||||
text: helix_core::RopeSlice,
|
||||
row: usize,
|
||||
height: u16,
|
||||
text_annotations: &TextAnnotations,
|
||||
) -> Vec<(usize, std::ops::Range<usize>)> {
|
||||
let text = doc.text().slice(..);
|
||||
let row = text.char_to_line(anchor.min(text.len_chars()));
|
||||
|
||||
let range = {
|
||||
) -> std::ops::Range<usize> {
|
||||
// Calculate viewport byte ranges:
|
||||
// Saturating subs to make it inclusive zero indexing.
|
||||
let last_line = text.len_lines().saturating_sub(1);
|
||||
@@ -275,9 +274,26 @@ impl EditorView {
|
||||
let end = text.line_to_byte(last_visible_line + 1);
|
||||
|
||||
start..end
|
||||
};
|
||||
}
|
||||
|
||||
text_annotations.collect_overlay_highlights(range)
|
||||
pub fn empty_highlight_iter(
|
||||
doc: &Document,
|
||||
anchor: usize,
|
||||
height: u16,
|
||||
) -> Box<dyn Iterator<Item = HighlightEvent>> {
|
||||
let text = doc.text().slice(..);
|
||||
let row = text.char_to_line(anchor.min(text.len_chars()));
|
||||
|
||||
// Calculate viewport byte ranges:
|
||||
// Saturating subs to make it inclusive zero indexing.
|
||||
let range = Self::viewport_byte_range(text, row, height);
|
||||
Box::new(
|
||||
[HighlightEvent::Source {
|
||||
start: text.byte_to_char(range.start),
|
||||
end: text.byte_to_char(range.end),
|
||||
}]
|
||||
.into_iter(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Get syntax highlights for a document in a view represented by the first line
|
||||
@@ -292,16 +308,7 @@ impl EditorView {
|
||||
let text = doc.text().slice(..);
|
||||
let row = text.char_to_line(anchor.min(text.len_chars()));
|
||||
|
||||
let range = {
|
||||
// Calculate viewport byte ranges:
|
||||
// Saturating subs to make it inclusive zero indexing.
|
||||
let last_line = text.len_lines().saturating_sub(1);
|
||||
let last_visible_line = (row + height as usize).saturating_sub(1).min(last_line);
|
||||
let start = text.line_to_byte(row.min(last_line));
|
||||
let end = text.line_to_byte(last_visible_line + 1);
|
||||
|
||||
start..end
|
||||
};
|
||||
let range = Self::viewport_byte_range(text, row, height);
|
||||
|
||||
match doc.syntax() {
|
||||
Some(syntax) => {
|
||||
@@ -334,6 +341,20 @@ impl EditorView {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn overlay_syntax_highlights(
|
||||
doc: &Document,
|
||||
anchor: usize,
|
||||
height: u16,
|
||||
text_annotations: &TextAnnotations,
|
||||
) -> Vec<(usize, std::ops::Range<usize>)> {
|
||||
let text = doc.text().slice(..);
|
||||
let row = text.char_to_line(anchor.min(text.len_chars()));
|
||||
|
||||
let range = Self::viewport_byte_range(text, row, height);
|
||||
|
||||
text_annotations.collect_overlay_highlights(range)
|
||||
}
|
||||
|
||||
/// Get highlight spans for document diagnostics
|
||||
pub fn doc_diagnostics_highlights(
|
||||
doc: &Document,
|
||||
@@ -365,7 +386,7 @@ impl EditorView {
|
||||
let mut warning_vec = Vec::new();
|
||||
let mut error_vec = Vec::new();
|
||||
|
||||
for diagnostic in doc.shown_diagnostics() {
|
||||
for diagnostic in doc.diagnostics() {
|
||||
// Separate diagnostics into different Vecs by severity.
|
||||
let (vec, scope) = match diagnostic.severity {
|
||||
Some(Severity::Info) => (&mut info_vec, info),
|
||||
@@ -400,6 +421,7 @@ impl EditorView {
|
||||
view: &View,
|
||||
theme: &Theme,
|
||||
cursor_shape_config: &CursorShapeConfig,
|
||||
is_terminal_focused: bool,
|
||||
) -> Vec<(usize, std::ops::Range<usize>)> {
|
||||
let text = doc.text().slice(..);
|
||||
let selection = doc.selection(view.id);
|
||||
@@ -447,7 +469,7 @@ impl EditorView {
|
||||
|
||||
// Special-case: cursor at end of the rope.
|
||||
if range.head == range.anchor && range.head == text.len_chars() {
|
||||
if !selection_is_primary || cursor_is_block {
|
||||
if !selection_is_primary || (cursor_is_block && is_terminal_focused) {
|
||||
// Bar and underline cursors are drawn by the terminal
|
||||
// BUG: If the editor area loses focus while having a bar or
|
||||
// underline cursor (eg. when a regex prompt has focus) then
|
||||
@@ -470,13 +492,17 @@ impl EditorView {
|
||||
cursor_start
|
||||
};
|
||||
spans.push((selection_scope, range.anchor..selection_end));
|
||||
if !selection_is_primary || cursor_is_block {
|
||||
// add block cursors
|
||||
// skip primary cursor if terminal is unfocused - crossterm cursor is used in that case
|
||||
if !selection_is_primary || (cursor_is_block && is_terminal_focused) {
|
||||
spans.push((cursor_scope, cursor_start..range.head));
|
||||
}
|
||||
} else {
|
||||
// Reverse case.
|
||||
let cursor_end = next_grapheme_boundary(text, range.head);
|
||||
if !selection_is_primary || cursor_is_block {
|
||||
// add block cursors
|
||||
// skip primary cursor if terminal is unfocused - crossterm cursor is used in that case
|
||||
if !selection_is_primary || (cursor_is_block && is_terminal_focused) {
|
||||
spans.push((cursor_scope, range.head..cursor_end));
|
||||
}
|
||||
// non block cursors look like they exclude the cursor
|
||||
@@ -658,7 +684,7 @@ impl EditorView {
|
||||
.primary()
|
||||
.cursor(doc.text().slice(..));
|
||||
|
||||
let diagnostics = doc.shown_diagnostics().filter(|diagnostic| {
|
||||
let diagnostics = doc.diagnostics().iter().filter(|diagnostic| {
|
||||
diagnostic.range.start <= cursor && diagnostic.range.end >= cursor
|
||||
});
|
||||
|
||||
@@ -1276,8 +1302,6 @@ impl Component for EditorView {
|
||||
cx.editor.status_msg = None;
|
||||
|
||||
let mode = cx.editor.mode();
|
||||
let (view, _) = current!(cx.editor);
|
||||
let focus = view.id;
|
||||
|
||||
if let Some(on_next_key) = self.on_next_key.take() {
|
||||
// if there's a command waiting input, do that first
|
||||
@@ -1359,12 +1383,9 @@ impl Component for EditorView {
|
||||
return EventResult::Ignored(None);
|
||||
}
|
||||
|
||||
// if the focused view still exists and wasn't closed
|
||||
if cx.editor.tree.contains(focus) {
|
||||
let config = cx.editor.config();
|
||||
let mode = cx.editor.mode();
|
||||
let view = view_mut!(cx.editor, focus);
|
||||
let doc = doc_mut!(cx.editor, &view.doc);
|
||||
let (view, doc) = current!(cx.editor);
|
||||
|
||||
view.ensure_cursor_in_view(doc, config.scrolloff);
|
||||
|
||||
@@ -1373,7 +1394,6 @@ impl Component for EditorView {
|
||||
if mode != Mode::Insert {
|
||||
doc.append_changes_to_history(view);
|
||||
}
|
||||
}
|
||||
|
||||
EventResult::Consumed(callback)
|
||||
}
|
||||
@@ -1500,8 +1520,15 @@ impl Component for EditorView {
|
||||
|
||||
fn cursor(&self, _area: Rect, editor: &Editor) -> (Option<Position>, CursorKind) {
|
||||
match editor.cursor() {
|
||||
// All block cursors are drawn manually
|
||||
(pos, CursorKind::Block) => (pos, CursorKind::Hidden),
|
||||
// all block cursors are drawn manually
|
||||
(pos, CursorKind::Block) => {
|
||||
if self.terminal_focused {
|
||||
(pos, CursorKind::Hidden)
|
||||
} else {
|
||||
// use crossterm cursor when terminal loses focus
|
||||
(pos, CursorKind::Underline)
|
||||
}
|
||||
}
|
||||
cursor => cursor,
|
||||
}
|
||||
}
|
||||
|
@@ -92,7 +92,9 @@ impl Component for SignatureHelp {
|
||||
Some(doc) => Markdown::new(doc.clone(), Arc::clone(&self.config_loader)),
|
||||
};
|
||||
let sig_doc = sig_doc.parse(Some(&cx.editor.theme));
|
||||
let sig_doc_area = area.clip_top(sig_text_area.height + 2);
|
||||
let sig_doc_area = area
|
||||
.clip_top(sig_text_area.height + 2)
|
||||
.clip_bottom(u16::from(cx.editor.popup_border()));
|
||||
let sig_doc_para = Paragraph::new(sig_doc)
|
||||
.wrap(Wrap { trim: false })
|
||||
.scroll((cx.scroll.unwrap_or_default() as u16, 0));
|
||||
|
@@ -7,11 +7,18 @@ use crate::{
|
||||
use helix_core::fuzzy::MATCHER;
|
||||
use nucleo::pattern::{Atom, AtomKind, CaseMatching};
|
||||
use nucleo::{Config, Utf32Str};
|
||||
use tui::{buffer::Buffer as Surface, widgets::Table};
|
||||
use tui::{
|
||||
buffer::Buffer as Surface,
|
||||
widgets::{Block, Borders, Table, Widget},
|
||||
};
|
||||
|
||||
pub use tui::widgets::{Cell, Row};
|
||||
|
||||
use helix_view::{editor::SmartTabConfig, graphics::Rect, Editor};
|
||||
use helix_view::{
|
||||
editor::SmartTabConfig,
|
||||
graphics::{Margin, Rect},
|
||||
Editor,
|
||||
};
|
||||
use tui::layout::Constraint;
|
||||
|
||||
pub trait Item: Sync + Send + 'static {
|
||||
@@ -322,6 +329,15 @@ impl<T: Item + 'static> Component for Menu<T> {
|
||||
let selected = theme.get("ui.menu.selected");
|
||||
surface.clear_with(area, style);
|
||||
|
||||
let render_borders = cx.editor.menu_border();
|
||||
|
||||
let area = if render_borders {
|
||||
Widget::render(Block::default().borders(Borders::ALL), area, surface);
|
||||
area.inner(&Margin::vertical(1))
|
||||
} else {
|
||||
area
|
||||
};
|
||||
|
||||
let scroll = self.scroll;
|
||||
|
||||
let options: Vec<_> = self
|
||||
@@ -362,6 +378,9 @@ impl<T: Item + 'static> Component for Menu<T> {
|
||||
false,
|
||||
);
|
||||
|
||||
let render_borders = cx.editor.menu_border();
|
||||
|
||||
if !render_borders {
|
||||
if let Some(cursor) = self.cursor {
|
||||
let offset_from_top = cursor - scroll;
|
||||
let left = &mut surface[(area.left(), area.y + offset_from_top as u16)];
|
||||
@@ -372,6 +391,7 @@ impl<T: Item + 'static> Component for Menu<T> {
|
||||
)];
|
||||
right.set_style(selected);
|
||||
}
|
||||
}
|
||||
|
||||
let fits = len <= win_height;
|
||||
|
||||
@@ -385,12 +405,13 @@ impl<T: Item + 'static> Component for Menu<T> {
|
||||
for i in 0..win_height {
|
||||
cell = &mut surface[(area.right() - 1, area.top() + i as u16)];
|
||||
|
||||
cell.set_symbol("▐"); // right half block
|
||||
let half_block = if render_borders { "▌" } else { "▐" };
|
||||
|
||||
if scroll_line <= i && i < scroll_line + scroll_height {
|
||||
// Draw scroll thumb
|
||||
cell.set_symbol(half_block);
|
||||
cell.set_fg(scroll_style.fg.unwrap_or(helix_view::theme::Color::Reset));
|
||||
} else {
|
||||
} else if !render_borders {
|
||||
// Draw scroll track
|
||||
cell.set_fg(scroll_style.bg.unwrap_or(helix_view::theme::Color::Reset));
|
||||
}
|
||||
|
@@ -339,7 +339,7 @@ pub mod completers {
|
||||
let language_ids = editor
|
||||
.syn_loader
|
||||
.language_configs()
|
||||
.map(|config| &config.language_id)
|
||||
.map(|config| &config.language_name)
|
||||
.chain(std::iter::once(&text));
|
||||
|
||||
fuzzy_match(input, language_ids, false)
|
||||
|
@@ -480,8 +480,7 @@ impl<T: Item + 'static> Picker<T> {
|
||||
.find::<Overlay<DynamicPicker<T>>>()
|
||||
.map(|overlay| &mut overlay.content.file_picker),
|
||||
};
|
||||
let Some(picker) = picker
|
||||
else {
|
||||
let Some(picker) = picker else {
|
||||
log::info!("picker closed before syntax highlighting finished");
|
||||
return;
|
||||
};
|
||||
@@ -489,7 +488,15 @@ impl<T: Item + 'static> Picker<T> {
|
||||
let doc = match current_file {
|
||||
PathOrId::Id(doc_id) => doc_mut!(editor, &doc_id),
|
||||
PathOrId::Path(path) => match picker.preview_cache.get_mut(&path) {
|
||||
Some(CachedPreview::Document(ref mut doc)) => doc,
|
||||
Some(CachedPreview::Document(ref mut doc)) => {
|
||||
let diagnostics = Editor::doc_diagnostics(
|
||||
&editor.language_servers,
|
||||
&editor.diagnostics,
|
||||
doc,
|
||||
);
|
||||
doc.replace_diagnostics(diagnostics, &[], None);
|
||||
doc
|
||||
}
|
||||
_ => return,
|
||||
},
|
||||
};
|
||||
@@ -736,17 +743,20 @@ impl<T: Item + 'static> Picker<T> {
|
||||
}
|
||||
}
|
||||
|
||||
let mut highlights = EditorView::doc_syntax_highlights(
|
||||
let syntax_highlights = EditorView::doc_syntax_highlights(
|
||||
doc,
|
||||
offset.anchor,
|
||||
area.height,
|
||||
&cx.editor.theme,
|
||||
);
|
||||
|
||||
let mut overlay_highlights =
|
||||
EditorView::empty_highlight_iter(doc, offset.anchor, area.height);
|
||||
for spans in EditorView::doc_diagnostics_highlights(doc, &cx.editor.theme) {
|
||||
if spans.is_empty() {
|
||||
continue;
|
||||
}
|
||||
highlights = Box::new(helix_core::syntax::merge(highlights, spans));
|
||||
overlay_highlights = Box::new(helix_core::syntax::merge(overlay_highlights, spans));
|
||||
}
|
||||
let mut decorations: Vec<Box<dyn LineDecoration>> = Vec::new();
|
||||
|
||||
@@ -777,7 +787,8 @@ impl<T: Item + 'static> Picker<T> {
|
||||
offset,
|
||||
// TODO: compute text annotations asynchronously here (like inlay hints)
|
||||
&TextAnnotations::default(),
|
||||
highlights,
|
||||
syntax_highlights,
|
||||
overlay_highlights,
|
||||
&cx.editor.theme,
|
||||
&mut decorations,
|
||||
&mut [],
|
||||
@@ -795,7 +806,8 @@ impl<T: Item + 'static + Send + Sync> Component for Picker<T> {
|
||||
// | | | |
|
||||
// +---------+ +---------+
|
||||
|
||||
let render_preview = self.show_preview && area.width > MIN_AREA_WIDTH_FOR_PREVIEW;
|
||||
let render_preview =
|
||||
self.show_preview && self.file_fn.is_some() && area.width > MIN_AREA_WIDTH_FOR_PREVIEW;
|
||||
|
||||
let picker_width = if render_preview {
|
||||
area.width / 2
|
||||
|
@@ -3,7 +3,10 @@ use crate::{
|
||||
compositor::{Callback, Component, Context, Event, EventResult},
|
||||
ctrl, key,
|
||||
};
|
||||
use tui::buffer::Buffer as Surface;
|
||||
use tui::{
|
||||
buffer::Buffer as Surface,
|
||||
widgets::{Block, Borders, Widget},
|
||||
};
|
||||
|
||||
use helix_core::Position;
|
||||
use helix_view::{
|
||||
@@ -252,13 +255,29 @@ impl<T: Component> Component for Popup<T> {
|
||||
let background = cx.editor.theme.get("ui.popup");
|
||||
surface.clear_with(area, background);
|
||||
|
||||
let inner = area.inner(&self.margin);
|
||||
let render_borders = cx.editor.popup_border();
|
||||
|
||||
let inner = if self
|
||||
.contents
|
||||
.type_name()
|
||||
.starts_with("helix_term::ui::menu::Menu")
|
||||
{
|
||||
area
|
||||
} else {
|
||||
area.inner(&self.margin)
|
||||
};
|
||||
|
||||
let border = usize::from(render_borders);
|
||||
if render_borders {
|
||||
Widget::render(Block::default().borders(Borders::ALL), area, surface);
|
||||
}
|
||||
|
||||
self.contents.render(inner, surface, cx);
|
||||
|
||||
// render scrollbar if contents do not fit
|
||||
if self.has_scrollbar {
|
||||
let win_height = inner.height as usize;
|
||||
let len = self.child_size.1 as usize;
|
||||
let win_height = (inner.height as usize).saturating_sub(2 * border);
|
||||
let len = (self.child_size.1 as usize).saturating_sub(2 * border);
|
||||
let fits = len <= win_height;
|
||||
let scroll = self.scroll;
|
||||
let scroll_style = cx.editor.theme.get("ui.menu.scroll");
|
||||
@@ -274,14 +293,15 @@ impl<T: Component> Component for Popup<T> {
|
||||
|
||||
let mut cell;
|
||||
for i in 0..win_height {
|
||||
cell = &mut surface[(inner.right() - 1, inner.top() + i as u16)];
|
||||
cell = &mut surface[(inner.right() - 1, inner.top() + (border + i) as u16)];
|
||||
|
||||
cell.set_symbol("▐"); // right half block
|
||||
let half_block = if render_borders { "▌" } else { "▐" };
|
||||
|
||||
if scroll_line <= i && i < scroll_line + scroll_height {
|
||||
// Draw scroll thumb
|
||||
cell.set_symbol(half_block);
|
||||
cell.set_fg(scroll_style.fg.unwrap_or(helix_view::theme::Color::Reset));
|
||||
} else {
|
||||
} else if !render_borders {
|
||||
// Draw scroll track
|
||||
cell.set_fg(scroll_style.bg.unwrap_or(helix_view::theme::Color::Reset));
|
||||
}
|
||||
|
@@ -227,7 +227,8 @@ where
|
||||
{
|
||||
let (warnings, errors) = context
|
||||
.doc
|
||||
.shown_diagnostics()
|
||||
.diagnostics()
|
||||
.iter()
|
||||
.fold((0, 0), |mut counts, diag| {
|
||||
use helix_core::diagnostic::Severity;
|
||||
match diag.severity {
|
||||
|
@@ -480,3 +480,49 @@ fn bar() {#(\n|)#\
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_join_selections() -> anyhow::Result<()> {
|
||||
// normal join
|
||||
test((
|
||||
platform_line(indoc! {"\
|
||||
#[a|]#bc
|
||||
def
|
||||
"}),
|
||||
"J",
|
||||
platform_line(indoc! {"\
|
||||
#[a|]#bc def
|
||||
"}),
|
||||
))
|
||||
.await?;
|
||||
|
||||
// join with empty line
|
||||
test((
|
||||
platform_line(indoc! {"\
|
||||
#[a|]#bc
|
||||
|
||||
def
|
||||
"}),
|
||||
"JJ",
|
||||
platform_line(indoc! {"\
|
||||
#[a|]#bc def
|
||||
"}),
|
||||
))
|
||||
.await?;
|
||||
|
||||
// join with additional space in non-empty line
|
||||
test((
|
||||
platform_line(indoc! {"\
|
||||
#[a|]#bc
|
||||
|
||||
def
|
||||
"}),
|
||||
"JJ",
|
||||
platform_line(indoc! {"\
|
||||
#[a|]#bc def
|
||||
"}),
|
||||
))
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@@ -1,28 +1,28 @@
|
||||
[package]
|
||||
name = "helix-tui"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
description = """
|
||||
A library to build rich terminal user interfaces or dashboards
|
||||
"""
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
description = """A library to build rich terminal user interfaces or dashboards"""
|
||||
include = ["src/**/*", "README.md"]
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
categories.workspace = true
|
||||
repository.workspace = true
|
||||
homepage.workspace = true
|
||||
|
||||
[features]
|
||||
default = ["crossterm"]
|
||||
|
||||
[dependencies]
|
||||
helix-view = { path = "../helix-view", features = ["term"] }
|
||||
helix-core = { path = "../helix-core" }
|
||||
|
||||
bitflags = "2.4"
|
||||
cassowary = "0.3"
|
||||
unicode-segmentation = "1.10"
|
||||
crossterm = { version = "0.27", optional = true }
|
||||
termini = "1.0"
|
||||
serde = { version = "1", "optional" = true, features = ["derive"]}
|
||||
once_cell = "1.18"
|
||||
once_cell = "1.19"
|
||||
log = "~0.4"
|
||||
helix-view = { version = "0.6", path = "../helix-view", features = ["term"] }
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
|
@@ -32,10 +32,21 @@ fn vte_version() -> Option<usize> {
|
||||
}
|
||||
|
||||
/// Describes terminal capabilities like extended underline, truecolor, etc.
|
||||
#[derive(Copy, Clone, Debug, Default)]
|
||||
#[derive(Clone, Debug)]
|
||||
struct Capabilities {
|
||||
/// Support for undercurled, underdashed, etc.
|
||||
has_extended_underlines: bool,
|
||||
/// Support for resetting the cursor style back to normal.
|
||||
reset_cursor_command: String,
|
||||
}
|
||||
|
||||
impl Default for Capabilities {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
has_extended_underlines: false,
|
||||
reset_cursor_command: "\x1B[0 q".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Capabilities {
|
||||
@@ -54,6 +65,10 @@ impl Capabilities {
|
||||
|| t.extended_cap("Su").is_some()
|
||||
|| vte_version() >= Some(5102)
|
||||
|| matches!(term_program().as_deref(), Some("WezTerm")),
|
||||
reset_cursor_command: t
|
||||
.utf8_string_cap(termini::StringCapability::CursorNormal)
|
||||
.unwrap_or("\x1B[0 q")
|
||||
.to_string(),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -154,7 +169,8 @@ where
|
||||
|
||||
fn restore(&mut self, config: Config) -> io::Result<()> {
|
||||
// reset cursor shape
|
||||
write!(self.buffer, "\x1B[0 q")?;
|
||||
self.buffer
|
||||
.write_all(self.capabilities.reset_cursor_command.as_bytes())?;
|
||||
if config.enable_mouse_capture {
|
||||
execute!(self.buffer, DisableMouseCapture)?;
|
||||
}
|
||||
|
@@ -213,7 +213,7 @@ impl Buffer {
|
||||
&& y < self.area.bottom()
|
||||
}
|
||||
|
||||
/// Returns the index in the Vec<Cell> for the given global (x, y) coordinates.
|
||||
/// Returns the index in the `Vec<Cell>` for the given global (x, y) coordinates.
|
||||
///
|
||||
/// Global coordinates are offset by the Buffer's area offset (`x`/`y`).
|
||||
///
|
||||
@@ -242,7 +242,7 @@ impl Buffer {
|
||||
((y - self.area.y) as usize) * (self.area.width as usize) + ((x - self.area.x) as usize)
|
||||
}
|
||||
|
||||
/// Returns the index in the Vec<Cell> for the given global (x, y) coordinates,
|
||||
/// Returns the index in the `Vec<Cell>` for the given global (x, y) coordinates,
|
||||
/// or `None` if the coordinates are outside the buffer's area.
|
||||
fn index_of_opt(&self, x: u16, y: u16) -> Option<usize> {
|
||||
if self.in_bounds(x, y) {
|
||||
|
@@ -1,24 +1,25 @@
|
||||
[package]
|
||||
name = "helix-vcs"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
categories.workspace = true
|
||||
repository.workspace = true
|
||||
homepage.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
helix-event = { version = "0.6", path = "../helix-event" }
|
||||
helix-core = { path = "../helix-core" }
|
||||
helix-event = { path = "../helix-event" }
|
||||
|
||||
tokio = { version = "1", features = ["rt", "rt-multi-thread", "time", "sync", "parking_lot", "macros"] }
|
||||
parking_lot = "0.12"
|
||||
arc-swap = { version = "1.6.0" }
|
||||
|
||||
gix = { version = "0.55.0", default-features = false , optional = true }
|
||||
gix = { version = "0.57.1", default-features = false , optional = true }
|
||||
imara-diff = "0.1.5"
|
||||
anyhow = "1"
|
||||
|
||||
@@ -28,4 +29,4 @@ log = "0.4"
|
||||
git = ["gix"]
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.8"
|
||||
tempfile = "3.9"
|
||||
|
@@ -3,7 +3,7 @@ use arc_swap::ArcSwap;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use gix::objs::tree::EntryMode;
|
||||
use gix::objs::tree::EntryKind;
|
||||
use gix::sec::trust::DefaultForLevel;
|
||||
use gix::{Commit, ObjectId, Repository, ThreadSafeRepository};
|
||||
|
||||
@@ -128,12 +128,12 @@ fn find_file_in_commit(repo: &Repository, commit: &Commit, file: &Path) -> Resul
|
||||
let tree_entry = tree
|
||||
.lookup_entry_by_path(rel_path, &mut Vec::new())?
|
||||
.context("file is untracked")?;
|
||||
match tree_entry.mode() {
|
||||
match tree_entry.mode().kind() {
|
||||
// not a file, everything is new, do not show diff
|
||||
mode @ (EntryMode::Tree | EntryMode::Commit | EntryMode::Link) => {
|
||||
mode @ (EntryKind::Tree | EntryKind::Commit | EntryKind::Link) => {
|
||||
bail!("entry at {} is not a file but a {mode:?}", file.display())
|
||||
}
|
||||
// found a file
|
||||
EntryMode::Blob | EntryMode::BlobExecutable => Ok(tree_entry.object_id()),
|
||||
EntryKind::Blob | EntryKind::BlobExecutable => Ok(tree_entry.object_id()),
|
||||
}
|
||||
}
|
||||
|
@@ -1,32 +1,34 @@
|
||||
[package]
|
||||
name = "helix-view"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
description = "UI abstractions for use in backends"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
categories.workspace = true
|
||||
repository.workspace = true
|
||||
homepage.workspace = true
|
||||
|
||||
[features]
|
||||
default = []
|
||||
term = ["crossterm"]
|
||||
|
||||
[dependencies]
|
||||
helix-core = { path = "../helix-core" }
|
||||
helix-event = { path = "../helix-event" }
|
||||
helix-loader = { path = "../helix-loader" }
|
||||
helix-lsp = { path = "../helix-lsp" }
|
||||
helix-dap = { path = "../helix-dap" }
|
||||
helix-vcs = { path = "../helix-vcs" }
|
||||
|
||||
bitflags = "2.4"
|
||||
anyhow = "1"
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
helix-event = { version = "0.6", path = "../helix-event" }
|
||||
helix-loader = { version = "0.6", path = "../helix-loader" }
|
||||
helix-lsp = { version = "0.6", path = "../helix-lsp" }
|
||||
helix-dap = { version = "0.6", path = "../helix-dap" }
|
||||
crossterm = { version = "0.27", optional = true }
|
||||
helix-vcs = { version = "0.6", path = "../helix-vcs" }
|
||||
|
||||
# Conversion traits
|
||||
once_cell = "1.18"
|
||||
url = "2"
|
||||
once_cell = "1.19"
|
||||
url = "2.5.0"
|
||||
|
||||
arc-swap = { version = "1.6.0" }
|
||||
|
||||
@@ -43,12 +45,12 @@ serde_json = "1.0"
|
||||
toml = "0.7"
|
||||
log = "~0.4"
|
||||
|
||||
which = "4.4"
|
||||
which = "5.0.0"
|
||||
parking_lot = "0.12.1"
|
||||
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
clipboard-win = { version = "4.5", features = ["std"] }
|
||||
clipboard-win = { version = "5.0", features = ["std"] }
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
libc = "0.2"
|
||||
|
@@ -4,10 +4,12 @@ use arc_swap::ArcSwap;
|
||||
use futures_util::future::BoxFuture;
|
||||
use futures_util::FutureExt;
|
||||
use helix_core::auto_pairs::AutoPairs;
|
||||
use helix_core::chars::char_is_word;
|
||||
use helix_core::doc_formatter::TextFormat;
|
||||
use helix_core::encoding::Encoding;
|
||||
use helix_core::syntax::{Highlight, LanguageServerFeature};
|
||||
use helix_core::text_annotations::{InlineAnnotation, TextAnnotations};
|
||||
use helix_lsp::util::lsp_pos_to_pos;
|
||||
use helix_vcs::{DiffHandle, DiffProviderRegistry};
|
||||
|
||||
use ::parking_lot::Mutex;
|
||||
@@ -1075,14 +1077,6 @@ impl Document {
|
||||
};
|
||||
}
|
||||
|
||||
/// Set the programming language for the file if you know the name (scope) but don't have the
|
||||
/// [`syntax::LanguageConfiguration`] for it.
|
||||
pub fn set_language2(&mut self, scope: &str, config_loader: Arc<syntax::Loader>) {
|
||||
let language_config = config_loader.language_config_for_scope(scope);
|
||||
|
||||
self.set_language(language_config, Some(config_loader));
|
||||
}
|
||||
|
||||
/// Set the programming language for the file if you know the language but don't have the
|
||||
/// [`syntax::LanguageConfiguration`] for it.
|
||||
pub fn set_language_by_language_id(
|
||||
@@ -1091,7 +1085,7 @@ impl Document {
|
||||
config_loader: Arc<syntax::Loader>,
|
||||
) -> anyhow::Result<()> {
|
||||
let language_config = config_loader
|
||||
.language_config_for_language_id(language_id)
|
||||
.language_config_for_language_name(language_id)
|
||||
.ok_or_else(|| anyhow!("invalid language id: {}", language_id))?;
|
||||
self.set_language(Some(language_config), Some(config_loader));
|
||||
Ok(())
|
||||
@@ -1213,23 +1207,45 @@ impl Document {
|
||||
|
||||
let changes = transaction.changes();
|
||||
|
||||
changes.update_positions(
|
||||
self.diagnostics
|
||||
.iter_mut()
|
||||
.map(|diagnostic| (&mut diagnostic.range.start, Assoc::After)),
|
||||
);
|
||||
changes.update_positions(
|
||||
self.diagnostics
|
||||
.iter_mut()
|
||||
.map(|diagnostic| (&mut diagnostic.range.end, Assoc::After)),
|
||||
);
|
||||
// map state.diagnostics over changes::map_pos too
|
||||
for diagnostic in &mut self.diagnostics {
|
||||
diagnostic.line = self.text.char_to_line(diagnostic.range.start);
|
||||
// map diagnostics over changes too
|
||||
changes.update_positions(self.diagnostics.iter_mut().map(|diagnostic| {
|
||||
let assoc = if diagnostic.starts_at_word {
|
||||
Assoc::BeforeWord
|
||||
} else {
|
||||
Assoc::After
|
||||
};
|
||||
(&mut diagnostic.range.start, assoc)
|
||||
}));
|
||||
changes.update_positions(self.diagnostics.iter_mut().filter_map(|diagnostic| {
|
||||
if diagnostic.zero_width {
|
||||
// for zero width diagnostics treat the diagnostic as a point
|
||||
// rather than a range
|
||||
return None;
|
||||
}
|
||||
let assoc = if diagnostic.ends_at_word {
|
||||
Assoc::AfterWord
|
||||
} else {
|
||||
Assoc::Before
|
||||
};
|
||||
Some((&mut diagnostic.range.end, assoc))
|
||||
}));
|
||||
self.diagnostics.retain_mut(|diagnostic| {
|
||||
if diagnostic.zero_width {
|
||||
diagnostic.range.end = diagnostic.range.start
|
||||
} else if diagnostic.range.start >= diagnostic.range.end {
|
||||
return false;
|
||||
}
|
||||
diagnostic.line = self.text.char_to_line(diagnostic.range.start);
|
||||
true
|
||||
});
|
||||
|
||||
self.diagnostics
|
||||
.sort_unstable_by_key(|diagnostic| diagnostic.range);
|
||||
self.diagnostics.sort_unstable_by_key(|diagnostic| {
|
||||
(
|
||||
diagnostic.range,
|
||||
diagnostic.severity,
|
||||
diagnostic.language_server_id,
|
||||
)
|
||||
});
|
||||
|
||||
// Update the inlay hint annotations' positions, helping ensure they are displayed in the proper place
|
||||
let apply_inlay_hint_changes = |annotations: &mut Rc<[InlineAnnotation]>| {
|
||||
@@ -1514,7 +1530,7 @@ impl Document {
|
||||
pub fn language_name(&self) -> Option<&str> {
|
||||
self.language
|
||||
.as_ref()
|
||||
.map(|language| language.language_id.as_str())
|
||||
.map(|language| language.language_name.as_str())
|
||||
}
|
||||
|
||||
/// Language ID for the document. Either the `language-id`,
|
||||
@@ -1692,32 +1708,134 @@ impl Document {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn lsp_diagnostic_to_diagnostic(
|
||||
text: &Rope,
|
||||
language_config: Option<&LanguageConfiguration>,
|
||||
diagnostic: &helix_lsp::lsp::Diagnostic,
|
||||
language_server_id: usize,
|
||||
offset_encoding: helix_lsp::OffsetEncoding,
|
||||
) -> Option<Diagnostic> {
|
||||
use helix_core::diagnostic::{Range, Severity::*};
|
||||
|
||||
// TODO: convert inside server
|
||||
let start =
|
||||
if let Some(start) = lsp_pos_to_pos(text, diagnostic.range.start, offset_encoding) {
|
||||
start
|
||||
} else {
|
||||
log::warn!("lsp position out of bounds - {:?}", diagnostic);
|
||||
return None;
|
||||
};
|
||||
|
||||
let end = if let Some(end) = lsp_pos_to_pos(text, diagnostic.range.end, offset_encoding) {
|
||||
end
|
||||
} else {
|
||||
log::warn!("lsp position out of bounds - {:?}", diagnostic);
|
||||
return None;
|
||||
};
|
||||
|
||||
let severity = diagnostic.severity.map(|severity| match severity {
|
||||
lsp::DiagnosticSeverity::ERROR => Error,
|
||||
lsp::DiagnosticSeverity::WARNING => Warning,
|
||||
lsp::DiagnosticSeverity::INFORMATION => Info,
|
||||
lsp::DiagnosticSeverity::HINT => Hint,
|
||||
severity => unreachable!("unrecognized diagnostic severity: {:?}", severity),
|
||||
});
|
||||
|
||||
if let Some(lang_conf) = language_config {
|
||||
if let Some(severity) = severity {
|
||||
if severity < lang_conf.diagnostic_severity {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
};
|
||||
use helix_core::diagnostic::{DiagnosticTag, NumberOrString};
|
||||
|
||||
let code = match diagnostic.code.clone() {
|
||||
Some(x) => match x {
|
||||
lsp::NumberOrString::Number(x) => Some(NumberOrString::Number(x)),
|
||||
lsp::NumberOrString::String(x) => Some(NumberOrString::String(x)),
|
||||
},
|
||||
None => None,
|
||||
};
|
||||
|
||||
let tags = if let Some(tags) = &diagnostic.tags {
|
||||
let new_tags = tags
|
||||
.iter()
|
||||
.filter_map(|tag| match *tag {
|
||||
lsp::DiagnosticTag::DEPRECATED => Some(DiagnosticTag::Deprecated),
|
||||
lsp::DiagnosticTag::UNNECESSARY => Some(DiagnosticTag::Unnecessary),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
new_tags
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let ends_at_word =
|
||||
start != end && end != 0 && text.get_char(end - 1).map_or(false, char_is_word);
|
||||
let starts_at_word = start != end && text.get_char(start).map_or(false, char_is_word);
|
||||
|
||||
Some(Diagnostic {
|
||||
range: Range { start, end },
|
||||
ends_at_word,
|
||||
starts_at_word,
|
||||
zero_width: start == end,
|
||||
line: diagnostic.range.start.line as usize,
|
||||
message: diagnostic.message.clone(),
|
||||
severity,
|
||||
code,
|
||||
tags,
|
||||
source: diagnostic.source.clone(),
|
||||
data: diagnostic.data.clone(),
|
||||
language_server_id,
|
||||
})
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn diagnostics(&self) -> &[Diagnostic] {
|
||||
&self.diagnostics
|
||||
}
|
||||
|
||||
pub fn shown_diagnostics(&self) -> impl Iterator<Item = &Diagnostic> + DoubleEndedIterator {
|
||||
self.diagnostics.iter().filter(|d| {
|
||||
self.language_servers_with_feature(LanguageServerFeature::Diagnostics)
|
||||
.any(|ls| ls.id() == d.language_server_id)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn replace_diagnostics(
|
||||
&mut self,
|
||||
mut diagnostics: Vec<Diagnostic>,
|
||||
language_server_id: usize,
|
||||
diagnostics: impl IntoIterator<Item = Diagnostic>,
|
||||
unchanged_sources: &[String],
|
||||
language_server_id: Option<usize>,
|
||||
) {
|
||||
if unchanged_sources.is_empty() {
|
||||
self.clear_diagnostics(language_server_id);
|
||||
self.diagnostics.append(&mut diagnostics);
|
||||
self.diagnostics
|
||||
.sort_unstable_by_key(|diagnostic| diagnostic.range);
|
||||
} else {
|
||||
self.diagnostics.retain(|d| {
|
||||
if language_server_id.map_or(false, |id| id != d.language_server_id) {
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn clear_diagnostics(&mut self, language_server_id: usize) {
|
||||
self.diagnostics
|
||||
.retain(|d| d.language_server_id != language_server_id);
|
||||
if let Some(source) = &d.source {
|
||||
unchanged_sources.contains(source)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
}
|
||||
self.diagnostics.extend(diagnostics);
|
||||
self.diagnostics.sort_unstable_by_key(|diagnostic| {
|
||||
(
|
||||
diagnostic.range,
|
||||
diagnostic.severity,
|
||||
diagnostic.language_server_id,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
/// clears diagnostics for a given language server id if set, otherwise all diagnostics are cleared
|
||||
pub fn clear_diagnostics(&mut self, language_server_id: Option<usize>) {
|
||||
if let Some(id) = language_server_id {
|
||||
self.diagnostics.retain(|d| d.language_server_id != id);
|
||||
} else {
|
||||
self.diagnostics.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the document's auto pairs. If the document has a recognized
|
||||
|
@@ -42,10 +42,9 @@ use anyhow::{anyhow, bail, Error};
|
||||
pub use helix_core::diagnostic::Severity;
|
||||
use helix_core::{
|
||||
auto_pairs::AutoPairs,
|
||||
syntax::{self, AutoPairConfig, SoftWrap},
|
||||
Change, LineEnding, NATIVE_LINE_ENDING,
|
||||
syntax::{self, AutoPairConfig, IndentationHeuristic, LanguageServerFeature, SoftWrap},
|
||||
Change, LineEnding, Position, Selection, NATIVE_LINE_ENDING,
|
||||
};
|
||||
use helix_core::{Position, Selection};
|
||||
use helix_dap as dap;
|
||||
use helix_lsp::lsp;
|
||||
|
||||
@@ -291,6 +290,11 @@ pub struct Config {
|
||||
pub insert_final_newline: bool,
|
||||
/// Enables smart tab
|
||||
pub smart_tab: Option<SmartTabConfig>,
|
||||
/// Draw border around popups.
|
||||
pub popup_border: PopupBorderConfig,
|
||||
/// Which indent heuristic to use when a new line is inserted
|
||||
#[serde(default)]
|
||||
pub indent_heuristic: IndentationHeuristic,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Eq, PartialOrd, Ord)]
|
||||
@@ -755,12 +759,13 @@ impl Default for IndentGuidesConfig {
|
||||
}
|
||||
|
||||
/// Line ending configuration.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum LineEndingConfig {
|
||||
/// The platform's native line ending.
|
||||
///
|
||||
/// `crlf` on Windows, otherwise `lf`.
|
||||
#[default]
|
||||
Native,
|
||||
/// Line feed.
|
||||
LF,
|
||||
@@ -777,12 +782,6 @@ pub enum LineEndingConfig {
|
||||
Nel,
|
||||
}
|
||||
|
||||
impl Default for LineEndingConfig {
|
||||
fn default() -> Self {
|
||||
LineEndingConfig::Native
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LineEndingConfig> for LineEnding {
|
||||
fn from(line_ending: LineEndingConfig) -> Self {
|
||||
match line_ending {
|
||||
@@ -799,6 +798,15 @@ impl From<LineEndingConfig> for LineEnding {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum PopupBorderConfig {
|
||||
None,
|
||||
All,
|
||||
Popup,
|
||||
Menu,
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
@@ -846,6 +854,8 @@ impl Default for Config {
|
||||
default_line_ending: LineEndingConfig::default(),
|
||||
insert_final_newline: true,
|
||||
smart_tab: Some(SmartTabConfig::default()),
|
||||
popup_border: PopupBorderConfig::None,
|
||||
indent_heuristic: IndentationHeuristic::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1065,6 +1075,16 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn popup_border(&self) -> bool {
|
||||
self.config().popup_border == PopupBorderConfig::All
|
||||
|| self.config().popup_border == PopupBorderConfig::Popup
|
||||
}
|
||||
|
||||
pub fn menu_border(&self) -> bool {
|
||||
self.config().popup_border == PopupBorderConfig::All
|
||||
|| self.config().popup_border == PopupBorderConfig::Menu
|
||||
}
|
||||
|
||||
pub fn apply_motion<F: Fn(&mut Self) + 'static>(&mut self, motion: F) {
|
||||
motion(self);
|
||||
self.last_motion = Some(Box::new(motion));
|
||||
@@ -1457,6 +1477,10 @@ impl Editor {
|
||||
self.config.clone(),
|
||||
)?;
|
||||
|
||||
let diagnostics =
|
||||
Editor::doc_diagnostics(&self.language_servers, &self.diagnostics, &doc);
|
||||
doc.replace_diagnostics(diagnostics, &[], None);
|
||||
|
||||
if let Some(diff_base) = self.diff_providers.get_diff_base(&path) {
|
||||
doc.set_diff_base(diff_base);
|
||||
}
|
||||
@@ -1686,6 +1710,60 @@ impl Editor {
|
||||
.find(|doc| doc.path().map(|p| p == path.as_ref()).unwrap_or(false))
|
||||
}
|
||||
|
||||
/// Returns all supported diagnostics for the document
|
||||
pub fn doc_diagnostics<'a>(
|
||||
language_servers: &'a helix_lsp::Registry,
|
||||
diagnostics: &'a BTreeMap<lsp::Url, Vec<(lsp::Diagnostic, usize)>>,
|
||||
document: &Document,
|
||||
) -> impl Iterator<Item = helix_core::Diagnostic> + 'a {
|
||||
Editor::doc_diagnostics_with_filter(language_servers, diagnostics, document, |_, _| true)
|
||||
}
|
||||
|
||||
/// Returns all supported diagnostics for the document
|
||||
/// filtered by `filter` which is invocated with the raw `lsp::Diagnostic` and the language server id it came from
|
||||
pub fn doc_diagnostics_with_filter<'a>(
|
||||
language_servers: &'a helix_lsp::Registry,
|
||||
diagnostics: &'a BTreeMap<lsp::Url, Vec<(lsp::Diagnostic, usize)>>,
|
||||
|
||||
document: &Document,
|
||||
filter: impl Fn(&lsp::Diagnostic, usize) -> bool + 'a,
|
||||
) -> impl Iterator<Item = helix_core::Diagnostic> + 'a {
|
||||
let text = document.text().clone();
|
||||
let language_config = document.language.clone();
|
||||
document
|
||||
.path()
|
||||
.and_then(|path| url::Url::from_file_path(path).ok()) // TODO log error?
|
||||
.and_then(|uri| diagnostics.get(&uri))
|
||||
.map(|diags| {
|
||||
diags.iter().filter_map(move |(diagnostic, lsp_id)| {
|
||||
let ls = language_servers.get_by_id(*lsp_id)?;
|
||||
language_config
|
||||
.as_ref()
|
||||
.and_then(|c| {
|
||||
c.language_servers.iter().find(|features| {
|
||||
features.name == ls.name()
|
||||
&& features.has_feature(LanguageServerFeature::Diagnostics)
|
||||
})
|
||||
})
|
||||
.and_then(|_| {
|
||||
if filter(diagnostic, *lsp_id) {
|
||||
Document::lsp_diagnostic_to_diagnostic(
|
||||
&text,
|
||||
language_config.as_deref(),
|
||||
diagnostic,
|
||||
*lsp_id,
|
||||
ls.offset_encoding(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
.into_iter()
|
||||
.flatten()
|
||||
}
|
||||
|
||||
/// Gets the primary cursor position in screen coordinates,
|
||||
/// or `None` if the primary cursor is not visible on screen.
|
||||
pub fn cursor(&self) -> (Option<Position>, CursorKind) {
|
||||
@@ -1832,6 +1910,30 @@ impl Editor {
|
||||
.as_ref()
|
||||
.and_then(|debugger| debugger.current_stack_frame())
|
||||
}
|
||||
|
||||
/// Returns the id of a view that this doc contains a selection for,
|
||||
/// making sure it is synced with the current changes
|
||||
/// if possible or there are no selections returns current_view
|
||||
/// otherwise uses an arbitrary view
|
||||
pub fn get_synced_view_id(&mut self, id: DocumentId) -> ViewId {
|
||||
let current_view = view_mut!(self);
|
||||
let doc = self.documents.get_mut(&id).unwrap();
|
||||
if doc.selections().contains_key(¤t_view.id) {
|
||||
// only need to sync current view if this is not the current doc
|
||||
if current_view.doc != id {
|
||||
current_view.sync_changes(doc);
|
||||
}
|
||||
current_view.id
|
||||
} else if let Some(view_id) = doc.selections().keys().next() {
|
||||
let view_id = *view_id;
|
||||
let view = self.tree.get_mut(view_id);
|
||||
view.sync_changes(doc);
|
||||
view_id
|
||||
} else {
|
||||
doc.ensure_view_init(current_view.id);
|
||||
current_view.id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn try_restore_indent(doc: &mut Document, view: &mut View) {
|
||||
|
@@ -9,8 +9,7 @@ use crate::{
|
||||
};
|
||||
|
||||
fn count_digits(n: usize) -> usize {
|
||||
// TODO: use checked_log10 when MSRV reaches 1.67
|
||||
std::iter::successors(Some(n), |&n| (n >= 10).then_some(n / 10)).count()
|
||||
(usize::checked_ilog10(n).unwrap_or(0) + 1) as usize
|
||||
}
|
||||
|
||||
pub type GutterFn<'doc> = Box<dyn FnMut(usize, bool, bool, &mut String) -> Option<Style> + 'doc>;
|
||||
|
@@ -369,9 +369,7 @@ impl Editor {
|
||||
{
|
||||
Ok(process) => process,
|
||||
Err(err) => {
|
||||
// TODO replace the pretty print {:?} with a regular format {}
|
||||
// when the MSRV is raised to 1.60.0
|
||||
self.set_error(format!("Error starting external terminal: {:?}", err));
|
||||
self.set_error(format!("Error starting external terminal: {}", err));
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
@@ -325,7 +325,7 @@ impl std::str::FromStr for KeyEvent {
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut tokens: Vec<_> = s.split('-').collect();
|
||||
let code = match tokens.pop().ok_or_else(|| anyhow!("Missing key code"))? {
|
||||
let mut code = match tokens.pop().ok_or_else(|| anyhow!("Missing key code"))? {
|
||||
keys::BACKSPACE => KeyCode::Backspace,
|
||||
keys::ENTER => KeyCode::Enter,
|
||||
keys::LEFT => KeyCode::Left,
|
||||
@@ -405,6 +405,18 @@ impl std::str::FromStr for KeyEvent {
|
||||
modifiers.insert(flag);
|
||||
}
|
||||
|
||||
// Normalize character keys so that characters like C-S-r and C-R
|
||||
// are represented by equal KeyEvents.
|
||||
match code {
|
||||
KeyCode::Char(ch)
|
||||
if ch.is_ascii_lowercase() && modifiers.contains(KeyModifiers::SHIFT) =>
|
||||
{
|
||||
code = KeyCode::Char(ch.to_ascii_uppercase());
|
||||
modifiers.remove(KeyModifiers::SHIFT);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
Ok(KeyEvent { code, modifiers })
|
||||
}
|
||||
}
|
||||
@@ -684,6 +696,19 @@ mod test {
|
||||
modifiers: KeyModifiers::ALT | KeyModifiers::CONTROL
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
str::parse::<KeyEvent>("C-S-r").unwrap(),
|
||||
str::parse::<KeyEvent>("C-R").unwrap(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
str::parse::<KeyEvent>("S-w").unwrap(),
|
||||
KeyEvent {
|
||||
code: KeyCode::Char('W'),
|
||||
modifiers: KeyModifiers::NONE
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@@ -75,8 +75,8 @@ impl Registers {
|
||||
self.clipboard_provider.as_ref(),
|
||||
self.inner.get(&name),
|
||||
match name {
|
||||
'*' => ClipboardType::Clipboard,
|
||||
'+' => ClipboardType::Selection,
|
||||
'+' => ClipboardType::Clipboard,
|
||||
'*' => ClipboardType::Selection,
|
||||
_ => unreachable!(),
|
||||
},
|
||||
)),
|
||||
@@ -95,8 +95,8 @@ impl Registers {
|
||||
self.clipboard_provider.set_contents(
|
||||
values.join(NATIVE_LINE_ENDING.as_str()),
|
||||
match name {
|
||||
'*' => ClipboardType::Clipboard,
|
||||
'+' => ClipboardType::Selection,
|
||||
'+' => ClipboardType::Clipboard,
|
||||
'*' => ClipboardType::Selection,
|
||||
_ => unreachable!(),
|
||||
},
|
||||
)?;
|
||||
@@ -118,8 +118,8 @@ impl Registers {
|
||||
'#' | '.' | '%' => Err(anyhow::anyhow!("Register {name} does not support pushing")),
|
||||
'*' | '+' => {
|
||||
let clipboard_type = match name {
|
||||
'*' => ClipboardType::Clipboard,
|
||||
'+' => ClipboardType::Selection,
|
||||
'+' => ClipboardType::Clipboard,
|
||||
'*' => ClipboardType::Selection,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let contents = self.clipboard_provider.get_contents(clipboard_type)?;
|
||||
@@ -172,8 +172,8 @@ impl Registers {
|
||||
('#', "<selection indices>"),
|
||||
('.', "<selection contents>"),
|
||||
('%', "<document path>"),
|
||||
('*', "<system clipboard>"),
|
||||
('+', "<primary clipboard>"),
|
||||
('+', "<system clipboard>"),
|
||||
('*', "<primary clipboard>"),
|
||||
]
|
||||
.iter()
|
||||
.copied(),
|
||||
@@ -190,8 +190,8 @@ impl Registers {
|
||||
match name {
|
||||
'*' | '+' => {
|
||||
self.clear_clipboard(match name {
|
||||
'*' => ClipboardType::Clipboard,
|
||||
'+' => ClipboardType::Selection,
|
||||
'+' => ClipboardType::Clipboard,
|
||||
'*' => ClipboardType::Selection,
|
||||
_ => unreachable!(),
|
||||
});
|
||||
self.inner.remove(&name);
|
||||
|
347
languages.toml
347
languages.toml
File diff suppressed because it is too large
Load Diff
2
logo.svg
2
logo.svg
@@ -1 +1 @@
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;" viewBox="663.38 37.57 575.35 903.75"> <g transform="matrix(1,0,0,1,-31352.7,-1817.25)"> <g transform="matrix(1,0,0,1,31062.7,-20.8972)"> <g transform="matrix(1,0,0,1,-130.173,0.00185558)"> <path d="M1083.58,1875.72L1635.06,2194.12C1649.8,2202.63 1658.88,2218.37 1658.88,2235.39C1658.88,2264.98 1658.88,2311.74 1658.88,2341.33C1658.88,2349.84 1656.61,2358.03 1652.5,2365.16C1652.5,2365.16 1214.7,2112.4 1107.2,2050.33C1092.58,2041.89 1083.58,2026.29 1083.58,2009.41C1083.58,1963.5 1083.58,1875.72 1083.58,1875.72Z" style="fill:#706bc8;"></path> </g> <g transform="matrix(1,0,0,1,-130.173,0.00185558)"> <path d="M1635.26,2604.84C1649.88,2613.28 1658.88,2628.87 1658.88,2645.75C1658.88,2691.67 1658.88,2779.44 1658.88,2779.44L1107.41,2461.05C1092.66,2452.53 1083.58,2436.8 1083.58,2419.78C1083.58,2390.19 1083.58,2343.42 1083.58,2313.84C1083.58,2305.32 1085.85,2297.13 1089.96,2290.01C1089.96,2290.01 1527.76,2542.77 1635.26,2604.84Z" style="fill:#55c5e4;"></path> </g> <g transform="matrix(1,0,0,1,216.062,984.098)"> <path d="M790.407,1432.56C785.214,1435.55 780.717,1439.9 777.509,1445.46C767.862,1462.16 773.473,1483.76 790.004,1493.59L789.998,1493.59L761.173,1476.95C746.427,1468.44 737.344,1452.71 737.344,1435.68C737.344,1406.09 737.344,1359.33 737.344,1329.74C737.344,1312.71 746.427,1296.98 761.173,1288.47L1259.59,1000.74L1259.83,1000.6C1264.92,997.617 1269.33,993.314 1272.48,987.844C1282.13,971.136 1276.52,949.544 1259.99,939.707L1260,939.707L1288.82,956.349C1303.57,964.862 1312.65,980.595 1312.65,997.622C1312.65,1027.21 1312.65,1073.97 1312.65,1103.56C1312.65,1120.59 1303.57,1136.32 1288.82,1144.83L1259.19,1161.94L1259.59,1161.68L790.407,1432.56Z" style="fill:#84ddea;"></path> </g> <g transform="matrix(1,0,0,1,216.062,984.098)"> <path d="M790.407,1686.24C785.214,1689.23 780.717,1693.58 777.509,1699.13C767.862,1715.84 773.473,1737.43 790.004,1747.27L789.998,1747.27L761.173,1730.63C746.427,1722.12 737.344,1706.38 737.344,1689.36C737.344,1659.77 737.344,1613.01 737.344,1583.42C737.344,1566.39 746.427,1550.66 761.173,1542.15L1259.59,1254.42L1259.83,1254.28C1264.92,1251.29 1269.33,1246.99 1272.48,1241.52C1282.13,1224.81 1276.52,1203.22 1259.99,1193.38L1260,1193.38L1288.82,1210.03C1303.57,1218.54 1312.65,1234.27 1312.65,1251.3C1312.65,1280.89 1312.65,1327.65 1312.65,1357.24C1312.65,1374.26 1303.57,1390 1288.82,1398.51L1259.19,1415.61L1259.59,1415.36L790.407,1686.24Z" style="fill:#997bc8;"></path></g></g></g> </svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xml:space="preserve" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2" viewBox="663.38 37.57 575.35 903.75"><path d="m1083.58 1875.72 551.48 318.4a47.66 47.66 0 0 1 23.82 41.27v105.94c0 8.51-2.27 16.7-6.38 23.83 0 0-437.8-252.76-545.3-314.83a47.245 47.245 0 0 1-23.62-40.92z" style="fill:#706bc8" transform="translate(-420.173 -1838.145)"/><path d="M1635.26 2604.84a47.228 47.228 0 0 1 23.62 40.91v133.69l-551.47-318.39a47.66 47.66 0 0 1-23.83-41.27v-105.94c0-8.52 2.27-16.71 6.38-23.83 0 0 437.8 252.76 545.3 314.83" style="fill:#55c5e4" transform="translate(-420.173 -1838.145)"/><path d="M790.407 1432.56a35.033 35.033 0 0 0-12.898 12.9c-9.647 16.7-4.036 38.3 12.495 48.13h-.006l-28.825-16.64a47.644 47.644 0 0 1-23.829-41.27v-105.94c0-17.03 9.083-32.76 23.829-41.27l498.417-287.73.24-.14a34.962 34.962 0 0 0 12.65-12.756c9.65-16.708 4.04-38.3-12.49-48.137h.01l28.82 16.642a47.648 47.648 0 0 1 23.83 41.273v105.938c0 17.03-9.08 32.76-23.83 41.27l-29.63 17.11.4-.26z" style="fill:#84ddea" transform="translate(-73.938 -854.05)"/><path d="M790.407 1686.24a35.08 35.08 0 0 0-12.898 12.89c-9.647 16.71-4.036 38.3 12.495 48.14h-.006l-28.825-16.64a47.656 47.656 0 0 1-23.829-41.27v-105.94c0-17.03 9.083-32.76 23.829-41.27l498.417-287.73.24-.14c5.09-2.99 9.5-7.29 12.65-12.76 9.65-16.71 4.04-38.3-12.49-48.14h.01l28.82 16.65a47.636 47.636 0 0 1 23.83 41.27v105.94c0 17.02-9.08 32.76-23.83 41.27l-29.63 17.1.4-.25z" style="fill:#997bc8" transform="translate(-73.938 -854.05)"/></svg>
|
Before Width: | Height: | Size: 2.8 KiB After Width: | Height: | Size: 1.5 KiB |
116
logo_dark.svg
116
logo_dark.svg
@@ -1,115 +1 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
version="1.1"
|
||||
xml:space="preserve"
|
||||
style="clip-rule:evenodd;fill-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2"
|
||||
viewBox="663.38 37.57 2087.006 903.71997"
|
||||
id="svg22"
|
||||
sodipodi:docname="logo_dark.svg"
|
||||
width="2087.0059"
|
||||
height="903.71997"
|
||||
inkscape:version="1.2.1 (9c6d41e410, 2022-07-14)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"><defs
|
||||
id="defs26"><rect
|
||||
x="713.02588"
|
||||
y="-304.32538"
|
||||
width="3615.2336"
|
||||
height="1864.7544"
|
||||
id="rect14663" /><rect
|
||||
x="972.073"
|
||||
y="151.15895"
|
||||
width="2140.9646"
|
||||
height="684.86273"
|
||||
id="rect447" /><rect
|
||||
x="897.0401"
|
||||
y="217.45384"
|
||||
width="837.72321"
|
||||
height="631.59924"
|
||||
id="rect435" /><rect
|
||||
x="825.67834"
|
||||
y="157.61452"
|
||||
width="1496.2448"
|
||||
height="861.45544"
|
||||
id="rect429" /><rect
|
||||
x="798.3819"
|
||||
y="-42.157242"
|
||||
width="2236.0837"
|
||||
height="945.90723"
|
||||
id="rect315" /><rect
|
||||
x="661.30237"
|
||||
y="48.087799"
|
||||
width="769.15619"
|
||||
height="828.46844"
|
||||
id="rect309" /></defs><sodipodi:namedview
|
||||
id="namedview24"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
showgrid="false"
|
||||
inkscape:zoom="0.28409405"
|
||||
inkscape:cx="1904.2989"
|
||||
inkscape:cy="633.59299"
|
||||
inkscape:window-width="1908"
|
||||
inkscape:window-height="2075"
|
||||
inkscape:window-x="26"
|
||||
inkscape:window-y="23"
|
||||
inkscape:window-maximized="0"
|
||||
inkscape:current-layer="svg22" /> <g
|
||||
transform="translate(-31352.726,-1817.2547)"
|
||||
id="g20"> <g
|
||||
transform="translate(31062.7,-20.8972)"
|
||||
id="g18"> <g
|
||||
transform="translate(-130.173,0.00185558)"
|
||||
id="g4"> <path
|
||||
d="m 1083.58,1875.72 551.48,318.4 c 14.74,8.51 23.82,24.25 23.82,41.27 0,29.59 0,76.35 0,105.94 0,8.51 -2.27,16.7 -6.38,23.83 0,0 -437.8,-252.76 -545.3,-314.83 -14.62,-8.44 -23.62,-24.04 -23.62,-40.92 0,-45.91 0,-133.69 0,-133.69 z"
|
||||
style="fill:#706bc8"
|
||||
id="path2" /> </g> <g
|
||||
transform="translate(-130.173,0.00185558)"
|
||||
id="g8"> <path
|
||||
d="m 1635.26,2604.84 c 14.62,8.44 23.62,24.03 23.62,40.91 0,45.92 0,133.69 0,133.69 l -551.47,-318.39 c -14.75,-8.52 -23.83,-24.25 -23.83,-41.27 0,-29.59 0,-76.36 0,-105.94 0,-8.52 2.27,-16.71 6.38,-23.83 0,0 437.8,252.76 545.3,314.83 z"
|
||||
style="fill:#55c5e4"
|
||||
id="path6" /> </g> <g
|
||||
transform="translate(216.062,984.098)"
|
||||
id="g12"> <path
|
||||
d="m 790.407,1432.56 c -5.193,2.99 -9.69,7.34 -12.898,12.9 -9.647,16.7 -4.036,38.3 12.495,48.13 h -0.006 l -28.825,-16.64 c -14.746,-8.51 -23.829,-24.24 -23.829,-41.27 0,-29.59 0,-76.35 0,-105.94 0,-17.03 9.083,-32.76 23.829,-41.27 l 498.417,-287.73 0.24,-0.14 c 5.09,-2.983 9.5,-7.286 12.65,-12.756 9.65,-16.708 4.04,-38.3 -12.49,-48.137 h 0.01 l 28.82,16.642 c 14.75,8.513 23.83,24.246 23.83,41.273 0,29.588 0,76.348 0,105.938 0,17.03 -9.08,32.76 -23.83,41.27 l -29.63,17.11 0.4,-0.26 z"
|
||||
style="fill:#84ddea"
|
||||
id="path10" /> </g> <g
|
||||
transform="translate(216.062,984.098)"
|
||||
id="g16"> <path
|
||||
d="m 790.407,1686.24 c -5.193,2.99 -9.69,7.34 -12.898,12.89 -9.647,16.71 -4.036,38.3 12.495,48.14 h -0.006 l -28.825,-16.64 c -14.746,-8.51 -23.829,-24.25 -23.829,-41.27 0,-29.59 0,-76.35 0,-105.94 0,-17.03 9.083,-32.76 23.829,-41.27 l 498.417,-287.73 0.24,-0.14 c 5.09,-2.99 9.5,-7.29 12.65,-12.76 9.65,-16.71 4.04,-38.3 -12.49,-48.14 h 0.01 l 28.82,16.65 c 14.75,8.51 23.83,24.24 23.83,41.27 0,29.59 0,76.35 0,105.94 0,17.02 -9.08,32.76 -23.83,41.27 l -29.63,17.1 0.4,-0.25 z"
|
||||
style="fill:#997bc8"
|
||||
id="path14" /></g></g></g> <text
|
||||
xml:space="preserve"
|
||||
transform="translate(663.354,37.565425)"
|
||||
id="text307"
|
||||
style="font-size:4px;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';white-space:pre;shape-inside:url(#rect309);display:inline;fill:#006400;stroke:#006400;stroke-width:2.66667" /><g
|
||||
aria-label="Helix"
|
||||
transform="matrix(1.3113898,0,0,1.3113898,142.0244,48.21073)"
|
||||
id="text445"
|
||||
style="font-size:4px;-inkscape-font-specification:'sans-serif, Normal';white-space:pre;shape-inside:url(#rect447);display:inline;fill:#f0f6fc;stroke:#f0f6fc;stroke-width:2.66687;stroke-opacity:1;fill-opacity:1"><path
|
||||
d="m 1242.0723,515.10828 h -60.4 v -123.2 h -113.2 v 123.2 h -60.4 v -285.6 h 60.4 v 112 h 113.2 v -112 h 60.4 z"
|
||||
style="font-size:400px;-inkscape-font-specification:'sans-serif, @wght=700';font-variation-settings:'wght' 700;stroke:#f0f6fc;stroke-opacity:1;fill:#f0f6fc;fill-opacity:1"
|
||||
id="path14794" /><path
|
||||
d="m 1399.272,292.70828 q 30.4,0 52,11.6 22,11.6 34,33.6 12,22 12,54 v 28.8 h -140.8 q 0.8,25.2 14.8,39.6 14.4,14.4 39.6,14.4 21.2,0 38.4,-4 17.2,-4.4 35.6,-13.2 v 46 q -16,8 -34,11.6 -17.6,4 -42.8,4 -32.8,0 -58,-12 -25.2,-12.4 -39.6,-37.2 -14.4,-24.8 -14.4,-62.4 0,-38.4 12.8,-63.6 13.2,-25.6 36.4,-38.4 23.2,-12.8 54,-12.8 z m 0.4,42.4 q -17.2,0 -28.8,11.2 -11.2,11.2 -13.2,34.8 h 83.6 q 0,-13.2 -4.8,-23.6 -4.4,-10.4 -13.6,-16.4 -9.2,-6 -23.2,-6 z"
|
||||
style="font-size:400px;-inkscape-font-specification:'sans-serif, @wght=700';font-variation-settings:'wght' 700;stroke:#f0f6fc;stroke-opacity:1;fill:#f0f6fc;fill-opacity:1"
|
||||
id="path14796" /><path
|
||||
d="m 1605.2719,515.10828 h -59.6 v -304 h 59.6 z"
|
||||
style="font-size:400px;-inkscape-font-specification:'sans-serif, @wght=700';font-variation-settings:'wght' 700;stroke:#f0f6fc;stroke-opacity:1;fill:#f0f6fc;fill-opacity:1"
|
||||
id="path14798" /><path
|
||||
d="m 1727.272,296.70828 v 218.4 h -59.6 v -218.4 z m -29.6,-85.6 q 13.2,0 22.8,6.4 9.6,6 9.6,22.8 0,16.4 -9.6,22.8 -9.6,6.4 -22.8,6.4 -13.6,0 -23.2,-6.4 -9.2,-6.4 -9.2,-22.8 0,-16.8 9.2,-22.8 9.6,-6.4 23.2,-6.4 z"
|
||||
style="font-size:400px;-inkscape-font-specification:'sans-serif, @wght=700';font-variation-settings:'wght' 700;stroke:#f0f6fc;stroke-opacity:1;fill:#f0f6fc;fill-opacity:1"
|
||||
id="path14800" /><path
|
||||
d="m 1834.4721,403.50828 -70.4,-106.8 h 67.6 l 42.4,69.6 42.8,-69.6 h 67.6 l -71.2,106.8 74.4,111.6 h -67.6 l -46,-74.8 -46,74.8 h -67.6 z"
|
||||
style="font-size:400px;-inkscape-font-specification:'sans-serif, @wght=700';font-variation-settings:'wght' 700;stroke:#f0f6fc;stroke-opacity:1;fill:#f0f6fc;fill-opacity:1"
|
||||
id="path14802" /></g><text
|
||||
xml:space="preserve"
|
||||
transform="translate(663.38,37.570044)"
|
||||
id="text14661"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:997.723px;font-family:sans-serif;-inkscape-font-specification:'sans-serif, @wght=700';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;font-variation-settings:'wght' 700;white-space:pre;shape-inside:url(#rect14663);display:inline;fill:#2a292f;fill-opacity:1;stroke:#2a292f;stroke-width:6.652;stroke-dasharray:none;stroke-opacity:1" /></svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xml:space="preserve" width="2087.006" height="903.72" style="clip-rule:evenodd;fill-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2" viewBox="663.38 37.57 2087.006 903.72"><defs><path id="c" d="M713.026-304.325H4328.26v1864.754H713.026z"/><path id="b" d="M972.073 151.159h2140.965v684.863H972.073z"/><path id="a" d="M661.302 48.088h769.156v828.468H661.302z"/></defs><path d="m1083.58 1875.72 551.48 318.4a47.66 47.66 0 0 1 23.82 41.27v105.94c0 8.51-2.27 16.7-6.38 23.83 0 0-437.8-252.76-545.3-314.83a47.245 47.245 0 0 1-23.62-40.92v-133.69z" style="fill:#706bc8" transform="translate(-420.199 -1838.15)"/><path d="M1635.26 2604.84a47.228 47.228 0 0 1 23.62 40.91v133.69l-551.47-318.39a47.66 47.66 0 0 1-23.83-41.27v-105.94c0-8.52 2.27-16.71 6.38-23.83 0 0 437.8 252.76 545.3 314.83" style="fill:#55c5e4" transform="translate(-420.199 -1838.15)"/><path d="M790.407 1432.56a35.033 35.033 0 0 0-12.898 12.9c-9.647 16.7-4.036 38.3 12.495 48.13h-.006l-28.825-16.64a47.644 47.644 0 0 1-23.829-41.27v-105.94c0-17.03 9.083-32.76 23.829-41.27l498.417-287.73.24-.14a34.962 34.962 0 0 0 12.65-12.756c9.65-16.708 4.04-38.3-12.49-48.137h.01l28.82 16.642a47.648 47.648 0 0 1 23.83 41.273v105.938c0 17.03-9.08 32.76-23.83 41.27l-29.63 17.11.4-.26z" style="fill:#84ddea" transform="translate(-73.964 -854.054)"/><path d="M790.407 1686.24a35.08 35.08 0 0 0-12.898 12.89c-9.647 16.71-4.036 38.3 12.495 48.14h-.006l-28.825-16.64a47.656 47.656 0 0 1-23.829-41.27v-105.94c0-17.03 9.083-32.76 23.829-41.27l498.417-287.73.24-.14c5.09-2.99 9.5-7.29 12.65-12.76 9.65-16.71 4.04-38.3-12.49-48.14h.01l28.82 16.65a47.636 47.636 0 0 1 23.83 41.27v105.94c0 17.02-9.08 32.76-23.83 41.27l-29.63 17.1.4-.25z" style="fill:#997bc8" transform="translate(-73.964 -854.054)"/><g aria-label="Helix" style="font-size:4px;-inkscape-font-specification:"sans-serif, Normal";white-space:pre;shape-inside:url(#b);display:inline;fill:#f0f6fc;stroke:#f0f6fc;stroke-width:2.66687;stroke-opacity:1;fill-opacity:1" transform="matrix(1.31139 0 0 1.31139 142.024 48.21)"><path d="M1242.072 515.108h-60.4v-123.2h-113.2v123.2h-60.4v-285.6h60.4v112h113.2v-112h60.4zM1399.272 292.708q30.4 0 52 11.6 22 11.6 34 33.6t12 54v28.8h-140.8q.8 25.2 14.8 39.6 14.4 14.4 39.6 14.4 21.2 0 38.4-4 17.2-4.4 35.6-13.2v46q-16 8-34 11.6-17.6 4-42.8 4-32.8 0-58-12-25.2-12.4-39.6-37.2-14.4-24.8-14.4-62.4 0-38.4 12.8-63.6 13.2-25.6 36.4-38.4 23.2-12.8 54-12.8zm.4 42.4q-17.2 0-28.8 11.2-11.2 11.2-13.2 34.8h83.6q0-13.2-4.8-23.6-4.4-10.4-13.6-16.4-9.2-6-23.2-6zM1605.272 515.108h-59.6v-304h59.6zM1727.272 296.708v218.4h-59.6v-218.4zm-29.6-85.6q13.2 0 22.8 6.4 9.6 6 9.6 22.8 0 16.4-9.6 22.8-9.6 6.4-22.8 6.4-13.6 0-23.2-6.4-9.2-6.4-9.2-22.8 0-16.8 9.2-22.8 9.6-6.4 23.2-6.4zM1834.472 403.508l-70.4-106.8h67.6l42.4 69.6 42.8-69.6h67.6l-71.2 106.8 74.4 111.6h-67.6l-46-74.8-46 74.8h-67.6z" style="font-size:400px;-inkscape-font-specification:"sans-serif, @wght=700";font-variation-settings:"wght"700;stroke:#f0f6fc;stroke-opacity:1;fill:#f0f6fc;fill-opacity:1"/></g></svg>
|
Before Width: | Height: | Size: 7.1 KiB After Width: | Height: | Size: 3.0 KiB |
116
logo_light.svg
116
logo_light.svg
@@ -1,115 +1 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
version="1.1"
|
||||
xml:space="preserve"
|
||||
style="clip-rule:evenodd;fill-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2"
|
||||
viewBox="663.38 37.57 2087.006 903.71997"
|
||||
id="svg22"
|
||||
sodipodi:docname="logo_light.svg"
|
||||
width="2087.0059"
|
||||
height="903.71997"
|
||||
inkscape:version="1.2.1 (9c6d41e410, 2022-07-14)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"><defs
|
||||
id="defs26"><rect
|
||||
x="713.02588"
|
||||
y="-304.32538"
|
||||
width="3615.2336"
|
||||
height="1864.7544"
|
||||
id="rect14663" /><rect
|
||||
x="972.073"
|
||||
y="151.15895"
|
||||
width="2140.9646"
|
||||
height="684.86273"
|
||||
id="rect447" /><rect
|
||||
x="897.0401"
|
||||
y="217.45384"
|
||||
width="837.72321"
|
||||
height="631.59924"
|
||||
id="rect435" /><rect
|
||||
x="825.67834"
|
||||
y="157.61452"
|
||||
width="1496.2448"
|
||||
height="861.45544"
|
||||
id="rect429" /><rect
|
||||
x="798.3819"
|
||||
y="-42.157242"
|
||||
width="2236.0837"
|
||||
height="945.90723"
|
||||
id="rect315" /><rect
|
||||
x="661.30237"
|
||||
y="48.087799"
|
||||
width="769.15619"
|
||||
height="828.46844"
|
||||
id="rect309" /></defs><sodipodi:namedview
|
||||
id="namedview24"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#000000"
|
||||
borderopacity="0.25"
|
||||
inkscape:showpageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:deskcolor="#d1d1d1"
|
||||
showgrid="false"
|
||||
inkscape:zoom="0.28409405"
|
||||
inkscape:cx="1904.2989"
|
||||
inkscape:cy="633.59299"
|
||||
inkscape:window-width="1908"
|
||||
inkscape:window-height="2075"
|
||||
inkscape:window-x="26"
|
||||
inkscape:window-y="23"
|
||||
inkscape:window-maximized="0"
|
||||
inkscape:current-layer="svg22" /> <g
|
||||
transform="translate(-31352.726,-1817.2547)"
|
||||
id="g20"> <g
|
||||
transform="translate(31062.7,-20.8972)"
|
||||
id="g18"> <g
|
||||
transform="translate(-130.173,0.00185558)"
|
||||
id="g4"> <path
|
||||
d="m 1083.58,1875.72 551.48,318.4 c 14.74,8.51 23.82,24.25 23.82,41.27 0,29.59 0,76.35 0,105.94 0,8.51 -2.27,16.7 -6.38,23.83 0,0 -437.8,-252.76 -545.3,-314.83 -14.62,-8.44 -23.62,-24.04 -23.62,-40.92 0,-45.91 0,-133.69 0,-133.69 z"
|
||||
style="fill:#706bc8"
|
||||
id="path2" /> </g> <g
|
||||
transform="translate(-130.173,0.00185558)"
|
||||
id="g8"> <path
|
||||
d="m 1635.26,2604.84 c 14.62,8.44 23.62,24.03 23.62,40.91 0,45.92 0,133.69 0,133.69 l -551.47,-318.39 c -14.75,-8.52 -23.83,-24.25 -23.83,-41.27 0,-29.59 0,-76.36 0,-105.94 0,-8.52 2.27,-16.71 6.38,-23.83 0,0 437.8,252.76 545.3,314.83 z"
|
||||
style="fill:#55c5e4"
|
||||
id="path6" /> </g> <g
|
||||
transform="translate(216.062,984.098)"
|
||||
id="g12"> <path
|
||||
d="m 790.407,1432.56 c -5.193,2.99 -9.69,7.34 -12.898,12.9 -9.647,16.7 -4.036,38.3 12.495,48.13 h -0.006 l -28.825,-16.64 c -14.746,-8.51 -23.829,-24.24 -23.829,-41.27 0,-29.59 0,-76.35 0,-105.94 0,-17.03 9.083,-32.76 23.829,-41.27 l 498.417,-287.73 0.24,-0.14 c 5.09,-2.983 9.5,-7.286 12.65,-12.756 9.65,-16.708 4.04,-38.3 -12.49,-48.137 h 0.01 l 28.82,16.642 c 14.75,8.513 23.83,24.246 23.83,41.273 0,29.588 0,76.348 0,105.938 0,17.03 -9.08,32.76 -23.83,41.27 l -29.63,17.11 0.4,-0.26 z"
|
||||
style="fill:#84ddea"
|
||||
id="path10" /> </g> <g
|
||||
transform="translate(216.062,984.098)"
|
||||
id="g16"> <path
|
||||
d="m 790.407,1686.24 c -5.193,2.99 -9.69,7.34 -12.898,12.89 -9.647,16.71 -4.036,38.3 12.495,48.14 h -0.006 l -28.825,-16.64 c -14.746,-8.51 -23.829,-24.25 -23.829,-41.27 0,-29.59 0,-76.35 0,-105.94 0,-17.03 9.083,-32.76 23.829,-41.27 l 498.417,-287.73 0.24,-0.14 c 5.09,-2.99 9.5,-7.29 12.65,-12.76 9.65,-16.71 4.04,-38.3 -12.49,-48.14 h 0.01 l 28.82,16.65 c 14.75,8.51 23.83,24.24 23.83,41.27 0,29.59 0,76.35 0,105.94 0,17.02 -9.08,32.76 -23.83,41.27 l -29.63,17.1 0.4,-0.25 z"
|
||||
style="fill:#997bc8"
|
||||
id="path14" /></g></g></g> <text
|
||||
xml:space="preserve"
|
||||
transform="translate(663.354,37.565425)"
|
||||
id="text307"
|
||||
style="font-size:4px;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';white-space:pre;shape-inside:url(#rect309);display:inline;fill:#006400;stroke:#006400;stroke-width:2.66667" /><g
|
||||
aria-label="Helix"
|
||||
transform="matrix(1.3113898,0,0,1.3113898,142.0244,48.21073)"
|
||||
id="text445"
|
||||
style="font-size:4px;-inkscape-font-specification:'sans-serif, Normal';white-space:pre;shape-inside:url(#rect447);display:inline;fill:#2a292f;stroke:#2a292f;stroke-width:2.66687"><path
|
||||
d="m 1242.0723,515.10828 h -60.4 v -123.2 h -113.2 v 123.2 h -60.4 v -285.6 h 60.4 v 112 h 113.2 v -112 h 60.4 z"
|
||||
style="font-size:400px;-inkscape-font-specification:'sans-serif, @wght=700';font-variation-settings:'wght' 700"
|
||||
id="path14794" /><path
|
||||
d="m 1399.272,292.70828 q 30.4,0 52,11.6 22,11.6 34,33.6 12,22 12,54 v 28.8 h -140.8 q 0.8,25.2 14.8,39.6 14.4,14.4 39.6,14.4 21.2,0 38.4,-4 17.2,-4.4 35.6,-13.2 v 46 q -16,8 -34,11.6 -17.6,4 -42.8,4 -32.8,0 -58,-12 -25.2,-12.4 -39.6,-37.2 -14.4,-24.8 -14.4,-62.4 0,-38.4 12.8,-63.6 13.2,-25.6 36.4,-38.4 23.2,-12.8 54,-12.8 z m 0.4,42.4 q -17.2,0 -28.8,11.2 -11.2,11.2 -13.2,34.8 h 83.6 q 0,-13.2 -4.8,-23.6 -4.4,-10.4 -13.6,-16.4 -9.2,-6 -23.2,-6 z"
|
||||
style="font-size:400px;-inkscape-font-specification:'sans-serif, @wght=700';font-variation-settings:'wght' 700"
|
||||
id="path14796" /><path
|
||||
d="m 1605.2719,515.10828 h -59.6 v -304 h 59.6 z"
|
||||
style="font-size:400px;-inkscape-font-specification:'sans-serif, @wght=700';font-variation-settings:'wght' 700"
|
||||
id="path14798" /><path
|
||||
d="m 1727.272,296.70828 v 218.4 h -59.6 v -218.4 z m -29.6,-85.6 q 13.2,0 22.8,6.4 9.6,6 9.6,22.8 0,16.4 -9.6,22.8 -9.6,6.4 -22.8,6.4 -13.6,0 -23.2,-6.4 -9.2,-6.4 -9.2,-22.8 0,-16.8 9.2,-22.8 9.6,-6.4 23.2,-6.4 z"
|
||||
style="font-size:400px;-inkscape-font-specification:'sans-serif, @wght=700';font-variation-settings:'wght' 700"
|
||||
id="path14800" /><path
|
||||
d="m 1834.4721,403.50828 -70.4,-106.8 h 67.6 l 42.4,69.6 42.8,-69.6 h 67.6 l -71.2,106.8 74.4,111.6 h -67.6 l -46,-74.8 -46,74.8 h -67.6 z"
|
||||
style="font-size:400px;-inkscape-font-specification:'sans-serif, @wght=700';font-variation-settings:'wght' 700"
|
||||
id="path14802" /></g><text
|
||||
xml:space="preserve"
|
||||
transform="translate(663.38,37.570044)"
|
||||
id="text14661"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:997.723px;font-family:sans-serif;-inkscape-font-specification:'sans-serif, @wght=700';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-east-asian:normal;font-variation-settings:'wght' 700;white-space:pre;shape-inside:url(#rect14663);display:inline;fill:#2a292f;fill-opacity:1;stroke:#2a292f;stroke-width:6.652;stroke-dasharray:none;stroke-opacity:1" /></svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xml:space="preserve" width="2087.006" height="903.72" style="clip-rule:evenodd;fill-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2" viewBox="663.38 37.57 2087.006 903.72"><defs><path id="c" d="M713.026-304.325H4328.26v1864.754H713.026z"/><path id="b" d="M972.073 151.159h2140.965v684.863H972.073z"/><path id="a" d="M661.302 48.088h769.156v828.468H661.302z"/></defs><path d="m1083.58 1875.72 551.48 318.4a47.66 47.66 0 0 1 23.82 41.27v105.94c0 8.51-2.27 16.7-6.38 23.83 0 0-437.8-252.76-545.3-314.83a47.245 47.245 0 0 1-23.62-40.92v-133.69z" style="fill:#706bc8" transform="translate(-420.199 -1838.15)"/><path d="M1635.26 2604.84a47.228 47.228 0 0 1 23.62 40.91v133.69l-551.47-318.39a47.66 47.66 0 0 1-23.83-41.27v-105.94c0-8.52 2.27-16.71 6.38-23.83 0 0 437.8 252.76 545.3 314.83" style="fill:#55c5e4" transform="translate(-420.199 -1838.15)"/><path d="M790.407 1432.56a35.033 35.033 0 0 0-12.898 12.9c-9.647 16.7-4.036 38.3 12.495 48.13h-.006l-28.825-16.64a47.644 47.644 0 0 1-23.829-41.27v-105.94c0-17.03 9.083-32.76 23.829-41.27l498.417-287.73.24-.14a34.962 34.962 0 0 0 12.65-12.756c9.65-16.708 4.04-38.3-12.49-48.137h.01l28.82 16.642a47.648 47.648 0 0 1 23.83 41.273v105.938c0 17.03-9.08 32.76-23.83 41.27l-29.63 17.11.4-.26z" style="fill:#84ddea" transform="translate(-73.964 -854.054)"/><path d="M790.407 1686.24a35.08 35.08 0 0 0-12.898 12.89c-9.647 16.71-4.036 38.3 12.495 48.14h-.006l-28.825-16.64a47.656 47.656 0 0 1-23.829-41.27v-105.94c0-17.03 9.083-32.76 23.829-41.27l498.417-287.73.24-.14c5.09-2.99 9.5-7.29 12.65-12.76 9.65-16.71 4.04-38.3-12.49-48.14h.01l28.82 16.65a47.636 47.636 0 0 1 23.83 41.27v105.94c0 17.02-9.08 32.76-23.83 41.27l-29.63 17.1.4-.25z" style="fill:#997bc8" transform="translate(-73.964 -854.054)"/><g aria-label="Helix" style="font-size:4px;-inkscape-font-specification:"sans-serif, Normal";white-space:pre;shape-inside:url(#b);display:inline;fill:#2a292f;stroke:#2a292f;stroke-width:2.66687" transform="matrix(1.31139 0 0 1.31139 142.024 48.21)"><path d="M1242.072 515.108h-60.4v-123.2h-113.2v123.2h-60.4v-285.6h60.4v112h113.2v-112h60.4zM1399.272 292.708q30.4 0 52 11.6 22 11.6 34 33.6t12 54v28.8h-140.8q.8 25.2 14.8 39.6 14.4 14.4 39.6 14.4 21.2 0 38.4-4 17.2-4.4 35.6-13.2v46q-16 8-34 11.6-17.6 4-42.8 4-32.8 0-58-12-25.2-12.4-39.6-37.2-14.4-24.8-14.4-62.4 0-38.4 12.8-63.6 13.2-25.6 36.4-38.4 23.2-12.8 54-12.8zm.4 42.4q-17.2 0-28.8 11.2-11.2 11.2-13.2 34.8h83.6q0-13.2-4.8-23.6-4.4-10.4-13.6-16.4-9.2-6-23.2-6zM1605.272 515.108h-59.6v-304h59.6zM1727.272 296.708v218.4h-59.6v-218.4zm-29.6-85.6q13.2 0 22.8 6.4 9.6 6 9.6 22.8 0 16.4-9.6 22.8-9.6 6.4-22.8 6.4-13.6 0-23.2-6.4-9.2-6.4-9.2-22.8 0-16.8 9.2-22.8 9.6-6.4 23.2-6.4zM1834.472 403.508l-70.4-106.8h67.6l42.4 69.6 42.8-69.6h67.6l-71.2 106.8 74.4 111.6h-67.6l-46-74.8-46 74.8h-67.6z" style="font-size:400px;-inkscape-font-specification:"sans-serif, @wght=700";font-variation-settings:"wght"700"/></g></svg>
|
Before Width: | Height: | Size: 6.8 KiB After Width: | Height: | Size: 2.9 KiB |
@@ -39,11 +39,11 @@
|
||||
(identifier) @variable.parameter))
|
||||
|
||||
; (p?: t)
|
||||
; (p?: t = 1) // Invalid but still posible to hihglight.
|
||||
; (p?: t = 1) // Invalid but still possible to highlight.
|
||||
(optional_parameter
|
||||
(identifier) @variable.parameter)
|
||||
|
||||
; (...p?: t) // Invalid but still posible to hihglight.
|
||||
; (...p?: t) // Invalid but still possible to highlight.
|
||||
(optional_parameter
|
||||
(rest_pattern
|
||||
(identifier) @variable.parameter))
|
||||
@@ -59,7 +59,7 @@
|
||||
(pair_pattern
|
||||
value: (identifier) @variable.parameter)))
|
||||
|
||||
; ([ p ]?: t[]) // Invalid but still posible to hihglight.
|
||||
; ([ p ]?: t[]) // Invalid but still possible to highlight.
|
||||
(optional_parameter
|
||||
(array_pattern
|
||||
(identifier) @variable.parameter))
|
||||
@@ -107,6 +107,8 @@
|
||||
; Types
|
||||
; -----
|
||||
|
||||
(type_parameter
|
||||
name: (type_identifier) @type.parameter)
|
||||
(type_identifier) @type
|
||||
(predefined_type) @type.builtin
|
||||
|
||||
|
@@ -1,6 +1,18 @@
|
||||
; Scopes
|
||||
;-------
|
||||
|
||||
[
|
||||
(type_alias_declaration)
|
||||
(class_declaration)
|
||||
(interface_declaration)
|
||||
] @local.scope
|
||||
|
||||
; Definitions
|
||||
;------------
|
||||
|
||||
(type_parameter
|
||||
name: (type_identifier) @local.definition)
|
||||
|
||||
; Javascript and Typescript Treesitter grammars deviate when defining the
|
||||
; tree structure for parameters, so we need to address them in each specific
|
||||
; language instead of ecma.
|
||||
@@ -14,3 +26,8 @@
|
||||
; (i?: t = 1) // Invalid but still posible to hihglight.
|
||||
(optional_parameter
|
||||
(identifier) @local.definition)
|
||||
|
||||
; References
|
||||
;-----------
|
||||
|
||||
(type_identifier) @local.reference
|
||||
|
124
runtime/queries/agda/highlights.scm
Normal file
124
runtime/queries/agda/highlights.scm
Normal file
@@ -0,0 +1,124 @@
|
||||
;; Punctuation
|
||||
[ "." ";" ":"] @punctuation.delimiter
|
||||
[ "(" ")" "{" "}" ] @punctuation.bracket
|
||||
|
||||
;; Constants
|
||||
(integer) @constant.numeric.integer
|
||||
; (float) @constant.numeric.float
|
||||
(literal) @string
|
||||
|
||||
;; Pragmas and comments
|
||||
(comment) @comment
|
||||
(pragma) @attribute
|
||||
(macro) @function.macro
|
||||
|
||||
;; Imports
|
||||
(module_name) @namespace
|
||||
(import_directive (id) @namespace)
|
||||
[(module) (import) (open)] @keyword.control.import
|
||||
|
||||
;; Types
|
||||
(typed_binding (expr) @type)
|
||||
(record (expr) @type)
|
||||
(data (expr) @type)
|
||||
(signature (expr) @type)
|
||||
(function (rhs (expr) @type))
|
||||
; todo: these are too general. ideally, any nested (atom)
|
||||
; https://github.com/tree-sitter/tree-sitter/issues/880
|
||||
|
||||
;; Variables
|
||||
(untyped_binding (atom) @variable)
|
||||
(typed_binding (atom) @variable)
|
||||
(field_name) @variable.other.member
|
||||
|
||||
;; Functions
|
||||
(function_name) @function
|
||||
;(function (lhs
|
||||
; . (atom) @function
|
||||
; (atom) @variable.parameter))
|
||||
; todo: currently fails to parse, upstream tree-sitter bug
|
||||
|
||||
;; Data
|
||||
[(data_name) (record_name)] @constructor
|
||||
((atom) @constant.builtin.boolean
|
||||
(#any-of? @constant.builtin.boolean "true" "false" "True" "False"))
|
||||
|
||||
"Set" @type.builtin
|
||||
|
||||
; postulate
|
||||
; type_signature
|
||||
; pattern
|
||||
; id
|
||||
; bid
|
||||
; typed_binding
|
||||
; primitive
|
||||
; private
|
||||
; record_signature
|
||||
; record_assignments
|
||||
; field_assignment
|
||||
; module_assignment
|
||||
; renaming
|
||||
; import_directive
|
||||
; lambda
|
||||
; let
|
||||
; instance
|
||||
; generalize
|
||||
; record
|
||||
; fields
|
||||
; syntax
|
||||
; hole_name
|
||||
; data_signature
|
||||
|
||||
;; Keywords
|
||||
[
|
||||
"where"
|
||||
"data"
|
||||
"rewrite"
|
||||
"postulate"
|
||||
"public"
|
||||
"private"
|
||||
"tactic"
|
||||
"Prop"
|
||||
"quote"
|
||||
"renaming"
|
||||
"in"
|
||||
"hiding"
|
||||
"constructor"
|
||||
"abstract"
|
||||
"let"
|
||||
"field"
|
||||
"mutual"
|
||||
"infix"
|
||||
"infixl"
|
||||
"infixr"
|
||||
"record"
|
||||
"overlap"
|
||||
"instance"
|
||||
"do"
|
||||
] @keyword
|
||||
|
||||
[
|
||||
"="
|
||||
] @operator
|
||||
|
||||
; = | -> : ? \ .. ... λ ∀ →
|
||||
; (_LAMBDA) (_FORALL) (_ARROW)
|
||||
; "coinductive"
|
||||
; "eta-equality"
|
||||
; "field"
|
||||
; "inductive"
|
||||
; "interleaved"
|
||||
; "macro"
|
||||
; "no-eta-equality"
|
||||
; "pattern"
|
||||
; "primitive"
|
||||
; "quoteTerm"
|
||||
; "rewrite"
|
||||
; "syntax"
|
||||
; "unquote"
|
||||
; "unquoteDecl"
|
||||
; "unquoteDef"
|
||||
; "using"
|
||||
; "variable"
|
||||
; "with"
|
||||
|
@@ -36,3 +36,16 @@
|
||||
(parameter_list
|
||||
. (parameter_declaration) @anchor
|
||||
(#set! "scope" "tail")) @align
|
||||
(argument_list
|
||||
. (_) @anchor
|
||||
(#set! "scope" "tail")) @align
|
||||
; These are a bit opinionated since some people just indent binary/ternary expressions spanning multiple lines.
|
||||
; Since they are only triggered when a newline is inserted into an already complete binary/ternary expression,
|
||||
; this should happen rarely, so it is not a big deal either way.
|
||||
; Additionally, adding these queries has the advantage of preventing such continuation lines from being used
|
||||
; as the baseline when the `hybrid` indent heuristic is used (which is desirable since their indentation is so inconsistent).
|
||||
(binary_expression
|
||||
(#set! "scope" "tail")) @anchor @align
|
||||
(conditional_expression
|
||||
"?" @anchor
|
||||
(#set! "scope" "tail")) @align
|
||||
|
@@ -7,31 +7,31 @@
|
||||
|
||||
; Hint level tags
|
||||
((tag (name) @hint)
|
||||
(#match? @hint "^(HINT|MARK)$"))
|
||||
(#match? @hint "^(HINT|MARK|PASSED|STUB|MOCK)$"))
|
||||
|
||||
("text" @hint
|
||||
(#match? @hint "^(HINT|MARK)$"))
|
||||
(#match? @hint "^(HINT|MARK|PASSED|STUB|MOCK)$"))
|
||||
|
||||
; Info level tags
|
||||
((tag (name) @info)
|
||||
(#match? @info "^(INFO|NOTE|TODO)$"))
|
||||
(#match? @info "^(INFO|NOTE|TODO|PERF|OPTIMIZE|PERFORMANCE|QUESTION|ASK)$"))
|
||||
|
||||
("text" @info
|
||||
(#match? @info "^(INFO|NOTE|TODO)$"))
|
||||
(#match? @info "^(INFO|NOTE|TODO|PERF|OPTIMIZE|PERFORMANCE|QUESTION|ASK)$"))
|
||||
|
||||
; Warning level tags
|
||||
((tag (name) @warning)
|
||||
(#match? @warning "^(HACK|WARN|WARNING)$"))
|
||||
(#match? @warning "^(HACK|WARN|WARNING|TEST|TEMP)$"))
|
||||
|
||||
("text" @warning
|
||||
(#match? @warning "^(HACK|WARN|WARNING)$"))
|
||||
(#match? @warning "^(HACK|WARN|WARNING|TEST|TEMP)$"))
|
||||
|
||||
; Error level tags
|
||||
((tag (name) @error)
|
||||
(#match? @error "^(BUG|FIXME|ISSUE|XXX)$"))
|
||||
(#match? @error "^(BUG|FIXME|ISSUE|XXX|FIX|SAFETY|FIXIT|FAILED|DEBUG)$"))
|
||||
|
||||
("text" @error
|
||||
(#match? @error "^(BUG|FIXME|ISSUE|XXX)$"))
|
||||
(#match? @error "^(BUG|FIXME|ISSUE|XXX|FIX|SAFETY|FIXIT|FAILED|DEBUG)$"))
|
||||
|
||||
(tag
|
||||
(name) @ui.text
|
||||
@@ -44,3 +44,5 @@
|
||||
; User mention (@user)
|
||||
("text" @tag
|
||||
(#match? @tag "^[@][a-zA-Z0-9_-]+$"))
|
||||
|
||||
(uri) @markup.link.url
|
||||
|
1
runtime/queries/common-lisp/indents.scm
Normal file
1
runtime/queries/common-lisp/indents.scm
Normal file
@@ -0,0 +1 @@
|
||||
; inherits: scheme
|
44
runtime/queries/dbml/highlights.scm
Normal file
44
runtime/queries/dbml/highlights.scm
Normal file
@@ -0,0 +1,44 @@
|
||||
|
||||
; comments highlighting
|
||||
(comment) @comment
|
||||
|
||||
; keyword highlighting
|
||||
(keyword_def) @keyword
|
||||
(keyword_enum) @keyword
|
||||
(keyword_ref) @keyword
|
||||
|
||||
; identify blocks and definitions
|
||||
(definition) @function
|
||||
|
||||
; for identifiers
|
||||
(identifier) @variable
|
||||
(type) @keyword
|
||||
|
||||
; Highlight special types for database/data types
|
||||
("Project" ) @type
|
||||
("Table" ) @type
|
||||
("TableGroup" ) @type
|
||||
("database_type" ) @variable
|
||||
|
||||
; string and number constants
|
||||
("'''") @constant.character.escape
|
||||
(string) @string
|
||||
(number) @constant.numeric
|
||||
|
||||
; brackets
|
||||
[
|
||||
"("
|
||||
")"
|
||||
"{"
|
||||
"}"
|
||||
"["
|
||||
"]"
|
||||
] @punctuation.bracket
|
||||
|
||||
; brackets
|
||||
[
|
||||
":"
|
||||
"."
|
||||
","
|
||||
] @punctuation.delimiter
|
||||
|
@@ -21,6 +21,8 @@
|
||||
|
||||
; Functions
|
||||
(unqualified_import (identifier) @function)
|
||||
(unqualified_import "type" (type_identifier) @type)
|
||||
(unqualified_import (type_identifier) @constructor)
|
||||
(function
|
||||
name: (identifier) @function)
|
||||
(external_function
|
||||
@@ -43,6 +45,13 @@
|
||||
(tuple_access
|
||||
index: (integer) @variable.other.member)
|
||||
|
||||
; Attributes
|
||||
(attribute
|
||||
"@" @attribute
|
||||
name: (identifier) @attribute)
|
||||
|
||||
(attribute_value (identifier) @constant)
|
||||
|
||||
; Type names
|
||||
(remote_type_identifier) @type
|
||||
(type_identifier) @type
|
||||
@@ -60,10 +69,6 @@
|
||||
(identifier) @variable
|
||||
(discard) @comment.unused
|
||||
|
||||
; Operators
|
||||
(binary_expression
|
||||
operator: _ @operator)
|
||||
|
||||
; Keywords
|
||||
[
|
||||
(visibility_modifier) ; "pub"
|
||||
@@ -72,6 +77,7 @@
|
||||
"assert"
|
||||
"case"
|
||||
"const"
|
||||
; DEPRECATED: 'external' was removed in v0.30.
|
||||
"external"
|
||||
"fn"
|
||||
"if"
|
||||
@@ -79,11 +85,16 @@
|
||||
"let"
|
||||
"panic"
|
||||
"todo"
|
||||
"try"
|
||||
"type"
|
||||
"use"
|
||||
] @keyword
|
||||
|
||||
; Operators
|
||||
(binary_expression
|
||||
operator: _ @operator)
|
||||
(boolean_negation "!" @operator)
|
||||
(integer_negation "-" @operator)
|
||||
|
||||
; Punctuation
|
||||
[
|
||||
"("
|
||||
|
42
runtime/queries/gn/highlights.scm
Normal file
42
runtime/queries/gn/highlights.scm
Normal file
@@ -0,0 +1,42 @@
|
||||
; Copyright (C) 2021 Will Cassella (github@willcassella.com)
|
||||
;
|
||||
; Licensed under the Apache License, Version 2.0 (the "License");
|
||||
; you may not use this file except in compliance with the License.
|
||||
; You may obtain a copy of the License at
|
||||
;
|
||||
; http://www.apache.org/licenses/LICENSE-2.0
|
||||
;
|
||||
; Unless required by applicable law or agreed to in writing, software
|
||||
; distributed under the License is distributed on an "AS IS" BASIS,
|
||||
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
; See the License for the specific language governing permissions and
|
||||
; limitations under the License.
|
||||
|
||||
(identifier) @variable.builtin
|
||||
|
||||
(scope_access field: (_) @variable.other.member)
|
||||
|
||||
(call target: (_) @function)
|
||||
|
||||
[ "if" "else" ] @keyword.control.conditional
|
||||
|
||||
[
|
||||
(assign_op)
|
||||
(arithmetic_binary_op)
|
||||
(comparison_binary_op)
|
||||
(equivalence_binary_op)
|
||||
(logical_and_binary_op)
|
||||
(logical_or_binary_op)
|
||||
(negation_unary_op)
|
||||
] @operator
|
||||
|
||||
[ "(" ")" "[" "]" "{" "}" ] @punctuation.bracket
|
||||
[ "." "," ] @punctuation.delimiter
|
||||
|
||||
(string) @string
|
||||
(string_escape) @constant.character.escape
|
||||
(string_expansion [ "$" "${" "}" ] @constant.character.escape)
|
||||
[ (integer) (hex) ] @constant.numeric
|
||||
(boolean) @constant.builtin.boolean
|
||||
|
||||
(comment) @comment
|
15
runtime/queries/gn/injections.scm
Normal file
15
runtime/queries/gn/injections.scm
Normal file
@@ -0,0 +1,15 @@
|
||||
; Copyright (C) 2021 Will Cassella (github@willcassella.com)
|
||||
;
|
||||
; Licensed under the Apache License, Version 2.0 (the "License");
|
||||
; you may not use this file except in compliance with the License.
|
||||
; You may obtain a copy of the License at
|
||||
;
|
||||
; http://www.apache.org/licenses/LICENSE-2.0
|
||||
;
|
||||
; Unless required by applicable law or agreed to in writing, software
|
||||
; distributed under the License is distributed on an "AS IS" BASIS,
|
||||
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
; See the License for the specific language governing permissions and
|
||||
; limitations under the License.
|
||||
|
||||
((comment) @injection.content (#set! injection.language "comment"))
|
16
runtime/queries/gn/locals.scm
Normal file
16
runtime/queries/gn/locals.scm
Normal file
@@ -0,0 +1,16 @@
|
||||
; Copyright (C) 2021 Will Cassella (github@willcassella.com)
|
||||
;
|
||||
; Licensed under the Apache License, Version 2.0 (the "License");
|
||||
; you may not use this file except in compliance with the License.
|
||||
; You may obtain a copy of the License at
|
||||
;
|
||||
; http://www.apache.org/licenses/LICENSE-2.0
|
||||
;
|
||||
; Unless required by applicable law or agreed to in writing, software
|
||||
; distributed under the License is distributed on an "AS IS" BASIS,
|
||||
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
; See the License for the specific language governing permissions and
|
||||
; limitations under the License.
|
||||
|
||||
(source_file) @local.scope
|
||||
(assignment target: (identifier) @local.definition)
|
@@ -11,6 +11,18 @@
|
||||
function: (selector_expression
|
||||
field: (field_identifier) @function.method))
|
||||
|
||||
|
||||
; Types
|
||||
|
||||
(type_parameter_list
|
||||
(parameter_declaration
|
||||
name: (identifier) @type.parameter))
|
||||
|
||||
((type_identifier) @type.builtin
|
||||
(match? @type.builtin "^(any|bool|byte|comparable|complex128|complex64|error|float32|float64|int|int16|int32|int64|int8|rune|string|uint|uint16|uint32|uint64|uint8|uintptr)$"))
|
||||
|
||||
(type_identifier) @type
|
||||
|
||||
; Function definitions
|
||||
|
||||
(function_declaration
|
||||
@@ -30,10 +42,6 @@
|
||||
(parameter_declaration (identifier) @variable.parameter)
|
||||
(variadic_parameter_declaration (identifier) @variable.parameter)
|
||||
|
||||
((type_identifier) @type.builtin
|
||||
(match? @type.builtin "^(any|bool|byte|comparable|complex128|complex64|error|float32|float64|int|int16|int32|int64|int8|rune|string|uint|uint16|uint32|uint64|uint8|uintptr)$"))
|
||||
|
||||
(type_identifier) @type
|
||||
(type_spec
|
||||
name: (type_identifier) @constructor)
|
||||
(field_identifier) @variable.other.member
|
||||
|
@@ -1,9 +1,17 @@
|
||||
; Scopes
|
||||
|
||||
(block) @local.scope
|
||||
[
|
||||
(function_declaration)
|
||||
(type_declaration)
|
||||
(block)
|
||||
] @local.scope
|
||||
|
||||
; Definitions
|
||||
|
||||
(type_parameter_list
|
||||
(parameter_declaration
|
||||
name: (identifier) @local.definition))
|
||||
|
||||
(parameter_declaration (identifier) @local.definition)
|
||||
(variadic_parameter_declaration (identifier) @local.definition)
|
||||
|
||||
@@ -27,4 +35,4 @@
|
||||
|
||||
(identifier) @local.reference
|
||||
(field_identifier) @local.reference
|
||||
|
||||
(type_identifier) @local.reference
|
||||
|
@@ -124,7 +124,7 @@
|
||||
;; Types
|
||||
|
||||
(type) @type
|
||||
(type_variable) @type
|
||||
(type_variable) @type.parameter
|
||||
|
||||
(constructor) @constructor
|
||||
|
||||
|
31
runtime/queries/hocon/highlights.scm
Normal file
31
runtime/queries/hocon/highlights.scm
Normal file
@@ -0,0 +1,31 @@
|
||||
(comment) @comment
|
||||
|
||||
(null) @constant.builtin
|
||||
[(true) (false)] @constant.builtin.boolean
|
||||
(number) @constant.numeric
|
||||
(string) @string
|
||||
(multiline_string) @string
|
||||
(string (escape_sequence) @constant.character.escape)
|
||||
(unquoted_string) @string
|
||||
|
||||
(value [":" "=" "+=" ] @operator)
|
||||
|
||||
(substitution (_) @string)
|
||||
(substitution ["${" "${?" "}"] @punctuation.special)
|
||||
|
||||
[
|
||||
"url"
|
||||
"file"
|
||||
"classpath"
|
||||
"required"
|
||||
] @function.builtin
|
||||
|
||||
(include) @keyword.directive
|
||||
|
||||
[ "(" ")" "[" "]" "{" "}" ] @punctuation.bracket
|
||||
|
||||
(unit) @keyword
|
||||
(path (_) @keyword)
|
||||
(unquoted_path "." @punctuation.delimiter)
|
||||
[ "," ] @punctuation.delimiter
|
||||
|
10
runtime/queries/hocon/indents.scm
Normal file
10
runtime/queries/hocon/indents.scm
Normal file
@@ -0,0 +1,10 @@
|
||||
[
|
||||
(object)
|
||||
(array)
|
||||
] @indent
|
||||
|
||||
[
|
||||
"]"
|
||||
"}"
|
||||
] @outdent
|
||||
|
1
runtime/queries/janet/highlights.scm
Normal file
1
runtime/queries/janet/highlights.scm
Normal file
@@ -0,0 +1 @@
|
||||
; inherits: clojure
|
@@ -1,12 +1,12 @@
|
||||
(comment) @comment
|
||||
(single_line_comment) @comment
|
||||
(multi_line_comment) @comment
|
||||
|
||||
(node
|
||||
name: (identifier) @function)
|
||||
(prop (identifier) @attribute)
|
||||
(type) @type
|
||||
(identifier) @variable)
|
||||
|
||||
(bare_identifier) @variable.other.member
|
||||
(prop (identifier) @attribute)
|
||||
|
||||
(type (_) @type) @punctuation.bracket
|
||||
|
||||
(keyword) @keyword
|
||||
|
||||
|
3
runtime/queries/kdl/indents.scm
Normal file
3
runtime/queries/kdl/indents.scm
Normal file
@@ -0,0 +1,3 @@
|
||||
(node_children) @indent
|
||||
|
||||
"}" @outdent
|
27
runtime/queries/kdl/textobjects.scm
Normal file
27
runtime/queries/kdl/textobjects.scm
Normal file
@@ -0,0 +1,27 @@
|
||||
(type (_) @test.inside) @test.around
|
||||
|
||||
(node
|
||||
children: (node_children)? @class.inside) @class.around
|
||||
|
||||
(node
|
||||
children: (node_children)? @function.inside) @function.around
|
||||
|
||||
(node (identifier) @function.movement)
|
||||
|
||||
[
|
||||
(single_line_comment)
|
||||
(multi_line_comment)
|
||||
] @comment.inside
|
||||
|
||||
[
|
||||
(single_line_comment)+
|
||||
(multi_line_comment)+
|
||||
] @comment.around
|
||||
|
||||
[
|
||||
(prop)
|
||||
(value)
|
||||
] @parameter.inside
|
||||
|
||||
(value (type) ? (_) @parameter.inside @parameter.movement . ) @parameter.around
|
||||
|
@@ -244,7 +244,10 @@
|
||||
. (identifier)) @namespace
|
||||
|
||||
((type_identifier) @type.builtin
|
||||
(#match? @function.builtin "^(Byte|Short|Int|Long|UByte|UShort|UInt|ULong|Float|Double|Boolean|Char|String|Array|ByteArray|ShortArray|IntArray|LongArray|UByteArray|UShortArray|UIntArray|ULongArray|FloatArray|DoubleArray|BooleanArray|CharArray|Map|Set|List|EmptyMap|EmptySet|EmptyList|MutableMap|MutableSet|MutableList)$"))
|
||||
(#match? @type.builtin "^(Byte|Short|Int|Long|UByte|UShort|UInt|ULong|Float|Double|Boolean|Char|String|Array|ByteArray|ShortArray|IntArray|LongArray|UByteArray|UShortArray|UIntArray|ULongArray|FloatArray|DoubleArray|BooleanArray|CharArray|Map|Set|List|EmptyMap|EmptySet|EmptyList|MutableMap|MutableSet|MutableList)$"))
|
||||
|
||||
(type_parameter
|
||||
(type_identifier) @type.parameter)
|
||||
|
||||
(type_identifier) @type
|
||||
|
||||
|
15
runtime/queries/kotlin/locals.scm
Normal file
15
runtime/queries/kotlin/locals.scm
Normal file
@@ -0,0 +1,15 @@
|
||||
; Scopes
|
||||
|
||||
[
|
||||
(class_declaration)
|
||||
(function_declaration)
|
||||
] @local.scope
|
||||
|
||||
; Definitions
|
||||
|
||||
(type_parameter
|
||||
(type_identifier) @local.definition)
|
||||
|
||||
; References
|
||||
|
||||
(type_identifier) @local.reference
|
10
runtime/queries/log/highlights.scm
Normal file
10
runtime/queries/log/highlights.scm
Normal file
@@ -0,0 +1,10 @@
|
||||
(trace) @comment
|
||||
(debug) @hint
|
||||
(info) @info
|
||||
(warn) @warning
|
||||
(error) @error
|
||||
(year_month_day) @keyword
|
||||
(time) @constant
|
||||
(string_literal) @string
|
||||
(number) @constant.numeric
|
||||
(constant) @constant.builtin
|
19
runtime/queries/lpf/highlights.scm
Normal file
19
runtime/queries/lpf/highlights.scm
Normal file
@@ -0,0 +1,19 @@
|
||||
[
|
||||
"SYSCONFIG"
|
||||
"BLOCK"
|
||||
"LOCATE"
|
||||
"COMP"
|
||||
"FREQUENCY"
|
||||
"PORT"
|
||||
"IOBUF"
|
||||
] @keyword
|
||||
|
||||
["SITE"] @keyword.storage
|
||||
|
||||
["="] @operator
|
||||
|
||||
((number) @constant.numeric)
|
||||
|
||||
((string) @string)
|
||||
((line_comment) @comment)
|
||||
|
@@ -8,6 +8,8 @@
|
||||
|
||||
[(class_name) (class_type_name) (type_constructor)] @type
|
||||
|
||||
(type_variable) @type.parameter
|
||||
|
||||
[(constructor_name) (tag)] @constructor
|
||||
|
||||
; Functions
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user