mirror of
https://github.com/helix-editor/helix.git
synced 2025-10-06 16:33:17 +02:00
Compare commits
4 Commits
25.07.1
...
string-lsp
Author | SHA1 | Date | |
---|---|---|---|
|
61491af15e | ||
|
a36806e326 | ||
|
b84c9a893c | ||
|
652e316925 |
6
Cargo.lock
generated
6
Cargo.lock
generated
@@ -1237,6 +1237,7 @@ dependencies = [
|
||||
"nucleo",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"percent-encoding",
|
||||
"quickcheck",
|
||||
"regex",
|
||||
"regex-cursor",
|
||||
@@ -1252,7 +1253,6 @@ dependencies = [
|
||||
"unicode-general-category",
|
||||
"unicode-segmentation",
|
||||
"unicode-width",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1332,10 +1332,10 @@ name = "helix-lsp-types"
|
||||
version = "0.95.1"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"percent-encoding",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_repr",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1468,7 +1468,6 @@ dependencies = [
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"toml",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2622,7 +2621,6 @@ dependencies = [
|
||||
"form_urlencoded",
|
||||
"idna",
|
||||
"percent-encoding",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@@ -42,6 +42,7 @@ tree-sitter = { version = "0.22" }
|
||||
nucleo = "0.5.0"
|
||||
slotmap = "1.0.7"
|
||||
thiserror = "2.0"
|
||||
percent-encoding = "2.3"
|
||||
|
||||
[workspace.package]
|
||||
version = "24.7.0"
|
||||
|
@@ -40,7 +40,7 @@ bitflags = "2.6"
|
||||
ahash = "0.8.11"
|
||||
hashbrown = { version = "0.14.5", features = ["raw"] }
|
||||
dunce = "1.0"
|
||||
url = "2.5.4"
|
||||
percent-encoding.workspace = true
|
||||
|
||||
log = "0.4"
|
||||
anyhow = "1.0"
|
||||
|
@@ -1,6 +1,7 @@
|
||||
use std::{
|
||||
fmt,
|
||||
path::{Path, PathBuf},
|
||||
str::FromStr,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
@@ -16,14 +17,6 @@ pub enum Uri {
|
||||
}
|
||||
|
||||
impl Uri {
|
||||
// This clippy allow mirrors url::Url::from_file_path
|
||||
#[allow(clippy::result_unit_err)]
|
||||
pub fn to_url(&self) -> Result<url::Url, ()> {
|
||||
match self {
|
||||
Uri::File(path) => url::Url::from_file_path(path),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_path(&self) -> Option<&Path> {
|
||||
match self {
|
||||
Self::File(path) => Some(path),
|
||||
@@ -45,81 +38,96 @@ impl fmt::Display for Uri {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UrlConversionError {
|
||||
source: url::Url,
|
||||
kind: UrlConversionErrorKind,
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct UriParseError {
|
||||
source: String,
|
||||
kind: UriParseErrorKind,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum UrlConversionErrorKind {
|
||||
UnsupportedScheme,
|
||||
UnableToConvert,
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum UriParseErrorKind {
|
||||
UnsupportedScheme(String),
|
||||
MalformedUri,
|
||||
}
|
||||
|
||||
impl fmt::Display for UrlConversionError {
|
||||
impl fmt::Display for UriParseError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self.kind {
|
||||
UrlConversionErrorKind::UnsupportedScheme => {
|
||||
match &self.kind {
|
||||
UriParseErrorKind::UnsupportedScheme(scheme) => {
|
||||
write!(f, "unsupported scheme '{scheme}' in URI {}", self.source)
|
||||
}
|
||||
UriParseErrorKind::MalformedUri => {
|
||||
write!(
|
||||
f,
|
||||
"unsupported scheme '{}' in URL {}",
|
||||
self.source.scheme(),
|
||||
"unable to convert malformed URI to file path: {}",
|
||||
self.source
|
||||
)
|
||||
}
|
||||
UrlConversionErrorKind::UnableToConvert => {
|
||||
write!(f, "unable to convert URL to file path: {}", self.source)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for UrlConversionError {}
|
||||
impl std::error::Error for UriParseError {}
|
||||
|
||||
fn convert_url_to_uri(url: &url::Url) -> Result<Uri, UrlConversionErrorKind> {
|
||||
if url.scheme() == "file" {
|
||||
url.to_file_path()
|
||||
.map(|path| Uri::File(helix_stdx::path::normalize(path).into()))
|
||||
.map_err(|_| UrlConversionErrorKind::UnableToConvert)
|
||||
} else {
|
||||
Err(UrlConversionErrorKind::UnsupportedScheme)
|
||||
impl FromStr for Uri {
|
||||
type Err = UriParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
use std::ffi::OsStr;
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
use std::os::unix::prelude::OsStrExt;
|
||||
#[cfg(target_os = "wasi")]
|
||||
use std::os::wasi::prelude::OsStrExt;
|
||||
|
||||
let Some((scheme, rest)) = s.split_once("://") else {
|
||||
return Err(Self::Err {
|
||||
source: s.to_string(),
|
||||
kind: UriParseErrorKind::MalformedUri,
|
||||
});
|
||||
};
|
||||
|
||||
if scheme != "file" {
|
||||
return Err(Self::Err {
|
||||
source: s.to_string(),
|
||||
kind: UriParseErrorKind::UnsupportedScheme(scheme.to_string()),
|
||||
});
|
||||
}
|
||||
|
||||
// Assert there is no query or fragment in the URI.
|
||||
if s.find(['?', '#']).is_some() {
|
||||
return Err(Self::Err {
|
||||
source: s.to_string(),
|
||||
kind: UriParseErrorKind::MalformedUri,
|
||||
});
|
||||
}
|
||||
|
||||
let mut bytes = Vec::new();
|
||||
bytes.extend(percent_encoding::percent_decode(rest.as_bytes()));
|
||||
Ok(PathBuf::from(OsStr::from_bytes(&bytes)).into())
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<url::Url> for Uri {
|
||||
type Error = UrlConversionError;
|
||||
impl TryFrom<&str> for Uri {
|
||||
type Error = UriParseError;
|
||||
|
||||
fn try_from(url: url::Url) -> Result<Self, Self::Error> {
|
||||
convert_url_to_uri(&url).map_err(|kind| Self::Error { source: url, kind })
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&url::Url> for Uri {
|
||||
type Error = UrlConversionError;
|
||||
|
||||
fn try_from(url: &url::Url) -> Result<Self, Self::Error> {
|
||||
convert_url_to_uri(url).map_err(|kind| Self::Error {
|
||||
source: url.clone(),
|
||||
kind,
|
||||
})
|
||||
fn try_from(s: &str) -> Result<Self, Self::Error> {
|
||||
s.parse()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use url::Url;
|
||||
|
||||
#[test]
|
||||
fn unknown_scheme() {
|
||||
let url = Url::parse("csharp:/metadata/foo/bar/Baz.cs").unwrap();
|
||||
assert!(matches!(
|
||||
Uri::try_from(url),
|
||||
Err(UrlConversionError {
|
||||
kind: UrlConversionErrorKind::UnsupportedScheme,
|
||||
..
|
||||
let uri = "csharp://metadata/foo/barBaz.cs";
|
||||
assert_eq!(
|
||||
uri.parse::<Uri>(),
|
||||
Err(UriParseError {
|
||||
source: uri.to_string(),
|
||||
kind: UriParseErrorKind::UnsupportedScheme("csharp".to_string()),
|
||||
})
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -25,7 +25,7 @@ bitflags = "2.6.0"
|
||||
serde = { version = "1.0.216", features = ["derive"] }
|
||||
serde_json = "1.0.133"
|
||||
serde_repr = "0.1"
|
||||
url = {version = "2.5.4", features = ["serde"]}
|
||||
percent-encoding.workspace = true
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
@@ -1,3 +1,5 @@
|
||||
# Helix's `lsp-types`
|
||||
|
||||
This is a fork of the [`lsp-types`](https://crates.io/crates/lsp-types) crate ([`gluon-lang/lsp-types`](https://github.com/gluon-lang/lsp-types)) taken at version v0.95.1 (commit [3e6daee](https://github.com/gluon-lang/lsp-types/commit/3e6daee771d14db4094a554b8d03e29c310dfcbe)). This fork focuses usability improvements that make the types easier to work with for the Helix codebase. For example the URL type - the `uri` crate at this version of `lsp-types` - will be replaced with a wrapper around a string.
|
||||
This is a fork of the [`lsp-types`](https://crates.io/crates/lsp-types) crate ([`gluon-lang/lsp-types`](https://github.com/gluon-lang/lsp-types)) taken at version v0.95.1 (commit [3e6daee](https://github.com/gluon-lang/lsp-types/commit/3e6daee771d14db4094a554b8d03e29c310dfcbe)). This fork focuses on usability improvements that make the types easier to work with for the Helix codebase.
|
||||
|
||||
The URL type has been replaced with a newtype wrapper of a `String`. The `lsp-types` crate at the forked version used [`url::Url`](https://docs.rs/url/2.5.0/url/struct.Url.html) which provides conveniences for using URLs according to [the WHATWG URL spec](https://url.spec.whatwg.org). Helix supports a subset of valid URLs, namely the `file://` scheme, so a wrapper around a normal `String` is sufficient. Plus the LSP spec requires URLs to be in [RFC3986](https://tools.ietf.org/html/rfc3986) format instead.
|
||||
|
@@ -3,27 +3,151 @@
|
||||
Language Server Protocol types for Rust.
|
||||
|
||||
Based on: <https://microsoft.github.io/language-server-protocol/specification>
|
||||
|
||||
This library uses the URL crate for parsing URIs. Note that there is
|
||||
some confusion on the meaning of URLs vs URIs:
|
||||
<http://stackoverflow.com/a/28865728/393898>. According to that
|
||||
information, on the classical sense of "URLs", "URLs" are a subset of
|
||||
URIs, But on the modern/new meaning of URLs, they are the same as
|
||||
URIs. The important take-away aspect is that the URL crate should be
|
||||
able to parse any URI, such as `urn:isbn:0451450523`.
|
||||
|
||||
|
||||
*/
|
||||
#![allow(non_upper_case_globals)]
|
||||
#![forbid(unsafe_code)]
|
||||
|
||||
use bitflags::bitflags;
|
||||
|
||||
use std::{collections::HashMap, fmt::Debug};
|
||||
use std::{collections::HashMap, fmt::Debug, path::Path};
|
||||
|
||||
use serde::{de, de::Error as Error_, Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
pub use url::Url;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub struct Url(String);
|
||||
|
||||
// <https://datatracker.ietf.org/doc/html/rfc3986#section-2.2>, also see
|
||||
// <https://github.com/microsoft/vscode-uri/blob/6dec22d7dcc6c63c30343d3a8d56050d0078cb6a/src/uri.ts#L454-L477>
|
||||
const RESERVED: &percent_encoding::AsciiSet = &percent_encoding::CONTROLS
|
||||
// GEN_DELIMS
|
||||
.add(b':')
|
||||
.add(b'/')
|
||||
.add(b'?')
|
||||
.add(b'#')
|
||||
.add(b'[')
|
||||
.add(b']')
|
||||
.add(b'@')
|
||||
// SUB_DELIMS
|
||||
.add(b'!')
|
||||
.add(b'$')
|
||||
.add(b'&')
|
||||
.add(b'\'')
|
||||
.add(b'(')
|
||||
.add(b')')
|
||||
.add(b'*')
|
||||
.add(b'+')
|
||||
.add(b',')
|
||||
.add(b';')
|
||||
.add(b'=');
|
||||
|
||||
impl Url {
|
||||
#[cfg(any(unix, target_os = "redox", target_os = "wasi"))]
|
||||
pub fn from_file_path<P: AsRef<Path>>(path: P) -> Self {
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
use std::os::unix::prelude::OsStrExt;
|
||||
#[cfg(target_os = "wasi")]
|
||||
use std::os::wasi::prelude::OsStrExt;
|
||||
|
||||
let mut serialization = String::from("file://");
|
||||
// skip the root component
|
||||
for component in path.as_ref().components().skip(1) {
|
||||
serialization.push('/');
|
||||
serialization.extend(percent_encoding::percent_encode(
|
||||
component.as_os_str().as_bytes(),
|
||||
RESERVED,
|
||||
));
|
||||
}
|
||||
if &serialization == "file://" {
|
||||
// An URL's path must not be empty.
|
||||
serialization.push('/');
|
||||
}
|
||||
Self(serialization)
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn from_file_path<P: AsRef<Path>>(path: P) -> Self {
|
||||
from_file_path_windows(path.as_ref())
|
||||
}
|
||||
|
||||
#[cfg_attr(not(windows), allow(dead_code))]
|
||||
fn from_file_path_windows(path: &Path) -> Self {
|
||||
use std::path::{Component, Prefix};
|
||||
|
||||
fn is_windows_drive_letter(segment: &str) -> bool {
|
||||
segment.len() == 2
|
||||
&& (segment.as_bytes()[0] as char).is_ascii_alphabetic()
|
||||
&& matches!(segment.as_bytes()[1], b':' | b'|')
|
||||
}
|
||||
|
||||
assert!(path.is_absolute());
|
||||
let mut serialization = String::from("file://");
|
||||
let mut components = path.components();
|
||||
let host_start = serialization.len() + 1;
|
||||
|
||||
match components.next() {
|
||||
Some(Component::Prefix(ref p)) => match p.kind() {
|
||||
Prefix::Disk(letter) | Prefix::VerbatimDisk(letter) => {
|
||||
serialization.push('/');
|
||||
serialization.push(letter as char);
|
||||
serialization.push(':');
|
||||
}
|
||||
// TODO: Prefix::UNC | Prefix::VerbatimUNC
|
||||
_ => todo!("support UNC drives"),
|
||||
},
|
||||
_ => unreachable!("absolute windows paths must start with a prefix"),
|
||||
}
|
||||
|
||||
let mut path_only_has_prefix = true;
|
||||
for component in components {
|
||||
if component == Component::RootDir {
|
||||
continue;
|
||||
}
|
||||
|
||||
path_only_has_prefix = false;
|
||||
|
||||
serialization.push('/');
|
||||
serialization.extend(percent_encoding::percent_encode(
|
||||
component.as_os_str().as_encoded_bytes(),
|
||||
RESERVED,
|
||||
));
|
||||
}
|
||||
|
||||
if serialization.len() > host_start
|
||||
&& is_windows_drive_letter(&serialization[host_start..])
|
||||
&& path_only_has_prefix
|
||||
{
|
||||
serialization.push('/');
|
||||
}
|
||||
|
||||
Self(serialization)
|
||||
}
|
||||
|
||||
pub fn from_directory_path<P: AsRef<Path>>(path: P) -> Self {
|
||||
let Self(mut serialization) = Self::from_file_path(path);
|
||||
if !serialization.ends_with('/') {
|
||||
serialization.push('/');
|
||||
}
|
||||
Self(serialization)
|
||||
}
|
||||
|
||||
/// Returns the serialized representation of the URL as a `&str`
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
|
||||
/// Consumes the URL, converting into a `String`.
|
||||
/// Note that the string is the serialized representation of the URL.
|
||||
pub fn into_string(self) -> String {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Url {
|
||||
fn from(value: &str) -> Self {
|
||||
Self(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
// Large enough to contain any enumeration name defined in this crate
|
||||
type PascalCaseBuf = [u8; 32];
|
||||
@@ -2843,14 +2967,14 @@ mod tests {
|
||||
test_serialization(
|
||||
&WorkspaceEdit {
|
||||
changes: Some(
|
||||
vec![(Url::parse("file://test").unwrap(), vec![])]
|
||||
vec![(Url::from("file://test"), vec![])]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
),
|
||||
document_changes: None,
|
||||
..Default::default()
|
||||
},
|
||||
r#"{"changes":{"file://test/":[]}}"#,
|
||||
r#"{"changes":{"file://test":[]}}"#,
|
||||
);
|
||||
}
|
||||
|
||||
|
@@ -32,14 +32,17 @@ use tokio::{
|
||||
},
|
||||
};
|
||||
|
||||
fn workspace_for_uri(uri: lsp::Url) -> WorkspaceFolder {
|
||||
fn workspace_for_path(path: &Path) -> WorkspaceFolder {
|
||||
let name = path
|
||||
.iter()
|
||||
.last()
|
||||
.expect("workspace paths should be non-empty")
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
lsp::WorkspaceFolder {
|
||||
name: uri
|
||||
.path_segments()
|
||||
.and_then(|segments| segments.last())
|
||||
.map(|basename| basename.to_string())
|
||||
.unwrap_or_default(),
|
||||
uri,
|
||||
name,
|
||||
uri: lsp::Url::from_directory_path(path),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,7 +58,7 @@ pub struct Client {
|
||||
config: Option<Value>,
|
||||
root_path: std::path::PathBuf,
|
||||
root_uri: Option<lsp::Url>,
|
||||
workspace_folders: Mutex<Vec<lsp::WorkspaceFolder>>,
|
||||
workspace_folders: Mutex<Vec<PathBuf>>,
|
||||
initialize_notify: Arc<Notify>,
|
||||
/// workspace folders added while the server is still initializing
|
||||
req_timeout: u64,
|
||||
@@ -80,16 +83,13 @@ impl Client {
|
||||
&workspace,
|
||||
workspace_is_cwd,
|
||||
);
|
||||
let root_uri = root
|
||||
.as_ref()
|
||||
.and_then(|root| lsp::Url::from_file_path(root).ok());
|
||||
|
||||
if self.root_path == root.unwrap_or(workspace)
|
||||
|| root_uri.as_ref().map_or(false, |root_uri| {
|
||||
if &self.root_path == root.as_ref().unwrap_or(&workspace)
|
||||
|| root.as_ref().is_some_and(|root| {
|
||||
self.workspace_folders
|
||||
.lock()
|
||||
.iter()
|
||||
.any(|workspace| &workspace.uri == root_uri)
|
||||
.any(|workspace| workspace == root)
|
||||
})
|
||||
{
|
||||
// workspace URI is already registered so we can use this client
|
||||
@@ -113,15 +113,16 @@ impl Client {
|
||||
// wait and see if anyone ever runs into it.
|
||||
tokio::spawn(async move {
|
||||
client.initialize_notify.notified().await;
|
||||
if let Some(workspace_folders_caps) = client
|
||||
if let Some((workspace_folders_caps, root)) = client
|
||||
.capabilities()
|
||||
.workspace
|
||||
.as_ref()
|
||||
.and_then(|cap| cap.workspace_folders.as_ref())
|
||||
.filter(|cap| cap.supported.unwrap_or(false))
|
||||
.zip(root)
|
||||
{
|
||||
client.add_workspace_folder(
|
||||
root_uri,
|
||||
root,
|
||||
workspace_folders_caps.change_notifications.as_ref(),
|
||||
);
|
||||
}
|
||||
@@ -129,16 +130,14 @@ impl Client {
|
||||
return true;
|
||||
};
|
||||
|
||||
if let Some(workspace_folders_caps) = capabilities
|
||||
if let Some((workspace_folders_caps, root)) = capabilities
|
||||
.workspace
|
||||
.as_ref()
|
||||
.and_then(|cap| cap.workspace_folders.as_ref())
|
||||
.filter(|cap| cap.supported.unwrap_or(false))
|
||||
.zip(root)
|
||||
{
|
||||
self.add_workspace_folder(
|
||||
root_uri,
|
||||
workspace_folders_caps.change_notifications.as_ref(),
|
||||
);
|
||||
self.add_workspace_folder(root, workspace_folders_caps.change_notifications.as_ref());
|
||||
true
|
||||
} else {
|
||||
// the server doesn't support multi workspaces, we need a new client
|
||||
@@ -148,29 +147,19 @@ impl Client {
|
||||
|
||||
fn add_workspace_folder(
|
||||
&self,
|
||||
root_uri: Option<lsp::Url>,
|
||||
root: PathBuf,
|
||||
change_notifications: Option<&OneOf<bool, String>>,
|
||||
) {
|
||||
// root_uri is None just means that there isn't really any LSP workspace
|
||||
// associated with this file. For servers that support multiple workspaces
|
||||
// there is just one server so we can always just use that shared instance.
|
||||
// No need to add a new workspace root here as there is no logical root for this file
|
||||
// let the server deal with this
|
||||
let Some(root_uri) = root_uri else {
|
||||
return;
|
||||
};
|
||||
|
||||
let workspace = workspace_for_path(&root);
|
||||
// server supports workspace folders, let's add the new root to the list
|
||||
self.workspace_folders
|
||||
.lock()
|
||||
.push(workspace_for_uri(root_uri.clone()));
|
||||
self.workspace_folders.lock().push(root);
|
||||
if Some(&OneOf::Left(false)) == change_notifications {
|
||||
// server specifically opted out of DidWorkspaceChange notifications
|
||||
// let's assume the server will request the workspace folders itself
|
||||
// and that we can therefore reuse the client (but are done now)
|
||||
return;
|
||||
}
|
||||
tokio::spawn(self.did_change_workspace(vec![workspace_for_uri(root_uri)], Vec::new()));
|
||||
tokio::spawn(self.did_change_workspace(vec![workspace], Vec::new()));
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity, clippy::too_many_arguments)]
|
||||
@@ -179,8 +168,8 @@ impl Client {
|
||||
args: &[String],
|
||||
config: Option<Value>,
|
||||
server_environment: HashMap<String, String>,
|
||||
root_path: PathBuf,
|
||||
root_uri: Option<lsp::Url>,
|
||||
root: Option<PathBuf>,
|
||||
workspace: PathBuf,
|
||||
id: LanguageServerId,
|
||||
name: String,
|
||||
req_timeout: u64,
|
||||
@@ -212,10 +201,11 @@ impl Client {
|
||||
let (server_rx, server_tx, initialize_notify) =
|
||||
Transport::start(reader, writer, stderr, id, name.clone());
|
||||
|
||||
let workspace_folders = root_uri
|
||||
.clone()
|
||||
.map(|root| vec![workspace_for_uri(root)])
|
||||
.unwrap_or_default();
|
||||
let workspace_folders = root.clone().into_iter().collect();
|
||||
let root_uri = root.clone().map(lsp::Url::from_file_path);
|
||||
// `root_uri` and `workspace_folder` can be empty in case there is no workspace
|
||||
// `root_url` can not, use `workspace` as a fallback
|
||||
let root_path = root.unwrap_or(workspace);
|
||||
|
||||
let client = Self {
|
||||
id,
|
||||
@@ -376,10 +366,12 @@ impl Client {
|
||||
self.config.as_ref()
|
||||
}
|
||||
|
||||
pub async fn workspace_folders(
|
||||
&self,
|
||||
) -> parking_lot::MutexGuard<'_, Vec<lsp::WorkspaceFolder>> {
|
||||
self.workspace_folders.lock()
|
||||
pub async fn workspace_folders(&self) -> Vec<lsp::WorkspaceFolder> {
|
||||
self.workspace_folders
|
||||
.lock()
|
||||
.iter()
|
||||
.map(|path| workspace_for_path(path))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Execute a RPC request on the language server.
|
||||
@@ -526,7 +518,7 @@ impl Client {
|
||||
#[allow(deprecated)]
|
||||
let params = lsp::InitializeParams {
|
||||
process_id: Some(std::process::id()),
|
||||
workspace_folders: Some(self.workspace_folders.lock().clone()),
|
||||
workspace_folders: Some(self.workspace_folders().await),
|
||||
// root_path is obsolete, but some clients like pyright still use it so we specify both.
|
||||
// clients will prefer _uri if possible
|
||||
root_path: self.root_path.to_str().map(|path| path.to_owned()),
|
||||
@@ -748,11 +740,11 @@ impl Client {
|
||||
} else {
|
||||
Url::from_file_path(path)
|
||||
};
|
||||
Some(url.ok()?.to_string())
|
||||
url.into_string()
|
||||
};
|
||||
let files = vec![lsp::FileRename {
|
||||
old_uri: url_from_path(old_path)?,
|
||||
new_uri: url_from_path(new_path)?,
|
||||
old_uri: url_from_path(old_path),
|
||||
new_uri: url_from_path(new_path),
|
||||
}];
|
||||
let request = self.call_with_timeout::<lsp::request::WillRenameFiles>(
|
||||
&lsp::RenameFilesParams { files },
|
||||
@@ -782,12 +774,12 @@ impl Client {
|
||||
} else {
|
||||
Url::from_file_path(path)
|
||||
};
|
||||
Some(url.ok()?.to_string())
|
||||
url.into_string()
|
||||
};
|
||||
|
||||
let files = vec![lsp::FileRename {
|
||||
old_uri: url_from_path(old_path)?,
|
||||
new_uri: url_from_path(new_path)?,
|
||||
old_uri: url_from_path(old_path),
|
||||
new_uri: url_from_path(new_path),
|
||||
}];
|
||||
Some(self.notify::<lsp::notification::DidRenameFiles>(lsp::RenameFilesParams { files }))
|
||||
}
|
||||
|
@@ -106,9 +106,7 @@ impl Handler {
|
||||
log::warn!("LSP client was dropped: {id}");
|
||||
return false;
|
||||
};
|
||||
let Ok(uri) = lsp::Url::from_file_path(&path) else {
|
||||
return true;
|
||||
};
|
||||
let uri = lsp::Url::from_file_path(&path);
|
||||
log::debug!(
|
||||
"Sending didChangeWatchedFiles notification to client '{}'",
|
||||
client.name()
|
||||
|
@@ -853,12 +853,8 @@ fn start_client(
|
||||
workspace_is_cwd,
|
||||
);
|
||||
|
||||
// `root_uri` and `workspace_folder` can be empty in case there is no workspace
|
||||
// `root_url` can not, use `workspace` as a fallback
|
||||
let root_path = root.clone().unwrap_or_else(|| workspace.clone());
|
||||
let root_uri = root.and_then(|root| lsp::Url::from_file_path(root).ok());
|
||||
|
||||
if let Some(globset) = &ls_config.required_root_patterns {
|
||||
let root_path = root.as_ref().unwrap_or(&workspace);
|
||||
if !root_path
|
||||
.read_dir()?
|
||||
.flatten()
|
||||
@@ -874,8 +870,8 @@ fn start_client(
|
||||
&ls_config.args,
|
||||
ls_config.config.clone(),
|
||||
ls_config.environment.clone(),
|
||||
root_path,
|
||||
root_uri,
|
||||
root,
|
||||
workspace,
|
||||
id,
|
||||
name,
|
||||
ls_config.timeout,
|
||||
|
@@ -744,7 +744,7 @@ impl Application {
|
||||
}
|
||||
}
|
||||
Notification::PublishDiagnostics(mut params) => {
|
||||
let uri = match helix_core::Uri::try_from(params.uri) {
|
||||
let uri = match helix_core::Uri::try_from(params.uri.as_str()) {
|
||||
Ok(uri) => uri,
|
||||
Err(err) => {
|
||||
log::error!("{err}");
|
||||
@@ -1143,7 +1143,8 @@ impl Application {
|
||||
..
|
||||
} = params
|
||||
{
|
||||
self.jobs.callback(crate::open_external_url_callback(uri));
|
||||
self.jobs
|
||||
.callback(crate::open_external_url_callback(uri.as_str()));
|
||||
return lsp::ShowDocumentResult { success: true };
|
||||
};
|
||||
|
||||
@@ -1154,7 +1155,7 @@ impl Application {
|
||||
..
|
||||
} = params;
|
||||
|
||||
let uri = match helix_core::Uri::try_from(uri) {
|
||||
let uri = match helix_core::Uri::try_from(uri.as_str()) {
|
||||
Ok(uri) => uri,
|
||||
Err(err) => {
|
||||
log::error!("{err}");
|
||||
|
@@ -1347,7 +1347,9 @@ fn open_url(cx: &mut Context, url: Url, action: Action) {
|
||||
.unwrap_or_default();
|
||||
|
||||
if url.scheme() != "file" {
|
||||
return cx.jobs.callback(crate::open_external_url_callback(url));
|
||||
return cx
|
||||
.jobs
|
||||
.callback(crate::open_external_url_callback(url.as_str()));
|
||||
}
|
||||
|
||||
let content_type = std::fs::File::open(url.path()).and_then(|file| {
|
||||
@@ -1360,9 +1362,9 @@ fn open_url(cx: &mut Context, url: Url, action: Action) {
|
||||
// we attempt to open binary files - files that can't be open in helix - using external
|
||||
// program as well, e.g. pdf files or images
|
||||
match content_type {
|
||||
Ok(content_inspector::ContentType::BINARY) => {
|
||||
cx.jobs.callback(crate::open_external_url_callback(url))
|
||||
}
|
||||
Ok(content_inspector::ContentType::BINARY) => cx
|
||||
.jobs
|
||||
.callback(crate::open_external_url_callback(url.as_str())),
|
||||
Ok(_) | Err(_) => {
|
||||
let path = &rel_path.join(url.path());
|
||||
if path.is_dir() {
|
||||
|
@@ -69,7 +69,7 @@ struct Location {
|
||||
}
|
||||
|
||||
fn lsp_location_to_location(location: lsp::Location) -> Option<Location> {
|
||||
let uri = match location.uri.try_into() {
|
||||
let uri = match location.uri.as_str().try_into() {
|
||||
Ok(uri) => uri,
|
||||
Err(err) => {
|
||||
log::warn!("discarding invalid or unsupported URI: {err}");
|
||||
@@ -456,7 +456,7 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.filter_map(|symbol| {
|
||||
let uri = match Uri::try_from(&symbol.location.uri) {
|
||||
let uri = match Uri::try_from(symbol.location.uri.as_str()) {
|
||||
Ok(uri) => uri,
|
||||
Err(err) => {
|
||||
log::warn!("discarding symbol with invalid URI: {err}");
|
||||
@@ -510,7 +510,7 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
|
||||
.to_string()
|
||||
.into()
|
||||
} else {
|
||||
item.symbol.location.uri.to_string().into()
|
||||
item.symbol.location.uri.as_str().into()
|
||||
}
|
||||
}),
|
||||
];
|
||||
|
@@ -18,7 +18,6 @@ use futures_util::Future;
|
||||
mod handlers;
|
||||
|
||||
use ignore::DirEntry;
|
||||
use url::Url;
|
||||
|
||||
#[cfg(windows)]
|
||||
fn true_color() -> bool {
|
||||
@@ -70,10 +69,10 @@ fn filter_picker_entry(entry: &DirEntry, root: &Path, dedup_symlinks: bool) -> b
|
||||
}
|
||||
|
||||
/// Opens URL in external program.
|
||||
fn open_external_url_callback(
|
||||
url: Url,
|
||||
fn open_external_url_callback<U: AsRef<std::ffi::OsStr>>(
|
||||
url: U,
|
||||
) -> impl Future<Output = Result<job::Callback, anyhow::Error>> + Send + 'static {
|
||||
let commands = open::commands(url.as_str());
|
||||
let commands = open::commands(url);
|
||||
async {
|
||||
for cmd in commands {
|
||||
let mut command = tokio::process::Command::new(cmd.get_program());
|
||||
|
@@ -30,9 +30,7 @@ crossterm = { version = "0.28", optional = true }
|
||||
|
||||
tempfile = "3.14"
|
||||
|
||||
# Conversion traits
|
||||
once_cell = "1.20"
|
||||
url = "2.5.4"
|
||||
|
||||
arc-swap = { version = "1.7.1" }
|
||||
|
||||
|
@@ -642,7 +642,6 @@ where
|
||||
}
|
||||
|
||||
use helix_lsp::{lsp, Client, LanguageServerId, LanguageServerName};
|
||||
use url::Url;
|
||||
|
||||
impl Document {
|
||||
pub fn from(
|
||||
@@ -1822,8 +1821,8 @@ impl Document {
|
||||
}
|
||||
|
||||
/// File path as a URL.
|
||||
pub fn url(&self) -> Option<Url> {
|
||||
Url::from_file_path(self.path()?).ok()
|
||||
pub fn url(&self) -> Option<lsp::Url> {
|
||||
self.path().map(lsp::Url::from_file_path)
|
||||
}
|
||||
|
||||
pub fn uri(&self) -> Option<helix_core::Uri> {
|
||||
@@ -1909,7 +1908,7 @@ impl Document {
|
||||
pub fn lsp_diagnostic_to_diagnostic(
|
||||
text: &Rope,
|
||||
language_config: Option<&LanguageConfiguration>,
|
||||
diagnostic: &helix_lsp::lsp::Diagnostic,
|
||||
diagnostic: &lsp::Diagnostic,
|
||||
language_server_id: LanguageServerId,
|
||||
offset_encoding: helix_lsp::OffsetEncoding,
|
||||
) -> Option<Diagnostic> {
|
||||
|
@@ -57,7 +57,7 @@ pub struct ApplyEditError {
|
||||
pub enum ApplyEditErrorKind {
|
||||
DocumentChanged,
|
||||
FileNotFound,
|
||||
InvalidUrl(helix_core::uri::UrlConversionError),
|
||||
InvalidUrl(helix_core::uri::UriParseError),
|
||||
IoError(std::io::Error),
|
||||
// TODO: check edits before applying and propagate failure
|
||||
// InvalidEdit,
|
||||
@@ -69,8 +69,8 @@ impl From<std::io::Error> for ApplyEditErrorKind {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<helix_core::uri::UrlConversionError> for ApplyEditErrorKind {
|
||||
fn from(err: helix_core::uri::UrlConversionError) -> Self {
|
||||
impl From<helix_core::uri::UriParseError> for ApplyEditErrorKind {
|
||||
fn from(err: helix_core::uri::UriParseError) -> Self {
|
||||
ApplyEditErrorKind::InvalidUrl(err)
|
||||
}
|
||||
}
|
||||
@@ -94,7 +94,7 @@ impl Editor {
|
||||
text_edits: Vec<lsp::TextEdit>,
|
||||
offset_encoding: OffsetEncoding,
|
||||
) -> Result<(), ApplyEditErrorKind> {
|
||||
let uri = match Uri::try_from(url) {
|
||||
let uri = match Uri::try_from(url.as_str()) {
|
||||
Ok(uri) => uri,
|
||||
Err(err) => {
|
||||
log::error!("{err}");
|
||||
@@ -242,7 +242,7 @@ impl Editor {
|
||||
// may no longer be valid.
|
||||
match op {
|
||||
ResourceOp::Create(op) => {
|
||||
let uri = Uri::try_from(&op.uri)?;
|
||||
let uri = Uri::try_from(op.uri.as_str())?;
|
||||
let path = uri.as_path().expect("URIs are valid paths");
|
||||
let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
|
||||
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
|
||||
@@ -262,7 +262,7 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
ResourceOp::Delete(op) => {
|
||||
let uri = Uri::try_from(&op.uri)?;
|
||||
let uri = Uri::try_from(op.uri.as_str())?;
|
||||
let path = uri.as_path().expect("URIs are valid paths");
|
||||
if path.is_dir() {
|
||||
let recursive = op
|
||||
@@ -284,9 +284,9 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
ResourceOp::Rename(op) => {
|
||||
let from_uri = Uri::try_from(&op.old_uri)?;
|
||||
let from_uri = Uri::try_from(op.old_uri.as_str())?;
|
||||
let from = from_uri.as_path().expect("URIs are valid paths");
|
||||
let to_uri = Uri::try_from(&op.new_uri)?;
|
||||
let to_uri = Uri::try_from(op.new_uri.as_str())?;
|
||||
let to = to_uri.as_path().expect("URIs are valid paths");
|
||||
let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
|
||||
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
|
||||
|
Reference in New Issue
Block a user