mirror of
https://github.com/helix-editor/helix.git
synced 2025-10-06 08:23:27 +02:00
Compare commits
4 Commits
bump_imara
...
string-lsp
Author | SHA1 | Date | |
---|---|---|---|
|
61491af15e | ||
|
a36806e326 | ||
|
b84c9a893c | ||
|
652e316925 |
6
Cargo.lock
generated
6
Cargo.lock
generated
@@ -1237,6 +1237,7 @@ dependencies = [
|
|||||||
"nucleo",
|
"nucleo",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
|
"percent-encoding",
|
||||||
"quickcheck",
|
"quickcheck",
|
||||||
"regex",
|
"regex",
|
||||||
"regex-cursor",
|
"regex-cursor",
|
||||||
@@ -1252,7 +1253,6 @@ dependencies = [
|
|||||||
"unicode-general-category",
|
"unicode-general-category",
|
||||||
"unicode-segmentation",
|
"unicode-segmentation",
|
||||||
"unicode-width",
|
"unicode-width",
|
||||||
"url",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1332,10 +1332,10 @@ name = "helix-lsp-types"
|
|||||||
version = "0.95.1"
|
version = "0.95.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
|
"percent-encoding",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_repr",
|
"serde_repr",
|
||||||
"url",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1468,7 +1468,6 @@ dependencies = [
|
|||||||
"tokio",
|
"tokio",
|
||||||
"tokio-stream",
|
"tokio-stream",
|
||||||
"toml",
|
"toml",
|
||||||
"url",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -2622,7 +2621,6 @@ dependencies = [
|
|||||||
"form_urlencoded",
|
"form_urlencoded",
|
||||||
"idna",
|
"idna",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
"serde",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@@ -42,6 +42,7 @@ tree-sitter = { version = "0.22" }
|
|||||||
nucleo = "0.5.0"
|
nucleo = "0.5.0"
|
||||||
slotmap = "1.0.7"
|
slotmap = "1.0.7"
|
||||||
thiserror = "2.0"
|
thiserror = "2.0"
|
||||||
|
percent-encoding = "2.3"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "24.7.0"
|
version = "24.7.0"
|
||||||
|
@@ -40,7 +40,7 @@ bitflags = "2.6"
|
|||||||
ahash = "0.8.11"
|
ahash = "0.8.11"
|
||||||
hashbrown = { version = "0.14.5", features = ["raw"] }
|
hashbrown = { version = "0.14.5", features = ["raw"] }
|
||||||
dunce = "1.0"
|
dunce = "1.0"
|
||||||
url = "2.5.4"
|
percent-encoding.workspace = true
|
||||||
|
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
use std::{
|
use std::{
|
||||||
fmt,
|
fmt,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
str::FromStr,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -16,14 +17,6 @@ pub enum Uri {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Uri {
|
impl Uri {
|
||||||
// This clippy allow mirrors url::Url::from_file_path
|
|
||||||
#[allow(clippy::result_unit_err)]
|
|
||||||
pub fn to_url(&self) -> Result<url::Url, ()> {
|
|
||||||
match self {
|
|
||||||
Uri::File(path) => url::Url::from_file_path(path),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_path(&self) -> Option<&Path> {
|
pub fn as_path(&self) -> Option<&Path> {
|
||||||
match self {
|
match self {
|
||||||
Self::File(path) => Some(path),
|
Self::File(path) => Some(path),
|
||||||
@@ -45,81 +38,96 @@ impl fmt::Display for Uri {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct UrlConversionError {
|
pub struct UriParseError {
|
||||||
source: url::Url,
|
source: String,
|
||||||
kind: UrlConversionErrorKind,
|
kind: UriParseErrorKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum UrlConversionErrorKind {
|
pub enum UriParseErrorKind {
|
||||||
UnsupportedScheme,
|
UnsupportedScheme(String),
|
||||||
UnableToConvert,
|
MalformedUri,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for UrlConversionError {
|
impl fmt::Display for UriParseError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self.kind {
|
match &self.kind {
|
||||||
UrlConversionErrorKind::UnsupportedScheme => {
|
UriParseErrorKind::UnsupportedScheme(scheme) => {
|
||||||
|
write!(f, "unsupported scheme '{scheme}' in URI {}", self.source)
|
||||||
|
}
|
||||||
|
UriParseErrorKind::MalformedUri => {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"unsupported scheme '{}' in URL {}",
|
"unable to convert malformed URI to file path: {}",
|
||||||
self.source.scheme(),
|
|
||||||
self.source
|
self.source
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
UrlConversionErrorKind::UnableToConvert => {
|
|
||||||
write!(f, "unable to convert URL to file path: {}", self.source)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::error::Error for UrlConversionError {}
|
impl std::error::Error for UriParseError {}
|
||||||
|
|
||||||
fn convert_url_to_uri(url: &url::Url) -> Result<Uri, UrlConversionErrorKind> {
|
impl FromStr for Uri {
|
||||||
if url.scheme() == "file" {
|
type Err = UriParseError;
|
||||||
url.to_file_path()
|
|
||||||
.map(|path| Uri::File(helix_stdx::path::normalize(path).into()))
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
.map_err(|_| UrlConversionErrorKind::UnableToConvert)
|
use std::ffi::OsStr;
|
||||||
} else {
|
#[cfg(any(unix, target_os = "redox"))]
|
||||||
Err(UrlConversionErrorKind::UnsupportedScheme)
|
use std::os::unix::prelude::OsStrExt;
|
||||||
|
#[cfg(target_os = "wasi")]
|
||||||
|
use std::os::wasi::prelude::OsStrExt;
|
||||||
|
|
||||||
|
let Some((scheme, rest)) = s.split_once("://") else {
|
||||||
|
return Err(Self::Err {
|
||||||
|
source: s.to_string(),
|
||||||
|
kind: UriParseErrorKind::MalformedUri,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
if scheme != "file" {
|
||||||
|
return Err(Self::Err {
|
||||||
|
source: s.to_string(),
|
||||||
|
kind: UriParseErrorKind::UnsupportedScheme(scheme.to_string()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assert there is no query or fragment in the URI.
|
||||||
|
if s.find(['?', '#']).is_some() {
|
||||||
|
return Err(Self::Err {
|
||||||
|
source: s.to_string(),
|
||||||
|
kind: UriParseErrorKind::MalformedUri,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut bytes = Vec::new();
|
||||||
|
bytes.extend(percent_encoding::percent_decode(rest.as_bytes()));
|
||||||
|
Ok(PathBuf::from(OsStr::from_bytes(&bytes)).into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<url::Url> for Uri {
|
impl TryFrom<&str> for Uri {
|
||||||
type Error = UrlConversionError;
|
type Error = UriParseError;
|
||||||
|
|
||||||
fn try_from(url: url::Url) -> Result<Self, Self::Error> {
|
fn try_from(s: &str) -> Result<Self, Self::Error> {
|
||||||
convert_url_to_uri(&url).map_err(|kind| Self::Error { source: url, kind })
|
s.parse()
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<&url::Url> for Uri {
|
|
||||||
type Error = UrlConversionError;
|
|
||||||
|
|
||||||
fn try_from(url: &url::Url) -> Result<Self, Self::Error> {
|
|
||||||
convert_url_to_uri(url).map_err(|kind| Self::Error {
|
|
||||||
source: url.clone(),
|
|
||||||
kind,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn unknown_scheme() {
|
fn unknown_scheme() {
|
||||||
let url = Url::parse("csharp:/metadata/foo/bar/Baz.cs").unwrap();
|
let uri = "csharp://metadata/foo/barBaz.cs";
|
||||||
assert!(matches!(
|
assert_eq!(
|
||||||
Uri::try_from(url),
|
uri.parse::<Uri>(),
|
||||||
Err(UrlConversionError {
|
Err(UriParseError {
|
||||||
kind: UrlConversionErrorKind::UnsupportedScheme,
|
source: uri.to_string(),
|
||||||
..
|
kind: UriParseErrorKind::UnsupportedScheme("csharp".to_string()),
|
||||||
})
|
})
|
||||||
));
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -25,7 +25,7 @@ bitflags = "2.6.0"
|
|||||||
serde = { version = "1.0.216", features = ["derive"] }
|
serde = { version = "1.0.216", features = ["derive"] }
|
||||||
serde_json = "1.0.133"
|
serde_json = "1.0.133"
|
||||||
serde_repr = "0.1"
|
serde_repr = "0.1"
|
||||||
url = {version = "2.5.4", features = ["serde"]}
|
percent-encoding.workspace = true
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
|
@@ -1,3 +1,5 @@
|
|||||||
# Helix's `lsp-types`
|
# Helix's `lsp-types`
|
||||||
|
|
||||||
This is a fork of the [`lsp-types`](https://crates.io/crates/lsp-types) crate ([`gluon-lang/lsp-types`](https://github.com/gluon-lang/lsp-types)) taken at version v0.95.1 (commit [3e6daee](https://github.com/gluon-lang/lsp-types/commit/3e6daee771d14db4094a554b8d03e29c310dfcbe)). This fork focuses usability improvements that make the types easier to work with for the Helix codebase. For example the URL type - the `uri` crate at this version of `lsp-types` - will be replaced with a wrapper around a string.
|
This is a fork of the [`lsp-types`](https://crates.io/crates/lsp-types) crate ([`gluon-lang/lsp-types`](https://github.com/gluon-lang/lsp-types)) taken at version v0.95.1 (commit [3e6daee](https://github.com/gluon-lang/lsp-types/commit/3e6daee771d14db4094a554b8d03e29c310dfcbe)). This fork focuses on usability improvements that make the types easier to work with for the Helix codebase.
|
||||||
|
|
||||||
|
The URL type has been replaced with a newtype wrapper of a `String`. The `lsp-types` crate at the forked version used [`url::Url`](https://docs.rs/url/2.5.0/url/struct.Url.html) which provides conveniences for using URLs according to [the WHATWG URL spec](https://url.spec.whatwg.org). Helix supports a subset of valid URLs, namely the `file://` scheme, so a wrapper around a normal `String` is sufficient. Plus the LSP spec requires URLs to be in [RFC3986](https://tools.ietf.org/html/rfc3986) format instead.
|
||||||
|
@@ -3,27 +3,151 @@
|
|||||||
Language Server Protocol types for Rust.
|
Language Server Protocol types for Rust.
|
||||||
|
|
||||||
Based on: <https://microsoft.github.io/language-server-protocol/specification>
|
Based on: <https://microsoft.github.io/language-server-protocol/specification>
|
||||||
|
|
||||||
This library uses the URL crate for parsing URIs. Note that there is
|
|
||||||
some confusion on the meaning of URLs vs URIs:
|
|
||||||
<http://stackoverflow.com/a/28865728/393898>. According to that
|
|
||||||
information, on the classical sense of "URLs", "URLs" are a subset of
|
|
||||||
URIs, But on the modern/new meaning of URLs, they are the same as
|
|
||||||
URIs. The important take-away aspect is that the URL crate should be
|
|
||||||
able to parse any URI, such as `urn:isbn:0451450523`.
|
|
||||||
|
|
||||||
|
|
||||||
*/
|
*/
|
||||||
#![allow(non_upper_case_globals)]
|
#![allow(non_upper_case_globals)]
|
||||||
#![forbid(unsafe_code)]
|
#![forbid(unsafe_code)]
|
||||||
|
|
||||||
use bitflags::bitflags;
|
use bitflags::bitflags;
|
||||||
|
|
||||||
use std::{collections::HashMap, fmt::Debug};
|
use std::{collections::HashMap, fmt::Debug, path::Path};
|
||||||
|
|
||||||
use serde::{de, de::Error as Error_, Deserialize, Serialize};
|
use serde::{de, de::Error as Error_, Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
pub use url::Url;
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||||
|
pub struct Url(String);
|
||||||
|
|
||||||
|
// <https://datatracker.ietf.org/doc/html/rfc3986#section-2.2>, also see
|
||||||
|
// <https://github.com/microsoft/vscode-uri/blob/6dec22d7dcc6c63c30343d3a8d56050d0078cb6a/src/uri.ts#L454-L477>
|
||||||
|
const RESERVED: &percent_encoding::AsciiSet = &percent_encoding::CONTROLS
|
||||||
|
// GEN_DELIMS
|
||||||
|
.add(b':')
|
||||||
|
.add(b'/')
|
||||||
|
.add(b'?')
|
||||||
|
.add(b'#')
|
||||||
|
.add(b'[')
|
||||||
|
.add(b']')
|
||||||
|
.add(b'@')
|
||||||
|
// SUB_DELIMS
|
||||||
|
.add(b'!')
|
||||||
|
.add(b'$')
|
||||||
|
.add(b'&')
|
||||||
|
.add(b'\'')
|
||||||
|
.add(b'(')
|
||||||
|
.add(b')')
|
||||||
|
.add(b'*')
|
||||||
|
.add(b'+')
|
||||||
|
.add(b',')
|
||||||
|
.add(b';')
|
||||||
|
.add(b'=');
|
||||||
|
|
||||||
|
impl Url {
|
||||||
|
#[cfg(any(unix, target_os = "redox", target_os = "wasi"))]
|
||||||
|
pub fn from_file_path<P: AsRef<Path>>(path: P) -> Self {
|
||||||
|
#[cfg(any(unix, target_os = "redox"))]
|
||||||
|
use std::os::unix::prelude::OsStrExt;
|
||||||
|
#[cfg(target_os = "wasi")]
|
||||||
|
use std::os::wasi::prelude::OsStrExt;
|
||||||
|
|
||||||
|
let mut serialization = String::from("file://");
|
||||||
|
// skip the root component
|
||||||
|
for component in path.as_ref().components().skip(1) {
|
||||||
|
serialization.push('/');
|
||||||
|
serialization.extend(percent_encoding::percent_encode(
|
||||||
|
component.as_os_str().as_bytes(),
|
||||||
|
RESERVED,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if &serialization == "file://" {
|
||||||
|
// An URL's path must not be empty.
|
||||||
|
serialization.push('/');
|
||||||
|
}
|
||||||
|
Self(serialization)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
pub fn from_file_path<P: AsRef<Path>>(path: P) -> Self {
|
||||||
|
from_file_path_windows(path.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(not(windows), allow(dead_code))]
|
||||||
|
fn from_file_path_windows(path: &Path) -> Self {
|
||||||
|
use std::path::{Component, Prefix};
|
||||||
|
|
||||||
|
fn is_windows_drive_letter(segment: &str) -> bool {
|
||||||
|
segment.len() == 2
|
||||||
|
&& (segment.as_bytes()[0] as char).is_ascii_alphabetic()
|
||||||
|
&& matches!(segment.as_bytes()[1], b':' | b'|')
|
||||||
|
}
|
||||||
|
|
||||||
|
assert!(path.is_absolute());
|
||||||
|
let mut serialization = String::from("file://");
|
||||||
|
let mut components = path.components();
|
||||||
|
let host_start = serialization.len() + 1;
|
||||||
|
|
||||||
|
match components.next() {
|
||||||
|
Some(Component::Prefix(ref p)) => match p.kind() {
|
||||||
|
Prefix::Disk(letter) | Prefix::VerbatimDisk(letter) => {
|
||||||
|
serialization.push('/');
|
||||||
|
serialization.push(letter as char);
|
||||||
|
serialization.push(':');
|
||||||
|
}
|
||||||
|
// TODO: Prefix::UNC | Prefix::VerbatimUNC
|
||||||
|
_ => todo!("support UNC drives"),
|
||||||
|
},
|
||||||
|
_ => unreachable!("absolute windows paths must start with a prefix"),
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut path_only_has_prefix = true;
|
||||||
|
for component in components {
|
||||||
|
if component == Component::RootDir {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
path_only_has_prefix = false;
|
||||||
|
|
||||||
|
serialization.push('/');
|
||||||
|
serialization.extend(percent_encoding::percent_encode(
|
||||||
|
component.as_os_str().as_encoded_bytes(),
|
||||||
|
RESERVED,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if serialization.len() > host_start
|
||||||
|
&& is_windows_drive_letter(&serialization[host_start..])
|
||||||
|
&& path_only_has_prefix
|
||||||
|
{
|
||||||
|
serialization.push('/');
|
||||||
|
}
|
||||||
|
|
||||||
|
Self(serialization)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_directory_path<P: AsRef<Path>>(path: P) -> Self {
|
||||||
|
let Self(mut serialization) = Self::from_file_path(path);
|
||||||
|
if !serialization.ends_with('/') {
|
||||||
|
serialization.push('/');
|
||||||
|
}
|
||||||
|
Self(serialization)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the serialized representation of the URL as a `&str`
|
||||||
|
pub fn as_str(&self) -> &str {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Consumes the URL, converting into a `String`.
|
||||||
|
/// Note that the string is the serialized representation of the URL.
|
||||||
|
pub fn into_string(self) -> String {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&str> for Url {
|
||||||
|
fn from(value: &str) -> Self {
|
||||||
|
Self(value.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Large enough to contain any enumeration name defined in this crate
|
// Large enough to contain any enumeration name defined in this crate
|
||||||
type PascalCaseBuf = [u8; 32];
|
type PascalCaseBuf = [u8; 32];
|
||||||
@@ -2843,14 +2967,14 @@ mod tests {
|
|||||||
test_serialization(
|
test_serialization(
|
||||||
&WorkspaceEdit {
|
&WorkspaceEdit {
|
||||||
changes: Some(
|
changes: Some(
|
||||||
vec![(Url::parse("file://test").unwrap(), vec![])]
|
vec![(Url::from("file://test"), vec![])]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.collect(),
|
.collect(),
|
||||||
),
|
),
|
||||||
document_changes: None,
|
document_changes: None,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
r#"{"changes":{"file://test/":[]}}"#,
|
r#"{"changes":{"file://test":[]}}"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -32,14 +32,17 @@ use tokio::{
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
fn workspace_for_uri(uri: lsp::Url) -> WorkspaceFolder {
|
fn workspace_for_path(path: &Path) -> WorkspaceFolder {
|
||||||
|
let name = path
|
||||||
|
.iter()
|
||||||
|
.last()
|
||||||
|
.expect("workspace paths should be non-empty")
|
||||||
|
.to_string_lossy()
|
||||||
|
.to_string();
|
||||||
|
|
||||||
lsp::WorkspaceFolder {
|
lsp::WorkspaceFolder {
|
||||||
name: uri
|
name,
|
||||||
.path_segments()
|
uri: lsp::Url::from_directory_path(path),
|
||||||
.and_then(|segments| segments.last())
|
|
||||||
.map(|basename| basename.to_string())
|
|
||||||
.unwrap_or_default(),
|
|
||||||
uri,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -55,7 +58,7 @@ pub struct Client {
|
|||||||
config: Option<Value>,
|
config: Option<Value>,
|
||||||
root_path: std::path::PathBuf,
|
root_path: std::path::PathBuf,
|
||||||
root_uri: Option<lsp::Url>,
|
root_uri: Option<lsp::Url>,
|
||||||
workspace_folders: Mutex<Vec<lsp::WorkspaceFolder>>,
|
workspace_folders: Mutex<Vec<PathBuf>>,
|
||||||
initialize_notify: Arc<Notify>,
|
initialize_notify: Arc<Notify>,
|
||||||
/// workspace folders added while the server is still initializing
|
/// workspace folders added while the server is still initializing
|
||||||
req_timeout: u64,
|
req_timeout: u64,
|
||||||
@@ -80,16 +83,13 @@ impl Client {
|
|||||||
&workspace,
|
&workspace,
|
||||||
workspace_is_cwd,
|
workspace_is_cwd,
|
||||||
);
|
);
|
||||||
let root_uri = root
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|root| lsp::Url::from_file_path(root).ok());
|
|
||||||
|
|
||||||
if self.root_path == root.unwrap_or(workspace)
|
if &self.root_path == root.as_ref().unwrap_or(&workspace)
|
||||||
|| root_uri.as_ref().map_or(false, |root_uri| {
|
|| root.as_ref().is_some_and(|root| {
|
||||||
self.workspace_folders
|
self.workspace_folders
|
||||||
.lock()
|
.lock()
|
||||||
.iter()
|
.iter()
|
||||||
.any(|workspace| &workspace.uri == root_uri)
|
.any(|workspace| workspace == root)
|
||||||
})
|
})
|
||||||
{
|
{
|
||||||
// workspace URI is already registered so we can use this client
|
// workspace URI is already registered so we can use this client
|
||||||
@@ -113,15 +113,16 @@ impl Client {
|
|||||||
// wait and see if anyone ever runs into it.
|
// wait and see if anyone ever runs into it.
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
client.initialize_notify.notified().await;
|
client.initialize_notify.notified().await;
|
||||||
if let Some(workspace_folders_caps) = client
|
if let Some((workspace_folders_caps, root)) = client
|
||||||
.capabilities()
|
.capabilities()
|
||||||
.workspace
|
.workspace
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|cap| cap.workspace_folders.as_ref())
|
.and_then(|cap| cap.workspace_folders.as_ref())
|
||||||
.filter(|cap| cap.supported.unwrap_or(false))
|
.filter(|cap| cap.supported.unwrap_or(false))
|
||||||
|
.zip(root)
|
||||||
{
|
{
|
||||||
client.add_workspace_folder(
|
client.add_workspace_folder(
|
||||||
root_uri,
|
root,
|
||||||
workspace_folders_caps.change_notifications.as_ref(),
|
workspace_folders_caps.change_notifications.as_ref(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -129,16 +130,14 @@ impl Client {
|
|||||||
return true;
|
return true;
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(workspace_folders_caps) = capabilities
|
if let Some((workspace_folders_caps, root)) = capabilities
|
||||||
.workspace
|
.workspace
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|cap| cap.workspace_folders.as_ref())
|
.and_then(|cap| cap.workspace_folders.as_ref())
|
||||||
.filter(|cap| cap.supported.unwrap_or(false))
|
.filter(|cap| cap.supported.unwrap_or(false))
|
||||||
|
.zip(root)
|
||||||
{
|
{
|
||||||
self.add_workspace_folder(
|
self.add_workspace_folder(root, workspace_folders_caps.change_notifications.as_ref());
|
||||||
root_uri,
|
|
||||||
workspace_folders_caps.change_notifications.as_ref(),
|
|
||||||
);
|
|
||||||
true
|
true
|
||||||
} else {
|
} else {
|
||||||
// the server doesn't support multi workspaces, we need a new client
|
// the server doesn't support multi workspaces, we need a new client
|
||||||
@@ -148,29 +147,19 @@ impl Client {
|
|||||||
|
|
||||||
fn add_workspace_folder(
|
fn add_workspace_folder(
|
||||||
&self,
|
&self,
|
||||||
root_uri: Option<lsp::Url>,
|
root: PathBuf,
|
||||||
change_notifications: Option<&OneOf<bool, String>>,
|
change_notifications: Option<&OneOf<bool, String>>,
|
||||||
) {
|
) {
|
||||||
// root_uri is None just means that there isn't really any LSP workspace
|
let workspace = workspace_for_path(&root);
|
||||||
// associated with this file. For servers that support multiple workspaces
|
|
||||||
// there is just one server so we can always just use that shared instance.
|
|
||||||
// No need to add a new workspace root here as there is no logical root for this file
|
|
||||||
// let the server deal with this
|
|
||||||
let Some(root_uri) = root_uri else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
// server supports workspace folders, let's add the new root to the list
|
// server supports workspace folders, let's add the new root to the list
|
||||||
self.workspace_folders
|
self.workspace_folders.lock().push(root);
|
||||||
.lock()
|
|
||||||
.push(workspace_for_uri(root_uri.clone()));
|
|
||||||
if Some(&OneOf::Left(false)) == change_notifications {
|
if Some(&OneOf::Left(false)) == change_notifications {
|
||||||
// server specifically opted out of DidWorkspaceChange notifications
|
// server specifically opted out of DidWorkspaceChange notifications
|
||||||
// let's assume the server will request the workspace folders itself
|
// let's assume the server will request the workspace folders itself
|
||||||
// and that we can therefore reuse the client (but are done now)
|
// and that we can therefore reuse the client (but are done now)
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
tokio::spawn(self.did_change_workspace(vec![workspace_for_uri(root_uri)], Vec::new()));
|
tokio::spawn(self.did_change_workspace(vec![workspace], Vec::new()));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::type_complexity, clippy::too_many_arguments)]
|
#[allow(clippy::type_complexity, clippy::too_many_arguments)]
|
||||||
@@ -179,8 +168,8 @@ impl Client {
|
|||||||
args: &[String],
|
args: &[String],
|
||||||
config: Option<Value>,
|
config: Option<Value>,
|
||||||
server_environment: HashMap<String, String>,
|
server_environment: HashMap<String, String>,
|
||||||
root_path: PathBuf,
|
root: Option<PathBuf>,
|
||||||
root_uri: Option<lsp::Url>,
|
workspace: PathBuf,
|
||||||
id: LanguageServerId,
|
id: LanguageServerId,
|
||||||
name: String,
|
name: String,
|
||||||
req_timeout: u64,
|
req_timeout: u64,
|
||||||
@@ -212,10 +201,11 @@ impl Client {
|
|||||||
let (server_rx, server_tx, initialize_notify) =
|
let (server_rx, server_tx, initialize_notify) =
|
||||||
Transport::start(reader, writer, stderr, id, name.clone());
|
Transport::start(reader, writer, stderr, id, name.clone());
|
||||||
|
|
||||||
let workspace_folders = root_uri
|
let workspace_folders = root.clone().into_iter().collect();
|
||||||
.clone()
|
let root_uri = root.clone().map(lsp::Url::from_file_path);
|
||||||
.map(|root| vec![workspace_for_uri(root)])
|
// `root_uri` and `workspace_folder` can be empty in case there is no workspace
|
||||||
.unwrap_or_default();
|
// `root_url` can not, use `workspace` as a fallback
|
||||||
|
let root_path = root.unwrap_or(workspace);
|
||||||
|
|
||||||
let client = Self {
|
let client = Self {
|
||||||
id,
|
id,
|
||||||
@@ -376,10 +366,12 @@ impl Client {
|
|||||||
self.config.as_ref()
|
self.config.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn workspace_folders(
|
pub async fn workspace_folders(&self) -> Vec<lsp::WorkspaceFolder> {
|
||||||
&self,
|
self.workspace_folders
|
||||||
) -> parking_lot::MutexGuard<'_, Vec<lsp::WorkspaceFolder>> {
|
.lock()
|
||||||
self.workspace_folders.lock()
|
.iter()
|
||||||
|
.map(|path| workspace_for_path(path))
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Execute a RPC request on the language server.
|
/// Execute a RPC request on the language server.
|
||||||
@@ -526,7 +518,7 @@ impl Client {
|
|||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let params = lsp::InitializeParams {
|
let params = lsp::InitializeParams {
|
||||||
process_id: Some(std::process::id()),
|
process_id: Some(std::process::id()),
|
||||||
workspace_folders: Some(self.workspace_folders.lock().clone()),
|
workspace_folders: Some(self.workspace_folders().await),
|
||||||
// root_path is obsolete, but some clients like pyright still use it so we specify both.
|
// root_path is obsolete, but some clients like pyright still use it so we specify both.
|
||||||
// clients will prefer _uri if possible
|
// clients will prefer _uri if possible
|
||||||
root_path: self.root_path.to_str().map(|path| path.to_owned()),
|
root_path: self.root_path.to_str().map(|path| path.to_owned()),
|
||||||
@@ -748,11 +740,11 @@ impl Client {
|
|||||||
} else {
|
} else {
|
||||||
Url::from_file_path(path)
|
Url::from_file_path(path)
|
||||||
};
|
};
|
||||||
Some(url.ok()?.to_string())
|
url.into_string()
|
||||||
};
|
};
|
||||||
let files = vec![lsp::FileRename {
|
let files = vec![lsp::FileRename {
|
||||||
old_uri: url_from_path(old_path)?,
|
old_uri: url_from_path(old_path),
|
||||||
new_uri: url_from_path(new_path)?,
|
new_uri: url_from_path(new_path),
|
||||||
}];
|
}];
|
||||||
let request = self.call_with_timeout::<lsp::request::WillRenameFiles>(
|
let request = self.call_with_timeout::<lsp::request::WillRenameFiles>(
|
||||||
&lsp::RenameFilesParams { files },
|
&lsp::RenameFilesParams { files },
|
||||||
@@ -782,12 +774,12 @@ impl Client {
|
|||||||
} else {
|
} else {
|
||||||
Url::from_file_path(path)
|
Url::from_file_path(path)
|
||||||
};
|
};
|
||||||
Some(url.ok()?.to_string())
|
url.into_string()
|
||||||
};
|
};
|
||||||
|
|
||||||
let files = vec![lsp::FileRename {
|
let files = vec![lsp::FileRename {
|
||||||
old_uri: url_from_path(old_path)?,
|
old_uri: url_from_path(old_path),
|
||||||
new_uri: url_from_path(new_path)?,
|
new_uri: url_from_path(new_path),
|
||||||
}];
|
}];
|
||||||
Some(self.notify::<lsp::notification::DidRenameFiles>(lsp::RenameFilesParams { files }))
|
Some(self.notify::<lsp::notification::DidRenameFiles>(lsp::RenameFilesParams { files }))
|
||||||
}
|
}
|
||||||
|
@@ -106,9 +106,7 @@ impl Handler {
|
|||||||
log::warn!("LSP client was dropped: {id}");
|
log::warn!("LSP client was dropped: {id}");
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
let Ok(uri) = lsp::Url::from_file_path(&path) else {
|
let uri = lsp::Url::from_file_path(&path);
|
||||||
return true;
|
|
||||||
};
|
|
||||||
log::debug!(
|
log::debug!(
|
||||||
"Sending didChangeWatchedFiles notification to client '{}'",
|
"Sending didChangeWatchedFiles notification to client '{}'",
|
||||||
client.name()
|
client.name()
|
||||||
|
@@ -853,12 +853,8 @@ fn start_client(
|
|||||||
workspace_is_cwd,
|
workspace_is_cwd,
|
||||||
);
|
);
|
||||||
|
|
||||||
// `root_uri` and `workspace_folder` can be empty in case there is no workspace
|
|
||||||
// `root_url` can not, use `workspace` as a fallback
|
|
||||||
let root_path = root.clone().unwrap_or_else(|| workspace.clone());
|
|
||||||
let root_uri = root.and_then(|root| lsp::Url::from_file_path(root).ok());
|
|
||||||
|
|
||||||
if let Some(globset) = &ls_config.required_root_patterns {
|
if let Some(globset) = &ls_config.required_root_patterns {
|
||||||
|
let root_path = root.as_ref().unwrap_or(&workspace);
|
||||||
if !root_path
|
if !root_path
|
||||||
.read_dir()?
|
.read_dir()?
|
||||||
.flatten()
|
.flatten()
|
||||||
@@ -874,8 +870,8 @@ fn start_client(
|
|||||||
&ls_config.args,
|
&ls_config.args,
|
||||||
ls_config.config.clone(),
|
ls_config.config.clone(),
|
||||||
ls_config.environment.clone(),
|
ls_config.environment.clone(),
|
||||||
root_path,
|
root,
|
||||||
root_uri,
|
workspace,
|
||||||
id,
|
id,
|
||||||
name,
|
name,
|
||||||
ls_config.timeout,
|
ls_config.timeout,
|
||||||
|
@@ -744,7 +744,7 @@ impl Application {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Notification::PublishDiagnostics(mut params) => {
|
Notification::PublishDiagnostics(mut params) => {
|
||||||
let uri = match helix_core::Uri::try_from(params.uri) {
|
let uri = match helix_core::Uri::try_from(params.uri.as_str()) {
|
||||||
Ok(uri) => uri,
|
Ok(uri) => uri,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::error!("{err}");
|
log::error!("{err}");
|
||||||
@@ -1143,7 +1143,8 @@ impl Application {
|
|||||||
..
|
..
|
||||||
} = params
|
} = params
|
||||||
{
|
{
|
||||||
self.jobs.callback(crate::open_external_url_callback(uri));
|
self.jobs
|
||||||
|
.callback(crate::open_external_url_callback(uri.as_str()));
|
||||||
return lsp::ShowDocumentResult { success: true };
|
return lsp::ShowDocumentResult { success: true };
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1154,7 +1155,7 @@ impl Application {
|
|||||||
..
|
..
|
||||||
} = params;
|
} = params;
|
||||||
|
|
||||||
let uri = match helix_core::Uri::try_from(uri) {
|
let uri = match helix_core::Uri::try_from(uri.as_str()) {
|
||||||
Ok(uri) => uri,
|
Ok(uri) => uri,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::error!("{err}");
|
log::error!("{err}");
|
||||||
|
@@ -1347,7 +1347,9 @@ fn open_url(cx: &mut Context, url: Url, action: Action) {
|
|||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
if url.scheme() != "file" {
|
if url.scheme() != "file" {
|
||||||
return cx.jobs.callback(crate::open_external_url_callback(url));
|
return cx
|
||||||
|
.jobs
|
||||||
|
.callback(crate::open_external_url_callback(url.as_str()));
|
||||||
}
|
}
|
||||||
|
|
||||||
let content_type = std::fs::File::open(url.path()).and_then(|file| {
|
let content_type = std::fs::File::open(url.path()).and_then(|file| {
|
||||||
@@ -1360,9 +1362,9 @@ fn open_url(cx: &mut Context, url: Url, action: Action) {
|
|||||||
// we attempt to open binary files - files that can't be open in helix - using external
|
// we attempt to open binary files - files that can't be open in helix - using external
|
||||||
// program as well, e.g. pdf files or images
|
// program as well, e.g. pdf files or images
|
||||||
match content_type {
|
match content_type {
|
||||||
Ok(content_inspector::ContentType::BINARY) => {
|
Ok(content_inspector::ContentType::BINARY) => cx
|
||||||
cx.jobs.callback(crate::open_external_url_callback(url))
|
.jobs
|
||||||
}
|
.callback(crate::open_external_url_callback(url.as_str())),
|
||||||
Ok(_) | Err(_) => {
|
Ok(_) | Err(_) => {
|
||||||
let path = &rel_path.join(url.path());
|
let path = &rel_path.join(url.path());
|
||||||
if path.is_dir() {
|
if path.is_dir() {
|
||||||
|
@@ -69,7 +69,7 @@ struct Location {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn lsp_location_to_location(location: lsp::Location) -> Option<Location> {
|
fn lsp_location_to_location(location: lsp::Location) -> Option<Location> {
|
||||||
let uri = match location.uri.try_into() {
|
let uri = match location.uri.as_str().try_into() {
|
||||||
Ok(uri) => uri,
|
Ok(uri) => uri,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::warn!("discarding invalid or unsupported URI: {err}");
|
log::warn!("discarding invalid or unsupported URI: {err}");
|
||||||
@@ -456,7 +456,7 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
|
|||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|symbol| {
|
.filter_map(|symbol| {
|
||||||
let uri = match Uri::try_from(&symbol.location.uri) {
|
let uri = match Uri::try_from(symbol.location.uri.as_str()) {
|
||||||
Ok(uri) => uri,
|
Ok(uri) => uri,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::warn!("discarding symbol with invalid URI: {err}");
|
log::warn!("discarding symbol with invalid URI: {err}");
|
||||||
@@ -510,7 +510,7 @@ pub fn workspace_symbol_picker(cx: &mut Context) {
|
|||||||
.to_string()
|
.to_string()
|
||||||
.into()
|
.into()
|
||||||
} else {
|
} else {
|
||||||
item.symbol.location.uri.to_string().into()
|
item.symbol.location.uri.as_str().into()
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
|
@@ -18,7 +18,6 @@ use futures_util::Future;
|
|||||||
mod handlers;
|
mod handlers;
|
||||||
|
|
||||||
use ignore::DirEntry;
|
use ignore::DirEntry;
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
fn true_color() -> bool {
|
fn true_color() -> bool {
|
||||||
@@ -70,10 +69,10 @@ fn filter_picker_entry(entry: &DirEntry, root: &Path, dedup_symlinks: bool) -> b
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Opens URL in external program.
|
/// Opens URL in external program.
|
||||||
fn open_external_url_callback(
|
fn open_external_url_callback<U: AsRef<std::ffi::OsStr>>(
|
||||||
url: Url,
|
url: U,
|
||||||
) -> impl Future<Output = Result<job::Callback, anyhow::Error>> + Send + 'static {
|
) -> impl Future<Output = Result<job::Callback, anyhow::Error>> + Send + 'static {
|
||||||
let commands = open::commands(url.as_str());
|
let commands = open::commands(url);
|
||||||
async {
|
async {
|
||||||
for cmd in commands {
|
for cmd in commands {
|
||||||
let mut command = tokio::process::Command::new(cmd.get_program());
|
let mut command = tokio::process::Command::new(cmd.get_program());
|
||||||
|
@@ -30,9 +30,7 @@ crossterm = { version = "0.28", optional = true }
|
|||||||
|
|
||||||
tempfile = "3.14"
|
tempfile = "3.14"
|
||||||
|
|
||||||
# Conversion traits
|
|
||||||
once_cell = "1.20"
|
once_cell = "1.20"
|
||||||
url = "2.5.4"
|
|
||||||
|
|
||||||
arc-swap = { version = "1.7.1" }
|
arc-swap = { version = "1.7.1" }
|
||||||
|
|
||||||
|
@@ -642,7 +642,6 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
use helix_lsp::{lsp, Client, LanguageServerId, LanguageServerName};
|
use helix_lsp::{lsp, Client, LanguageServerId, LanguageServerName};
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
impl Document {
|
impl Document {
|
||||||
pub fn from(
|
pub fn from(
|
||||||
@@ -1822,8 +1821,8 @@ impl Document {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// File path as a URL.
|
/// File path as a URL.
|
||||||
pub fn url(&self) -> Option<Url> {
|
pub fn url(&self) -> Option<lsp::Url> {
|
||||||
Url::from_file_path(self.path()?).ok()
|
self.path().map(lsp::Url::from_file_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn uri(&self) -> Option<helix_core::Uri> {
|
pub fn uri(&self) -> Option<helix_core::Uri> {
|
||||||
@@ -1909,7 +1908,7 @@ impl Document {
|
|||||||
pub fn lsp_diagnostic_to_diagnostic(
|
pub fn lsp_diagnostic_to_diagnostic(
|
||||||
text: &Rope,
|
text: &Rope,
|
||||||
language_config: Option<&LanguageConfiguration>,
|
language_config: Option<&LanguageConfiguration>,
|
||||||
diagnostic: &helix_lsp::lsp::Diagnostic,
|
diagnostic: &lsp::Diagnostic,
|
||||||
language_server_id: LanguageServerId,
|
language_server_id: LanguageServerId,
|
||||||
offset_encoding: helix_lsp::OffsetEncoding,
|
offset_encoding: helix_lsp::OffsetEncoding,
|
||||||
) -> Option<Diagnostic> {
|
) -> Option<Diagnostic> {
|
||||||
|
@@ -57,7 +57,7 @@ pub struct ApplyEditError {
|
|||||||
pub enum ApplyEditErrorKind {
|
pub enum ApplyEditErrorKind {
|
||||||
DocumentChanged,
|
DocumentChanged,
|
||||||
FileNotFound,
|
FileNotFound,
|
||||||
InvalidUrl(helix_core::uri::UrlConversionError),
|
InvalidUrl(helix_core::uri::UriParseError),
|
||||||
IoError(std::io::Error),
|
IoError(std::io::Error),
|
||||||
// TODO: check edits before applying and propagate failure
|
// TODO: check edits before applying and propagate failure
|
||||||
// InvalidEdit,
|
// InvalidEdit,
|
||||||
@@ -69,8 +69,8 @@ impl From<std::io::Error> for ApplyEditErrorKind {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<helix_core::uri::UrlConversionError> for ApplyEditErrorKind {
|
impl From<helix_core::uri::UriParseError> for ApplyEditErrorKind {
|
||||||
fn from(err: helix_core::uri::UrlConversionError) -> Self {
|
fn from(err: helix_core::uri::UriParseError) -> Self {
|
||||||
ApplyEditErrorKind::InvalidUrl(err)
|
ApplyEditErrorKind::InvalidUrl(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -94,7 +94,7 @@ impl Editor {
|
|||||||
text_edits: Vec<lsp::TextEdit>,
|
text_edits: Vec<lsp::TextEdit>,
|
||||||
offset_encoding: OffsetEncoding,
|
offset_encoding: OffsetEncoding,
|
||||||
) -> Result<(), ApplyEditErrorKind> {
|
) -> Result<(), ApplyEditErrorKind> {
|
||||||
let uri = match Uri::try_from(url) {
|
let uri = match Uri::try_from(url.as_str()) {
|
||||||
Ok(uri) => uri,
|
Ok(uri) => uri,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::error!("{err}");
|
log::error!("{err}");
|
||||||
@@ -242,7 +242,7 @@ impl Editor {
|
|||||||
// may no longer be valid.
|
// may no longer be valid.
|
||||||
match op {
|
match op {
|
||||||
ResourceOp::Create(op) => {
|
ResourceOp::Create(op) => {
|
||||||
let uri = Uri::try_from(&op.uri)?;
|
let uri = Uri::try_from(op.uri.as_str())?;
|
||||||
let path = uri.as_path().expect("URIs are valid paths");
|
let path = uri.as_path().expect("URIs are valid paths");
|
||||||
let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
|
let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
|
||||||
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
|
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
|
||||||
@@ -262,7 +262,7 @@ impl Editor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ResourceOp::Delete(op) => {
|
ResourceOp::Delete(op) => {
|
||||||
let uri = Uri::try_from(&op.uri)?;
|
let uri = Uri::try_from(op.uri.as_str())?;
|
||||||
let path = uri.as_path().expect("URIs are valid paths");
|
let path = uri.as_path().expect("URIs are valid paths");
|
||||||
if path.is_dir() {
|
if path.is_dir() {
|
||||||
let recursive = op
|
let recursive = op
|
||||||
@@ -284,9 +284,9 @@ impl Editor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ResourceOp::Rename(op) => {
|
ResourceOp::Rename(op) => {
|
||||||
let from_uri = Uri::try_from(&op.old_uri)?;
|
let from_uri = Uri::try_from(op.old_uri.as_str())?;
|
||||||
let from = from_uri.as_path().expect("URIs are valid paths");
|
let from = from_uri.as_path().expect("URIs are valid paths");
|
||||||
let to_uri = Uri::try_from(&op.new_uri)?;
|
let to_uri = Uri::try_from(op.new_uri.as_str())?;
|
||||||
let to = to_uri.as_path().expect("URIs are valid paths");
|
let to = to_uri.as_path().expect("URIs are valid paths");
|
||||||
let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
|
let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
|
||||||
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
|
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
|
||||||
|
Reference in New Issue
Block a user