1
1
mirror of https://github.com/Byron/gitoxide synced 2025-10-05 16:42:40 +02:00

thanks clippy

This commit is contained in:
Sebastian Thiel
2025-02-21 09:40:55 +01:00
parent 2efce72ada
commit 8e96ed37db
131 changed files with 272 additions and 268 deletions

View File

@@ -325,6 +325,7 @@ bin-dir = "gitoxide-max-pure-v{ version }-{ target }/{ bin }{ binary-ext }"
pedantic = { level = "warn", priority = -1 }
#
# Reviewed and allowed lints
needless_continue = "allow"
enum_glob_use = "allow" # x97
missing_errors_doc = "allow" # x1792
missing_panics_doc = "allow" # x447

View File

@@ -180,7 +180,7 @@ where
break;
}
Err(err) => return Err(err.into()),
};
}
}
if let Some(tx) = tx_tree_id {
tx.send(chunk).ok();

View File

@@ -92,16 +92,16 @@ mod serde_only {
});
if f.link().is_some() {
names.push("link");
};
}
if f.resolve_undo().is_some() {
names.push("resolve-undo (REUC)");
};
}
if f.untracked().is_some() {
names.push("untracked (UNTR)");
};
}
if f.fs_monitor().is_some() {
names.push("fs-monitor (FSMN)");
};
}
if f.had_offset_table() {
names.push("offset-table (IEOT)");
}

View File

@@ -86,7 +86,7 @@ pub use discover::discover;
pub fn env(mut out: impl std::io::Write, format: OutputFormat) -> anyhow::Result<()> {
if format != OutputFormat::Human {
bail!("JSON output isn't supported");
};
}
let width = 15;
writeln!(

View File

@@ -161,7 +161,7 @@ pub fn pack_or_pack_index(
)
})?;
if !object_path.as_ref().map_or(true, |p| p.as_ref().is_dir()) {
if !object_path.as_ref().is_none_or(|p| p.as_ref().is_dir()) {
return Err(anyhow!(
"The object directory at '{}' is inaccessible",
object_path

View File

@@ -114,7 +114,7 @@ pub fn from_pack(
OutputFormat::Human => drop(human_output(out, res)),
#[cfg(feature = "serde")]
OutputFormat::Json => serde_json::to_writer_pretty(out, &res)?,
};
}
Ok(())
}

View File

@@ -306,6 +306,6 @@ fn receive_pack_blocking(
OutputFormat::Json => {
serde_json::to_writer_pretty(&mut out, &JsonOutcome::from_outcome_and_refs(outcome, refs))?;
}
};
}
Ok(())
}

View File

@@ -177,7 +177,7 @@ where
#[cfg(feature = "serde")]
Some(OutputFormat::Json) => serde_json::to_writer_pretty(out, &multi_index.index_names().iter().zip(res.pack_traverse_statistics).collect::<Vec<_>>())?,
_ => {}
};
}
return Ok(())
},
_ => return Err(anyhow!(
@@ -195,7 +195,7 @@ where
#[cfg(feature = "serde")]
Some(OutputFormat::Json) => serde_json::to_writer_pretty(out, stats)?,
_ => {}
};
}
}
Ok(())
}

View File

@@ -447,7 +447,7 @@ pub fn update(
break;
}
Err(err) => return Err(err.into()),
};
}
}
db.send_last_chunk();
let saw_new_commits = !commits.is_empty();

View File

@@ -213,7 +213,7 @@ pub(crate) mod function {
continue;
}
}
};
}
let is_ignored = matches!(entry.status, gix::dir::entry::Status::Ignored(_));
let entry_path = gix::path::from_bstr(entry.rela_path);

View File

@@ -114,7 +114,7 @@ pub(crate) mod function {
&mut err,
)?;
}
};
}
if let Some(gix::worktree::state::checkout::Outcome { collisions, errors, .. }) = outcome {
if !(collisions.is_empty() && errors.is_empty()) {

View File

@@ -84,7 +84,7 @@ impl Filter {
}
(None, _) => {}
(Some(_), None) => return false,
};
}
true
}
}

View File

@@ -99,7 +99,7 @@ fn write_changes(
writeln!(out, " {:o} -> {:o}", source_entry_mode.0, entry_mode.0)?;
}
}
};
}
}
Ok(())

View File

@@ -264,7 +264,7 @@ pub(crate) mod function {
gix::remote::fetch::refmap::Source::Ref(r) => {
crate::repository::remote::refs::print_ref(&mut out, r)?;
}
};
}
let mode_and_type = update.type_change.map_or_else(
|| format!("{}", update.mode),
|type_change| {

View File

@@ -184,7 +184,7 @@ pub(crate) mod function {
.unwrap_or_default()
},
)
.map_or(true, |m| m.is_excluded());
.is_none_or(|m| m.is_excluded());
let entry_is_submodule = entry.mode.is_submodule();
if entry_is_excluded && (!entry_is_submodule || !recurse_submodules) {

View File

@@ -105,7 +105,7 @@ mod refs_impl {
out,
&map.remote_refs.into_iter().map(JsonRef::from).collect::<Vec<_>>(),
)?,
};
}
Ok(())
}
}

View File

@@ -28,7 +28,7 @@ fn print_sm(sm: Submodule<'_>, dirty_suffix: Option<&str>, out: &mut impl std::i
let mut sm_repo = sm.open()?;
if let Some(repo) = sm_repo.as_mut() {
repo.object_cache_size_if_unset(4 * 1024 * 1024);
};
}
writeln!(
out,
" {is_active} {path} {config} head:{head_id} index:{index_id} ({worktree}) [{url}]",

View File

@@ -237,12 +237,7 @@ impl Outcome {
}
fn reduce_and_check_if_done(&mut self, attr: AttributeId) -> bool {
if self.selected.is_empty()
|| self
.selected
.iter()
.any(|(_name, id)| id.map_or(false, |id| id == attr))
{
if self.selected.is_empty() || self.selected.iter().any(|(_name, id)| *id == Some(attr)) {
*self.remaining.as_mut().expect("initialized") -= 1;
}
self.is_done()

View File

@@ -684,7 +684,7 @@ fn collect_parents(
parent_ids.push((id, parent_commit_time));
}
}
};
}
Ok(parent_ids)
}

View File

@@ -85,7 +85,7 @@ impl File {
x => {
return Err(Error::UnsupportedVersion(x));
}
};
}
ofs += 1;
let object_hash = gix_hash::Kind::try_from(data[ofs]).map_err(Error::UnsupportedHashVersion)?;

View File

@@ -122,8 +122,8 @@ impl<'a> Path<'a> {
/// the `home_for_user` function to be provided.
/// The interpolation uses `getpwnam` sys call and is therefore not available on windows.
/// - `%(prefix)/` is expanded to the location where `gitoxide` is installed.
/// This location is not known at compile time and therefore need to be
/// optionally provided by the caller through `git_install_dir`.
/// This location is not known at compile time and therefore need to be
/// optionally provided by the caller through `git_install_dir`.
///
/// Any other, non-empty path value is returned unchanged and error is returned in case of an empty path value or if required input
/// wasn't provided.

View File

@@ -32,7 +32,7 @@ impl File<'static> {
/// which later overwrite portions of the included file, which seems unusual as these would be related to `includes`.
/// We can fix this by 'splitting' the include section if needed so the included sections are put into the right place.
/// - `hasconfig:remote.*.url` will not prevent itself to include files with `[remote "name"]\nurl = x` values, but it also
/// won't match them, i.e. one cannot include something that will cause the condition to match or to always be true.
/// won't match them, i.e. one cannot include something that will cause the condition to match or to always be true.
pub fn resolve_includes(&mut self, options: init::Options<'_>) -> Result<(), Error> {
if options.includes.max_depth == 0 {
return Ok(());

View File

@@ -116,7 +116,7 @@ impl File<'static> {
return Err(Error::Io { source: err, path });
}
}
};
}
meta.path = Some(path);
let config = Self::from_bytes_owned(buf, meta, options)?;

View File

@@ -151,7 +151,7 @@ impl Body<'_> {
value_range.end = i;
} else {
value_range.start = i;
};
}
}
_ => (),
}

View File

@@ -305,7 +305,7 @@ fn from_bytes<'a, 'b>(
events: std::mem::take(&mut events),
});
}
};
}
header = match convert(Event::SectionHeader(next_header)) {
Event::SectionHeader(h) => h,
_ => unreachable!("BUG: convert must not change the event type, just the lifetime"),

View File

@@ -12,7 +12,7 @@ const SH: &str = "/bin/sh";
fn empty() {
let prog = Program::from_custom_definition("");
let git = *GIT;
assert!(matches!(&prog.kind, Kind::ExternalName { name_and_args } if name_and_args == ""));
assert!(matches!(&prog.kind, Kind::ExternalName { name_and_args } if name_and_args.is_empty()));
assert_eq!(
format!("{:?}", prog.to_command(&helper::Action::Store("egal".into()))),
format!(r#""{git}" "credential-" "store""#),

View File

@@ -77,7 +77,7 @@ pub(crate) mod function {
let mut offset_in_seconds = hours * 3600 + minutes * 60;
if sign == Sign::Minus {
offset_in_seconds *= -1;
};
}
let time = Time {
seconds,
offset: offset_in_seconds,

View File

@@ -395,7 +395,7 @@ impl Pipeline {
&& header.size > self.options.large_file_threshold_bytes
{
is_binary = Some(true);
};
}
let data = if is_binary == Some(true) {
Data::Binary { size: header.size }
} else {
@@ -461,27 +461,25 @@ impl Pipeline {
out.clear();
run_cmd(rela_path, cmd, out)?;
}
None => {
match res {
ToWorktreeOutcome::Unchanged(_) => {}
ToWorktreeOutcome::Buffer(src) => {
out.clear();
out.try_reserve(src.len())?;
out.extend_from_slice(src);
}
ToWorktreeOutcome::Process(MaybeDelayed::Immediate(mut stream)) => {
std::io::copy(&mut stream, out).map_err(|err| {
convert_to_diffable::Error::StreamCopy {
rela_path: rela_path.to_owned(),
source: err,
}
})?;
}
ToWorktreeOutcome::Process(MaybeDelayed::Delayed(_)) => {
unreachable!("we prohibit this")
}
};
}
None => match res {
ToWorktreeOutcome::Unchanged(_) => {}
ToWorktreeOutcome::Buffer(src) => {
out.clear();
out.try_reserve(src.len())?;
out.extend_from_slice(src);
}
ToWorktreeOutcome::Process(MaybeDelayed::Immediate(mut stream)) => {
std::io::copy(&mut stream, out).map_err(|err| {
convert_to_diffable::Error::StreamCopy {
rela_path: rela_path.to_owned(),
source: err,
}
})?;
}
ToWorktreeOutcome::Process(MaybeDelayed::Delayed(_)) => {
unreachable!("we prohibit this")
}
},
}
}

View File

@@ -362,7 +362,7 @@ impl Platform {
/// * `mode` is the kind of object (only blobs and links are allowed)
/// * `rela_path` is the relative path as seen from the (work)tree root.
/// * `kind` identifies the side of the diff this resource will be used for.
/// A diff needs both `OldOrSource` *and* `NewOrDestination`.
/// A diff needs both `OldOrSource` *and* `NewOrDestination`.
/// * `objects` provides access to the object database in case the resource can't be read from a worktree.
///
/// Note that it's assumed that either `id + mode (` or `rela_path` can serve as unique identifier for the resource,

View File

@@ -166,9 +166,9 @@ impl<T: Change> Tracker<T> {
/// We may refuse the push if that information isn't needed for what we have to track.
pub fn try_push_change(&mut self, change: T, location: &BStr) -> Option<T> {
let change_kind = change.kind();
if let (None, ChangeKind::Modification { .. }) = (self.rewrites.copies, change_kind) {
if let (None, ChangeKind::Modification) = (self.rewrites.copies, change_kind) {
return Some(change);
};
}
let entry_kind = change.entry_mode().kind();
if entry_kind == EntryKind::Commit {
@@ -179,7 +179,7 @@ impl<T: Change> Tracker<T> {
.filter(|_| matches!(change_kind, ChangeKind::Addition | ChangeKind::Deletion));
if let (None, EntryKind::Tree) = (relation, entry_kind) {
return Some(change);
};
}
let start = self.path_backing.len();
self.path_backing.extend_from_slice(location);
@@ -514,7 +514,7 @@ impl<T: Change> Tracker<T> {
dst_items.push((item.change.id().to_owned(), item));
}
_ => continue,
};
}
}
for ((src_id, src_item), (dst_id, dst_item)) in src_items.into_iter().zip(dst_items) {
@@ -758,7 +758,7 @@ fn find_match<'a, T: Change>(
Operation::SourceOrDestinationIsBinary => {
// TODO: figure out if git does more here
}
};
}
}
}
Ok(None)

View File

@@ -70,7 +70,7 @@ where
}
Some((None, None, _)) => unreachable!("BUG: it makes no sense to fill the stack with empties"),
None => return Ok(()),
};
}
pop_path = false;
}
(Some(lhs), Some(rhs)) => {
@@ -328,7 +328,7 @@ fn handle_lhs_and_rhs_with_equal_filenames(
.cancelled()
{
return Err(Error::Cancelled);
};
}
let relation = relation_to_propagate.or_else(|| {
*change_id += 1;
@@ -343,7 +343,7 @@ fn handle_lhs_and_rhs_with_equal_filenames(
.cancelled()
{
return Err(Error::Cancelled);
};
}
queue.push_back((None, Some(rhs.oid.to_owned()), to_child(relation)));
}
(true, _) => {
@@ -371,7 +371,7 @@ fn handle_lhs_and_rhs_with_equal_filenames(
.cancelled()
{
return Err(Error::Cancelled);
};
}
queue.push_back((Some(lhs.oid.to_owned()), None, to_child(relation)));
}
(false, false) => {
@@ -390,7 +390,7 @@ fn handle_lhs_and_rhs_with_equal_filenames(
return Err(Error::Cancelled);
}
}
};
}
Ok(())
}

View File

@@ -441,7 +441,7 @@ fn rename_by_similarity_reports_limit_if_encountered() -> crate::Result {
1 => assert_eq!(dst.location, "b"),
2 => assert_eq!(dst.location, "c"),
_ => panic!("too many elements emitted"),
};
}
calls += 1;
Action::Continue
},
@@ -511,7 +511,7 @@ fn rename_by_50_percent_similarity() -> crate::Result {
assert_eq!(dst.location, "c");
}
_ => panic!("too many elements emitted"),
};
}
calls += 1;
Action::Continue
},

View File

@@ -76,7 +76,11 @@ mod path {
None => {
let mut dir = normalize_on_trailing_dot_dot(dir)?;
dir.pop(); // ".git" suffix
let work_dir = dir.as_os_str().is_empty().then(|| PathBuf::from(".")).unwrap_or(dir);
let work_dir = if dir.as_os_str().is_empty() {
PathBuf::from(".")
} else {
dir
};
Path::WorkTree(work_dir)
}
},

View File

@@ -319,7 +319,7 @@ fn do_not_shorten_absolute_paths() -> crate::Result {
assert!(work_dir.is_absolute());
}
_ => panic!("expected worktree path"),
};
}
Ok(())
}
@@ -345,7 +345,7 @@ mod dot_git_only {
assert_eq!(work_dir, expected.as_ref());
}
_ => panic!("expected worktree path"),
};
}
}
#[test]

View File

@@ -30,7 +30,7 @@ pub mod pipe {
Ok(Err(err)) => return Err(err),
Err(_) => {}
}
};
}
Ok(&self.buf)
}
@@ -77,7 +77,7 @@ pub mod pipe {
/// Returns the _([`write`][Writer], [`read`][Reader])_ ends of a pipe for transferring bytes, analogous to a unix pipe.
///
/// * `in_flight_writes` defines the amount of chunks of bytes to keep in memory until the `write` end will block when writing.
/// If `0`, the `write` end will always block until the `read` end consumes the transferred bytes.
/// If `0`, the `write` end will always block until the `read` end consumes the transferred bytes.
pub fn unidirectional(in_flight_writes: usize) -> (Writer, Reader) {
let (tx, rx) = std::sync::mpsc::sync_channel(in_flight_writes);
(

View File

@@ -17,11 +17,11 @@ where
/// with a given `chunk_size` allowing a maximum `chunks_in_flight`.
///
/// * `chunk_size` describes how many items returned by `iter` will be a single item of this `EagerIter`.
/// This helps to reduce the overhead imposed by transferring many small items.
/// If this number is 1, each item will become a single chunk. 0 is invalid.
/// This helps to reduce the overhead imposed by transferring many small items.
/// If this number is 1, each item will become a single chunk. 0 is invalid.
/// * `chunks_in_flight` describes how many chunks can be kept in memory in case the consumer of the `EagerIter`s items
/// isn't consuming them fast enough. Setting this number to 0 effectively turns off any caching, but blocks `EagerIter`
/// if its items aren't consumed fast enough.
/// isn't consuming them fast enough. Setting this number to 0 effectively turns off any caching, but blocks `EagerIter`
/// if its items aren't consumed fast enough.
pub fn new(iter: I, chunk_size: usize, chunks_in_flight: usize) -> Self {
let (sender, receiver) = std::sync::mpsc::sync_channel(chunks_in_flight);
let size_hint = iter.size_hint();

View File

@@ -66,11 +66,11 @@ pub fn optimize_chunk_size_and_thread_limit(
///
/// * `desired_chunk_size` is the amount of items per chunk you think should be used.
/// * `num_items` is the total amount of items in the iteration, if `Some`.
/// Otherwise this knowledge will not affect the output of this function.
/// Otherwise this knowledge will not affect the output of this function.
/// * `thread_limit` is the amount of threads to use at most, if `Some`.
/// Otherwise this knowledge will not affect the output of this function.
/// Otherwise this knowledge will not affect the output of this function.
/// * `available_threads` is the total amount of threads available, if `Some`.
/// Otherwise the actual amount of available threads is determined by querying the system.
/// Otherwise the actual amount of available threads is determined by querying the system.
///
/// `Note` that this implementation is available only if the `parallel` feature toggle is set.
#[cfg(feature = "parallel")]

View File

@@ -126,7 +126,7 @@ impl Server {
}
}
None => continue,
};
}
}
for cap in &capabilities {

View File

@@ -202,7 +202,7 @@ impl Pipeline {
let (src, dest) = bufs.src_and_dest();
if eol::convert_to_worktree(src, digest, dest, self.options.eol_config)? {
bufs.swap();
};
}
if let Some(encoding) = encoding {
let (src, dest) = bufs.src_and_dest();

View File

@@ -165,7 +165,7 @@ impl<'a> Iterator for Iter<'a> {
}
if self.retries.on_create_directory_failure < 1 {
return self.permanent_failure(dir, NotFound);
};
}
self.cursors.push(dir);
self.cursors.push(match dir.parent() {
None => return self.permanent_failure(dir, InvalidInput),
@@ -177,7 +177,7 @@ impl<'a> Iterator for Iter<'a> {
self.retries.on_interrupt -= 1;
if self.retries.on_interrupt <= 1 {
return self.permanent_failure(dir, Interrupted);
};
}
self.cursors.push(dir);
self.intermediate_failure(dir, err)
}

View File

@@ -13,7 +13,7 @@ pub fn pattern(mut pat: &[u8], may_alter: bool) -> Option<(&[u8], pattern::Mode,
let mut mode = Mode::empty();
if pat.is_empty() {
return None;
};
}
if may_alter {
if pat.first() == Some(&b'!') {
mode |= Mode::NEGATIVE;

View File

@@ -157,7 +157,7 @@ pub(crate) mod function {
t_ch = t.1;
}
None => break,
};
}
}
if t_ch != p_ch {
return NoMatch;
@@ -177,7 +177,7 @@ pub(crate) mod function {
t_ch = t.1;
}
None => break AbortAll,
};
}
};
}
BRACKET_OPEN => {
@@ -187,7 +187,7 @@ pub(crate) mod function {
p_ch = t.1;
}
None => return AbortAll,
};
}
if p_ch == b'^' {
p_ch = NEGATE_CLASS;
@@ -326,7 +326,7 @@ pub(crate) mod function {
}
}
_ => return AbortAll,
};
}
prev_p_ch = 0;
}
}
@@ -336,7 +336,7 @@ pub(crate) mod function {
matched = true;
}
}
};
}
next = p.next();
if let Some((_, BRACKET_CLOSE)) = next {
break;

View File

@@ -88,7 +88,7 @@ fn compare_baseline_with_ours() {
panics += 1;
continue;
}
};
}
}
}

View File

@@ -100,7 +100,7 @@ impl Prefix {
return Err(from_hex::Error::TooLong { hex_len });
} else if hex_len < Self::MIN_HEX_LEN {
return Err(from_hex::Error::TooShort { hex_len });
};
}
let src = if value.len() % 2 == 0 {
let mut out = Vec::from_iter(std::iter::repeat(0).take(value.len() / 2));

View File

@@ -194,7 +194,7 @@ impl State {
let entry_path = e.path(self);
if entry_path == path {
return true;
};
}
if !ignore_case {
return false;
}
@@ -222,7 +222,7 @@ impl State {
let dir_path = dir.path(self);
if dir_path == directory {
return true;
};
}
if !ignore_case {
return false;
}

View File

@@ -12,7 +12,7 @@ bitflags! {
/// The mask to obtain the length of the path associated with this entry, up to 4095 characters without extension.
const PATH_LEN = 0x0fff;
/// The mask to apply to obtain the stage number of an entry, encoding three value: 0 = base, 1 = ours, 2 = theirs.
const STAGE_MASK = 1<<12 | 1<<13;
const STAGE_MASK = (1<<12) | (1<<13);
/// If set, additional bits need to be written to storage.
const EXTENDED = 1<<14;
/// If set, the entry be assumed to match with the version on the working tree, as a way to avoid `lstat()` checks.

View File

@@ -55,7 +55,7 @@ impl Mode {
Mode::FILE if executable_bit && stat.is_executable() => return Some(Change::ExecutableBit),
Mode::FILE_EXECUTABLE if executable_bit && !stat.is_executable() => return Some(Change::ExecutableBit),
_ => return None,
};
}
let new_mode = if stat.is_dir() {
Mode::COMMIT
} else if executable_bit && stat.is_executable() {

View File

@@ -176,7 +176,7 @@ fn entries<T: std::io::Write>(out: &mut CountBytes<T>, state: &State, header_siz
let eight_null_bytes = [0u8; 8];
out.write_all(&eight_null_bytes[n as usize..])?;
}
};
}
}
Ok(out.count)

View File

@@ -126,7 +126,7 @@ fn parse_bounds(bounds: &Punctuated<TypeParamBound, Token![+]>) -> Option<Conver
return None;
}
if let TypeParamBound::Trait(ref tb) = bounds.first().unwrap() {
if let Some(seg) = tb.path.segments.iter().last() {
if let Some(seg) = tb.path.segments.iter().next_back() {
if let PathArguments::AngleBracketed(ref gen_args) = seg.arguments {
if let GenericArgument::Type(_) = gen_args.args.first().unwrap() {
if seg.ident == "Into" {

View File

@@ -46,7 +46,7 @@ impl Snapshot {
Err(insert_pos) => {
self.entries_by_old_email.insert(insert_pos, entry.into());
}
};
}
}
self
}

View File

@@ -293,7 +293,7 @@ impl Pipeline {
ToWorktreeOutcome::Process(MaybeDelayed::Delayed(_)) => {
unreachable!("we prohibit this")
}
};
}
}
let res = self.filter.convert_to_git(

View File

@@ -97,7 +97,7 @@ impl Platform {
storage.data = out;
storage.mode = mode;
}
};
}
Ok(())
}
}

View File

@@ -22,7 +22,7 @@ use std::convert::Infallible;
/// * `labels` are relevant for text-merges and will be shown in conflicts.
/// * `objects` provides access to trees when diffing them.
/// * `write_blob_to_odb(content) -> Result<ObjectId, E>` writes newly merged content into the odb to obtain an id
/// that will be used in merged trees.
/// that will be used in merged trees.
/// * `diff_state` is state used for diffing trees.
/// * `diff_resource_cache` is used for similarity checks.
/// * `blob_merge` is a pre-configured platform to merge any content.
@@ -208,7 +208,7 @@ where
[None, None, None],
)) {
break 'outer;
};
}
editor.remove(to_components(theirs.location()))?;
}
apply_change(&mut editor, theirs, rewritten_location.as_ref().map(|t| &t.0))?;
@@ -274,7 +274,7 @@ where
}
if should_fail_on_conflict(conflict) {
break 'outer;
};
}
} else if matches!(candidate, PossibleConflict::TreeToNonTree { .. }) {
let (mode, id) = theirs.entry_mode_and_id();
let location = theirs.location();
@@ -316,7 +316,7 @@ where
their_changes[theirs_idx].was_written = true;
if should_fail_on_conflict(conflict) {
break 'outer;
};
}
} else if matches!(candidate, PossibleConflict::NonTreeToTree { .. }) {
// We are writing on top of what was a file, a conflict we probably already saw and dealt with.
let location = theirs.location();
@@ -543,7 +543,7 @@ where
],
)) {
break 'outer;
};
}
}
(
Change::Addition {
@@ -795,10 +795,10 @@ where
editor.remove(toc(location))?;
}
_ => unreachable!("parent-match assures this"),
};
}
}
Some(ResolveWith::Ancestor) => {}
};
}
should_fail_on_conflict(Conflict::without_resolution(
ResolutionFailure::OursModifiedTheirsDeleted,
(ours, theirs, side, outer_side),
@@ -807,7 +807,7 @@ where
};
if should_break {
break 'outer;
};
}
}
(
Change::Modification { .. },
@@ -921,7 +921,7 @@ where
],
)) {
break 'outer;
};
}
match tree_conflicts {
None => {
let our_addition = Change::Addition {
@@ -983,7 +983,7 @@ where
],
)) {
break 'outer;
};
}
}
if let Some(addition) = our_addition {
push_deferred((addition, Some(ours_idx)), our_changes);
@@ -1054,7 +1054,7 @@ where
],
)) {
break 'outer;
};
}
let ours_is_rewrite = side.is_swapped();
if tree_conflicts.is_none()
@@ -1139,7 +1139,7 @@ where
[None, index_entry(our_mode, our_id), index_entry(their_mode, their_id)],
)) {
break 'outer;
};
}
}
// Because this constellation can only be found by the lookup tree, there is
@@ -1236,7 +1236,7 @@ where
}
if should_fail_on_conflict(Conflict::unknown((ours, theirs, Original, outer_side))) {
break 'outer;
};
}
}
}
their_changes[theirs_idx].was_written = true;

View File

@@ -581,7 +581,7 @@ pub mod apply_index_entries {
index.entry_index_by_path_and_stage_bounded(path, gix_index::entry::Stage::Unconflicted, len)
{
index.entries_mut()[pos].flags.insert(gix_index::entry::Flags::REMOVE);
};
}
match idx_by_path_stage.entry((stage, path)) {
hash_map::Entry::Occupied(map_entry) => {
// This can happen due to the way the algorithm works.
@@ -604,7 +604,7 @@ pub mod apply_index_entries {
path,
);
}
};
}
}
}

View File

@@ -171,7 +171,7 @@ mod text {
"{}: resolution mismatch",
case.name
);
};
}
assert_str_eq!(
out.as_bstr().to_str_lossy(),
case.expected.to_str_lossy(),

View File

@@ -151,7 +151,7 @@ fn run_baseline() -> crate::Result {
if !tree_path.exists() {
skipped_tree_resolve_cases += 1;
continue;
};
}
let expected_tree_id = gix_hash::ObjectId::from_hex(std::fs::read_to_string(tree_path)?.trim().as_bytes())?;
options.tree_merge.tree_conflicts = Some(tree_resolution);
let resolve_with_ours = tree_resolution == gix_merge::tree::ResolveWith::Ours;

View File

@@ -155,7 +155,7 @@ fn new_inmemory_writes() -> (TreeStore, impl FnMut(&Tree) -> Result<ObjectId, st
Entry::Vacant(e) => {
e.insert(tree.clone());
}
};
}
Ok(id)
}
};

View File

@@ -153,7 +153,7 @@ impl Editor<'_> {
}
}
Err(_) => break,
};
}
}
None
}

View File

@@ -61,7 +61,7 @@ pub fn resolve(objects_directory: PathBuf, current_dir: &std::path::Path) -> Res
}
Err(err) if err.kind() == io::ErrorKind::NotFound => {}
Err(err) => return Err(err.into()),
};
}
if depth != 0 {
out.push(dir);
}

View File

@@ -418,7 +418,7 @@ impl super::Store {
// Safety: can't race as we hold the lock, have to set the generation beforehand to help avoid others to observe the value.
slot.generation.store(generation, Ordering::SeqCst);
*files_mut = None;
};
}
slot.files.store(files);
}

View File

@@ -61,7 +61,7 @@ impl PackId {
midx <= Self::max_packs_in_multi_index(),
"There shouldn't be more than 2^16 packs per multi-index"
);
((self.index as gix_pack::data::Id | 1 << 15) | midx << 16) as gix_pack::data::Id
(self.index as gix_pack::data::Id | (1 << 15)) | (midx << 16) as gix_pack::data::Id
}
}
}

View File

@@ -34,7 +34,7 @@ impl loose::Iter {
}
}
Err(err) => return Some(Err(err)),
};
}
None
}
}

View File

@@ -51,8 +51,8 @@ impl crate::Bundle {
/// * `progress` provides detailed progress information which can be discarded with [`gix_features::progress::Discard`].
/// * `should_interrupt` is checked regularly and when true, the whole operation will stop.
/// * `thin_pack_base_object_lookup` If set, we expect to see a thin-pack with objects that reference their base object by object id which is
/// expected to exist in the object database the bundle is contained within.
/// `options` further configure how the task is performed.
/// expected to exist in the object database the bundle is contained within.
/// `options` further configure how the task is performed.
///
/// # Note
///

View File

@@ -86,7 +86,7 @@ impl<T> Tree<T> {
let pack_offset = get_pack_offset(&data);
if let Some(previous_offset) = previous_cursor_position {
Self::advance_cursor_to_pack_offset(&mut r, pack_offset, previous_offset)?;
};
}
let entry = crate::data::Entry::from_read(&mut r, pack_offset, hash_len).map_err(|err| Error::Io {
source: err,
message: "EOF while parsing header",
@@ -111,7 +111,7 @@ impl<T> Tree<T> {
.expect("in bound distance for deltas");
tree.add_child(base_pack_offset, pack_offset, data)?;
}
};
}
progress.inc();
if idx % 10_000 == 0 && should_interrupt.load(Ordering::SeqCst) {
return Err(Error::Interrupted);

View File

@@ -251,8 +251,8 @@ where
}
/// * `initial_threads` is the threads we may spawn, not accounting for our own thread which is still considered used by the parent
/// system. Since this thread will take a controlling function, we may spawn one more than that. In threaded mode, we will finish
/// all remaining work.
/// system. Since this thread will take a controlling function, we may spawn one more than that. In threaded mode, we will finish
/// all remaining work.
#[allow(clippy::too_many_arguments)]
fn deltas_mt<T, F, MBFN, E, R>(
mut threads_to_create: isize,

View File

@@ -260,7 +260,7 @@ impl File {
&first_entry,
consumed_input.expect("consumed bytes as set by cache"),
));
};
}
// First pass will decompress all delta data and keep it in our output buffer
// [<possibly resolved base object>]<delta-1..delta-n>...

View File

@@ -88,7 +88,7 @@ impl File {
None => return Err(Error::DeltaBaseUnresolved(base_id)),
}
}
};
}
}
}

View File

@@ -59,7 +59,7 @@ fn compress_data(obj: &gix_object::Data<'_>) -> Result<Vec<u8>, input::Error> {
unreachable!("Should never see other errors than zlib, but got {:?}", err,)
}
}
};
}
out.flush().expect("zlib flush should never fail");
Ok(out.into_inner())
}

View File

@@ -107,7 +107,7 @@ where
if entry.is_invalid() {
self.pack_offsets_and_validity.push((0, false));
continue;
};
}
self.pack_offsets_and_validity.push((self.written, true));
let header = entry.to_entry_header(self.entry_version, |index| {
let (base_offset, is_valid_object) = self.pack_offsets_and_validity[index];
@@ -128,7 +128,7 @@ where
self.is_done = true;
self.trailer = Some(gix_hash::ObjectId::from(digest));
}
};
}
Ok(self.written - previous_written)
}
}

View File

@@ -57,7 +57,7 @@ pub mod changes {
}
}
Change::Deletion { .. } => {}
};
}
Action::Continue
}
}

View File

@@ -72,7 +72,7 @@ impl output::Entry {
) -> Option<Result<Self, Error>> {
if entry.version != target_version {
return None;
};
}
let pack_offset_must_be_zero = 0;
let pack_entry = match data::Entry::from_bytes(&entry.data, pack_offset_must_be_zero, count.id.as_slice().len())
@@ -144,7 +144,7 @@ impl output::Entry {
std::io::ErrorKind::Other => return Err(Error::ZlibDeflate(err)),
err => unreachable!("Should never see other errors than zlib, but got {:?}", err,),
}
};
}
out.flush()?;
out.into_inner()
},

View File

@@ -220,7 +220,7 @@ fn digest_statistics(traverse::Outcome { roots, children }: traverse::Outcome<En
Tree => res.num_trees += 1,
Tag => res.num_tags += 1,
Commit => res.num_commits += 1,
};
}
}
let num_nodes = roots.len() + children.len();

View File

@@ -262,7 +262,7 @@ impl index::File {
}
}
Blob => {}
};
}
}
Ok(())
}

View File

@@ -69,10 +69,10 @@ impl crate::index::File {
/// * `kind` is the version of pack index to produce, use [`crate::index::Version::default()`] if in doubt.
/// * `tread_limit` is used for a parallel tree traversal for obtaining object hashes with optimal performance.
/// * `root_progress` is the top-level progress to stay informed about the progress of this potentially long-running
/// computation.
/// computation.
/// * `object_hash` defines what kind of object hash we write into the index file.
/// * `pack_version` is the version of the underlying pack for which `entries` are read. It's used in case none of these objects are provided
/// to compute a pack-hash.
/// to compute a pack-hash.
///
/// # Remarks
///
@@ -164,7 +164,7 @@ impl crate::index::File {
},
)?;
}
};
}
last_seen_trailer = trailer;
num_objects += 1;
objects_progress.inc();

View File

@@ -164,7 +164,7 @@ where
"interrupted by user",
))
}
};
}
}
BandRef::Error(d) => {
let text = TextRef::from(d).0;
@@ -176,9 +176,9 @@ where
"interrupted by user",
))
}
};
}
}
};
}
}
None => {
break match line.as_slice() {

View File

@@ -162,7 +162,7 @@ where
"interrupted by user",
))
}
};
}
}
BandRef::Error(d) => {
let text = TextRef::from(d).0;
@@ -174,9 +174,9 @@ where
"interrupted by user",
))
}
};
}
}
};
}
}
None => {
break match line.as_slice() {

View File

@@ -12,8 +12,8 @@
//! - mingw [is used for the conversion](https://github.com/git/git/blob/main/compat/mingw.h#L579:L579) and it appears they handle surrogates during the conversion, maybe some sort of non-strict UTF-8 converter? Actually it uses [WideCharToMultiByte](https://docs.microsoft.com/en-us/windows/win32/api/stringapiset/nf-stringapiset-widechartomultibyte)
//! under the hood which by now does fail if the UTF-8 would be invalid unicode, i.e. unicode pairs.
//! - `OsString` on windows already stores strings as WTF-8, which supports [surrogate pairs](https://unicodebook.readthedocs.io/unicode_encodings.html),
//! something that UTF-8 isn't allowed do it for security reasons, after all it's UTF-16 specific and exists only to extend
//! the encodable code-points.
//! something that UTF-8 isn't allowed do it for security reasons, after all it's UTF-16 specific and exists only to extend
//! the encodable code-points.
//! - informative reading on [WTF-8](https://simonsapin.github.io/wtf-8/#motivation) which is the encoding used by Rust
//! internally that deals with surrogates and non-wellformed surrogates (those that aren't in pairs).
//! * **unix**

View File

@@ -155,7 +155,7 @@ fn parse_long_keywords(input: &[u8], p: &mut Pattern, cursor: &mut usize) -> Res
keyword: BString::from(keyword),
});
}
};
}
Ok(())
})
}

View File

@@ -137,7 +137,7 @@ fn missing_parentheses() {
let output = gix_pathspec::parse(input.as_bytes(), Default::default());
assert!(output.is_err());
assert!(matches!(output.unwrap_err(), Error::MissingClosingParenthesis { .. }));
assert!(matches!(output.unwrap_err(), Error::MissingClosingParenthesis));
}
#[test]

View File

@@ -120,7 +120,7 @@ impl Response {
io::ErrorKind::UnexpectedEof,
"Could not read message headline",
)));
};
}
match line.trim_end() {
"acknowledgments" => {

View File

@@ -224,7 +224,7 @@ impl Response {
}
Err(_) => return true,
},
};
}
false
}
}

View File

@@ -188,7 +188,7 @@ pub(in crate::handshake::refs) fn parse_v1(
object,
path: path.into(),
}),
};
}
}
}
Ok(())

View File

@@ -66,7 +66,7 @@ impl RemoteProgress<'_> {
}
}
None => progress.set_name(progress_name(progress.name(), text)),
};
}
}
}
}

View File

@@ -188,7 +188,7 @@ impl file::Store {
res.strip_namespace(namespace);
}
return Ok(Some(res));
};
}
}
}
Ok(None)
@@ -225,9 +225,13 @@ impl file::Store {
use crate::Category::*;
let sn = FullNameRef::new_unchecked(sn);
match c {
LinkedPseudoRef { name: worktree_name } => is_reflog
.then(|| (linked_git_dir(worktree_name).into(), sn))
.unwrap_or((commondir.into(), name)),
LinkedPseudoRef { name: worktree_name } => {
if is_reflog {
(linked_git_dir(worktree_name).into(), sn)
} else {
(commondir.into(), name)
}
}
Tag | LocalBranch | RemoteBranch | Note => (commondir.into(), name),
MainRef | MainPseudoRef => (commondir.into(), sn),
LinkedRef { name: worktree_name } => sn

View File

@@ -180,7 +180,7 @@ where
let buf = &mut self.buf[..n];
if let Err(err) = read.read_exact(buf) {
return Some(Err(err.into()));
};
}
let last_byte = *buf.last().expect("we have read non-zero bytes before");
self.last_nl_pos = Some(if last_byte != b'\n' { buf.len() } else { buf.len() - 1 });

View File

@@ -126,7 +126,7 @@ pub mod create_or_update {
}
})?;
options.create(true);
};
}
let file_for_appending = match options.open(&log_path) {
Ok(f) => Some(f),

View File

@@ -112,7 +112,7 @@ fn missing_reflog_creates_it_even_if_similarly_named_empty_dir_exists_and_append
"there is no logs in disabled mode"
);
}
};
}
// create onto existing directory
let full_name_str = "refs/heads/other";

View File

@@ -124,7 +124,7 @@ impl ReferenceExt for Reference {
})?;
}
_ => break oid,
};
}
};
self.peeled = Some(peeled_id);
self.target = Target::Object(peeled_id);

View File

@@ -110,7 +110,7 @@ impl Transaction<'_, '_> {
full_name: change.name(),
});
}
};
}
}
}
Change::Delete { .. } => {}

View File

@@ -159,7 +159,7 @@ impl Transaction<'_, '_> {
let full_name = change.name();
return Err(Error::MustExist { full_name, expected });
}
};
}
fn new_would_change_existing(new: &Target, existing: &Target) -> (bool, bool) {
match (new, existing) {
@@ -386,7 +386,7 @@ impl Transaction<'_, '_> {
other => other,
};
return Err(err);
};
}
// traverse parent chain from leaf/peeled ref and set the leaf previous oid accordingly
// to help with their reflog entries

View File

@@ -66,7 +66,7 @@ where
let edit = edit.borrow_mut();
if !edit.deref {
continue;
};
}
// we can't tell what happened and we are here because it's a non-existing ref or an invalid one.
// In any case, we don't want the following algorithms to try dereffing it and assume they deal with

View File

@@ -96,7 +96,7 @@ fn reference_with_equally_named_empty_or_non_empty_directory_already_in_place_ca
assert_eq!(source.to_string(), "Directory not empty");
}
_ => unreachable!("other errors shouldn't happen here"),
};
}
}
}
Ok(())

View File

@@ -94,7 +94,7 @@ impl<'a> Needle<'a> {
}
None => return Match::None,
_ => {}
};
}
let tail = &name[*asterisk_pos + 1..];
if !item.full_ref_name.ends_with(tail) {
return Match::None;

View File

@@ -264,7 +264,7 @@ pub mod baseline {
lhs = "HEAD".as_bytes();
} else {
tokens.next();
};
}
let rhs = tokens.next().unwrap().trim();
let local = (rhs != b"FETCH_HEAD").then(|| full_tracking_ref(rhs.into()));
if !(lhs.as_bstr() == "HEAD" && local.is_none()) {

View File

@@ -73,7 +73,7 @@ pub trait Navigate {
///
/// * `stage` ranges from 0 to 2, with 0 being the base, 1 being ours, 2 being theirs.
/// * `path` without prefix is relative to the root of the repository, while prefixes like `./` and `../` make it
/// relative to the current working directory.
/// relative to the current working directory.
fn index_lookup(&mut self, path: &BStr, stage: u8) -> Option<()>;
}

View File

@@ -201,7 +201,7 @@ fn long_describe_prefix(name: &BStr) -> Option<(&BStr, delegate::PrefixHint<'_>)
let candidate = iter.by_ref().find_map(|substr| {
if substr.first()? != &b'g' {
return None;
};
}
let rest = substr.get(1..)?;
rest.iter().all(u8::is_ascii_hexdigit).then(|| rest.as_bstr())
})?;
@@ -213,7 +213,7 @@ fn long_describe_prefix(name: &BStr) -> Option<(&BStr, delegate::PrefixHint<'_>)
.and_then(|generation| {
iter.next().map(|token| {
let last_token_len = token.len();
let first_token_ptr = iter.last().map_or(token.as_ptr(), <[_]>::as_ptr);
let first_token_ptr = iter.next_back().map_or(token.as_ptr(), <[_]>::as_ptr);
// SAFETY: both pointers are definitely part of the same object
#[allow(unsafe_code)]
let prior_tokens_len: usize = unsafe { token.as_ptr().offset_from(first_token_ptr) }
@@ -274,7 +274,7 @@ fn parens(input: &[u8]) -> Result<Option<InsideParensRestConsumed<'_>>, Error> {
_ => {
if ignore_next {
skip_list.pop();
};
}
ignore_next = false;
}
}
@@ -307,7 +307,7 @@ fn try_parse<T: FromStr + PartialEq + Default>(input: &BStr) -> Result<Option<T>
n.parse().ok().map(|n| {
if n == T::default() && input[0] == b'-' {
return Err(Error::NegativeZero { input: input.into() });
};
}
Ok(n)
})
})
@@ -337,7 +337,7 @@ where
[b':', b'2', b':', path @ ..] => return consume_all(delegate.index_lookup(path.as_bstr(), 2)),
[b':', path @ ..] => return consume_all(delegate.index_lookup(path.as_bstr(), 0)),
_ => {}
};
}
let mut sep_pos = None;
let mut consecutive_hex_chars = Some(0);

View File

@@ -252,7 +252,7 @@ impl<T> Graph<'_, '_, Commit<T>> {
gix_hashtable::hash_map::Entry::Occupied(mut entry) => {
update_data(&mut entry.get_mut().data);
}
};
}
Ok(self.map.get_mut(&id))
}
@@ -303,7 +303,7 @@ impl<T: Default> Graph<'_, '_, Commit<T>> {
gix_hashtable::hash_map::Entry::Occupied(mut entry) => {
update_commit(entry.get_mut());
}
};
}
Ok(self.map.get_mut(&id))
}
}
@@ -337,7 +337,7 @@ impl<'cache, T> Graph<'_, 'cache, T> {
gix_hashtable::hash_map::Entry::Occupied(mut entry) => {
update_data(entry.get_mut());
}
};
}
commit
}))
}

View File

@@ -36,7 +36,7 @@ impl IsActivePlatform {
) -> Result<bool, gix_config::value::Error> {
if let Some(val) = config.boolean(format!("submodule.{name}.active")).transpose()? {
return Ok(val);
};
}
if let Some(val) = self.search.as_mut().map(|search| {
search
.pattern_matching_relative_path(name, Some(true), attributes)

View File

@@ -102,7 +102,7 @@ impl curl::easy::Handler for Handler {
.ok();
}
}
};
}
true
}
}
@@ -338,7 +338,7 @@ pub fn new() -> (
body.channel.try_send(err).ok();
}
(None, None) => {}
};
}
} else {
let handler = handle.get_mut();
if let Some((action, authenticate)) = proxy_auth_action {

View File

@@ -23,7 +23,7 @@ impl crate::IsSpuriousError for Error {
#[cfg(feature = "http-client-curl")]
if let Some(err) = source.downcast_ref::<crate::client::http::curl::Error>() {
return err.is_spurious();
};
}
#[cfg(feature = "http-client-reqwest")]
if let Some(err) = source.downcast_ref::<crate::client::http::reqwest::remote::Error>() {
return err.is_spurious();

Some files were not shown because too many files have changed in this diff Show More