Fix clippy::redundant_clone lint violations (#36558)

This removes around 900 unnecessary clones, ranging from cloning a few
ints all the way to large data structures and images.

A lot of these were fixed using `cargo clippy --fix --workspace
--all-targets`, however it often breaks other lints and needs to be run
again. This was then followed up with some manual fixing.

I understand this is a large diff, but all the changes are pretty
trivial. Rust is doing some heavy lifting here for us. Once I get it up
to speed with main, I'd appreciate this getting merged rather sooner
than later.

Release Notes:

- N/A
This commit is contained in:
tidely 2025-08-20 13:20:13 +03:00 committed by GitHub
parent cf7c64d77f
commit 7bdc99abc1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
306 changed files with 805 additions and 1102 deletions

View file

@ -168,7 +168,7 @@ impl RemoteBufferStore {
.with_context(|| {
format!("no worktree found for id {}", file.worktree_id)
})?;
buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
buffer_file = Some(Arc::new(File::from_proto(file, worktree, cx)?)
as Arc<dyn language::File>);
}
Buffer::from_proto(replica_id, capability, state, buffer_file)
@ -591,7 +591,7 @@ impl LocalBufferStore {
else {
return Task::ready(Err(anyhow!("no such worktree")));
};
self.save_local_buffer(buffer, worktree, path.path.clone(), true, cx)
self.save_local_buffer(buffer, worktree, path.path, true, cx)
}
fn open_buffer(
@ -845,7 +845,7 @@ impl BufferStore {
) -> Task<Result<()>> {
match &mut self.state {
BufferStoreState::Local(this) => this.save_buffer(buffer, cx),
BufferStoreState::Remote(this) => this.save_remote_buffer(buffer.clone(), None, cx),
BufferStoreState::Remote(this) => this.save_remote_buffer(buffer, None, cx),
}
}
@ -1138,7 +1138,7 @@ impl BufferStore {
envelope: TypedEnvelope<proto::UpdateBuffer>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
let payload = envelope.payload.clone();
let payload = envelope.payload;
let buffer_id = BufferId::new(payload.buffer_id)?;
let ops = payload
.operations

View file

@ -760,7 +760,7 @@ mod tests {
&store,
vec![
(server_1_id.clone(), ContextServerStatus::Starting),
(server_1_id.clone(), ContextServerStatus::Running),
(server_1_id, ContextServerStatus::Running),
(server_2_id.clone(), ContextServerStatus::Starting),
(server_2_id.clone(), ContextServerStatus::Running),
(server_2_id.clone(), ContextServerStatus::Stopped),

View file

@ -192,7 +192,7 @@ impl BreakpointStore {
}
pub(crate) fn shared(&mut self, project_id: u64, downstream_client: AnyProtoClient) {
self.downstream_client = Some((downstream_client.clone(), project_id));
self.downstream_client = Some((downstream_client, project_id));
}
pub(crate) fn unshared(&mut self, cx: &mut Context<Self>) {
@ -450,9 +450,9 @@ impl BreakpointStore {
});
if let Some(found_bp) = found_bp {
found_bp.message = Some(log_message.clone());
found_bp.message = Some(log_message);
} else {
breakpoint.bp.message = Some(log_message.clone());
breakpoint.bp.message = Some(log_message);
// We did not remove any breakpoint, hence let's toggle one.
breakpoint_set
.breakpoints
@ -482,9 +482,9 @@ impl BreakpointStore {
});
if let Some(found_bp) = found_bp {
found_bp.hit_condition = Some(hit_condition.clone());
found_bp.hit_condition = Some(hit_condition);
} else {
breakpoint.bp.hit_condition = Some(hit_condition.clone());
breakpoint.bp.hit_condition = Some(hit_condition);
// We did not remove any breakpoint, hence let's toggle one.
breakpoint_set
.breakpoints
@ -514,9 +514,9 @@ impl BreakpointStore {
});
if let Some(found_bp) = found_bp {
found_bp.condition = Some(condition.clone());
found_bp.condition = Some(condition);
} else {
breakpoint.bp.condition = Some(condition.clone());
breakpoint.bp.condition = Some(condition);
// We did not remove any breakpoint, hence let's toggle one.
breakpoint_set
.breakpoints
@ -591,7 +591,7 @@ impl BreakpointStore {
cx: &mut Context<Self>,
) {
if let Some(breakpoints) = self.breakpoints.remove(&old_path) {
self.breakpoints.insert(new_path.clone(), breakpoints);
self.breakpoints.insert(new_path, breakpoints);
cx.notify();
}

View file

@ -1454,7 +1454,7 @@ impl DapCommand for EvaluateCommand {
variables_reference: message.variable_reference,
named_variables: message.named_variables,
indexed_variables: message.indexed_variables,
memory_reference: message.memory_reference.clone(),
memory_reference: message.memory_reference,
value_location_reference: None, //TODO
})
}

View file

@ -721,7 +721,7 @@ impl DapStore {
downstream_client: AnyProtoClient,
_: &mut Context<Self>,
) {
self.downstream_client = Some((downstream_client.clone(), project_id));
self.downstream_client = Some((downstream_client, project_id));
}
pub fn unshared(&mut self, cx: &mut Context<Self>) {

View file

@ -1394,7 +1394,7 @@ impl Session {
let breakpoint_store = self.breakpoint_store.clone();
if let Some((local, path)) = self.as_running_mut().and_then(|local| {
let breakpoint = local.tmp_breakpoint.take()?;
let path = breakpoint.path.clone();
let path = breakpoint.path;
Some((local, path))
}) {
local
@ -1710,7 +1710,7 @@ impl Session {
this.threads = result
.into_iter()
.map(|thread| (ThreadId(thread.id), Thread::from(thread.clone())))
.map(|thread| (ThreadId(thread.id), Thread::from(thread)))
.collect();
this.invalidate_command_type::<StackTraceCommand>();
@ -2553,10 +2553,7 @@ impl Session {
mode: Option<String>,
cx: &mut Context<Self>,
) -> Task<Option<dap::DataBreakpointInfoResponse>> {
let command = DataBreakpointInfoCommand {
context: context.clone(),
mode,
};
let command = DataBreakpointInfoCommand { context, mode };
self.request(command, |_, response, _| response.ok(), cx)
}

View file

@ -769,7 +769,7 @@ impl GitStore {
.as_ref()
.and_then(|weak| weak.upgrade())
{
let conflict_set = conflict_set.clone();
let conflict_set = conflict_set;
let buffer_snapshot = buffer.read(cx).text_snapshot();
git_state.update(cx, |state, cx| {
@ -912,7 +912,7 @@ impl GitStore {
return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
};
let content = match &version {
Some(version) => buffer.rope_for_version(version).clone(),
Some(version) => buffer.rope_for_version(version),
None => buffer.as_rope().clone(),
};
let version = version.unwrap_or(buffer.version());
@ -1506,10 +1506,7 @@ impl GitStore {
let mut update = envelope.payload;
let id = RepositoryId::from_proto(update.id);
let client = this
.upstream_client()
.context("no upstream client")?
.clone();
let client = this.upstream_client().context("no upstream client")?;
let mut is_new = false;
let repo = this.repositories.entry(id).or_insert_with(|| {
@ -3418,7 +3415,6 @@ impl Repository {
reset_mode: ResetMode,
_cx: &mut App,
) -> oneshot::Receiver<Result<()>> {
let commit = commit.to_string();
let id = self.id;
self.send_job(None, move |git_repo, _| async move {
@ -3644,7 +3640,7 @@ impl Repository {
let to_stage = self
.cached_status()
.filter(|entry| !entry.status.staging().is_fully_staged())
.map(|entry| entry.repo_path.clone())
.map(|entry| entry.repo_path)
.collect();
self.stage_entries(to_stage, cx)
}
@ -3653,16 +3649,13 @@ impl Repository {
let to_unstage = self
.cached_status()
.filter(|entry| entry.status.staging().has_staged())
.map(|entry| entry.repo_path.clone())
.map(|entry| entry.repo_path)
.collect();
self.unstage_entries(to_unstage, cx)
}
pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
let to_stash = self
.cached_status()
.map(|entry| entry.repo_path.clone())
.collect();
let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
self.stash_entries(to_stash, cx)
}

View file

@ -369,7 +369,7 @@ mod tests {
.unindent();
let buffer_id = BufferId::new(1).unwrap();
let buffer = Buffer::new(0, buffer_id, test_content.to_string());
let buffer = Buffer::new(0, buffer_id, test_content);
let snapshot = buffer.snapshot();
let conflict_snapshot = ConflictSet::parse(&snapshot);
@ -400,7 +400,7 @@ mod tests {
>>>>>>> "#
.unindent();
let buffer_id = BufferId::new(1).unwrap();
let buffer = Buffer::new(0, buffer_id, test_content.to_string());
let buffer = Buffer::new(0, buffer_id, test_content);
let snapshot = buffer.snapshot();
let conflict_snapshot = ConflictSet::parse(&snapshot);

View file

@ -244,7 +244,7 @@ impl ProjectItem for ImageItem {
}
fn project_path(&self, cx: &App) -> Option<ProjectPath> {
Some(self.project_path(cx).clone())
Some(self.project_path(cx))
}
fn is_dirty(&self) -> bool {
@ -375,7 +375,6 @@ impl ImageStore {
let (mut tx, rx) = postage::watch::channel();
entry.insert(rx.clone());
let project_path = project_path.clone();
let load_image = self
.state
.open_image(project_path.path.clone(), worktree, cx);

View file

@ -2739,7 +2739,7 @@ impl GetCodeActions {
Some(lsp::CodeActionProviderCapability::Options(CodeActionOptions {
code_action_kinds: Some(supported_action_kinds),
..
})) => Some(supported_action_kinds.clone()),
})) => Some(supported_action_kinds),
_ => capabilities.code_action_kinds,
}
}
@ -3793,7 +3793,7 @@ impl GetDocumentDiagnostics {
},
uri: lsp::Url::parse(&info.location_url.unwrap()).unwrap(),
},
message: info.message.clone(),
message: info.message,
}
})
.collect::<Vec<_>>();
@ -4491,9 +4491,8 @@ mod tests {
data: Some(json!({"detail": "test detail"})),
};
let proto_diagnostic =
GetDocumentDiagnostics::serialize_lsp_diagnostic(lsp_diagnostic.clone())
.expect("Failed to serialize diagnostic");
let proto_diagnostic = GetDocumentDiagnostics::serialize_lsp_diagnostic(lsp_diagnostic)
.expect("Failed to serialize diagnostic");
let start = proto_diagnostic.start.unwrap();
let end = proto_diagnostic.end.unwrap();

View file

@ -917,7 +917,7 @@ impl LocalLspStore {
message: params.message,
actions: vec![],
response_channel: tx,
lsp_name: name.clone(),
lsp_name: name,
};
let _ = this.update(&mut cx, |_, cx| {
@ -2954,7 +2954,7 @@ impl LocalLspStore {
.update(cx, |this, cx| {
let path = buffer_to_edit.read(cx).project_path(cx);
let active_entry = this.active_entry;
let is_active_entry = path.clone().is_some_and(|project_path| {
let is_active_entry = path.is_some_and(|project_path| {
this.worktree_store
.read(cx)
.entry_for_path(&project_path, cx)
@ -5688,10 +5688,7 @@ impl LspStore {
let all_actions_task = self.request_multiple_lsp_locally(
buffer,
Some(range.start),
GetCodeActions {
range: range.clone(),
kinds: kinds.clone(),
},
GetCodeActions { range, kinds },
cx,
);
cx.background_spawn(async move {
@ -7221,7 +7218,7 @@ impl LspStore {
worktree = tree;
path = rel_path;
} else {
worktree = source_worktree.clone();
worktree = source_worktree;
path = relativize_path(&result.worktree_abs_path, &abs_path);
}
@ -10338,7 +10335,7 @@ impl LspStore {
let name = self
.language_server_statuses
.remove(&server_id)
.map(|status| status.name.clone())
.map(|status| status.name)
.or_else(|| {
if let Some(LanguageServerState::Running { adapter, .. }) = server_state.as_ref() {
Some(adapter.name())

View file

@ -58,7 +58,7 @@ pub fn register_notifications(
language_server
.on_notification::<InactiveRegions, _>({
let adapter = adapter.clone();
let adapter = adapter;
let this = lsp_store;
move |params: InactiveRegionsParams, cx| {

View file

@ -34,7 +34,6 @@ pub fn register_notifications(lsp_store: WeakEntity<LspStore>, language_server:
language_server
.on_notification::<ServerStatus, _>({
let name = name.clone();
move |params, cx| {
let message = params.message;
let log_message = message.as_ref().map(|message| {

View file

@ -2502,7 +2502,7 @@ impl Project {
path: ProjectPath,
cx: &mut Context<Self>,
) -> Task<Result<(Option<ProjectEntryId>, Entity<Buffer>)>> {
let task = self.open_buffer(path.clone(), cx);
let task = self.open_buffer(path, cx);
cx.spawn(async move |_project, cx| {
let buffer = task.await?;
let project_entry_id = buffer.read_with(cx, |buffer, cx| {
@ -3170,7 +3170,7 @@ impl Project {
if let ImageItemEvent::ReloadNeeded = event
&& !self.is_via_collab()
{
self.reload_images([image.clone()].into_iter().collect(), cx)
self.reload_images([image].into_iter().collect(), cx)
.detach_and_log_err(cx);
}
@ -3652,7 +3652,7 @@ impl Project {
cx: &mut Context<Self>,
) -> Task<Result<Vec<CodeAction>>> {
let snapshot = buffer.read(cx).snapshot();
let range = range.clone().to_owned().to_point(&snapshot);
let range = range.to_point(&snapshot);
let range_start = snapshot.anchor_before(range.start);
let range_end = if range.start == range.end {
range_start

View file

@ -1818,7 +1818,7 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp
buffer
.snapshot()
.diagnostics_in_range::<_, usize>(0..1, false)
.map(|entry| entry.diagnostic.message.clone())
.map(|entry| entry.diagnostic.message)
.collect::<Vec<_>>(),
["the message".to_string()]
);
@ -1844,7 +1844,7 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp
buffer
.snapshot()
.diagnostics_in_range::<_, usize>(0..1, false)
.map(|entry| entry.diagnostic.message.clone())
.map(|entry| entry.diagnostic.message)
.collect::<Vec<_>>(),
Vec::<String>::new(),
);
@ -3712,7 +3712,7 @@ async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor().clone());
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/dir"),
json!({
@ -3767,7 +3767,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext)
async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor().clone());
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/dir"),
json!({
@ -5897,7 +5897,7 @@ async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
async fn test_create_entry(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor().clone());
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/one/two",
json!({

View file

@ -760,7 +760,7 @@ impl Inventory {
TaskSettingsLocation::Global(path) => {
previously_existing_scenarios = parsed_scenarios
.global_scenarios()
.map(|(_, scenario)| scenario.label.clone())
.map(|(_, scenario)| scenario.label)
.collect::<HashSet<_>>();
parsed_scenarios
.global
@ -770,7 +770,7 @@ impl Inventory {
TaskSettingsLocation::Worktree(location) => {
previously_existing_scenarios = parsed_scenarios
.worktree_scenarios(location.worktree_id)
.map(|(_, scenario)| scenario.label.clone())
.map(|(_, scenario)| scenario.label)
.collect::<HashSet<_>>();
if new_templates.is_empty() {

View file

@ -89,7 +89,7 @@ impl Project {
let ssh_client = ssh_client.read(cx);
if let Some((SshArgs { arguments, envs }, path_style)) = ssh_client.ssh_info() {
return Some(SshDetails {
host: ssh_client.connection_options().host.clone(),
host: ssh_client.connection_options().host,
ssh_command: SshCommand { arguments },
envs,
path_style,

View file

@ -457,7 +457,7 @@ impl WorktreeStore {
})
.collect::<HashMap<_, _>>();
let (client, project_id) = self.upstream_client().clone().context("invalid project")?;
let (client, project_id) = self.upstream_client().context("invalid project")?;
for worktree in worktrees {
if let Some(old_worktree) =