Fix clippy::redundant_clone
lint violations (#36558)
This removes around 900 unnecessary clones, ranging from cloning a few ints all the way to large data structures and images. A lot of these were fixed using `cargo clippy --fix --workspace --all-targets`, however it often breaks other lints and needs to be run again. This was then followed up with some manual fixing. I understand this is a large diff, but all the changes are pretty trivial. Rust is doing some heavy lifting here for us. Once I get it up to speed with main, I'd appreciate this getting merged rather sooner than later. Release Notes: - N/A
This commit is contained in:
parent
cf7c64d77f
commit
7bdc99abc1
306 changed files with 805 additions and 1102 deletions
|
@ -35,7 +35,7 @@ fn main() {
|
|||
None,
|
||||
));
|
||||
let client = client::Client::new(clock, http.clone(), cx);
|
||||
Client::set_global(client.clone(), cx);
|
||||
Client::set_global(client, cx);
|
||||
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
if args.len() < 2 {
|
||||
|
@ -49,7 +49,7 @@ fn main() {
|
|||
let api_key = std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY not set");
|
||||
|
||||
let embedding_provider = Arc::new(OpenAiEmbeddingProvider::new(
|
||||
http.clone(),
|
||||
http,
|
||||
OpenAiEmbeddingModel::TextEmbedding3Small,
|
||||
open_ai::OPEN_AI_API_URL.to_string(),
|
||||
api_key,
|
||||
|
|
|
@ -88,7 +88,7 @@ impl EmbeddingIndex {
|
|||
|
||||
let worktree = self.worktree.read(cx).snapshot();
|
||||
let worktree_abs_path = worktree.abs_path().clone();
|
||||
let scan = self.scan_updated_entries(worktree, updated_entries.clone(), cx);
|
||||
let scan = self.scan_updated_entries(worktree, updated_entries, cx);
|
||||
let chunk = self.chunk_files(worktree_abs_path, scan.updated_entries, cx);
|
||||
let embed = Self::embed_files(self.embedding_provider.clone(), chunk.files, cx);
|
||||
let persist = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx);
|
||||
|
@ -406,7 +406,7 @@ impl EmbeddingIndex {
|
|||
.context("failed to create read transaction")?;
|
||||
let result = db
|
||||
.iter(&tx)?
|
||||
.map(|entry| Ok(entry?.1.path.clone()))
|
||||
.map(|entry| Ok(entry?.1.path))
|
||||
.collect::<Result<Vec<Arc<Path>>>>();
|
||||
drop(tx);
|
||||
result
|
||||
|
@ -423,8 +423,7 @@ impl EmbeddingIndex {
|
|||
Ok(db
|
||||
.get(&tx, &db_key_for_path(&path))?
|
||||
.context("no such path")?
|
||||
.chunks
|
||||
.clone())
|
||||
.chunks)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -434,7 +434,7 @@ mod tests {
|
|||
.await;
|
||||
|
||||
let range = search_result.range.clone();
|
||||
let content = content[range.clone()].to_owned();
|
||||
let content = content[range].to_owned();
|
||||
|
||||
assert!(content.contains("garbage in, garbage out"));
|
||||
}
|
||||
|
|
|
@ -205,7 +205,7 @@ impl SummaryIndex {
|
|||
let worktree = self.worktree.read(cx).snapshot();
|
||||
let worktree_abs_path = worktree.abs_path().clone();
|
||||
|
||||
backlogged = self.scan_updated_entries(worktree, updated_entries.clone(), cx);
|
||||
backlogged = self.scan_updated_entries(worktree, updated_entries, cx);
|
||||
digest = self.digest_files(backlogged.paths_to_digest, worktree_abs_path, cx);
|
||||
needs_summary = self.check_summary_cache(digest.files, cx);
|
||||
summaries = self.summarize_files(needs_summary.files, cx);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue