One big cleanup pass of clippy lints
Co-authored-by: Mikayla <mikayla@zed.dev>
This commit is contained in:
parent
e7540d2833
commit
8ba2f77148
138 changed files with 1328 additions and 1366 deletions
|
@ -460,7 +460,7 @@ impl FakeFs {
|
|||
}
|
||||
}
|
||||
Null => {
|
||||
self.create_dir(&path).await.unwrap();
|
||||
self.create_dir(path).await.unwrap();
|
||||
}
|
||||
String(contents) => {
|
||||
self.insert_file(&path, contents).await;
|
||||
|
|
|
@ -1027,7 +1027,7 @@ impl LspCommand for GetHover {
|
|||
lsp::HoverContents::Array(marked_strings) => {
|
||||
let content: Vec<HoverBlock> = marked_strings
|
||||
.into_iter()
|
||||
.filter_map(|marked_string| HoverBlock::try_new(marked_string))
|
||||
.filter_map(HoverBlock::try_new)
|
||||
.collect();
|
||||
if content.is_empty() {
|
||||
None
|
||||
|
@ -1049,9 +1049,7 @@ impl LspCommand for GetHover {
|
|||
}
|
||||
Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(new_language))) => {
|
||||
if !current_text.is_empty() {
|
||||
let text = std::mem::replace(&mut current_text, String::new())
|
||||
.trim()
|
||||
.to_string();
|
||||
let text = std::mem::take(&mut current_text).trim().to_string();
|
||||
contents.push(HoverBlock { text, language });
|
||||
}
|
||||
|
||||
|
@ -1067,9 +1065,7 @@ impl LspCommand for GetHover {
|
|||
| Event::End(Tag::BlockQuote)
|
||||
| Event::HardBreak => {
|
||||
if !current_text.is_empty() {
|
||||
let text = std::mem::replace(&mut current_text, String::new())
|
||||
.trim()
|
||||
.to_string();
|
||||
let text = std::mem::take(&mut current_text).trim().to_string();
|
||||
contents.push(HoverBlock { text, language });
|
||||
}
|
||||
language = None;
|
||||
|
|
|
@ -114,10 +114,12 @@ pub struct Project {
|
|||
_subscriptions: Vec<gpui::Subscription>,
|
||||
opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
|
||||
shared_buffers: HashMap<PeerId, HashSet<u64>>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
loading_buffers: HashMap<
|
||||
ProjectPath,
|
||||
postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
|
||||
>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
loading_local_worktrees:
|
||||
HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
|
||||
opened_buffers: HashMap<u64, OpenBuffer>,
|
||||
|
@ -993,7 +995,7 @@ impl Project {
|
|||
.iter()
|
||||
.filter_map(|worktree| {
|
||||
worktree
|
||||
.upgrade(&cx)
|
||||
.upgrade(cx)
|
||||
.map(|worktree| worktree.read(cx).as_local().unwrap().metadata_proto())
|
||||
})
|
||||
.collect()
|
||||
|
@ -1080,7 +1082,7 @@ impl Project {
|
|||
self.worktrees
|
||||
.iter()
|
||||
.filter_map(|worktree| {
|
||||
let worktree = worktree.upgrade(&cx)?.read(cx);
|
||||
let worktree = worktree.upgrade(cx)?.read(cx);
|
||||
if worktree.is_visible() {
|
||||
Some(format!(
|
||||
"project-path-online:{}",
|
||||
|
@ -1121,7 +1123,7 @@ impl Project {
|
|||
}
|
||||
|
||||
pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
|
||||
paths.iter().all(|path| self.contains_path(&path, cx))
|
||||
paths.iter().all(|path| self.contains_path(path, cx))
|
||||
}
|
||||
|
||||
pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
|
||||
|
@ -1395,11 +1397,8 @@ impl Project {
|
|||
}
|
||||
|
||||
for open_buffer in self.opened_buffers.values_mut() {
|
||||
match open_buffer {
|
||||
OpenBuffer::Strong(buffer) => {
|
||||
*open_buffer = OpenBuffer::Weak(buffer.downgrade());
|
||||
}
|
||||
_ => {}
|
||||
if let OpenBuffer::Strong(buffer) = open_buffer {
|
||||
*open_buffer = OpenBuffer::Weak(buffer.downgrade());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1493,7 +1492,7 @@ impl Project {
|
|||
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(self.replica_id(), text, cx)
|
||||
.with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
|
||||
.with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
|
||||
});
|
||||
self.register_buffer(&buffer, cx)?;
|
||||
Ok(buffer)
|
||||
|
@ -1791,7 +1790,7 @@ impl Project {
|
|||
server
|
||||
.notify::<lsp::notification::DidCloseTextDocument>(
|
||||
lsp::DidCloseTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
|
||||
text_document: lsp::TextDocumentIdentifier::new(uri),
|
||||
},
|
||||
)
|
||||
.log_err();
|
||||
|
@ -1825,7 +1824,7 @@ impl Project {
|
|||
language_server = self
|
||||
.language_server_ids
|
||||
.get(&(worktree_id, adapter.name.clone()))
|
||||
.and_then(|id| self.language_servers.get(&id))
|
||||
.and_then(|id| self.language_servers.get(id))
|
||||
.and_then(|server_state| {
|
||||
if let LanguageServerState::Running { server, .. } = server_state {
|
||||
Some(server.clone())
|
||||
|
@ -1838,7 +1837,7 @@ impl Project {
|
|||
|
||||
if let Some(local_worktree) = file.worktree.read(cx).as_local() {
|
||||
if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
|
||||
self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
|
||||
self.update_buffer_diagnostics(buffer_handle, diagnostics, None, cx)
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
|
@ -1853,8 +1852,7 @@ impl Project {
|
|||
0,
|
||||
initial_snapshot.text(),
|
||||
),
|
||||
}
|
||||
.clone(),
|
||||
},
|
||||
)
|
||||
.log_err();
|
||||
buffer_handle.update(cx, |buffer, cx| {
|
||||
|
@ -1864,7 +1862,7 @@ impl Project {
|
|||
.completion_provider
|
||||
.as_ref()
|
||||
.and_then(|provider| provider.trigger_characters.clone())
|
||||
.unwrap_or(Vec::new()),
|
||||
.unwrap_or_default(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
@ -1910,7 +1908,7 @@ impl Project {
|
|||
let request = self.client.request(proto::UpdateBuffer {
|
||||
project_id,
|
||||
buffer_id: buffer.read(cx).remote_id(),
|
||||
operations: vec![language::proto::serialize_operation(&operation)],
|
||||
operations: vec![language::proto::serialize_operation(operation)],
|
||||
});
|
||||
cx.background().spawn(request).detach_and_log_err(cx);
|
||||
} else if let Some(project_id) = self.remote_id() {
|
||||
|
@ -2014,7 +2012,7 @@ impl Project {
|
|||
.filter_map(move |((language_server_worktree_id, _), id)| {
|
||||
if *language_server_worktree_id == worktree_id {
|
||||
if let Some(LanguageServerState::Running { adapter, server }) =
|
||||
self.language_servers.get(&id)
|
||||
self.language_servers.get(id)
|
||||
{
|
||||
return Some((adapter, server));
|
||||
}
|
||||
|
@ -2151,7 +2149,7 @@ impl Project {
|
|||
let this = this.downgrade();
|
||||
let adapter = adapter.clone();
|
||||
move |mut params, cx| {
|
||||
let this = this.clone();
|
||||
let this = this;
|
||||
let adapter = adapter.clone();
|
||||
cx.spawn(|mut cx| async move {
|
||||
adapter.process_diagnostics(&mut params).await;
|
||||
|
@ -2371,7 +2369,7 @@ impl Project {
|
|||
.and_then(|provider| {
|
||||
provider.trigger_characters.clone()
|
||||
})
|
||||
.unwrap_or(Vec::new()),
|
||||
.unwrap_or_default(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
@ -2502,10 +2500,8 @@ impl Project {
|
|||
.cloned()
|
||||
{
|
||||
for orphaned_worktree in orphaned_worktrees {
|
||||
this.language_server_ids.insert(
|
||||
(orphaned_worktree, server_name.clone()),
|
||||
new_server_id.clone(),
|
||||
);
|
||||
this.language_server_ids
|
||||
.insert((orphaned_worktree, server_name.clone()), new_server_id);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -2528,9 +2524,7 @@ impl Project {
|
|||
return;
|
||||
}
|
||||
};
|
||||
let progress = match progress.value {
|
||||
lsp::ProgressParamsValue::WorkDone(value) => value,
|
||||
};
|
||||
let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
|
||||
let language_server_status =
|
||||
if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
|
||||
status
|
||||
|
@ -2543,7 +2537,7 @@ impl Project {
|
|||
}
|
||||
|
||||
let is_disk_based_diagnostics_progress =
|
||||
Some(token.as_ref()) == disk_based_diagnostics_progress_token.as_ref().map(|x| &**x);
|
||||
Some(token.as_ref()) == disk_based_diagnostics_progress_token.as_deref();
|
||||
|
||||
match progress {
|
||||
lsp::WorkDoneProgress::Begin(report) => {
|
||||
|
@ -2796,7 +2790,7 @@ impl Project {
|
|||
} else {
|
||||
let group_id = post_inc(&mut self.next_diagnostic_group_id);
|
||||
let is_disk_based =
|
||||
source.map_or(false, |source| disk_based_sources.contains(&source));
|
||||
source.map_or(false, |source| disk_based_sources.contains(source));
|
||||
|
||||
sources_by_group_id.insert(group_id, source);
|
||||
primary_diagnostic_group_ids
|
||||
|
@ -3194,7 +3188,7 @@ impl Project {
|
|||
if let Some(lsp_edits) = lsp_edits {
|
||||
let edits = this
|
||||
.update(cx, |this, cx| {
|
||||
this.edits_from_lsp(&buffer, lsp_edits, None, cx)
|
||||
this.edits_from_lsp(buffer, lsp_edits, None, cx)
|
||||
})
|
||||
.await?;
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
|
@ -3366,7 +3360,7 @@ impl Project {
|
|||
if let Some((worktree, rel_path)) =
|
||||
this.find_local_worktree(&abs_path, cx)
|
||||
{
|
||||
worktree_id = (&worktree.read(cx)).id();
|
||||
worktree_id = worktree.read(cx).id();
|
||||
path = rel_path;
|
||||
} else {
|
||||
path = relativize_path(&worktree_abs_path, &abs_path);
|
||||
|
@ -3613,7 +3607,7 @@ impl Project {
|
|||
.clone();
|
||||
(
|
||||
snapshot.anchor_before(start)..snapshot.anchor_after(end),
|
||||
text.clone(),
|
||||
text,
|
||||
)
|
||||
}
|
||||
Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
|
||||
|
@ -3791,7 +3785,7 @@ impl Project {
|
|||
|
||||
let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
|
||||
cx.foreground().spawn(async move {
|
||||
if !lang_server.capabilities().code_action_provider.is_some() {
|
||||
if lang_server.capabilities().code_action_provider.is_none() {
|
||||
return Ok(Default::default());
|
||||
}
|
||||
|
||||
|
@ -4120,6 +4114,7 @@ impl Project {
|
|||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn search(
|
||||
&self,
|
||||
query: SearchQuery,
|
||||
|
@ -4341,7 +4336,7 @@ impl Project {
|
|||
) {
|
||||
let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
|
||||
return cx.spawn(|this, cx| async move {
|
||||
if !request.check_capabilities(&language_server.capabilities()) {
|
||||
if !request.check_capabilities(language_server.capabilities()) {
|
||||
return Ok(Default::default());
|
||||
}
|
||||
|
||||
|
@ -4375,7 +4370,7 @@ impl Project {
|
|||
) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
|
||||
let abs_path = abs_path.as_ref();
|
||||
if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
|
||||
Task::ready(Ok((tree.clone(), relative_path.into())))
|
||||
Task::ready(Ok((tree, relative_path)))
|
||||
} else {
|
||||
let worktree = self.create_local_worktree(abs_path, visible, cx);
|
||||
cx.foreground()
|
||||
|
@ -4455,7 +4450,7 @@ impl Project {
|
|||
|
||||
Ok(worktree)
|
||||
}
|
||||
.map_err(|err| Arc::new(err))
|
||||
.map_err(Arc::new)
|
||||
})
|
||||
.shared()
|
||||
})
|
||||
|
@ -4487,9 +4482,9 @@ impl Project {
|
|||
}
|
||||
|
||||
fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
|
||||
cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
|
||||
cx.observe(worktree, |_, _, cx| cx.notify()).detach();
|
||||
if worktree.read(cx).is_local() {
|
||||
cx.subscribe(&worktree, |this, worktree, _, cx| {
|
||||
cx.subscribe(worktree, |this, worktree, _, cx| {
|
||||
this.update_local_worktree_buffers(worktree, cx);
|
||||
})
|
||||
.detach();
|
||||
|
@ -4508,7 +4503,7 @@ impl Project {
|
|||
}
|
||||
|
||||
self.metadata_changed(true, cx);
|
||||
cx.observe_release(&worktree, |this, worktree, cx| {
|
||||
cx.observe_release(worktree, |this, worktree, cx| {
|
||||
this.remove_worktree(worktree.id(), cx);
|
||||
cx.notify();
|
||||
})
|
||||
|
@ -4610,9 +4605,9 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn language_servers_running_disk_based_diagnostics<'a>(
|
||||
&'a self,
|
||||
) -> impl 'a + Iterator<Item = usize> {
|
||||
pub fn language_servers_running_disk_based_diagnostics(
|
||||
&self,
|
||||
) -> impl Iterator<Item = usize> + '_ {
|
||||
self.language_server_statuses
|
||||
.iter()
|
||||
.filter_map(|(id, status)| {
|
||||
|
@ -4762,7 +4757,7 @@ impl Project {
|
|||
.remove(&peer_id)
|
||||
.ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
|
||||
.replica_id;
|
||||
for (_, buffer) in &this.opened_buffers {
|
||||
for buffer in this.opened_buffers.values() {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
|
||||
}
|
||||
|
@ -5088,7 +5083,7 @@ impl Project {
|
|||
let ops = payload
|
||||
.operations
|
||||
.into_iter()
|
||||
.map(|op| language::proto::deserialize_operation(op))
|
||||
.map(language::proto::deserialize_operation)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let is_remote = this.is_remote();
|
||||
match this.opened_buffers.entry(buffer_id) {
|
||||
|
@ -5125,7 +5120,7 @@ impl Project {
|
|||
let worktree = this
|
||||
.worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
|
||||
.ok_or_else(|| anyhow!("no such worktree"))?;
|
||||
let file = File::from_proto(file, worktree.clone(), cx)?;
|
||||
let file = File::from_proto(file, worktree, cx)?;
|
||||
let buffer = this
|
||||
.opened_buffers
|
||||
.get_mut(&buffer_id)
|
||||
|
@ -5791,6 +5786,7 @@ impl Project {
|
|||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn edits_from_lsp(
|
||||
&mut self,
|
||||
buffer: &ModelHandle<Buffer>,
|
||||
|
@ -5837,7 +5833,7 @@ impl Project {
|
|||
new_text.push('\n');
|
||||
}
|
||||
range.end = next_range.end;
|
||||
new_text.push_str(&next_text);
|
||||
new_text.push_str(next_text);
|
||||
lsp_edits.next();
|
||||
}
|
||||
|
||||
|
@ -5872,7 +5868,7 @@ impl Project {
|
|||
ChangeTag::Insert => {
|
||||
if moved_since_edit {
|
||||
let anchor = snapshot.anchor_after(offset);
|
||||
edits.push((anchor.clone()..anchor, value.to_string()));
|
||||
edits.push((anchor..anchor, value.to_string()));
|
||||
} else {
|
||||
edits.last_mut().unwrap().1.push_str(value);
|
||||
}
|
||||
|
@ -5882,7 +5878,7 @@ impl Project {
|
|||
}
|
||||
} else if range.end == range.start {
|
||||
let anchor = snapshot.anchor_after(range.start);
|
||||
edits.push((anchor.clone()..anchor, new_text));
|
||||
edits.push((anchor..anchor, new_text));
|
||||
} else {
|
||||
let edit_start = snapshot.anchor_after(range.start);
|
||||
let edit_end = snapshot.anchor_before(range.end);
|
||||
|
@ -5944,7 +5940,7 @@ impl Project {
|
|||
|
||||
if let Some(server_id) = self.language_server_ids.get(&key) {
|
||||
if let Some(LanguageServerState::Running { adapter, server }) =
|
||||
self.language_servers.get(&server_id)
|
||||
self.language_servers.get(server_id)
|
||||
{
|
||||
return Some((adapter, server));
|
||||
}
|
||||
|
|
|
@ -499,7 +499,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
|
|||
});
|
||||
|
||||
buffer_a.read_with(cx, |buffer, _| {
|
||||
let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
|
||||
let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
|
||||
assert_eq!(
|
||||
chunks
|
||||
.iter()
|
||||
|
@ -513,7 +513,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
|
|||
);
|
||||
});
|
||||
buffer_b.read_with(cx, |buffer, _| {
|
||||
let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
|
||||
let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
|
||||
assert_eq!(
|
||||
chunks
|
||||
.iter()
|
||||
|
@ -579,7 +579,7 @@ async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
|
|||
.await
|
||||
.unwrap();
|
||||
buffer.read_with(cx, |buffer, _| {
|
||||
let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
|
||||
let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
|
||||
assert_eq!(
|
||||
chunks
|
||||
.iter()
|
||||
|
@ -1262,7 +1262,7 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
|
|||
// At the end of a line, an empty range is extended backward to include
|
||||
// the preceding character.
|
||||
buffer.read_with(cx, |buffer, _| {
|
||||
let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
|
||||
let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
|
||||
assert_eq!(
|
||||
chunks
|
||||
.iter()
|
||||
|
@ -1511,7 +1511,7 @@ async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestApp
|
|||
.into_iter()
|
||||
.map(|(range, text)| {
|
||||
(
|
||||
range.start.to_point(&buffer)..range.end.to_point(&buffer),
|
||||
range.start.to_point(buffer)..range.end.to_point(buffer),
|
||||
text,
|
||||
)
|
||||
})
|
||||
|
@ -1614,7 +1614,7 @@ async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
|
|||
.into_iter()
|
||||
.map(|(range, text)| {
|
||||
(
|
||||
range.start.to_point(&buffer)..range.end.to_point(&buffer),
|
||||
range.start.to_point(buffer)..range.end.to_point(buffer),
|
||||
text,
|
||||
)
|
||||
})
|
||||
|
@ -2139,7 +2139,7 @@ async fn test_rescan_and_remote_updates(
|
|||
let tree = project.worktrees(cx).next().unwrap();
|
||||
tree.read(cx)
|
||||
.entry_for_path(path)
|
||||
.expect(&format!("no entry for path {}", path))
|
||||
.unwrap_or_else(|| panic!("no entry for path {}", path))
|
||||
.id
|
||||
})
|
||||
};
|
||||
|
@ -2149,9 +2149,9 @@ async fn test_rescan_and_remote_updates(
|
|||
let buffer4 = buffer_for_path("b/c/file4", cx).await;
|
||||
let buffer5 = buffer_for_path("b/c/file5", cx).await;
|
||||
|
||||
let file2_id = id_for_path("a/file2", &cx);
|
||||
let file3_id = id_for_path("a/file3", &cx);
|
||||
let file4_id = id_for_path("b/c/file4", &cx);
|
||||
let file2_id = id_for_path("a/file2", cx);
|
||||
let file3_id = id_for_path("a/file3", cx);
|
||||
let file4_id = id_for_path("b/c/file4", cx);
|
||||
|
||||
// Create a remote copy of this worktree.
|
||||
let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
|
@ -2183,12 +2183,12 @@ async fn test_rescan_and_remote_updates(
|
|||
});
|
||||
|
||||
// Rename and delete files and directories.
|
||||
tree.flush_fs_events(&cx).await;
|
||||
tree.flush_fs_events(cx).await;
|
||||
std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
|
||||
std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
|
||||
std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
|
||||
std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
|
||||
tree.flush_fs_events(&cx).await;
|
||||
tree.flush_fs_events(cx).await;
|
||||
|
||||
let expected_paths = vec![
|
||||
"a",
|
||||
|
@ -2209,9 +2209,9 @@ async fn test_rescan_and_remote_updates(
|
|||
expected_paths
|
||||
);
|
||||
|
||||
assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
|
||||
assert_eq!(id_for_path("d/file3", &cx), file3_id);
|
||||
assert_eq!(id_for_path("d/file4", &cx), file4_id);
|
||||
assert_eq!(id_for_path("a/file2.new", cx), file2_id);
|
||||
assert_eq!(id_for_path("d/file3", cx), file3_id);
|
||||
assert_eq!(id_for_path("d/file4", cx), file4_id);
|
||||
|
||||
assert_eq!(
|
||||
buffer2.read(app).file().unwrap().path().as_ref(),
|
||||
|
@ -2689,7 +2689,7 @@ async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
|
|||
message: "error 2 hint 2".to_string(),
|
||||
related_information: Some(vec![lsp::DiagnosticRelatedInformation {
|
||||
location: lsp::Location {
|
||||
uri: buffer_uri.clone(),
|
||||
uri: buffer_uri,
|
||||
range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
|
||||
},
|
||||
message: "original diagnostic".to_string(),
|
||||
|
|
|
@ -53,7 +53,7 @@ impl SearchQuery {
|
|||
query = word_query
|
||||
}
|
||||
|
||||
let multiline = query.contains("\n") || query.contains("\\n");
|
||||
let multiline = query.contains('\n') || query.contains("\\n");
|
||||
let regex = RegexBuilder::new(&query)
|
||||
.case_insensitive(!case_sensitive)
|
||||
.multi_line(multiline)
|
||||
|
|
|
@ -57,6 +57,7 @@ lazy_static! {
|
|||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
|
||||
pub struct WorktreeId(usize);
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum Worktree {
|
||||
Local(LocalWorktree),
|
||||
Remote(RemoteWorktree),
|
||||
|
@ -157,7 +158,7 @@ impl Worktree {
|
|||
cx: &mut AsyncAppContext,
|
||||
) -> Result<ModelHandle<Self>> {
|
||||
let (tree, scan_states_tx) =
|
||||
LocalWorktree::new(client, path, visible, fs.clone(), next_entry_id, cx).await?;
|
||||
LocalWorktree::create(client, path, visible, fs.clone(), next_entry_id, cx).await?;
|
||||
tree.update(cx, |tree, cx| {
|
||||
let tree = tree.as_local_mut().unwrap();
|
||||
let abs_path = tree.abs_path().clone();
|
||||
|
@ -229,7 +230,7 @@ impl Worktree {
|
|||
cx.spawn(|mut cx| {
|
||||
let this = worktree_handle.downgrade();
|
||||
async move {
|
||||
while let Some(_) = snapshot_updated_rx.recv().await {
|
||||
while (snapshot_updated_rx.recv().await).is_some() {
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.poll_snapshot(cx);
|
||||
|
@ -322,15 +323,15 @@ impl Worktree {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn diagnostic_summaries<'a>(
|
||||
&'a self,
|
||||
) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + 'a {
|
||||
pub fn diagnostic_summaries(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + '_ {
|
||||
match self {
|
||||
Worktree::Local(worktree) => &worktree.diagnostic_summaries,
|
||||
Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
|
||||
}
|
||||
.iter()
|
||||
.map(|(path, summary)| (path.0.clone(), summary.clone()))
|
||||
.map(|(path, summary)| (path.0.clone(), *summary))
|
||||
}
|
||||
|
||||
fn poll_snapshot(&mut self, cx: &mut ModelContext<Self>) {
|
||||
|
@ -342,7 +343,7 @@ impl Worktree {
|
|||
}
|
||||
|
||||
impl LocalWorktree {
|
||||
async fn new(
|
||||
async fn create(
|
||||
client: Arc<Client>,
|
||||
path: impl Into<Arc<Path>>,
|
||||
visible: bool,
|
||||
|
@ -386,7 +387,7 @@ impl LocalWorktree {
|
|||
};
|
||||
if let Some(metadata) = metadata {
|
||||
let entry = Entry::new(
|
||||
path.into(),
|
||||
path,
|
||||
&metadata,
|
||||
&snapshot.next_entry_id,
|
||||
snapshot.root_char_bag,
|
||||
|
@ -651,7 +652,7 @@ impl LocalWorktree {
|
|||
let abs_path = self.absolutize(&entry.path);
|
||||
let delete = cx.background().spawn({
|
||||
let fs = self.fs.clone();
|
||||
let abs_path = abs_path.clone();
|
||||
let abs_path = abs_path;
|
||||
async move {
|
||||
if entry.is_file() {
|
||||
fs.remove_file(&abs_path, Default::default()).await
|
||||
|
@ -848,7 +849,7 @@ impl LocalWorktree {
|
|||
let rpc = self.client.clone();
|
||||
let worktree_id = cx.model_id() as u64;
|
||||
let maintain_remote_snapshot = cx.background().spawn({
|
||||
let rpc = rpc.clone();
|
||||
let rpc = rpc;
|
||||
let diagnostic_summaries = self.diagnostic_summaries.clone();
|
||||
async move {
|
||||
let mut prev_snapshot = match snapshots_rx.recv().await {
|
||||
|
@ -1002,10 +1003,9 @@ impl RemoteWorktree {
|
|||
warning_count: summary.warning_count as usize,
|
||||
};
|
||||
if summary.is_empty() {
|
||||
self.diagnostic_summaries.remove(&PathKey(path.clone()));
|
||||
self.diagnostic_summaries.remove(&PathKey(path));
|
||||
} else {
|
||||
self.diagnostic_summaries
|
||||
.insert(PathKey(path.clone()), summary);
|
||||
self.diagnostic_summaries.insert(PathKey(path), summary);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1513,7 +1513,7 @@ impl LocalSnapshot {
|
|||
|
||||
let mut ignore_stack = IgnoreStack::none();
|
||||
for (parent_abs_path, ignore) in new_ignores.into_iter().rev() {
|
||||
if ignore_stack.is_abs_path_ignored(&parent_abs_path, true) {
|
||||
if ignore_stack.is_abs_path_ignored(parent_abs_path, true) {
|
||||
ignore_stack = IgnoreStack::all();
|
||||
break;
|
||||
} else if let Some(ignore) = ignore {
|
||||
|
@ -1530,8 +1530,8 @@ impl LocalSnapshot {
|
|||
}
|
||||
|
||||
async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
|
||||
let contents = fs.load(&abs_path).await?;
|
||||
let parent = abs_path.parent().unwrap_or(Path::new("/"));
|
||||
let contents = fs.load(abs_path).await?;
|
||||
let parent = abs_path.parent().unwrap_or_else(|| Path::new("/"));
|
||||
let mut builder = GitignoreBuilder::new(parent);
|
||||
for line in contents.lines() {
|
||||
builder.add_line(Some(abs_path.into()), line)?;
|
||||
|
@ -1769,7 +1769,7 @@ impl language::LocalFile for File {
|
|||
.send(proto::BufferReloaded {
|
||||
project_id,
|
||||
buffer_id,
|
||||
version: serialize_version(&version),
|
||||
version: serialize_version(version),
|
||||
mtime: Some(mtime.into()),
|
||||
fingerprint,
|
||||
line_ending: serialize_line_ending(line_ending) as i32,
|
||||
|
@ -2285,7 +2285,7 @@ impl BackgroundScanner {
|
|||
snapshot.scan_id += 1;
|
||||
for event in &events {
|
||||
if let Ok(path) = event.path.strip_prefix(&root_canonical_path) {
|
||||
snapshot.remove_path(&path);
|
||||
snapshot.remove_path(path);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2528,13 +2528,13 @@ impl WorktreeHandle for ModelHandle<Worktree> {
|
|||
fs.create_file(&root_path.join(filename), Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
tree.condition(&cx, |tree, _| tree.entry_for_path(filename).is_some())
|
||||
tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
|
||||
.await;
|
||||
|
||||
fs.remove_file(&root_path.join(filename), Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
tree.condition(&cx, |tree, _| tree.entry_for_path(filename).is_none())
|
||||
tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
|
||||
.await;
|
||||
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
|
@ -2726,7 +2726,7 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
|
|||
let kind = if entry.is_dir {
|
||||
EntryKind::Dir
|
||||
} else {
|
||||
let mut char_bag = root_char_bag.clone();
|
||||
let mut char_bag = *root_char_bag;
|
||||
char_bag.extend(
|
||||
String::from_utf8_lossy(&entry.path)
|
||||
.chars()
|
||||
|
@ -2738,7 +2738,7 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
|
|||
Ok(Entry {
|
||||
id: ProjectEntryId::from_proto(entry.id),
|
||||
kind,
|
||||
path: path.clone(),
|
||||
path,
|
||||
inode: entry.inode,
|
||||
mtime: mtime.into(),
|
||||
is_symlink: entry.is_symlink,
|
||||
|
@ -2955,7 +2955,7 @@ mod tests {
|
|||
.unwrap();
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
tree.flush_fs_events(&cx).await;
|
||||
tree.flush_fs_events(cx).await;
|
||||
cx.read(|cx| {
|
||||
let tree = tree.read(cx);
|
||||
assert!(
|
||||
|
@ -2979,7 +2979,7 @@ mod tests {
|
|||
std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
|
||||
std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
|
||||
std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
|
||||
tree.flush_fs_events(&cx).await;
|
||||
tree.flush_fs_events(cx).await;
|
||||
cx.read(|cx| {
|
||||
let tree = tree.read(cx);
|
||||
assert!(
|
||||
|
@ -3026,7 +3026,7 @@ mod tests {
|
|||
.unwrap();
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
tree.flush_fs_events(&cx).await;
|
||||
tree.flush_fs_events(cx).await;
|
||||
|
||||
tree.update(cx, |tree, cx| {
|
||||
tree.as_local().unwrap().write_file(
|
||||
|
@ -3052,8 +3052,8 @@ mod tests {
|
|||
tree.read_with(cx, |tree, _| {
|
||||
let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
|
||||
let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
|
||||
assert_eq!(tracked.is_ignored, false);
|
||||
assert_eq!(ignored.is_ignored, true);
|
||||
assert!(!tracked.is_ignored);
|
||||
assert!(ignored.is_ignored);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -3226,9 +3226,9 @@ mod tests {
|
|||
|
||||
let mut ignore_contents = String::new();
|
||||
for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
|
||||
write!(
|
||||
writeln!(
|
||||
ignore_contents,
|
||||
"{}\n",
|
||||
"{}",
|
||||
path_to_ignore
|
||||
.strip_prefix(&ignore_dir_path)?
|
||||
.to_str()
|
||||
|
@ -3363,7 +3363,7 @@ mod tests {
|
|||
.collect::<Vec<_>>();
|
||||
assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
|
||||
|
||||
for (ignore_parent_abs_path, _) in &self.ignores_by_parent_abs_path {
|
||||
for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
|
||||
let ignore_parent_path =
|
||||
ignore_parent_abs_path.strip_prefix(&self.abs_path).unwrap();
|
||||
assert!(self.entry_for_path(&ignore_parent_path).is_some());
|
||||
|
@ -3389,7 +3389,7 @@ mod tests {
|
|||
paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
|
||||
}
|
||||
}
|
||||
paths.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
paths.sort_by(|a, b| a.0.cmp(b.0));
|
||||
paths
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue