Fix a bunch of other low-hanging style lints (#36498)

- **Fix a bunch of low hanging style lints like unnecessary-return**
- **Fix single worktree violation**
- **And the rest**

Release Notes:

- N/A
This commit is contained in:
Piotr Osiewicz 2025-08-19 21:26:17 +02:00 committed by GitHub
parent df9c2aefb1
commit 05fc0c432c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
239 changed files with 854 additions and 1015 deletions

View file

@ -234,7 +234,7 @@ impl RemoteBufferStore {
}
}
}
return Ok(None);
Ok(None)
}
pub fn incomplete_buffer_ids(&self) -> Vec<BufferId> {
@ -1313,10 +1313,7 @@ impl BufferStore {
let new_path = file.path.clone();
buffer.file_updated(Arc::new(file), cx);
if old_file
.as_ref()
.map_or(true, |old| *old.path() != new_path)
{
if old_file.as_ref().is_none_or(|old| *old.path() != new_path) {
Some(old_file)
} else {
None

View file

@ -102,7 +102,7 @@ fn parse(str: &str, mode: ParseMode) -> Option<Hsla> {
};
}
return None;
None
}
fn parse_component(value: &str, max: f32) -> Option<f32> {

View file

@ -146,7 +146,7 @@ impl DapLocator for CargoLocator {
let is_test = build_config
.args
.first()
.map_or(false, |arg| arg == "test" || arg == "t");
.is_some_and(|arg| arg == "test" || arg == "t");
let executables = output
.lines()

View file

@ -28,9 +28,7 @@ impl DapLocator for PythonLocator {
let valid_program = build_config.command.starts_with("$ZED_")
|| Path::new(&build_config.command)
.file_name()
.map_or(false, |name| {
name.to_str().is_some_and(|path| path.starts_with("python"))
});
.is_some_and(|name| name.to_str().is_some_and(|path| path.starts_with("python")));
if !valid_program || build_config.args.iter().any(|arg| arg == "-c") {
// We cannot debug selections.
return None;

View file

@ -329,7 +329,7 @@ impl Iterator for MemoryIterator {
}
if !self.fetch_next_page() {
self.start += 1;
return Some(MemoryCell(None));
Some(MemoryCell(None))
} else {
self.next()
}

View file

@ -431,7 +431,7 @@ impl RunningMode {
let should_send_exception_breakpoints = capabilities
.exception_breakpoint_filters
.as_ref()
.map_or(false, |filters| !filters.is_empty())
.is_some_and(|filters| !filters.is_empty())
|| !configuration_done_supported;
let supports_exception_filters = capabilities
.supports_exception_filter_options
@ -710,9 +710,7 @@ where
T: LocalDapCommand + PartialEq + Eq + Hash,
{
fn dyn_eq(&self, rhs: &dyn CacheableCommand) -> bool {
(rhs as &dyn Any)
.downcast_ref::<Self>()
.map_or(false, |rhs| self == rhs)
(rhs as &dyn Any).downcast_ref::<Self>() == Some(self)
}
fn dyn_hash(&self, mut hasher: &mut dyn Hasher) {
@ -1085,7 +1083,7 @@ impl Session {
})
.detach();
return tx;
tx
}
pub fn is_started(&self) -> bool {

View file

@ -781,9 +781,7 @@ impl GitStore {
let is_unmerged = self
.repository_and_path_for_buffer_id(buffer_id, cx)
.map_or(false, |(repo, path)| {
repo.read(cx).snapshot.has_conflict(&path)
});
.is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
let git_store = cx.weak_entity();
let buffer_git_state = self
.diffs
@ -2501,14 +2499,14 @@ impl BufferGitState {
pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
if *self.recalculating_tx.borrow() {
let mut rx = self.recalculating_tx.subscribe();
return Some(async move {
Some(async move {
loop {
let is_recalculating = rx.recv().await;
if is_recalculating != Some(true) {
break;
}
}
});
})
} else {
None
}
@ -2879,7 +2877,7 @@ impl RepositorySnapshot {
self.merge.conflicted_paths.contains(repo_path);
let has_conflict_currently = self
.status_for_path(repo_path)
.map_or(false, |entry| entry.status.is_conflicted());
.is_some_and(|entry| entry.status.is_conflicted());
had_conflict_on_last_merge_head_change || has_conflict_currently
}
@ -3531,7 +3529,7 @@ impl Repository {
&& buffer
.read(cx)
.file()
.map_or(false, |file| file.disk_state().exists())
.is_some_and(|file| file.disk_state().exists())
{
save_futures.push(buffer_store.save_buffer(buffer, cx));
}
@ -3597,7 +3595,7 @@ impl Repository {
&& buffer
.read(cx)
.file()
.map_or(false, |file| file.disk_state().exists())
.is_some_and(|file| file.disk_state().exists())
{
save_futures.push(buffer_store.save_buffer(buffer, cx));
}

View file

@ -3447,9 +3447,7 @@ impl LspCommand for GetCodeLens {
.server_capabilities
.code_lens_provider
.as_ref()
.map_or(false, |code_lens_options| {
code_lens_options.resolve_provider.unwrap_or(false)
})
.is_some_and(|code_lens_options| code_lens_options.resolve_provider.unwrap_or(false))
}
fn to_lsp(

View file

@ -1038,7 +1038,7 @@ impl LocalLspStore {
if let Some(LanguageServerState::Running { server, .. }) =
self.language_servers.get(&state.id)
{
return Some(server);
Some(server)
} else {
None
}
@ -1879,7 +1879,7 @@ impl LocalLspStore {
) -> Result<Vec<(Range<Anchor>, Arc<str>)>> {
let capabilities = &language_server.capabilities();
let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
if range_formatting_provider.map_or(false, |provider| provider == &OneOf::Left(false)) {
if range_formatting_provider == Some(&OneOf::Left(false)) {
anyhow::bail!(
"{} language server does not support range formatting",
language_server.name()
@ -2642,7 +2642,7 @@ impl LocalLspStore {
this.request_lsp(buffer.clone(), server, request, cx)
})?
.await?;
return Ok(actions);
Ok(actions)
}
pub async fn execute_code_actions_on_server(
@ -2718,7 +2718,7 @@ impl LocalLspStore {
}
}
}
return Ok(());
Ok(())
}
pub async fn deserialize_text_edits(
@ -2957,11 +2957,11 @@ impl LocalLspStore {
.update(cx, |this, cx| {
let path = buffer_to_edit.read(cx).project_path(cx);
let active_entry = this.active_entry;
let is_active_entry = path.clone().map_or(false, |project_path| {
let is_active_entry = path.clone().is_some_and(|project_path| {
this.worktree_store
.read(cx)
.entry_for_path(&project_path, cx)
.map_or(false, |entry| Some(entry.id) == active_entry)
.is_some_and(|entry| Some(entry.id) == active_entry)
});
let local = this.as_local_mut().unwrap();
@ -4038,7 +4038,7 @@ impl LspStore {
servers.push((json_adapter, json_server, json_delegate));
}
return Some(servers);
Some(servers)
})
.ok()
.flatten();
@ -4050,7 +4050,7 @@ impl LspStore {
let Ok(Some((fs, _))) = this.read_with(cx, |this, _| {
let local = this.as_local()?;
let toolchain_store = local.toolchain_store().clone();
return Some((local.fs.clone(), toolchain_store));
Some((local.fs.clone(), toolchain_store))
}) else {
return;
};
@ -4312,9 +4312,10 @@ impl LspStore {
local_store.unregister_buffer_from_language_servers(buffer_entity, &file_url, cx);
}
buffer_entity.update(cx, |buffer, cx| {
if buffer.language().map_or(true, |old_language| {
!Arc::ptr_eq(old_language, &new_language)
}) {
if buffer
.language()
.is_none_or(|old_language| !Arc::ptr_eq(old_language, &new_language))
{
buffer.set_language(Some(new_language.clone()), cx);
}
});
@ -4514,7 +4515,7 @@ impl LspStore {
if !request.check_capabilities(language_server.adapter_server_capabilities()) {
return Task::ready(Ok(Default::default()));
}
return cx.spawn(async move |this, cx| {
cx.spawn(async move |this, cx| {
let lsp_request = language_server.request::<R::LspRequest>(lsp_params);
let id = lsp_request.id();
@ -4573,7 +4574,7 @@ impl LspStore {
)
.await;
response
});
})
}
fn on_settings_changed(&mut self, cx: &mut Context<Self>) {
@ -7297,7 +7298,7 @@ impl LspStore {
include_ignored
|| worktree
.entry_for_path(path.as_ref())
.map_or(false, |entry| !entry.is_ignored)
.is_some_and(|entry| !entry.is_ignored)
})
.flat_map(move |(path, summaries)| {
summaries.iter().map(move |(server_id, summary)| {
@ -9341,9 +9342,7 @@ impl LspStore {
let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
.as_ref()
.map_or(false, |disk_based_token| {
token.starts_with(disk_based_token)
});
.is_some_and(|disk_based_token| token.starts_with(disk_based_token));
match progress {
lsp::WorkDoneProgress::Begin(report) => {
@ -10676,7 +10675,7 @@ impl LspStore {
let is_supporting = diagnostic
.related_information
.as_ref()
.map_or(false, |infos| {
.is_some_and(|infos| {
infos.iter().any(|info| {
primary_diagnostic_group_ids.contains_key(&(
source,
@ -10689,11 +10688,11 @@ impl LspStore {
let is_unnecessary = diagnostic
.tags
.as_ref()
.map_or(false, |tags| tags.contains(&DiagnosticTag::UNNECESSARY));
.is_some_and(|tags| tags.contains(&DiagnosticTag::UNNECESSARY));
let underline = self
.language_server_adapter_for_id(server_id)
.map_or(true, |adapter| adapter.underline_diagnostic(diagnostic));
.is_none_or(|adapter| adapter.underline_diagnostic(diagnostic));
if is_supporting {
supporting_diagnostics.insert(
@ -10703,7 +10702,7 @@ impl LspStore {
} else {
let group_id = post_inc(&mut self.as_local_mut().unwrap().next_diagnostic_group_id);
let is_disk_based =
source.map_or(false, |source| disk_based_sources.contains(source));
source.is_some_and(|source| disk_based_sources.contains(source));
sources_by_group_id.insert(group_id, source);
primary_diagnostic_group_ids
@ -12409,7 +12408,7 @@ impl TryFrom<&FileOperationFilter> for RenameActionPredicate {
ops.pattern
.options
.as_ref()
.map_or(false, |ops| ops.ignore_case.unwrap_or(false)),
.is_some_and(|ops| ops.ignore_case.unwrap_or(false)),
)
.build()?
.compile_matcher(),
@ -12424,7 +12423,7 @@ struct RenameActionPredicate {
impl RenameActionPredicate {
// Returns true if language server should be notified
fn eval(&self, path: &str, is_dir: bool) -> bool {
self.kind.as_ref().map_or(true, |kind| {
self.kind.as_ref().is_none_or(|kind| {
let expected_kind = if is_dir {
FileOperationPatternKind::Folder
} else {

View file

@ -218,10 +218,8 @@ impl ManifestQueryDelegate {
impl ManifestDelegate for ManifestQueryDelegate {
fn exists(&self, path: &Path, is_dir: Option<bool>) -> bool {
self.worktree.entry_for_path(path).map_or(false, |entry| {
is_dir.map_or(true, |is_required_to_be_dir| {
is_required_to_be_dir == entry.is_dir()
})
self.worktree.entry_for_path(path).is_some_and(|entry| {
is_dir.is_none_or(|is_required_to_be_dir| is_required_to_be_dir == entry.is_dir())
})
}

View file

@ -314,7 +314,7 @@ impl LanguageServerTree {
pub(crate) fn remove_nodes(&mut self, ids: &BTreeSet<LanguageServerId>) {
for (_, servers) in &mut self.instances {
for (_, nodes) in &mut servers.roots {
nodes.retain(|_, (node, _)| node.id.get().map_or(true, |id| !ids.contains(id)));
nodes.retain(|_, (node, _)| node.id.get().is_none_or(|id| !ids.contains(id)));
}
}
}

View file

@ -1897,7 +1897,7 @@ impl Project {
return true;
}
return false;
false
}
pub fn ssh_connection_string(&self, cx: &App) -> Option<SharedString> {
@ -1905,7 +1905,7 @@ impl Project {
return Some(ssh_state.read(cx).connection_string().into());
}
return None;
None
}
pub fn ssh_connection_state(&self, cx: &App) -> Option<remote::ConnectionState> {
@ -4134,7 +4134,7 @@ impl Project {
}
})
} else {
return Task::ready(None);
Task::ready(None)
}
}
@ -5187,7 +5187,7 @@ impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
}
fn prefix(&self) -> Arc<str> {
if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
if self.snapshot.root_entry().is_some_and(|e| e.is_file()) {
self.snapshot.root_name().into()
} else if self.include_root_name {
format!("{}{}", self.snapshot.root_name(), std::path::MAIN_SEPARATOR).into()
@ -5397,7 +5397,7 @@ impl Completion {
self.source
// `lsp::CompletionListItemDefaults` has `insert_text_format` field
.lsp_completion(true)
.map_or(false, |lsp_completion| {
.is_some_and(|lsp_completion| {
lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
})
}
@ -5453,9 +5453,10 @@ fn provide_inline_values(
.collect::<String>();
let point = snapshot.offset_to_point(capture_range.end);
while scopes.last().map_or(false, |scope: &Range<_>| {
!scope.contains(&capture_range.start)
}) {
while scopes
.last()
.is_some_and(|scope: &Range<_>| !scope.contains(&capture_range.start))
{
scopes.pop();
}
@ -5465,7 +5466,7 @@ fn provide_inline_values(
let scope = if scopes
.last()
.map_or(true, |scope| !scope.contains(&active_debug_line_offset))
.is_none_or(|scope| !scope.contains(&active_debug_line_offset))
{
VariableScope::Global
} else {

View file

@ -188,9 +188,9 @@ pub struct DiagnosticsSettings {
impl DiagnosticsSettings {
pub fn fetch_cargo_diagnostics(&self) -> bool {
self.cargo.as_ref().map_or(false, |cargo_diagnostics| {
cargo_diagnostics.fetch_cargo_diagnostics
})
self.cargo
.as_ref()
.is_some_and(|cargo_diagnostics| cargo_diagnostics.fetch_cargo_diagnostics)
}
}

View file

@ -2947,9 +2947,10 @@ fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
) -> Vec<(String, Option<DiagnosticSeverity>)> {
let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
for chunk in buffer.snapshot().chunks(range, true) {
if chunks.last().map_or(false, |prev_chunk| {
prev_chunk.1 == chunk.diagnostic_severity
}) {
if chunks
.last()
.is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
{
chunks.last_mut().unwrap().0.push_str(chunk.text);
} else {
chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));

View file

@ -99,7 +99,7 @@ impl Project {
}
}
return None;
None
}
pub fn create_terminal(
@ -518,7 +518,7 @@ impl Project {
smol::block_on(fs.metadata(&bin_path))
.ok()
.flatten()
.map_or(false, |meta| meta.is_dir)
.is_some_and(|meta| meta.is_dir)
})
}