Auto-fix clippy::collapsible_if violations (#36428)

Release Notes:

- N/A
This commit is contained in:
Piotr Osiewicz 2025-08-19 15:27:24 +02:00 committed by GitHub
parent 9e8ec72bd5
commit 8f567383e4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
281 changed files with 6628 additions and 7089 deletions

View file

@ -1158,13 +1158,12 @@ impl Buffer {
base_buffer.edit(edits, None, cx)
});
if let Some(operation) = operation {
if let Some(BufferBranchState {
if let Some(operation) = operation
&& let Some(BufferBranchState {
merged_operations, ..
}) = &mut self.branch_state
{
merged_operations.push(operation);
}
{
merged_operations.push(operation);
}
}
@ -1185,11 +1184,11 @@ impl Buffer {
};
let mut operation_to_undo = None;
if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
merged_operations.remove(ix);
operation_to_undo = Some(operation.timestamp);
}
if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
&& let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
{
merged_operations.remove(ix);
operation_to_undo = Some(operation.timestamp);
}
self.apply_ops([operation.clone()], cx);
@ -1424,10 +1423,10 @@ impl Buffer {
.map(|info| info.language.clone())
.collect();
if languages.is_empty() {
if let Some(buffer_language) = self.language() {
languages.push(buffer_language.clone());
}
if languages.is_empty()
&& let Some(buffer_language) = self.language()
{
languages.push(buffer_language.clone());
}
languages
@ -2589,10 +2588,10 @@ impl Buffer {
line_mode,
cursor_shape,
} => {
if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
if set.lamport_timestamp > lamport_timestamp {
return;
}
if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
&& set.lamport_timestamp > lamport_timestamp
{
return;
}
self.remote_selections.insert(
@ -3365,8 +3364,8 @@ impl BufferSnapshot {
}
}
if let Some(range) = range {
if smallest_range_and_depth.as_ref().map_or(
if let Some(range) = range
&& smallest_range_and_depth.as_ref().map_or(
true,
|(smallest_range, smallest_range_depth)| {
if layer.depth > *smallest_range_depth {
@ -3377,13 +3376,13 @@ impl BufferSnapshot {
false
}
},
) {
smallest_range_and_depth = Some((range, layer.depth));
scope = Some(LanguageScope {
language: layer.language.clone(),
override_id: layer.override_id(offset, &self.text),
});
}
)
{
smallest_range_and_depth = Some((range, layer.depth));
scope = Some(LanguageScope {
language: layer.language.clone(),
override_id: layer.override_id(offset, &self.text),
});
}
}
@ -3499,17 +3498,17 @@ impl BufferSnapshot {
// If there is a candidate node on both sides of the (empty) range, then
// decide between the two by favoring a named node over an anonymous token.
// If both nodes are the same in that regard, favor the right one.
if let Some(right_node) = right_node {
if right_node.is_named() || !left_node.is_named() {
layer_result = right_node;
}
if let Some(right_node) = right_node
&& (right_node.is_named() || !left_node.is_named())
{
layer_result = right_node;
}
}
if let Some(previous_result) = &result {
if previous_result.byte_range().len() < layer_result.byte_range().len() {
continue;
}
if let Some(previous_result) = &result
&& previous_result.byte_range().len() < layer_result.byte_range().len()
{
continue;
}
result = Some(layer_result);
}
@ -4081,10 +4080,10 @@ impl BufferSnapshot {
let mut result: Option<(Range<usize>, Range<usize>)> = None;
for pair in self.enclosing_bracket_ranges(range.clone()) {
if let Some(range_filter) = range_filter {
if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
continue;
}
if let Some(range_filter) = range_filter
&& !range_filter(pair.open_range.clone(), pair.close_range.clone())
{
continue;
}
let len = pair.close_range.end - pair.open_range.start;
@ -4474,27 +4473,26 @@ impl BufferSnapshot {
current_word_start_ix = Some(ix);
}
if let Some(query_chars) = &query_chars {
if query_ix < query_len {
if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
query_ix += 1;
}
}
if let Some(query_chars) = &query_chars
&& query_ix < query_len
&& c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
{
query_ix += 1;
}
continue;
} else if let Some(word_start) = current_word_start_ix.take() {
if query_ix == query_len {
let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
let mut word_text = self.text_for_range(word_start..ix).peekable();
let first_char = word_text
.peek()
.and_then(|first_chunk| first_chunk.chars().next());
// Skip empty and "words" starting with digits as a heuristic to reduce useless completions
if !query.skip_digits
|| first_char.map_or(true, |first_char| !first_char.is_digit(10))
{
words.insert(word_text.collect(), word_range);
}
} else if let Some(word_start) = current_word_start_ix.take()
&& query_ix == query_len
{
let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
let mut word_text = self.text_for_range(word_start..ix).peekable();
let first_char = word_text
.peek()
.and_then(|first_chunk| first_chunk.chars().next());
// Skip empty and "words" starting with digits as a heuristic to reduce useless completions
if !query.skip_digits
|| first_char.map_or(true, |first_char| !first_char.is_digit(10))
{
words.insert(word_text.collect(), word_range);
}
}
query_ix = 0;
@ -4607,17 +4605,17 @@ impl<'a> BufferChunks<'a> {
highlights
.stack
.retain(|(end_offset, _)| *end_offset > range.start);
if let Some(capture) = &highlights.next_capture {
if range.start >= capture.node.start_byte() {
let next_capture_end = capture.node.end_byte();
if range.start < next_capture_end {
highlights.stack.push((
next_capture_end,
highlights.highlight_maps[capture.grammar_index].get(capture.index),
));
}
highlights.next_capture.take();
if let Some(capture) = &highlights.next_capture
&& range.start >= capture.node.start_byte()
{
let next_capture_end = capture.node.end_byte();
if range.start < next_capture_end {
highlights.stack.push((
next_capture_end,
highlights.highlight_maps[capture.grammar_index].get(capture.index),
));
}
highlights.next_capture.take();
}
} else if let Some(snapshot) = self.buffer_snapshot {
let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
@ -4642,33 +4640,33 @@ impl<'a> BufferChunks<'a> {
}
fn initialize_diagnostic_endpoints(&mut self) {
if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
if let Some(buffer) = self.buffer_snapshot {
let mut diagnostic_endpoints = Vec::new();
for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
diagnostic_endpoints.push(DiagnosticEndpoint {
offset: entry.range.start,
is_start: true,
severity: entry.diagnostic.severity,
is_unnecessary: entry.diagnostic.is_unnecessary,
underline: entry.diagnostic.underline,
});
diagnostic_endpoints.push(DiagnosticEndpoint {
offset: entry.range.end,
is_start: false,
severity: entry.diagnostic.severity,
is_unnecessary: entry.diagnostic.is_unnecessary,
underline: entry.diagnostic.underline,
});
}
diagnostic_endpoints
.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
*diagnostics = diagnostic_endpoints.into_iter().peekable();
self.hint_depth = 0;
self.error_depth = 0;
self.warning_depth = 0;
self.information_depth = 0;
if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
&& let Some(buffer) = self.buffer_snapshot
{
let mut diagnostic_endpoints = Vec::new();
for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
diagnostic_endpoints.push(DiagnosticEndpoint {
offset: entry.range.start,
is_start: true,
severity: entry.diagnostic.severity,
is_unnecessary: entry.diagnostic.is_unnecessary,
underline: entry.diagnostic.underline,
});
diagnostic_endpoints.push(DiagnosticEndpoint {
offset: entry.range.end,
is_start: false,
severity: entry.diagnostic.severity,
is_unnecessary: entry.diagnostic.is_unnecessary,
underline: entry.diagnostic.underline,
});
}
diagnostic_endpoints
.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
*diagnostics = diagnostic_endpoints.into_iter().peekable();
self.hint_depth = 0;
self.error_depth = 0;
self.warning_depth = 0;
self.information_depth = 0;
}
}
@ -4779,11 +4777,11 @@ impl<'a> Iterator for BufferChunks<'a> {
.min(next_capture_start)
.min(next_diagnostic_endpoint);
let mut highlight_id = None;
if let Some(highlights) = self.highlights.as_ref() {
if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
chunk_end = chunk_end.min(*parent_capture_end);
highlight_id = Some(*parent_highlight_id);
}
if let Some(highlights) = self.highlights.as_ref()
&& let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
{
chunk_end = chunk_end.min(*parent_capture_end);
highlight_id = Some(*parent_highlight_id);
}
let slice =
@ -4977,11 +4975,12 @@ pub(crate) fn contiguous_ranges(
std::iter::from_fn(move || {
loop {
if let Some(value) = values.next() {
if let Some(range) = &mut current_range {
if value == range.end && range.len() < max_len {
range.end += 1;
continue;
}
if let Some(range) = &mut current_range
&& value == range.end
&& range.len() < max_len
{
range.end += 1;
continue;
}
let prev_range = current_range.clone();
@ -5049,10 +5048,10 @@ impl CharClassifier {
} else {
scope.word_characters()
};
if let Some(characters) = characters {
if characters.contains(&c) {
return CharKind::Word;
}
if let Some(characters) = characters
&& characters.contains(&c)
{
return CharKind::Word;
}
}

View file

@ -329,8 +329,8 @@ pub trait LspAdapter: 'static + Send + Sync {
// We only want to cache when we fall back to the global one,
// because we don't want to download and overwrite our global one
// for each worktree we might have open.
if binary_options.allow_path_lookup {
if let Some(binary) = self.check_if_user_installed(delegate.as_ref(), toolchains, cx).await {
if binary_options.allow_path_lookup
&& let Some(binary) = self.check_if_user_installed(delegate.as_ref(), toolchains, cx).await {
log::info!(
"found user-installed language server for {}. path: {:?}, arguments: {:?}",
self.name().0,
@ -339,7 +339,6 @@ pub trait LspAdapter: 'static + Send + Sync {
);
return Ok(binary);
}
}
anyhow::ensure!(binary_options.allow_binary_download, "downloading language servers disabled");
@ -1776,10 +1775,10 @@ impl Language {
BufferChunks::new(text, range, Some((captures, highlight_maps)), false, None)
{
let end_offset = offset + chunk.text.len();
if let Some(highlight_id) = chunk.syntax_highlight_id {
if !highlight_id.is_default() {
result.push((offset..end_offset, highlight_id));
}
if let Some(highlight_id) = chunk.syntax_highlight_id
&& !highlight_id.is_default()
{
result.push((offset..end_offset, highlight_id));
}
offset = end_offset;
}
@ -1796,11 +1795,11 @@ impl Language {
}
pub fn set_theme(&self, theme: &SyntaxTheme) {
if let Some(grammar) = self.grammar.as_ref() {
if let Some(highlights_query) = &grammar.highlights_query {
*grammar.highlight_map.lock() =
HighlightMap::new(highlights_query.capture_names(), theme);
}
if let Some(grammar) = self.grammar.as_ref()
&& let Some(highlights_query) = &grammar.highlights_query
{
*grammar.highlight_map.lock() =
HighlightMap::new(highlights_query.capture_names(), theme);
}
}
@ -1920,11 +1919,11 @@ impl LanguageScope {
.enumerate()
.map(move |(ix, bracket)| {
let mut is_enabled = true;
if let Some(next_disabled_ix) = disabled_ids.first() {
if ix == *next_disabled_ix as usize {
disabled_ids = &disabled_ids[1..];
is_enabled = false;
}
if let Some(next_disabled_ix) = disabled_ids.first()
&& ix == *next_disabled_ix as usize
{
disabled_ids = &disabled_ids[1..];
is_enabled = false;
}
(bracket, is_enabled)
})

View file

@ -414,42 +414,42 @@ impl SyntaxSnapshot {
.collect::<Vec<_>>();
self.reparse_with_ranges(text, root_language.clone(), edit_ranges, registry.as_ref());
if let Some(registry) = registry {
if registry.version() != self.language_registry_version {
let mut resolved_injection_ranges = Vec::new();
let mut cursor = self
.layers
.filter::<_, ()>(text, |summary| summary.contains_unknown_injections);
if let Some(registry) = registry
&& registry.version() != self.language_registry_version
{
let mut resolved_injection_ranges = Vec::new();
let mut cursor = self
.layers
.filter::<_, ()>(text, |summary| summary.contains_unknown_injections);
cursor.next();
while let Some(layer) = cursor.item() {
let SyntaxLayerContent::Pending { language_name } = &layer.content else {
unreachable!()
};
if registry
.language_for_name_or_extension(language_name)
.now_or_never()
.and_then(|language| language.ok())
.is_some()
{
let range = layer.range.to_offset(text);
log::trace!("reparse range {range:?} for language {language_name:?}");
resolved_injection_ranges.push(range);
}
cursor.next();
while let Some(layer) = cursor.item() {
let SyntaxLayerContent::Pending { language_name } = &layer.content else {
unreachable!()
};
if registry
.language_for_name_or_extension(language_name)
.now_or_never()
.and_then(|language| language.ok())
.is_some()
{
let range = layer.range.to_offset(text);
log::trace!("reparse range {range:?} for language {language_name:?}");
resolved_injection_ranges.push(range);
}
cursor.next();
}
drop(cursor);
if !resolved_injection_ranges.is_empty() {
self.reparse_with_ranges(
text,
root_language,
resolved_injection_ranges,
Some(&registry),
);
}
self.language_registry_version = registry.version();
}
drop(cursor);
if !resolved_injection_ranges.is_empty() {
self.reparse_with_ranges(
text,
root_language,
resolved_injection_ranges,
Some(&registry),
);
}
self.language_registry_version = registry.version();
}
self.update_count += 1;
@ -1065,10 +1065,10 @@ impl<'a> SyntaxMapCaptures<'a> {
pub fn set_byte_range(&mut self, range: Range<usize>) {
for layer in &mut self.layers {
layer.captures.set_byte_range(range.clone());
if let Some(capture) = &layer.next_capture {
if capture.node.end_byte() > range.start {
continue;
}
if let Some(capture) = &layer.next_capture
&& capture.node.end_byte() > range.start
{
continue;
}
layer.advance();
}
@ -1277,11 +1277,11 @@ fn join_ranges(
(None, None) => break,
};
if let Some(last) = result.last_mut() {
if range.start <= last.end {
last.end = last.end.max(range.end);
continue;
}
if let Some(last) = result.last_mut()
&& range.start <= last.end
{
last.end = last.end.max(range.end);
continue;
}
result.push(range);
}
@ -1330,14 +1330,13 @@ fn get_injections(
// if there currently no matches for that injection.
combined_injection_ranges.clear();
for pattern in &config.patterns {
if let (Some(language_name), true) = (pattern.language.as_ref(), pattern.combined) {
if let Some(language) = language_registry
if let (Some(language_name), true) = (pattern.language.as_ref(), pattern.combined)
&& let Some(language) = language_registry
.language_for_name_or_extension(language_name)
.now_or_never()
.and_then(|language| language.ok())
{
combined_injection_ranges.insert(language.id, (language, Vec::new()));
}
{
combined_injection_ranges.insert(language.id, (language, Vec::new()));
}
}
@ -1357,10 +1356,11 @@ fn get_injections(
content_ranges.first().unwrap().start_byte..content_ranges.last().unwrap().end_byte;
// Avoid duplicate matches if two changed ranges intersect the same injection.
if let Some((prev_pattern_ix, prev_range)) = &prev_match {
if mat.pattern_index == *prev_pattern_ix && content_range == *prev_range {
continue;
}
if let Some((prev_pattern_ix, prev_range)) = &prev_match
&& mat.pattern_index == *prev_pattern_ix
&& content_range == *prev_range
{
continue;
}
prev_match = Some((mat.pattern_index, content_range.clone()));

View file

@ -189,11 +189,11 @@ fn tokenize(text: &str, language_scope: Option<LanguageScope>) -> impl Iterator<
while let Some((ix, c)) = chars.next() {
let mut token = None;
let kind = classifier.kind(c);
if let Some((prev_char, prev_kind)) = prev {
if kind != prev_kind || (kind == CharKind::Punctuation && c != prev_char) {
token = Some(&text[start_ix..ix]);
start_ix = ix;
}
if let Some((prev_char, prev_kind)) = prev
&& (kind != prev_kind || (kind == CharKind::Punctuation && c != prev_char))
{
token = Some(&text[start_ix..ix]);
start_ix = ix;
}
prev = Some((c, kind));
if token.is_some() {