chore: Fix several style lints (#17488)
It's not comprehensive enough to start linting on `style` group, but hey, it's a start. Release Notes: - N/A
This commit is contained in:
parent
93249fc82b
commit
e6c1c51b37
361 changed files with 3530 additions and 3587 deletions
|
@ -595,7 +595,7 @@ impl MultiBuffer {
|
|||
if start_excerpt.id == end_excerpt.id {
|
||||
buffer_edits
|
||||
.entry(start_excerpt.buffer_id)
|
||||
.or_insert(Vec::new())
|
||||
.or_default()
|
||||
.push(BufferEdit {
|
||||
range: buffer_start..buffer_end,
|
||||
new_text,
|
||||
|
@ -618,7 +618,7 @@ impl MultiBuffer {
|
|||
..buffer_end;
|
||||
buffer_edits
|
||||
.entry(start_excerpt.buffer_id)
|
||||
.or_insert(Vec::new())
|
||||
.or_default()
|
||||
.push(BufferEdit {
|
||||
range: start_excerpt_range,
|
||||
new_text: new_text.clone(),
|
||||
|
@ -627,7 +627,7 @@ impl MultiBuffer {
|
|||
});
|
||||
buffer_edits
|
||||
.entry(end_excerpt.buffer_id)
|
||||
.or_insert(Vec::new())
|
||||
.or_default()
|
||||
.push(BufferEdit {
|
||||
range: end_excerpt_range,
|
||||
new_text: new_text.clone(),
|
||||
|
@ -643,7 +643,7 @@ impl MultiBuffer {
|
|||
}
|
||||
buffer_edits
|
||||
.entry(excerpt.buffer_id)
|
||||
.or_insert(Vec::new())
|
||||
.or_default()
|
||||
.push(BufferEdit {
|
||||
range: excerpt.range.context.to_offset(&excerpt.buffer),
|
||||
new_text: new_text.clone(),
|
||||
|
@ -844,7 +844,7 @@ impl MultiBuffer {
|
|||
let mut cursor = snapshot.excerpts.cursor::<ExcerptSummary>();
|
||||
|
||||
for (buffer_id, buffer_transaction) in &transaction.buffer_transactions {
|
||||
let Some(buffer_state) = buffers.get(&buffer_id) else {
|
||||
let Some(buffer_state) = buffers.get(buffer_id) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
|
@ -891,26 +891,24 @@ impl MultiBuffer {
|
|||
buffer.update(cx, |buffer, _| {
|
||||
buffer.merge_transactions(transaction, destination)
|
||||
});
|
||||
} else {
|
||||
if let Some(transaction) = self.history.forget(transaction) {
|
||||
if let Some(destination) = self.history.transaction_mut(destination) {
|
||||
for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions {
|
||||
if let Some(destination_buffer_transaction_id) =
|
||||
destination.buffer_transactions.get(&buffer_id)
|
||||
{
|
||||
if let Some(state) = self.buffers.borrow().get(&buffer_id) {
|
||||
state.buffer.update(cx, |buffer, _| {
|
||||
buffer.merge_transactions(
|
||||
buffer_transaction_id,
|
||||
*destination_buffer_transaction_id,
|
||||
)
|
||||
});
|
||||
}
|
||||
} else {
|
||||
destination
|
||||
.buffer_transactions
|
||||
.insert(buffer_id, buffer_transaction_id);
|
||||
} else if let Some(transaction) = self.history.forget(transaction) {
|
||||
if let Some(destination) = self.history.transaction_mut(destination) {
|
||||
for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions {
|
||||
if let Some(destination_buffer_transaction_id) =
|
||||
destination.buffer_transactions.get(&buffer_id)
|
||||
{
|
||||
if let Some(state) = self.buffers.borrow().get(&buffer_id) {
|
||||
state.buffer.update(cx, |buffer, _| {
|
||||
buffer.merge_transactions(
|
||||
buffer_transaction_id,
|
||||
*destination_buffer_transaction_id,
|
||||
)
|
||||
});
|
||||
}
|
||||
} else {
|
||||
destination
|
||||
.buffer_transactions
|
||||
.insert(buffer_id, buffer_transaction_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2202,10 +2200,8 @@ impl MultiBuffer {
|
|||
if excerpt.locator <= Locator::min() {
|
||||
panic!("invalid first excerpt locator {:?}", excerpt.locator);
|
||||
}
|
||||
} else {
|
||||
if excerpt.locator <= excerpts[ix - 1].locator {
|
||||
panic!("excerpts are out-of-order: {:?}", excerpts);
|
||||
}
|
||||
} else if excerpt.locator <= excerpts[ix - 1].locator {
|
||||
panic!("excerpts are out-of-order: {:?}", excerpts);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2214,10 +2210,8 @@ impl MultiBuffer {
|
|||
if entry.id.cmp(&ExcerptId::min(), &snapshot).is_le() {
|
||||
panic!("invalid first excerpt id {:?}", entry.id);
|
||||
}
|
||||
} else {
|
||||
if entry.id <= excerpt_ids[ix - 1].id {
|
||||
panic!("excerpt ids are out-of-order: {:?}", excerpt_ids);
|
||||
}
|
||||
} else if entry.id <= excerpt_ids[ix - 1].id {
|
||||
panic!("excerpt ids are out-of-order: {:?}", excerpt_ids);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3336,28 +3330,25 @@ impl MultiBufferSnapshot {
|
|||
) -> impl Iterator<Item = Range<usize>> + 'a {
|
||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||
self.excerpts_for_range(range.clone())
|
||||
.filter_map(move |(excerpt, excerpt_offset)| {
|
||||
redaction_enabled(excerpt.buffer.file()).then(move || {
|
||||
let excerpt_buffer_start =
|
||||
excerpt.range.context.start.to_offset(&excerpt.buffer);
|
||||
.filter(move |&(excerpt, _)| redaction_enabled(excerpt.buffer.file()))
|
||||
.flat_map(move |(excerpt, excerpt_offset)| {
|
||||
let excerpt_buffer_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
|
||||
|
||||
excerpt
|
||||
.buffer
|
||||
.redacted_ranges(excerpt.range.context.clone())
|
||||
.map(move |mut redacted_range| {
|
||||
// Re-base onto the excerpts coordinates in the multibuffer
|
||||
redacted_range.start = excerpt_offset
|
||||
+ redacted_range.start.saturating_sub(excerpt_buffer_start);
|
||||
redacted_range.end = excerpt_offset
|
||||
+ redacted_range.end.saturating_sub(excerpt_buffer_start);
|
||||
excerpt
|
||||
.buffer
|
||||
.redacted_ranges(excerpt.range.context.clone())
|
||||
.map(move |mut redacted_range| {
|
||||
// Re-base onto the excerpts coordinates in the multibuffer
|
||||
redacted_range.start = excerpt_offset
|
||||
+ redacted_range.start.saturating_sub(excerpt_buffer_start);
|
||||
redacted_range.end = excerpt_offset
|
||||
+ redacted_range.end.saturating_sub(excerpt_buffer_start);
|
||||
|
||||
redacted_range
|
||||
})
|
||||
.skip_while(move |redacted_range| redacted_range.end < range.start)
|
||||
.take_while(move |redacted_range| redacted_range.start < range.end)
|
||||
})
|
||||
redacted_range
|
||||
})
|
||||
.skip_while(move |redacted_range| redacted_range.end < range.start)
|
||||
.take_while(move |redacted_range| redacted_range.start < range.end)
|
||||
})
|
||||
.flatten()
|
||||
}
|
||||
|
||||
pub fn runnable_ranges(
|
||||
|
@ -4467,9 +4458,9 @@ impl ExcerptId {
|
|||
}
|
||||
}
|
||||
|
||||
impl Into<usize> for ExcerptId {
|
||||
fn into(self) -> usize {
|
||||
self.0
|
||||
impl From<ExcerptId> for usize {
|
||||
fn from(val: ExcerptId) -> Self {
|
||||
val.0
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4758,7 +4749,6 @@ impl<'a> ReversedMultiBufferBytes<'a> {
|
|||
self.excerpt_bytes = Some(excerpt_bytes);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5842,7 +5832,7 @@ mod tests {
|
|||
);
|
||||
}
|
||||
let snapshot = multibuffer.read(cx).read(cx);
|
||||
ids_to_remove.sort_unstable_by(|a, b| a.cmp(&b, &snapshot));
|
||||
ids_to_remove.sort_unstable_by(|a, b| a.cmp(b, &snapshot));
|
||||
drop(snapshot);
|
||||
multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.remove_excerpts(ids_to_remove, cx)
|
||||
|
@ -6396,13 +6386,13 @@ mod tests {
|
|||
}
|
||||
|
||||
fn validate_excerpts(
|
||||
actual: &Vec<(ExcerptId, BufferId, Range<Anchor>)>,
|
||||
actual: &[(ExcerptId, BufferId, Range<Anchor>)],
|
||||
expected: &Vec<(ExcerptId, BufferId, Range<Anchor>)>,
|
||||
) {
|
||||
assert_eq!(actual.len(), expected.len());
|
||||
|
||||
actual
|
||||
.into_iter()
|
||||
.iter()
|
||||
.zip(expected)
|
||||
.map(|(actual, expected)| {
|
||||
assert_eq!(actual.0, expected.0);
|
||||
|
@ -6437,7 +6427,7 @@ mod tests {
|
|||
(
|
||||
excerpt_id,
|
||||
buffer.read(cx).remote_id(),
|
||||
map_range_from_excerpt(&snapshot, excerpt_id, &buffer.read(cx).snapshot(), range),
|
||||
map_range_from_excerpt(snapshot, excerpt_id, &buffer.read(cx).snapshot(), range),
|
||||
)
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue