One big cleanup pass of clippy lints
Co-authored-by: Mikayla <mikayla@zed.dev>
This commit is contained in:
parent
e7540d2833
commit
8ba2f77148
138 changed files with 1328 additions and 1366 deletions
|
@ -737,9 +737,7 @@ impl Buffer {
|
|||
this.parsing_in_background = false;
|
||||
this.did_finish_parsing(new_tree, parsed_version, cx);
|
||||
|
||||
if parse_again && this.reparse(cx) {
|
||||
return;
|
||||
}
|
||||
if parse_again && this.reparse(cx) {}
|
||||
});
|
||||
})
|
||||
.detach();
|
||||
|
@ -933,10 +931,12 @@ impl Buffer {
|
|||
indent_sizes.entry(row).or_insert_with(|| {
|
||||
let mut size = snapshot.indent_size_for_line(row);
|
||||
if size.kind == new_indent.kind {
|
||||
if delta > 0 {
|
||||
size.len = size.len + delta as u32;
|
||||
} else if delta < 0 {
|
||||
size.len = size.len.saturating_sub(-delta as u32);
|
||||
match delta.cmp(&0) {
|
||||
Ordering::Greater => size.len += delta as u32,
|
||||
Ordering::Less => {
|
||||
size.len = size.len.saturating_sub(-delta as u32)
|
||||
}
|
||||
Ordering::Equal => {}
|
||||
}
|
||||
}
|
||||
size
|
||||
|
@ -961,7 +961,7 @@ impl Buffer {
|
|||
let edits: Vec<_> = indent_sizes
|
||||
.into_iter()
|
||||
.filter_map(|(row, indent_size)| {
|
||||
let current_size = indent_size_for_line(&self, row);
|
||||
let current_size = indent_size_for_line(self, row);
|
||||
Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
|
||||
})
|
||||
.collect();
|
||||
|
@ -978,21 +978,23 @@ impl Buffer {
|
|||
return None;
|
||||
}
|
||||
|
||||
if new_size.len > current_size.len {
|
||||
let point = Point::new(row, 0);
|
||||
Some((
|
||||
point..point,
|
||||
iter::repeat(new_size.char())
|
||||
.take((new_size.len - current_size.len) as usize)
|
||||
.collect::<String>(),
|
||||
))
|
||||
} else if new_size.len < current_size.len {
|
||||
Some((
|
||||
match new_size.len.cmp(¤t_size.len) {
|
||||
Ordering::Greater => {
|
||||
let point = Point::new(row, 0);
|
||||
Some((
|
||||
point..point,
|
||||
iter::repeat(new_size.char())
|
||||
.take((new_size.len - current_size.len) as usize)
|
||||
.collect::<String>(),
|
||||
))
|
||||
}
|
||||
|
||||
Ordering::Less => Some((
|
||||
Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
|
||||
String::new(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
)),
|
||||
|
||||
Ordering::Equal => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1599,7 +1601,7 @@ impl Deref for Buffer {
|
|||
|
||||
impl BufferSnapshot {
|
||||
pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
|
||||
indent_size_for_line(&self, row)
|
||||
indent_size_for_line(self, row)
|
||||
}
|
||||
|
||||
pub fn single_indent_size(&self, cx: &AppContext) -> IndentSize {
|
||||
|
@ -1643,10 +1645,10 @@ impl BufferSnapshot {
|
|||
result
|
||||
}
|
||||
|
||||
fn suggest_autoindents<'a>(
|
||||
&'a self,
|
||||
fn suggest_autoindents(
|
||||
&self,
|
||||
row_range: Range<u32>,
|
||||
) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + 'a> {
|
||||
) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
|
||||
let language = self.language.as_ref()?;
|
||||
let grammar = language.grammar.as_ref()?;
|
||||
let config = &language.config;
|
||||
|
@ -1675,7 +1677,7 @@ impl BufferSnapshot {
|
|||
start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
|
||||
end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
|
||||
} else if Some(capture.index) == end_capture_ix {
|
||||
end = Some(Point::from_ts_point(capture.node.start_position().into()));
|
||||
end = Some(Point::from_ts_point(capture.node.start_position()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1733,15 +1735,17 @@ impl BufferSnapshot {
|
|||
let mut outdent_to_row = u32::MAX;
|
||||
|
||||
while let Some((indent_row, delta)) = indent_changes.peek() {
|
||||
if *indent_row == row {
|
||||
match delta {
|
||||
match indent_row.cmp(&row) {
|
||||
Ordering::Equal => match delta {
|
||||
Ordering::Less => outdent_from_prev_row = true,
|
||||
Ordering::Greater => indent_from_prev_row = true,
|
||||
_ => {}
|
||||
}
|
||||
} else if *indent_row > row {
|
||||
break;
|
||||
},
|
||||
|
||||
Ordering::Greater => break,
|
||||
Ordering::Less => {}
|
||||
}
|
||||
|
||||
indent_changes.next();
|
||||
}
|
||||
|
||||
|
@ -1805,11 +1809,7 @@ impl BufferSnapshot {
|
|||
None
|
||||
}
|
||||
|
||||
pub fn chunks<'a, T: ToOffset>(
|
||||
&'a self,
|
||||
range: Range<T>,
|
||||
language_aware: bool,
|
||||
) -> BufferChunks<'a> {
|
||||
pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
|
||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||
|
||||
let mut tree = None;
|
||||
|
@ -1843,7 +1843,7 @@ impl BufferSnapshot {
|
|||
)
|
||||
}
|
||||
|
||||
pub fn for_each_line<'a>(&'a self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
|
||||
pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
|
||||
let mut line = String::new();
|
||||
let mut row = range.start.row;
|
||||
for chunk in self
|
||||
|
@ -1969,7 +1969,7 @@ impl BufferSnapshot {
|
|||
position: T,
|
||||
theme: Option<&SyntaxTheme>,
|
||||
) -> Option<Vec<OutlineItem<Anchor>>> {
|
||||
let position = position.to_offset(&self);
|
||||
let position = position.to_offset(self);
|
||||
let mut items =
|
||||
self.outline_items_containing(position.saturating_sub(1)..position + 1, theme)?;
|
||||
let mut prev_depth = None;
|
||||
|
@ -2050,7 +2050,7 @@ impl BufferSnapshot {
|
|||
|
||||
let mut offset = range.start;
|
||||
chunks.seek(offset);
|
||||
while let Some(mut chunk) = chunks.next() {
|
||||
for mut chunk in chunks.by_ref() {
|
||||
if chunk.text.len() > range.end - offset {
|
||||
chunk.text = &chunk.text[0..(range.end - offset)];
|
||||
offset = range.end;
|
||||
|
@ -2105,7 +2105,7 @@ impl BufferSnapshot {
|
|||
let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
|
||||
let mut cursor = QueryCursorHandle::new();
|
||||
let matches = cursor.set_byte_range(range).matches(
|
||||
&brackets_query,
|
||||
brackets_query,
|
||||
tree.root_node(),
|
||||
TextProvider(self.as_rope()),
|
||||
);
|
||||
|
@ -2120,17 +2120,17 @@ impl BufferSnapshot {
|
|||
.min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
|
||||
}
|
||||
|
||||
pub fn remote_selections_in_range<'a>(
|
||||
&'a self,
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn remote_selections_in_range(
|
||||
&self,
|
||||
range: Range<Anchor>,
|
||||
) -> impl 'a
|
||||
+ Iterator<
|
||||
) -> impl Iterator<
|
||||
Item = (
|
||||
ReplicaId,
|
||||
bool,
|
||||
impl 'a + Iterator<Item = &'a Selection<Anchor>>,
|
||||
impl Iterator<Item = &Selection<Anchor>> + '_,
|
||||
),
|
||||
> {
|
||||
> + '_ {
|
||||
self.remote_selections
|
||||
.iter()
|
||||
.filter(|(replica_id, set)| {
|
||||
|
@ -2165,8 +2165,7 @@ impl BufferSnapshot {
|
|||
T: 'a + Clone + ToOffset,
|
||||
O: 'a + FromAnchor,
|
||||
{
|
||||
self.diagnostics
|
||||
.range(search_range.clone(), self, true, reversed)
|
||||
self.diagnostics.range(search_range, self, true, reversed)
|
||||
}
|
||||
|
||||
pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
|
||||
|
@ -2469,10 +2468,7 @@ impl<'a> Iterator for BufferChunks<'a> {
|
|||
|
||||
impl QueryCursorHandle {
|
||||
pub(crate) fn new() -> Self {
|
||||
let mut cursor = QUERY_CURSORS
|
||||
.lock()
|
||||
.pop()
|
||||
.unwrap_or_else(|| QueryCursor::new());
|
||||
let mut cursor = QUERY_CURSORS.lock().pop().unwrap_or_else(QueryCursor::new);
|
||||
cursor.set_match_limit(64);
|
||||
QueryCursorHandle(Some(cursor))
|
||||
}
|
||||
|
@ -2614,7 +2610,7 @@ pub fn contiguous_ranges(
|
|||
values: impl Iterator<Item = u32>,
|
||||
max_len: usize,
|
||||
) -> impl Iterator<Item = Range<u32>> {
|
||||
let mut values = values.into_iter();
|
||||
let mut values = values;
|
||||
let mut current_range: Option<Range<u32>> = None;
|
||||
std::iter::from_fn(move || loop {
|
||||
if let Some(value) = values.next() {
|
||||
|
|
|
@ -8,7 +8,7 @@ use std::{
|
|||
use sum_tree::{self, Bias, SumTree};
|
||||
use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct DiagnosticSet {
|
||||
diagnostics: SumTree<DiagnosticEntry<Anchor>>,
|
||||
}
|
||||
|
@ -167,24 +167,15 @@ impl DiagnosticSet {
|
|||
.map(|entry| entry.resolve(buffer))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DiagnosticSet {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
diagnostics: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Item for DiagnosticEntry<Anchor> {
|
||||
type Summary = Summary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
Summary {
|
||||
start: self.range.start.clone(),
|
||||
end: self.range.end.clone(),
|
||||
min_start: self.range.start.clone(),
|
||||
max_end: self.range.end.clone(),
|
||||
start: self.range.start,
|
||||
end: self.range.end,
|
||||
min_start: self.range.start,
|
||||
max_end: self.range.end,
|
||||
count: 1,
|
||||
}
|
||||
}
|
||||
|
@ -217,13 +208,13 @@ impl sum_tree::Summary for Summary {
|
|||
|
||||
fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
|
||||
if other.min_start.cmp(&self.min_start, buffer).is_lt() {
|
||||
self.min_start = other.min_start.clone();
|
||||
self.min_start = other.min_start;
|
||||
}
|
||||
if other.max_end.cmp(&self.max_end, buffer).is_gt() {
|
||||
self.max_end = other.max_end.clone();
|
||||
self.max_end = other.max_end;
|
||||
}
|
||||
self.start = other.start.clone();
|
||||
self.end = other.end.clone();
|
||||
self.start = other.start;
|
||||
self.end = other.end;
|
||||
self.count += other.count;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,10 +56,7 @@ impl HighlightId {
|
|||
}
|
||||
|
||||
pub fn style(&self, theme: &SyntaxTheme) -> Option<HighlightStyle> {
|
||||
theme
|
||||
.highlights
|
||||
.get(self.0 as usize)
|
||||
.map(|entry| entry.1.clone())
|
||||
theme.highlights.get(self.0 as usize).map(|entry| entry.1)
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
|
|
|
@ -308,6 +308,7 @@ pub struct LanguageRegistry {
|
|||
lsp_binary_statuses_tx: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
||||
lsp_binary_statuses_rx: async_broadcast::Receiver<(Arc<Language>, LanguageServerBinaryStatus)>,
|
||||
login_shell_env_loaded: Shared<Task<()>>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
lsp_binary_paths: Mutex<
|
||||
HashMap<
|
||||
LanguageServerName,
|
||||
|
@ -342,7 +343,7 @@ impl LanguageRegistry {
|
|||
if let Some(theme) = self.theme.read().clone() {
|
||||
language.set_theme(&theme.editor.syntax);
|
||||
}
|
||||
self.languages.write().push(language.clone());
|
||||
self.languages.write().push(language);
|
||||
*self.subscription.write().0.borrow_mut() = ();
|
||||
}
|
||||
|
||||
|
@ -409,7 +410,7 @@ impl LanguageRegistry {
|
|||
) -> Option<Task<Result<lsp::LanguageServer>>> {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
if language.fake_adapter.is_some() {
|
||||
let language = language.clone();
|
||||
let language = language;
|
||||
return Some(cx.spawn(|cx| async move {
|
||||
let (servers_tx, fake_adapter) = language.fake_adapter.as_ref().unwrap();
|
||||
let (server, mut fake_server) = lsp::LanguageServer::fake(
|
||||
|
@ -474,7 +475,7 @@ impl LanguageRegistry {
|
|||
let server = lsp::LanguageServer::new(
|
||||
server_id,
|
||||
&server_binary_path,
|
||||
&server_args,
|
||||
server_args,
|
||||
&root_path,
|
||||
cx,
|
||||
)?;
|
||||
|
|
|
@ -408,10 +408,12 @@ pub async fn deserialize_completion(
|
|||
Ok(Completion {
|
||||
old_range: old_start..old_end,
|
||||
new_text: completion.new_text,
|
||||
label: label.unwrap_or(CodeLabel::plain(
|
||||
lsp_completion.label.clone(),
|
||||
lsp_completion.filter_text.as_deref(),
|
||||
)),
|
||||
label: label.unwrap_or_else(|| {
|
||||
CodeLabel::plain(
|
||||
lsp_completion.label.clone(),
|
||||
lsp_completion.filter_text.as_deref(),
|
||||
)
|
||||
}),
|
||||
lsp_completion,
|
||||
})
|
||||
}
|
||||
|
@ -465,7 +467,7 @@ pub fn deserialize_transaction(transaction: proto::Transaction) -> Result<Transa
|
|||
.into_iter()
|
||||
.map(deserialize_local_timestamp)
|
||||
.collect(),
|
||||
start: deserialize_version(transaction.start.into()),
|
||||
start: deserialize_version(transaction.start),
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -107,7 +107,7 @@ fn test_edit_events(cx: &mut gpui::MutableAppContext) {
|
|||
let buffer_1_events = buffer_1_events.clone();
|
||||
cx.subscribe(&buffer1, move |_, _, event, _| match event.clone() {
|
||||
Event::Operation(op) => buffer1_ops.borrow_mut().push(op),
|
||||
event @ _ => buffer_1_events.borrow_mut().push(event),
|
||||
event => buffer_1_events.borrow_mut().push(event),
|
||||
})
|
||||
.detach();
|
||||
let buffer_2_events = buffer_2_events.clone();
|
||||
|
@ -190,7 +190,7 @@ async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
|
|||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.apply_diff(diff, cx).unwrap();
|
||||
assert_eq!(buffer.text(), text);
|
||||
assert_eq!(anchor.to_point(&buffer), Point::new(2, 3));
|
||||
assert_eq!(anchor.to_point(buffer), Point::new(2, 3));
|
||||
});
|
||||
|
||||
let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
|
||||
|
@ -198,7 +198,7 @@ async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
|
|||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.apply_diff(diff, cx).unwrap();
|
||||
assert_eq!(buffer.text(), text);
|
||||
assert_eq!(anchor.to_point(&buffer), Point::new(4, 4));
|
||||
assert_eq!(anchor.to_point(buffer), Point::new(4, 4));
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -209,11 +209,9 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
|
||||
// Wait for the initial text to parse
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters) ",
|
||||
|
@ -230,11 +228,11 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
buffer.update(cx, |buf, cx| {
|
||||
buf.start_transaction();
|
||||
|
||||
let offset = buf.text().find(")").unwrap();
|
||||
let offset = buf.text().find(')').unwrap();
|
||||
buf.edit([(offset..offset, "b: C")], None, cx);
|
||||
assert!(!buf.is_parsing());
|
||||
|
||||
let offset = buf.text().find("}").unwrap();
|
||||
let offset = buf.text().find('}').unwrap();
|
||||
buf.edit([(offset..offset, " d; ")], None, cx);
|
||||
assert!(!buf.is_parsing());
|
||||
|
||||
|
@ -242,11 +240,9 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(buf.text(), "fn a(b: C) { d; }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
|
||||
|
@ -259,13 +255,13 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
// * turn field expression into a method call
|
||||
// * add a turbofish to the method call
|
||||
buffer.update(cx, |buf, cx| {
|
||||
let offset = buf.text().find(";").unwrap();
|
||||
let offset = buf.text().find(';').unwrap();
|
||||
buf.edit([(offset..offset, ".e")], None, cx);
|
||||
assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer.update(cx, |buf, cx| {
|
||||
let offset = buf.text().find(";").unwrap();
|
||||
let offset = buf.text().find(';').unwrap();
|
||||
buf.edit([(offset..offset, "(f)")], None, cx);
|
||||
assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
|
||||
assert!(buf.is_parsing());
|
||||
|
@ -276,11 +272,9 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
|
||||
|
@ -297,11 +291,9 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(buf.text(), "fn a() {}");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters) ",
|
||||
|
@ -314,11 +306,9 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
|
||||
|
@ -340,21 +330,17 @@ async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
|
|||
});
|
||||
|
||||
// Wait for the initial text to parse
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
get_tree_sexp(&buffer, cx),
|
||||
"(source_file (expression_statement (block)))"
|
||||
);
|
||||
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.set_language(Some(Arc::new(json_lang())), cx)
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
assert_eq!(get_tree_sexp(&buffer, &cx), "(document (object))");
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(get_tree_sexp(&buffer, cx), "(document (object))");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -417,7 +403,7 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
|||
|
||||
// Without space, we only match on names
|
||||
assert_eq!(
|
||||
search(&outline, "oon", &cx).await,
|
||||
search(&outline, "oon", cx).await,
|
||||
&[
|
||||
("mod module", vec![]), // included as the parent of a match
|
||||
("enum LoginState", vec![]), // included as the parent of a match
|
||||
|
@ -427,18 +413,18 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
|||
);
|
||||
|
||||
assert_eq!(
|
||||
search(&outline, "dp p", &cx).await,
|
||||
search(&outline, "dp p", cx).await,
|
||||
&[
|
||||
("impl Drop for Person", vec![5, 8, 9, 14]),
|
||||
("fn drop", vec![]),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
search(&outline, "dpn", &cx).await,
|
||||
search(&outline, "dpn", cx).await,
|
||||
&[("impl Drop for Person", vec![5, 14, 19])]
|
||||
);
|
||||
assert_eq!(
|
||||
search(&outline, "impl ", &cx).await,
|
||||
search(&outline, "impl ", cx).await,
|
||||
&[
|
||||
("impl Eq for Person", vec![0, 1, 2, 3, 4]),
|
||||
("impl Drop for Person", vec![0, 1, 2, 3, 4]),
|
||||
|
@ -530,9 +516,9 @@ async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
|
|||
]
|
||||
);
|
||||
|
||||
fn symbols_containing<'a>(
|
||||
fn symbols_containing(
|
||||
position: Point,
|
||||
snapshot: &'a BufferSnapshot,
|
||||
snapshot: &BufferSnapshot,
|
||||
) -> Vec<(String, Range<Point>)> {
|
||||
snapshot
|
||||
.symbols_containing(position, None)
|
||||
|
@ -799,7 +785,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
Ok(())
|
||||
}
|
||||
"
|
||||
.replace("|", "") // included in the string to preserve trailing whites
|
||||
.replace('|', "") // included in the string to preserve trailing whites
|
||||
.unindent()
|
||||
);
|
||||
|
||||
|
@ -971,7 +957,7 @@ fn test_autoindent_block_mode(cx: &mut MutableAppContext) {
|
|||
buffer.undo(cx);
|
||||
buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
|
||||
buffer.edit(
|
||||
[(Point::new(2, 8)..Point::new(2, 8), inserted_text.clone())],
|
||||
[(Point::new(2, 8)..Point::new(2, 8), inserted_text)],
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: vec![0],
|
||||
}),
|
||||
|
@ -1098,7 +1084,7 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
|
|||
if let Event::Operation(op) = event {
|
||||
network
|
||||
.borrow_mut()
|
||||
.broadcast(buffer.replica_id(), vec![proto::serialize_operation(&op)]);
|
||||
.broadcast(buffer.replica_id(), vec![proto::serialize_operation(op)]);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
@ -1202,7 +1188,7 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
|
|||
if let Event::Operation(op) = event {
|
||||
network.borrow_mut().broadcast(
|
||||
buffer.replica_id(),
|
||||
vec![proto::serialize_operation(&op)],
|
||||
vec![proto::serialize_operation(op)],
|
||||
);
|
||||
}
|
||||
})
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue