chore: Fix several style lints (#17488)

It's not comprehensive enough to start linting on `style` group, but
hey, it's a start.

Release Notes:

- N/A
This commit is contained in:
Piotr Osiewicz 2024-09-06 11:58:39 +02:00 committed by GitHub
parent 93249fc82b
commit e6c1c51b37
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
361 changed files with 3530 additions and 3587 deletions

View file

@ -68,7 +68,7 @@ pub use lsp::DiagnosticSeverity;
/// A label for the background task spawned by the buffer to compute
/// a diff against the contents of its file.
pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(|| TaskLabel::new());
pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
/// Indicate whether a [Buffer] has permissions to edit.
#[derive(PartialEq, Clone, Copy, Debug)]
@ -1105,7 +1105,6 @@ impl Buffer {
{
Ok(new_syntax_snapshot) => {
self.did_finish_parsing(new_syntax_snapshot, cx);
return;
}
Err(parse_task) => {
self.parsing_in_background = true;
@ -1938,25 +1937,23 @@ impl Buffer {
);
}
if space_above {
if position.row > 0 && !self.is_line_blank(position.row - 1) {
self.edit(
[(position..position, "\n")],
Some(AutoindentMode::EachLine),
cx,
);
position.row += 1;
}
if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
self.edit(
[(position..position, "\n")],
Some(AutoindentMode::EachLine),
cx,
);
position.row += 1;
}
if space_below {
if position.row == self.max_point().row || !self.is_line_blank(position.row + 1) {
self.edit(
[(position..position, "\n")],
Some(AutoindentMode::EachLine),
cx,
);
}
if space_below
&& (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
{
self.edit(
[(position..position, "\n")],
Some(AutoindentMode::EachLine),
cx,
);
}
self.end_transaction(cx);
@ -2094,7 +2091,7 @@ impl Buffer {
) {
if lamport_timestamp > self.diagnostics_timestamp {
let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
if diagnostics.len() == 0 {
if diagnostics.is_empty() {
if let Ok(ix) = ix {
self.diagnostics.remove(ix);
}
@ -2582,7 +2579,7 @@ impl BufferSnapshot {
});
let highlight_maps = captures
.grammars()
.into_iter()
.iter()
.map(|grammar| grammar.highlight_map())
.collect();
(captures, highlight_maps)
@ -3422,31 +3419,35 @@ impl BufferSnapshot {
current_depth
};
if depth < current_depth {
for _ in 0..(current_depth - depth) {
let mut indent = indent_stack.pop().unwrap();
if last_row != first_row {
// In this case, we landed on an empty row, had to seek forward,
// and discovered that the indent we where on is ending.
// This means that the last display row must
// be on line that ends this indent range, so we
// should display the range up to the first non-empty line
indent.end_row = first_row.saturating_sub(1);
}
match depth.cmp(&current_depth) {
Ordering::Less => {
for _ in 0..(current_depth - depth) {
let mut indent = indent_stack.pop().unwrap();
if last_row != first_row {
// In this case, we landed on an empty row, had to seek forward,
// and discovered that the indent we where on is ending.
// This means that the last display row must
// be on line that ends this indent range, so we
// should display the range up to the first non-empty line
indent.end_row = first_row.saturating_sub(1);
}
result_vec.push(indent)
result_vec.push(indent)
}
}
} else if depth > current_depth {
for next_depth in current_depth..depth {
indent_stack.push(IndentGuide {
buffer_id: self.remote_id(),
start_row: first_row,
end_row: last_row,
depth: next_depth,
tab_size,
settings,
});
Ordering::Greater => {
for next_depth in current_depth..depth {
indent_stack.push(IndentGuide {
buffer_id: self.remote_id(),
start_row: first_row,
end_row: last_row,
depth: next_depth,
tab_size,
settings,
});
}
}
_ => {}
}
for indent in indent_stack.iter_mut() {

View file

@ -110,6 +110,10 @@ impl DiagnosticSet {
pub fn len(&self) -> usize {
self.diagnostics.summary().count
}
/// Returns true when there are no diagnostics in this diagnostic set
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns an iterator over the diagnostic entries in the set.
pub fn iter(&self) -> impl Iterator<Item = &DiagnosticEntry<Anchor>> {

View file

@ -2,7 +2,7 @@
//! features (the other big contributors being project and lsp crates that revolve around LSP features).
//! Namely, this crate:
//! - Provides [`Language`], [`Grammar`] and [`LanguageRegistry`] types that
//! use Tree-sitter to provide syntax highlighting to the editor; note though that `language` doesn't perform the highlighting by itself. It only maps ranges in a buffer to colors. Treesitter is also used for buffer outlines (lists of symbols in a buffer)
//! use Tree-sitter to provide syntax highlighting to the editor; note though that `language` doesn't perform the highlighting by itself. It only maps ranges in a buffer to colors. Treesitter is also used for buffer outlines (lists of symbols in a buffer)
//! - Exposes [`LanguageConfig`] that describes how constructs (like brackets or line comments) should be handled by the editor for a source file of a particular language.
//!
//! Notably we do *not* assign a single language to a single file; in real world a single file can consist of multiple programming languages - HTML is a good example of that - and `language` crate tends to reflect that status quo in its API.
@ -433,7 +433,7 @@ pub trait LspAdapter: 'static + Send + Sync {
language: &Arc<Language>,
) -> Result<Vec<Option<CodeLabel>>> {
let mut labels = Vec::new();
for (ix, completion) in completions.into_iter().enumerate() {
for (ix, completion) in completions.iter().enumerate() {
let label = self.label_for_completion(completion, language).await;
if let Some(label) = label {
labels.resize(ix + 1, None);
@ -457,7 +457,7 @@ pub trait LspAdapter: 'static + Send + Sync {
language: &Arc<Language>,
) -> Result<Vec<Option<CodeLabel>>> {
let mut labels = Vec::new();
for (ix, (name, kind)) in symbols.into_iter().enumerate() {
for (ix, (name, kind)) in symbols.iter().enumerate() {
let label = self.label_for_symbol(name, *kind, language).await;
if let Some(label) = label {
labels.resize(ix + 1, None);

View file

@ -668,7 +668,7 @@ impl LanguageRegistry {
.ok_or_else(|| anyhow!("invalid grammar filename"))?;
anyhow::Ok(with_parser(|parser| {
let mut store = parser.take_wasm_store().unwrap();
let grammar = store.load_language(&grammar_name, &wasm_bytes);
let grammar = store.load_language(grammar_name, &wasm_bytes);
parser.set_wasm_store(store).unwrap();
grammar
})?)
@ -699,7 +699,7 @@ impl LanguageRegistry {
}
pub fn to_vec(&self) -> Vec<Arc<Language>> {
self.state.read().languages.iter().cloned().collect()
self.state.read().languages.to_vec()
}
pub fn lsp_adapters(&self, language: &Arc<Language>) -> Vec<Arc<CachedLspAdapter>> {
@ -971,7 +971,7 @@ impl LanguageRegistryState {
self.available_languages
.retain(|language| !languages_to_remove.contains(&language.name));
self.grammars
.retain(|name, _| !grammars_to_remove.contains(&name));
.retain(|name, _| !grammars_to_remove.contains(name));
self.version += 1;
self.reload_count += 1;
*self.subscription.0.borrow_mut() = ();

View file

@ -20,11 +20,11 @@ use settings::{add_references_to_properties, Settings, SettingsLocation, Setting
use std::{num::NonZeroU32, path::Path, sync::Arc};
use util::serde::default_true;
impl<'a> Into<SettingsLocation<'a>> for &'a dyn File {
fn into(self) -> SettingsLocation<'a> {
impl<'a> From<&'a dyn File> for SettingsLocation<'a> {
fn from(val: &'a dyn File) -> Self {
SettingsLocation {
worktree_id: self.worktree_id(),
path: self.path().as_ref(),
worktree_id: val.worktree_id(),
path: val.path().as_ref(),
}
}
}
@ -155,10 +155,10 @@ impl LanguageSettings {
);
let rest = available_language_servers
.into_iter()
.iter()
.filter(|&available_language_server| {
!disabled_language_servers.contains(&&available_language_server.0)
&& !enabled_language_servers.contains(&&available_language_server.0)
!disabled_language_servers.contains(&available_language_server.0)
&& !enabled_language_servers.contains(&available_language_server.0)
})
.cloned()
.collect::<Vec<_>>();
@ -414,13 +414,15 @@ impl JsonSchema for FormatOnSave {
.into(),
);
let mut valid_raw_values = SchemaObject::default();
valid_raw_values.enum_values = Some(vec![
Value::String("on".into()),
Value::String("off".into()),
Value::String("prettier".into()),
Value::String("language_server".into()),
]);
let valid_raw_values = SchemaObject {
enum_values: Some(vec![
Value::String("on".into()),
Value::String("off".into()),
Value::String("prettier".into()),
Value::String("language_server".into()),
]),
..Default::default()
};
let mut nested_values = SchemaObject::default();
nested_values.array().items = Some(formatter_schema.clone().into());
@ -545,12 +547,15 @@ impl JsonSchema for SelectedFormatter {
.into(),
);
let mut valid_raw_values = SchemaObject::default();
valid_raw_values.enum_values = Some(vec![
Value::String("auto".into()),
Value::String("prettier".into()),
Value::String("language_server".into()),
]);
let valid_raw_values = SchemaObject {
enum_values: Some(vec![
Value::String("auto".into()),
Value::String("prettier".into()),
Value::String("language_server".into()),
]),
..Default::default()
};
let mut nested_values = SchemaObject::default();
nested_values.array().items = Some(formatter_schema.clone().into());
@ -633,7 +638,7 @@ impl AsRef<[Formatter]> for FormatterList {
fn as_ref(&self) -> &[Formatter] {
match &self.0 {
SingleOrVec::Single(single) => slice::from_ref(single),
SingleOrVec::Vec(v) => &v,
SingleOrVec::Vec(v) => v,
}
}
}
@ -1145,7 +1150,7 @@ mod tests {
pub fn test_resolve_language_servers() {
fn language_server_names(names: &[&str]) -> Vec<LanguageServerName> {
names
.into_iter()
.iter()
.copied()
.map(|name| LanguageServerName(name.into()))
.collect::<Vec<_>>()

View file

@ -191,7 +191,7 @@ pub async fn parse_markdown_block(
style.strikethrough = true;
}
if let Some(link) = link_url.clone().and_then(|u| Link::identify(u)) {
if let Some(link) = link_url.clone().and_then(Link::identify) {
region_ranges.push(prev_len..text.len());
regions.push(ParsedRegion {
code: false,
@ -222,7 +222,7 @@ pub async fn parse_markdown_block(
text.push_str(t.as_ref());
region_ranges.push(prev_len..text.len());
let link = link_url.clone().and_then(|u| Link::identify(u));
let link = link_url.clone().and_then(Link::identify);
if link.is_some() {
highlights.push((
prev_len..text.len(),
@ -336,7 +336,7 @@ pub fn highlight_code(
}
/// Appends a new paragraph to the provided `text` buffer.
pub fn new_paragraph(text: &mut String, list_stack: &mut Vec<(Option<u64>, bool)>) {
pub fn new_paragraph(text: &mut String, list_stack: &mut [(Option<u64>, bool)]) {
let mut is_subsequent_paragraph_of_list = false;
if let Some((_, has_content)) = list_stack.last_mut() {
if *has_content {

View file

@ -198,8 +198,10 @@ pub fn render_item<T>(
match_ranges: impl IntoIterator<Item = Range<usize>>,
cx: &AppContext,
) -> StyledText {
let mut highlight_style = HighlightStyle::default();
highlight_style.background_color = Some(color_alpha(cx.theme().colors().text_accent, 0.3));
let highlight_style = HighlightStyle {
background_color: Some(color_alpha(cx.theme().colors().text_accent, 0.3)),
..Default::default()
};
let custom_highlights = match_ranges
.into_iter()
.map(|range| (range, highlight_style));

View file

@ -734,19 +734,24 @@ impl SyntaxSnapshot {
let mut max_depth = 0;
let mut prev_range: Option<Range<Anchor>> = None;
for layer in self.layers.iter() {
if layer.depth == max_depth {
if let Some(prev_range) = prev_range {
match layer.range.start.cmp(&prev_range.start, text) {
Ordering::Less => panic!("layers out of order"),
Ordering::Equal => {
assert!(layer.range.end.cmp(&prev_range.end, text).is_ge())
match Ord::cmp(&layer.depth, &max_depth) {
Ordering::Less => {
panic!("layers out of order")
}
Ordering::Equal => {
if let Some(prev_range) = prev_range {
match layer.range.start.cmp(&prev_range.start, text) {
Ordering::Less => panic!("layers out of order"),
Ordering::Equal => {
assert!(layer.range.end.cmp(&prev_range.end, text).is_ge())
}
Ordering::Greater => {}
}
Ordering::Greater => {}
}
}
} else if layer.depth < max_depth {
panic!("layers out of order")
Ordering::Greater => {}
}
max_depth = layer.depth;
prev_range = Some(layer.range.clone());
}
@ -1343,7 +1348,7 @@ pub(crate) fn splice_included_ranges(
new_ranges: &[tree_sitter::Range],
) -> (Vec<tree_sitter::Range>, Range<usize>) {
let mut removed_ranges = removed_ranges.iter().cloned().peekable();
let mut new_ranges = new_ranges.into_iter().cloned().peekable();
let mut new_ranges = new_ranges.iter().cloned().peekable();
let mut ranges_ix = 0;
let mut changed_portion: Option<Range<usize>> = None;
loop {
@ -1709,7 +1714,7 @@ impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary>
{
fn cmp(&self, cursor_location: &SyntaxLayerSummary, buffer: &BufferSnapshot) -> Ordering {
if self.change.cmp(cursor_location, buffer).is_le() {
return Ordering::Less;
Ordering::Less
} else {
self.position.cmp(cursor_location, buffer)
}
@ -1759,7 +1764,7 @@ impl<'a> Iterator for ByteChunks<'a> {
impl QueryCursorHandle {
pub fn new() -> Self {
let mut cursor = QUERY_CURSORS.lock().pop().unwrap_or_else(QueryCursor::new);
let mut cursor = QUERY_CURSORS.lock().pop().unwrap_or_default();
cursor.set_match_limit(64);
QueryCursorHandle(Some(cursor))
}

View file

@ -958,7 +958,7 @@ fn check_interpolation(
new_buffer: &BufferSnapshot,
) {
let edits = new_buffer
.edits_since::<usize>(&old_buffer.version())
.edits_since::<usize>(old_buffer.version())
.collect::<Vec<_>>();
for (old_layer, new_layer) in old_syntax_map
@ -1086,7 +1086,7 @@ fn test_edit_sequence(
mutated_syntax_map.set_language_registry(registry.clone());
mutated_syntax_map.reparse(language.clone(), &buffer);
for (i, marked_string) in steps.into_iter().enumerate() {
for (i, marked_string) in steps.iter().enumerate() {
let marked_string = marked_string.unindent();
log::info!("incremental parse {i}: {marked_string:?}");
buffer.edit_via_marked_text(&marked_string);
@ -1302,7 +1302,7 @@ fn assert_layers_for_range(
expected_layers: &[&str],
) {
let layers = syntax_map
.layers_for_range(range, &buffer, true)
.layers_for_range(range, buffer, true)
.collect::<Vec<_>>();
assert_eq!(
layers.len(),
@ -1338,7 +1338,7 @@ fn assert_capture_ranges(
.collect::<Vec<_>>();
for capture in captures {
let name = &queries[capture.grammar_index].capture_names()[capture.index as usize];
if highlight_query_capture_names.contains(&name) {
if highlight_query_capture_names.contains(name) {
actual_ranges.push(capture.node.byte_range());
}
}