commit
87ba68e3ea
27 changed files with 1229 additions and 277 deletions
10
Cargo.lock
generated
10
Cargo.lock
generated
|
@ -5297,6 +5297,15 @@ dependencies = [
|
||||||
"tree-sitter",
|
"tree-sitter",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tree-sitter-go"
|
||||||
|
version = "0.19.1"
|
||||||
|
source = "git+https://github.com/tree-sitter/tree-sitter-go?rev=aeb2f33b366fd78d5789ff104956ce23508b85db#aeb2f33b366fd78d5789ff104956ce23508b85db"
|
||||||
|
dependencies = [
|
||||||
|
"cc",
|
||||||
|
"tree-sitter",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tree-sitter-json"
|
name = "tree-sitter-json"
|
||||||
version = "0.19.0"
|
version = "0.19.0"
|
||||||
|
@ -6048,6 +6057,7 @@ dependencies = [
|
||||||
"tree-sitter",
|
"tree-sitter",
|
||||||
"tree-sitter-c",
|
"tree-sitter-c",
|
||||||
"tree-sitter-cpp",
|
"tree-sitter-cpp",
|
||||||
|
"tree-sitter-go",
|
||||||
"tree-sitter-json 0.20.0",
|
"tree-sitter-json 0.20.0",
|
||||||
"tree-sitter-markdown",
|
"tree-sitter-markdown",
|
||||||
"tree-sitter-rust",
|
"tree-sitter-rust",
|
||||||
|
|
|
@ -32,7 +32,8 @@ use gpui::{
|
||||||
pub use language::{char_kind, CharKind};
|
pub use language::{char_kind, CharKind};
|
||||||
use language::{
|
use language::{
|
||||||
BracketPair, Buffer, CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticSeverity,
|
BracketPair, Buffer, CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticSeverity,
|
||||||
Language, OffsetRangeExt, Point, Selection, SelectionGoal, TransactionId,
|
IndentKind, IndentSize, Language, OffsetRangeExt, Point, Selection, SelectionGoal,
|
||||||
|
TransactionId,
|
||||||
};
|
};
|
||||||
use multi_buffer::MultiBufferChunks;
|
use multi_buffer::MultiBufferChunks;
|
||||||
pub use multi_buffer::{
|
pub use multi_buffer::{
|
||||||
|
@ -51,7 +52,7 @@ use std::{
|
||||||
any::TypeId,
|
any::TypeId,
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
cmp::{self, Ordering, Reverse},
|
cmp::{self, Ordering, Reverse},
|
||||||
iter, mem,
|
mem,
|
||||||
ops::{Deref, DerefMut, Range, RangeInclusive},
|
ops::{Deref, DerefMut, Range, RangeInclusive},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
|
@ -1932,9 +1933,8 @@ impl Editor {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|selection| {
|
.map(|selection| {
|
||||||
let start_point = selection.start.to_point(&buffer);
|
let start_point = selection.start.to_point(&buffer);
|
||||||
let indent = buffer
|
let mut indent = buffer.indent_size_for_line(start_point.row);
|
||||||
.indent_column_for_line(start_point.row)
|
indent.len = cmp::min(indent.len, start_point.column);
|
||||||
.min(start_point.column);
|
|
||||||
let start = selection.start;
|
let start = selection.start;
|
||||||
let end = selection.end;
|
let end = selection.end;
|
||||||
|
|
||||||
|
@ -1967,9 +1967,9 @@ impl Editor {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut new_text = String::with_capacity(1 + indent as usize);
|
let mut new_text = String::with_capacity(1 + indent.len as usize);
|
||||||
new_text.push('\n');
|
new_text.push('\n');
|
||||||
new_text.extend(iter::repeat(' ').take(indent as usize));
|
new_text.extend(indent.chars());
|
||||||
if insert_extra_newline {
|
if insert_extra_newline {
|
||||||
new_text = new_text.repeat(2);
|
new_text = new_text.repeat(2);
|
||||||
}
|
}
|
||||||
|
@ -3070,14 +3070,21 @@ impl Editor {
|
||||||
.buffer_snapshot
|
.buffer_snapshot
|
||||||
.buffer_line_for_row(old_head.row)
|
.buffer_line_for_row(old_head.row)
|
||||||
{
|
{
|
||||||
let indent_column =
|
let indent_size = buffer.indent_size_for_line(line_buffer_range.start.row);
|
||||||
buffer.indent_column_for_line(line_buffer_range.start.row);
|
|
||||||
let language_name = buffer.language().map(|language| language.name());
|
let language_name = buffer.language().map(|language| language.name());
|
||||||
let indent = cx.global::<Settings>().tab_size(language_name.as_deref());
|
let indent_len = match indent_size.kind {
|
||||||
if old_head.column <= indent_column && old_head.column > 0 {
|
IndentKind::Space => {
|
||||||
|
cx.global::<Settings>().tab_size(language_name.as_deref())
|
||||||
|
}
|
||||||
|
IndentKind::Tab => 1,
|
||||||
|
};
|
||||||
|
if old_head.column <= indent_size.len && old_head.column > 0 {
|
||||||
new_head = cmp::min(
|
new_head = cmp::min(
|
||||||
new_head,
|
new_head,
|
||||||
Point::new(old_head.row, ((old_head.column - 1) / indent) * indent),
|
Point::new(
|
||||||
|
old_head.row,
|
||||||
|
((old_head.column - 1) / indent_len) * indent_len,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3128,21 +3135,27 @@ impl Editor {
|
||||||
for selection in &mut selections {
|
for selection in &mut selections {
|
||||||
let language_name =
|
let language_name =
|
||||||
buffer.language_at(selection.start, cx).map(|l| l.name());
|
buffer.language_at(selection.start, cx).map(|l| l.name());
|
||||||
let tab_size = cx.global::<Settings>().tab_size(language_name.as_deref());
|
let settings = cx.global::<Settings>();
|
||||||
|
let tab_size = if settings.hard_tabs(language_name.as_deref()) {
|
||||||
|
IndentSize::tab()
|
||||||
|
} else {
|
||||||
|
let tab_size = settings.tab_size(language_name.as_deref());
|
||||||
let char_column = buffer
|
let char_column = buffer
|
||||||
.read(cx)
|
.read(cx)
|
||||||
.text_for_range(Point::new(selection.start.row, 0)..selection.start)
|
.text_for_range(Point::new(selection.start.row, 0)..selection.start)
|
||||||
.flat_map(str::chars)
|
.flat_map(str::chars)
|
||||||
.count();
|
.count();
|
||||||
let chars_to_next_tab_stop = tab_size - (char_column as u32 % tab_size);
|
let chars_to_next_tab_stop = tab_size - (char_column as u32 % tab_size);
|
||||||
|
IndentSize::spaces(chars_to_next_tab_stop)
|
||||||
|
};
|
||||||
buffer.edit(
|
buffer.edit(
|
||||||
[(
|
[(
|
||||||
selection.start..selection.start,
|
selection.start..selection.start,
|
||||||
" ".repeat(chars_to_next_tab_stop as usize),
|
tab_size.chars().collect::<String>(),
|
||||||
)],
|
)],
|
||||||
cx,
|
cx,
|
||||||
);
|
);
|
||||||
selection.start.column += chars_to_next_tab_stop;
|
selection.start.column += tab_size.len;
|
||||||
selection.end = selection.start;
|
selection.end = selection.start;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -3163,7 +3176,14 @@ impl Editor {
|
||||||
let snapshot = buffer.snapshot(cx);
|
let snapshot = buffer.snapshot(cx);
|
||||||
for selection in &mut selections {
|
for selection in &mut selections {
|
||||||
let language_name = buffer.language_at(selection.start, cx).map(|l| l.name());
|
let language_name = buffer.language_at(selection.start, cx).map(|l| l.name());
|
||||||
let tab_size = cx.global::<Settings>().tab_size(language_name.as_deref());
|
let settings = &cx.global::<Settings>();
|
||||||
|
let tab_size = settings.tab_size(language_name.as_deref());
|
||||||
|
let indent_kind = if settings.hard_tabs(language_name.as_deref()) {
|
||||||
|
IndentKind::Tab
|
||||||
|
} else {
|
||||||
|
IndentKind::Space
|
||||||
|
};
|
||||||
|
|
||||||
let mut start_row = selection.start.row;
|
let mut start_row = selection.start.row;
|
||||||
let mut end_row = selection.end.row + 1;
|
let mut end_row = selection.end.row + 1;
|
||||||
|
|
||||||
|
@ -3187,26 +3207,35 @@ impl Editor {
|
||||||
}
|
}
|
||||||
|
|
||||||
for row in start_row..end_row {
|
for row in start_row..end_row {
|
||||||
let indent_column = snapshot.indent_column_for_line(row);
|
let current_indent = snapshot.indent_size_for_line(row);
|
||||||
let columns_to_next_tab_stop = tab_size - (indent_column % tab_size);
|
let indent_delta = match (current_indent.kind, indent_kind) {
|
||||||
|
(IndentKind::Space, IndentKind::Space) => {
|
||||||
|
let columns_to_next_tab_stop =
|
||||||
|
tab_size - (current_indent.len % tab_size);
|
||||||
|
IndentSize::spaces(columns_to_next_tab_stop)
|
||||||
|
}
|
||||||
|
(IndentKind::Tab, IndentKind::Space) => IndentSize::spaces(tab_size),
|
||||||
|
(_, IndentKind::Tab) => IndentSize::tab(),
|
||||||
|
};
|
||||||
|
|
||||||
let row_start = Point::new(row, 0);
|
let row_start = Point::new(row, 0);
|
||||||
buffer.edit(
|
buffer.edit(
|
||||||
[(
|
[(
|
||||||
row_start..row_start,
|
row_start..row_start,
|
||||||
" ".repeat(columns_to_next_tab_stop as usize),
|
indent_delta.chars().collect::<String>(),
|
||||||
)],
|
)],
|
||||||
cx,
|
cx,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Update this selection's endpoints to reflect the indentation.
|
// Update this selection's endpoints to reflect the indentation.
|
||||||
if row == selection.start.row {
|
if row == selection.start.row {
|
||||||
selection.start.column += columns_to_next_tab_stop as u32;
|
selection.start.column += indent_delta.len;
|
||||||
}
|
}
|
||||||
if row == selection.end.row {
|
if row == selection.end.row {
|
||||||
selection.end.column += columns_to_next_tab_stop as u32;
|
selection.end.column += indent_delta.len as u32;
|
||||||
}
|
}
|
||||||
|
|
||||||
last_indent = Some((row, columns_to_next_tab_stop as u32));
|
last_indent = Some((row, indent_delta.len));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -3239,12 +3268,19 @@ impl Editor {
|
||||||
}
|
}
|
||||||
|
|
||||||
for row in rows {
|
for row in rows {
|
||||||
let column = snapshot.indent_column_for_line(row);
|
let indent_size = snapshot.indent_size_for_line(row);
|
||||||
if column > 0 {
|
if indent_size.len > 0 {
|
||||||
let mut deletion_len = column % tab_size;
|
let deletion_len = match indent_size.kind {
|
||||||
if deletion_len == 0 {
|
IndentKind::Space => {
|
||||||
deletion_len = tab_size;
|
let columns_to_prev_tab_stop = indent_size.len % tab_size;
|
||||||
|
if columns_to_prev_tab_stop == 0 {
|
||||||
|
tab_size
|
||||||
|
} else {
|
||||||
|
columns_to_prev_tab_stop
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
IndentKind::Tab => 1,
|
||||||
|
};
|
||||||
deletion_ranges.push(Point::new(row, 0)..Point::new(row, deletion_len));
|
deletion_ranges.push(Point::new(row, 0)..Point::new(row, deletion_len));
|
||||||
last_outdent = Some(row);
|
last_outdent = Some(row);
|
||||||
}
|
}
|
||||||
|
@ -4558,7 +4594,7 @@ impl Editor {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let start = Point::new(row, snapshot.indent_column_for_line(row));
|
let start = Point::new(row, snapshot.indent_size_for_line(row).len);
|
||||||
let mut line_bytes = snapshot
|
let mut line_bytes = snapshot
|
||||||
.bytes_in_range(start..snapshot.max_point())
|
.bytes_in_range(start..snapshot.max_point())
|
||||||
.flatten()
|
.flatten()
|
||||||
|
@ -7712,6 +7748,88 @@ mod tests {
|
||||||
four"});
|
four"});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
async fn test_indent_outdent_with_hard_tabs(cx: &mut gpui::TestAppContext) {
|
||||||
|
let mut cx = EditorTestContext::new(cx).await;
|
||||||
|
cx.update(|cx| {
|
||||||
|
cx.update_global::<Settings, _, _>(|settings, _| {
|
||||||
|
settings.hard_tabs = true;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// select two ranges on one line
|
||||||
|
cx.set_state(indoc! {"
|
||||||
|
[one} [two}
|
||||||
|
three
|
||||||
|
four"});
|
||||||
|
cx.update_editor(|e, cx| e.tab(&Tab, cx));
|
||||||
|
cx.assert_editor_state(indoc! {"
|
||||||
|
\t[one} [two}
|
||||||
|
three
|
||||||
|
four"});
|
||||||
|
cx.update_editor(|e, cx| e.tab(&Tab, cx));
|
||||||
|
cx.assert_editor_state(indoc! {"
|
||||||
|
\t\t[one} [two}
|
||||||
|
three
|
||||||
|
four"});
|
||||||
|
cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
|
||||||
|
cx.assert_editor_state(indoc! {"
|
||||||
|
\t[one} [two}
|
||||||
|
three
|
||||||
|
four"});
|
||||||
|
cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
|
||||||
|
cx.assert_editor_state(indoc! {"
|
||||||
|
[one} [two}
|
||||||
|
three
|
||||||
|
four"});
|
||||||
|
|
||||||
|
// select across a line ending
|
||||||
|
cx.set_state(indoc! {"
|
||||||
|
one two
|
||||||
|
t[hree
|
||||||
|
}four"});
|
||||||
|
cx.update_editor(|e, cx| e.tab(&Tab, cx));
|
||||||
|
cx.assert_editor_state(indoc! {"
|
||||||
|
one two
|
||||||
|
\tt[hree
|
||||||
|
}four"});
|
||||||
|
cx.update_editor(|e, cx| e.tab(&Tab, cx));
|
||||||
|
cx.assert_editor_state(indoc! {"
|
||||||
|
one two
|
||||||
|
\t\tt[hree
|
||||||
|
}four"});
|
||||||
|
cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
|
||||||
|
cx.assert_editor_state(indoc! {"
|
||||||
|
one two
|
||||||
|
\tt[hree
|
||||||
|
}four"});
|
||||||
|
cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
|
||||||
|
cx.assert_editor_state(indoc! {"
|
||||||
|
one two
|
||||||
|
t[hree
|
||||||
|
}four"});
|
||||||
|
|
||||||
|
// Ensure that indenting/outdenting works when the cursor is at column 0.
|
||||||
|
cx.set_state(indoc! {"
|
||||||
|
one two
|
||||||
|
|three
|
||||||
|
four"});
|
||||||
|
cx.assert_editor_state(indoc! {"
|
||||||
|
one two
|
||||||
|
|three
|
||||||
|
four"});
|
||||||
|
cx.update_editor(|e, cx| e.tab(&Tab, cx));
|
||||||
|
cx.assert_editor_state(indoc! {"
|
||||||
|
one two
|
||||||
|
\t|three
|
||||||
|
four"});
|
||||||
|
cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
|
||||||
|
cx.assert_editor_state(indoc! {"
|
||||||
|
one two
|
||||||
|
|three
|
||||||
|
four"});
|
||||||
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
fn test_indent_outdent_with_excerpts(cx: &mut gpui::MutableAppContext) {
|
fn test_indent_outdent_with_excerpts(cx: &mut gpui::MutableAppContext) {
|
||||||
cx.set_global(
|
cx.set_global(
|
||||||
|
|
|
@ -106,7 +106,7 @@ pub fn line_beginning(
|
||||||
let soft_line_start = map.clip_point(DisplayPoint::new(display_point.row(), 0), Bias::Right);
|
let soft_line_start = map.clip_point(DisplayPoint::new(display_point.row(), 0), Bias::Right);
|
||||||
let indent_start = Point::new(
|
let indent_start = Point::new(
|
||||||
point.row,
|
point.row,
|
||||||
map.buffer_snapshot.indent_column_for_line(point.row),
|
map.buffer_snapshot.indent_size_for_line(point.row).len,
|
||||||
)
|
)
|
||||||
.to_display_point(map);
|
.to_display_point(map);
|
||||||
let line_start = map.prev_line_boundary(point).1;
|
let line_start = map.prev_line_boundary(point).1;
|
||||||
|
|
|
@ -8,8 +8,8 @@ use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
|
||||||
pub use language::Completion;
|
pub use language::Completion;
|
||||||
use language::{
|
use language::{
|
||||||
char_kind, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, DiagnosticEntry, Event, File,
|
char_kind, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, DiagnosticEntry, Event, File,
|
||||||
Language, OffsetRangeExt, Outline, OutlineItem, Selection, ToOffset as _, ToPoint as _,
|
IndentSize, Language, OffsetRangeExt, Outline, OutlineItem, Selection, ToOffset as _,
|
||||||
ToPointUtf16 as _, TransactionId,
|
ToPoint as _, ToPointUtf16 as _, TransactionId,
|
||||||
};
|
};
|
||||||
use settings::Settings;
|
use settings::Settings;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
|
@ -341,9 +341,14 @@ impl MultiBuffer {
|
||||||
|
|
||||||
if let Some(buffer) = self.as_singleton() {
|
if let Some(buffer) = self.as_singleton() {
|
||||||
return buffer.update(cx, |buffer, cx| {
|
return buffer.update(cx, |buffer, cx| {
|
||||||
let language_name = buffer.language().map(|language| language.name());
|
|
||||||
let indent_size = cx.global::<Settings>().tab_size(language_name.as_deref());
|
|
||||||
if autoindent {
|
if autoindent {
|
||||||
|
let language_name = buffer.language().map(|language| language.name());
|
||||||
|
let settings = cx.global::<Settings>();
|
||||||
|
let indent_size = if settings.hard_tabs(language_name.as_deref()) {
|
||||||
|
IndentSize::tab()
|
||||||
|
} else {
|
||||||
|
IndentSize::spaces(settings.tab_size(language_name.as_deref()))
|
||||||
|
};
|
||||||
buffer.edit_with_autoindent(edits, indent_size, cx);
|
buffer.edit_with_autoindent(edits, indent_size, cx);
|
||||||
} else {
|
} else {
|
||||||
buffer.edit(edits, cx);
|
buffer.edit(edits, cx);
|
||||||
|
@ -462,9 +467,15 @@ impl MultiBuffer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let language_name = buffer.language().map(|l| l.name());
|
let language_name = buffer.language().map(|l| l.name());
|
||||||
let indent_size = cx.global::<Settings>().tab_size(language_name.as_deref());
|
|
||||||
|
|
||||||
if autoindent {
|
if autoindent {
|
||||||
|
let settings = cx.global::<Settings>();
|
||||||
|
let indent_size = if settings.hard_tabs(language_name.as_deref()) {
|
||||||
|
IndentSize::tab()
|
||||||
|
} else {
|
||||||
|
IndentSize::spaces(settings.tab_size(language_name.as_deref()))
|
||||||
|
};
|
||||||
|
|
||||||
buffer.edit_with_autoindent(deletions, indent_size, cx);
|
buffer.edit_with_autoindent(deletions, indent_size, cx);
|
||||||
buffer.edit_with_autoindent(insertions, indent_size, cx);
|
buffer.edit_with_autoindent(insertions, indent_size, cx);
|
||||||
} else {
|
} else {
|
||||||
|
@ -1838,14 +1849,16 @@ impl MultiBufferSnapshot {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn indent_column_for_line(&self, row: u32) -> u32 {
|
pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
|
||||||
if let Some((buffer, range)) = self.buffer_line_for_row(row) {
|
if let Some((buffer, range)) = self.buffer_line_for_row(row) {
|
||||||
buffer
|
let mut size = buffer.indent_size_for_line(range.start.row);
|
||||||
.indent_column_for_line(range.start.row)
|
size.len = size
|
||||||
|
.len
|
||||||
.min(range.end.column)
|
.min(range.end.column)
|
||||||
.saturating_sub(range.start.column)
|
.saturating_sub(range.start.column);
|
||||||
|
size
|
||||||
} else {
|
} else {
|
||||||
0
|
IndentSize::spaces(0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -109,9 +109,10 @@ impl<'a> EditorTestContext<'a> {
|
||||||
self.editor.update(self.cx, update)
|
self.editor.update(self.cx, update)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn editor_text(&mut self) -> String {
|
pub fn buffer_text(&mut self) -> String {
|
||||||
self.editor
|
self.editor.read_with(self.cx, |editor, cx| {
|
||||||
.update(self.cx, |editor, cx| editor.snapshot(cx).text())
|
editor.buffer.read(cx).snapshot(cx).text()
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn simulate_keystroke(&mut self, keystroke_text: &str) {
|
pub fn simulate_keystroke(&mut self, keystroke_text: &str) {
|
||||||
|
@ -171,10 +172,10 @@ impl<'a> EditorTestContext<'a> {
|
||||||
&text,
|
&text,
|
||||||
vec!['|'.into(), ('[', '}').into(), ('{', ']').into()],
|
vec!['|'.into(), ('[', '}').into(), ('{', ']').into()],
|
||||||
);
|
);
|
||||||
let editor_text = self.editor_text();
|
let buffer_text = self.buffer_text();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
editor_text, unmarked_text,
|
buffer_text, unmarked_text,
|
||||||
"Unmarked text doesn't match editor text"
|
"Unmarked text doesn't match buffer text"
|
||||||
);
|
);
|
||||||
|
|
||||||
let expected_empty_selections = selection_ranges.remove(&'|'.into()).unwrap_or_default();
|
let expected_empty_selections = selection_ranges.remove(&'|'.into()).unwrap_or_default();
|
||||||
|
@ -254,7 +255,7 @@ impl<'a> EditorTestContext<'a> {
|
||||||
let actual_selections =
|
let actual_selections =
|
||||||
self.insert_markers(&empty_selections, &reverse_selections, &forward_selections);
|
self.insert_markers(&empty_selections, &reverse_selections, &forward_selections);
|
||||||
|
|
||||||
let unmarked_text = self.editor_text();
|
let unmarked_text = self.buffer_text();
|
||||||
let all_eq: Result<(), SetEqError<String>> =
|
let all_eq: Result<(), SetEqError<String>> =
|
||||||
set_eq!(expected_empty_selections, empty_selections)
|
set_eq!(expected_empty_selections, empty_selections)
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
|
@ -322,7 +323,7 @@ impl<'a> EditorTestContext<'a> {
|
||||||
reverse_selections: &Vec<Range<usize>>,
|
reverse_selections: &Vec<Range<usize>>,
|
||||||
forward_selections: &Vec<Range<usize>>,
|
forward_selections: &Vec<Range<usize>>,
|
||||||
) -> String {
|
) -> String {
|
||||||
let mut editor_text_with_selections = self.editor_text();
|
let mut editor_text_with_selections = self.buffer_text();
|
||||||
let mut selection_marks = BTreeMap::new();
|
let mut selection_marks = BTreeMap::new();
|
||||||
for range in empty_selections {
|
for range in empty_selections {
|
||||||
selection_marks.insert(&range.start, '|');
|
selection_marks.insert(&range.start, '|');
|
||||||
|
|
|
@ -22,7 +22,7 @@ use std::{
|
||||||
collections::{BTreeMap, HashMap},
|
collections::{BTreeMap, HashMap},
|
||||||
ffi::OsString,
|
ffi::OsString,
|
||||||
future::Future,
|
future::Future,
|
||||||
iter::{Iterator, Peekable},
|
iter::{self, Iterator, Peekable},
|
||||||
mem,
|
mem,
|
||||||
ops::{Deref, DerefMut, Range},
|
ops::{Deref, DerefMut, Range},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
@ -82,6 +82,18 @@ pub struct BufferSnapshot {
|
||||||
parse_count: usize,
|
parse_count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||||
|
pub struct IndentSize {
|
||||||
|
pub len: u32,
|
||||||
|
pub kind: IndentKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||||
|
pub enum IndentKind {
|
||||||
|
Space,
|
||||||
|
Tab,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
struct SelectionSet {
|
struct SelectionSet {
|
||||||
line_mode: bool,
|
line_mode: bool,
|
||||||
|
@ -215,7 +227,7 @@ struct AutoindentRequest {
|
||||||
before_edit: BufferSnapshot,
|
before_edit: BufferSnapshot,
|
||||||
edited: Vec<Anchor>,
|
edited: Vec<Anchor>,
|
||||||
inserted: Option<Vec<Range<Anchor>>>,
|
inserted: Option<Vec<Range<Anchor>>>,
|
||||||
indent_size: u32,
|
indent_size: IndentSize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -723,18 +735,18 @@ impl Buffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
|
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
|
||||||
if let Some(indent_columns) = self.compute_autoindents() {
|
if let Some(indent_sizes) = self.compute_autoindents() {
|
||||||
let indent_columns = cx.background().spawn(indent_columns);
|
let indent_sizes = cx.background().spawn(indent_sizes);
|
||||||
match cx
|
match cx
|
||||||
.background()
|
.background()
|
||||||
.block_with_timeout(Duration::from_micros(500), indent_columns)
|
.block_with_timeout(Duration::from_micros(500), indent_sizes)
|
||||||
{
|
{
|
||||||
Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
|
Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
|
||||||
Err(indent_columns) => {
|
Err(indent_sizes) => {
|
||||||
self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
|
self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
|
||||||
let indent_columns = indent_columns.await;
|
let indent_sizes = indent_sizes.await;
|
||||||
this.update(&mut cx, |this, cx| {
|
this.update(&mut cx, |this, cx| {
|
||||||
this.apply_autoindents(indent_columns, cx);
|
this.apply_autoindents(indent_sizes, cx);
|
||||||
});
|
});
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
@ -742,7 +754,7 @@ impl Buffer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
|
fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
|
||||||
let max_rows_between_yields = 100;
|
let max_rows_between_yields = 100;
|
||||||
let snapshot = self.snapshot();
|
let snapshot = self.snapshot();
|
||||||
if snapshot.language.is_none()
|
if snapshot.language.is_none()
|
||||||
|
@ -754,7 +766,7 @@ impl Buffer {
|
||||||
|
|
||||||
let autoindent_requests = self.autoindent_requests.clone();
|
let autoindent_requests = self.autoindent_requests.clone();
|
||||||
Some(async move {
|
Some(async move {
|
||||||
let mut indent_columns = BTreeMap::new();
|
let mut indent_sizes = BTreeMap::new();
|
||||||
for request in autoindent_requests {
|
for request in autoindent_requests {
|
||||||
let old_to_new_rows = request
|
let old_to_new_rows = request
|
||||||
.edited
|
.edited
|
||||||
|
@ -768,7 +780,7 @@ impl Buffer {
|
||||||
)
|
)
|
||||||
.collect::<BTreeMap<u32, u32>>();
|
.collect::<BTreeMap<u32, u32>>();
|
||||||
|
|
||||||
let mut old_suggestions = HashMap::<u32, u32>::default();
|
let mut old_suggestions = HashMap::<u32, IndentSize>::default();
|
||||||
let old_edited_ranges =
|
let old_edited_ranges =
|
||||||
contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
|
contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
|
||||||
for old_edited_range in old_edited_ranges {
|
for old_edited_range in old_edited_ranges {
|
||||||
|
@ -778,23 +790,19 @@ impl Buffer {
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten();
|
.flatten();
|
||||||
for (old_row, suggestion) in old_edited_range.zip(suggestions) {
|
for (old_row, suggestion) in old_edited_range.zip(suggestions) {
|
||||||
let indentation_basis = old_to_new_rows
|
let mut suggested_indent = old_to_new_rows
|
||||||
.get(&suggestion.basis_row)
|
.get(&suggestion.basis_row)
|
||||||
.and_then(|from_row| old_suggestions.get(from_row).copied())
|
.and_then(|from_row| old_suggestions.get(from_row).copied())
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
request
|
request
|
||||||
.before_edit
|
.before_edit
|
||||||
.indent_column_for_line(suggestion.basis_row)
|
.indent_size_for_line(suggestion.basis_row)
|
||||||
});
|
});
|
||||||
let delta = if suggestion.indent {
|
if suggestion.indent {
|
||||||
request.indent_size
|
suggested_indent += request.indent_size;
|
||||||
} else {
|
}
|
||||||
0
|
old_suggestions
|
||||||
};
|
.insert(*old_to_new_rows.get(&old_row).unwrap(), suggested_indent);
|
||||||
old_suggestions.insert(
|
|
||||||
*old_to_new_rows.get(&old_row).unwrap(),
|
|
||||||
indentation_basis + delta,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
yield_now().await;
|
yield_now().await;
|
||||||
}
|
}
|
||||||
|
@ -809,23 +817,18 @@ impl Buffer {
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten();
|
.flatten();
|
||||||
for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
|
for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
|
||||||
let delta = if suggestion.indent {
|
let mut suggested_indent = indent_sizes
|
||||||
request.indent_size
|
|
||||||
} else {
|
|
||||||
0
|
|
||||||
};
|
|
||||||
let new_indentation = indent_columns
|
|
||||||
.get(&suggestion.basis_row)
|
.get(&suggestion.basis_row)
|
||||||
.copied()
|
.copied()
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| snapshot.indent_size_for_line(suggestion.basis_row));
|
||||||
snapshot.indent_column_for_line(suggestion.basis_row)
|
if suggestion.indent {
|
||||||
})
|
suggested_indent += request.indent_size;
|
||||||
+ delta;
|
}
|
||||||
if old_suggestions
|
if old_suggestions
|
||||||
.get(&new_row)
|
.get(&new_row)
|
||||||
.map_or(true, |old_indentation| new_indentation != *old_indentation)
|
.map_or(true, |old_indentation| suggested_indent != *old_indentation)
|
||||||
{
|
{
|
||||||
indent_columns.insert(new_row, new_indentation);
|
indent_sizes.insert(new_row, suggested_indent);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
yield_now().await;
|
yield_now().await;
|
||||||
|
@ -845,56 +848,65 @@ impl Buffer {
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten();
|
.flatten();
|
||||||
for (row, suggestion) in inserted_row_range.zip(suggestions) {
|
for (row, suggestion) in inserted_row_range.zip(suggestions) {
|
||||||
let delta = if suggestion.indent {
|
let mut suggested_indent = indent_sizes
|
||||||
request.indent_size
|
|
||||||
} else {
|
|
||||||
0
|
|
||||||
};
|
|
||||||
let new_indentation = indent_columns
|
|
||||||
.get(&suggestion.basis_row)
|
.get(&suggestion.basis_row)
|
||||||
.copied()
|
.copied()
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
snapshot.indent_column_for_line(suggestion.basis_row)
|
snapshot.indent_size_for_line(suggestion.basis_row)
|
||||||
})
|
});
|
||||||
+ delta;
|
if suggestion.indent {
|
||||||
indent_columns.insert(row, new_indentation);
|
suggested_indent += request.indent_size;
|
||||||
|
}
|
||||||
|
indent_sizes.insert(row, suggested_indent);
|
||||||
}
|
}
|
||||||
yield_now().await;
|
yield_now().await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
indent_columns
|
|
||||||
|
indent_sizes
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_autoindents(
|
fn apply_autoindents(
|
||||||
&mut self,
|
&mut self,
|
||||||
indent_columns: BTreeMap<u32, u32>,
|
indent_sizes: BTreeMap<u32, IndentSize>,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) {
|
) {
|
||||||
self.autoindent_requests.clear();
|
self.autoindent_requests.clear();
|
||||||
self.start_transaction();
|
self.start_transaction();
|
||||||
for (row, indent_column) in &indent_columns {
|
for (row, indent_size) in &indent_sizes {
|
||||||
self.set_indent_column_for_line(*row, *indent_column, cx);
|
self.set_indent_size_for_line(*row, *indent_size, cx);
|
||||||
}
|
}
|
||||||
self.end_transaction(cx);
|
self.end_transaction(cx);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
|
fn set_indent_size_for_line(
|
||||||
let current_column = self.indent_column_for_line(row);
|
&mut self,
|
||||||
if column > current_column {
|
row: u32,
|
||||||
|
size: IndentSize,
|
||||||
|
cx: &mut ModelContext<Self>,
|
||||||
|
) {
|
||||||
|
let current_size = indent_size_for_line(&self, row);
|
||||||
|
if size.kind != current_size.kind && current_size.len > 0 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if size.len > current_size.len {
|
||||||
let offset = Point::new(row, 0).to_offset(&*self);
|
let offset = Point::new(row, 0).to_offset(&*self);
|
||||||
self.edit(
|
self.edit(
|
||||||
[(
|
[(
|
||||||
offset..offset,
|
offset..offset,
|
||||||
" ".repeat((column - current_column) as usize),
|
iter::repeat(size.char())
|
||||||
|
.take((size.len - current_size.len) as usize)
|
||||||
|
.collect::<String>(),
|
||||||
)],
|
)],
|
||||||
cx,
|
cx,
|
||||||
);
|
);
|
||||||
} else if column < current_column {
|
} else if size.len < current_size.len {
|
||||||
self.edit(
|
self.edit(
|
||||||
[(
|
[(
|
||||||
Point::new(row, 0)..Point::new(row, current_column - column),
|
Point::new(row, 0)..Point::new(row, current_size.len - size.len),
|
||||||
"",
|
"",
|
||||||
)],
|
)],
|
||||||
cx,
|
cx,
|
||||||
|
@ -1084,7 +1096,7 @@ impl Buffer {
|
||||||
pub fn edit_with_autoindent<I, S, T>(
|
pub fn edit_with_autoindent<I, S, T>(
|
||||||
&mut self,
|
&mut self,
|
||||||
edits_iter: I,
|
edits_iter: I,
|
||||||
indent_size: u32,
|
indent_size: IndentSize,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) -> Option<clock::Local>
|
) -> Option<clock::Local>
|
||||||
where
|
where
|
||||||
|
@ -1098,7 +1110,7 @@ impl Buffer {
|
||||||
pub fn edit_internal<I, S, T>(
|
pub fn edit_internal<I, S, T>(
|
||||||
&mut self,
|
&mut self,
|
||||||
edits_iter: I,
|
edits_iter: I,
|
||||||
autoindent_size: Option<u32>,
|
autoindent_size: Option<IndentSize>,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) -> Option<clock::Local>
|
) -> Option<clock::Local>
|
||||||
where
|
where
|
||||||
|
@ -1500,33 +1512,34 @@ impl Deref for Buffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BufferSnapshot {
|
impl BufferSnapshot {
|
||||||
|
pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
|
||||||
|
indent_size_for_line(&self, row)
|
||||||
|
}
|
||||||
|
|
||||||
fn suggest_autoindents<'a>(
|
fn suggest_autoindents<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
row_range: Range<u32>,
|
row_range: Range<u32>,
|
||||||
) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
|
) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
|
||||||
let mut query_cursor = QueryCursorHandle::new();
|
|
||||||
if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
|
|
||||||
let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
|
|
||||||
|
|
||||||
// Get the "indentation ranges" that intersect this row range.
|
// Get the "indentation ranges" that intersect this row range.
|
||||||
|
let grammar = self.grammar()?;
|
||||||
|
let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
|
||||||
|
let mut query_cursor = QueryCursorHandle::new();
|
||||||
let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
|
let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
|
||||||
let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
|
let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
|
||||||
query_cursor.set_point_range(
|
query_cursor.set_point_range(
|
||||||
Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
|
Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
|
||||||
..Point::new(row_range.end, 0).to_ts_point(),
|
..Point::new(row_range.end, 0).to_ts_point(),
|
||||||
);
|
);
|
||||||
let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
|
let mut indentation_ranges = Vec::<Range<Point>>::new();
|
||||||
for mat in query_cursor.matches(
|
for mat in query_cursor.matches(
|
||||||
&grammar.indents_query,
|
&grammar.indents_query,
|
||||||
tree.root_node(),
|
self.tree.as_ref()?.root_node(),
|
||||||
TextProvider(self.as_rope()),
|
TextProvider(self.as_rope()),
|
||||||
) {
|
) {
|
||||||
let mut node_kind = "";
|
|
||||||
let mut start: Option<Point> = None;
|
let mut start: Option<Point> = None;
|
||||||
let mut end: Option<Point> = None;
|
let mut end: Option<Point> = None;
|
||||||
for capture in mat.captures {
|
for capture in mat.captures {
|
||||||
if Some(capture.index) == indent_capture_ix {
|
if Some(capture.index) == indent_capture_ix {
|
||||||
node_kind = capture.node.kind();
|
|
||||||
start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
|
start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
|
||||||
end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
|
end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
|
||||||
} else if Some(capture.index) == end_capture_ix {
|
} else if Some(capture.index) == end_capture_ix {
|
||||||
|
@ -1540,11 +1553,11 @@ impl BufferSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
let range = start..end;
|
let range = start..end;
|
||||||
match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
|
match indentation_ranges.binary_search_by_key(&range.start, |r| r.start) {
|
||||||
Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
|
Err(ix) => indentation_ranges.insert(ix, range),
|
||||||
Ok(ix) => {
|
Ok(ix) => {
|
||||||
let prev_range = &mut indentation_ranges[ix];
|
let prev_range = &mut indentation_ranges[ix];
|
||||||
prev_range.0.end = prev_range.0.end.max(range.end);
|
prev_range.end = prev_range.end.max(range.end);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1552,11 +1565,11 @@ impl BufferSnapshot {
|
||||||
|
|
||||||
let mut prev_row = prev_non_blank_row.unwrap_or(0);
|
let mut prev_row = prev_non_blank_row.unwrap_or(0);
|
||||||
Some(row_range.map(move |row| {
|
Some(row_range.map(move |row| {
|
||||||
let row_start = Point::new(row, self.indent_column_for_line(row));
|
let row_start = Point::new(row, self.indent_size_for_line(row).len);
|
||||||
|
|
||||||
let mut indent_from_prev_row = false;
|
let mut indent_from_prev_row = false;
|
||||||
let mut outdent_to_row = u32::MAX;
|
let mut outdent_to_row = u32::MAX;
|
||||||
for (range, _node_kind) in &indentation_ranges {
|
for range in &indentation_ranges {
|
||||||
if range.start.row >= row {
|
if range.start.row >= row {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -1594,9 +1607,6 @@ impl BufferSnapshot {
|
||||||
prev_row = row;
|
prev_row = row;
|
||||||
suggestion
|
suggestion
|
||||||
}))
|
}))
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
|
fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
|
||||||
|
@ -1989,6 +1999,22 @@ impl BufferSnapshot {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
|
||||||
|
let mut result = IndentSize::spaces(0);
|
||||||
|
for c in text.chars_at(Point::new(row, 0)) {
|
||||||
|
let kind = match c {
|
||||||
|
' ' => IndentKind::Space,
|
||||||
|
'\t' => IndentKind::Tab,
|
||||||
|
_ => break,
|
||||||
|
};
|
||||||
|
if result.len == 0 {
|
||||||
|
result.kind = kind;
|
||||||
|
}
|
||||||
|
result.len += 1;
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
impl Clone for BufferSnapshot {
|
impl Clone for BufferSnapshot {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
@ -2311,6 +2337,43 @@ impl Default for Diagnostic {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl IndentSize {
|
||||||
|
pub fn spaces(len: u32) -> Self {
|
||||||
|
Self {
|
||||||
|
len,
|
||||||
|
kind: IndentKind::Space,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tab() -> Self {
|
||||||
|
Self {
|
||||||
|
len: 1,
|
||||||
|
kind: IndentKind::Tab,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn chars(&self) -> impl Iterator<Item = char> {
|
||||||
|
iter::repeat(self.char()).take(self.len as usize)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn char(&self) -> char {
|
||||||
|
match self.kind {
|
||||||
|
IndentKind::Space => ' ',
|
||||||
|
IndentKind::Tab => '\t',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::ops::AddAssign for IndentSize {
|
||||||
|
fn add_assign(&mut self, other: IndentSize) {
|
||||||
|
if self.len == 0 {
|
||||||
|
*self = other;
|
||||||
|
} else if self.kind == other.kind {
|
||||||
|
self.len += other.len;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Completion {
|
impl Completion {
|
||||||
pub fn sort_key(&self) -> (usize, &str) {
|
pub fn sort_key(&self) -> (usize, &str) {
|
||||||
let kind_key = match self.lsp_completion.kind {
|
let kind_key = match self.lsp_completion.kind {
|
||||||
|
|
|
@ -75,9 +75,10 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
http: Arc<dyn HttpClient>,
|
http: Arc<dyn HttpClient>,
|
||||||
container_dir: PathBuf,
|
container_dir: Arc<Path>,
|
||||||
) -> BoxFuture<'static, Result<PathBuf>>;
|
) -> BoxFuture<'static, Result<PathBuf>>;
|
||||||
fn cached_server_binary(&self, container_dir: PathBuf) -> BoxFuture<'static, Option<PathBuf>>;
|
fn cached_server_binary(&self, container_dir: Arc<Path>)
|
||||||
|
-> BoxFuture<'static, Option<PathBuf>>;
|
||||||
|
|
||||||
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
||||||
|
|
||||||
|
@ -366,7 +367,7 @@ async fn get_server_binary_path(
|
||||||
download_dir: Arc<Path>,
|
download_dir: Arc<Path>,
|
||||||
statuses: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
statuses: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
||||||
) -> Result<PathBuf> {
|
) -> Result<PathBuf> {
|
||||||
let container_dir = download_dir.join(adapter.name().0.as_ref());
|
let container_dir: Arc<Path> = download_dir.join(adapter.name().0.as_ref()).into();
|
||||||
if !container_dir.exists() {
|
if !container_dir.exists() {
|
||||||
smol::fs::create_dir_all(&container_dir)
|
smol::fs::create_dir_all(&container_dir)
|
||||||
.await
|
.await
|
||||||
|
@ -403,6 +404,7 @@ async fn fetch_latest_server_binary_path(
|
||||||
container_dir: &Path,
|
container_dir: &Path,
|
||||||
lsp_binary_statuses_tx: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
lsp_binary_statuses_tx: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
||||||
) -> Result<PathBuf> {
|
) -> Result<PathBuf> {
|
||||||
|
let container_dir: Arc<Path> = container_dir.into();
|
||||||
lsp_binary_statuses_tx
|
lsp_binary_statuses_tx
|
||||||
.broadcast((
|
.broadcast((
|
||||||
language.clone(),
|
language.clone(),
|
||||||
|
@ -416,7 +418,7 @@ async fn fetch_latest_server_binary_path(
|
||||||
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloading))
|
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloading))
|
||||||
.await?;
|
.await?;
|
||||||
let path = adapter
|
let path = adapter
|
||||||
.fetch_server_binary(version_info, http_client, container_dir.to_path_buf())
|
.fetch_server_binary(version_info, http_client, container_dir.clone())
|
||||||
.await?;
|
.await?;
|
||||||
lsp_binary_statuses_tx
|
lsp_binary_statuses_tx
|
||||||
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloaded))
|
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloaded))
|
||||||
|
@ -661,12 +663,12 @@ impl LspAdapter for FakeLspAdapter {
|
||||||
&self,
|
&self,
|
||||||
_: Box<dyn 'static + Send + Any>,
|
_: Box<dyn 'static + Send + Any>,
|
||||||
_: Arc<dyn HttpClient>,
|
_: Arc<dyn HttpClient>,
|
||||||
_: PathBuf,
|
_: Arc<Path>,
|
||||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cached_server_binary(&self, _: PathBuf) -> BoxFuture<'static, Option<PathBuf>> {
|
fn cached_server_binary(&self, _: Arc<Path>) -> BoxFuture<'static, Option<PathBuf>> {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -571,20 +571,77 @@ fn test_range_for_syntax_ancestor(cx: &mut MutableAppContext) {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
|
fn test_autoindent_with_soft_tabs(cx: &mut MutableAppContext) {
|
||||||
cx.add_model(|cx| {
|
cx.add_model(|cx| {
|
||||||
let text = "fn a() {}";
|
let text = "fn a() {}";
|
||||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||||
|
|
||||||
buffer.edit_with_autoindent([(8..8, "\n\n")], 4, cx);
|
buffer.edit_with_autoindent([(8..8, "\n\n")], IndentSize::spaces(4), cx);
|
||||||
assert_eq!(buffer.text(), "fn a() {\n \n}");
|
assert_eq!(buffer.text(), "fn a() {\n \n}");
|
||||||
|
|
||||||
buffer.edit_with_autoindent([(Point::new(1, 4)..Point::new(1, 4), "b()\n")], 4, cx);
|
buffer.edit_with_autoindent(
|
||||||
|
[(Point::new(1, 4)..Point::new(1, 4), "b()\n")],
|
||||||
|
IndentSize::spaces(4),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
|
assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
|
||||||
|
|
||||||
buffer.edit_with_autoindent([(Point::new(2, 4)..Point::new(2, 4), ".c")], 4, cx);
|
// Create a field expression on a new line, causing that line
|
||||||
|
// to be indented.
|
||||||
|
buffer.edit_with_autoindent(
|
||||||
|
[(Point::new(2, 4)..Point::new(2, 4), ".c")],
|
||||||
|
IndentSize::spaces(4),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
|
assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
|
||||||
|
|
||||||
|
// Remove the dot so that the line is no longer a field expression,
|
||||||
|
// causing the line to be outdented.
|
||||||
|
buffer.edit_with_autoindent(
|
||||||
|
[(Point::new(2, 8)..Point::new(2, 9), "")],
|
||||||
|
IndentSize::spaces(4),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
|
assert_eq!(buffer.text(), "fn a() {\n b()\n c\n}");
|
||||||
|
|
||||||
|
buffer
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
fn test_autoindent_with_hard_tabs(cx: &mut MutableAppContext) {
|
||||||
|
cx.add_model(|cx| {
|
||||||
|
let text = "fn a() {}";
|
||||||
|
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||||
|
|
||||||
|
buffer.edit_with_autoindent([(8..8, "\n\n")], IndentSize::tab(), cx);
|
||||||
|
assert_eq!(buffer.text(), "fn a() {\n\t\n}");
|
||||||
|
|
||||||
|
buffer.edit_with_autoindent(
|
||||||
|
[(Point::new(1, 1)..Point::new(1, 1), "b()\n")],
|
||||||
|
IndentSize::tab(),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
|
assert_eq!(buffer.text(), "fn a() {\n\tb()\n\t\n}");
|
||||||
|
|
||||||
|
// Create a field expression on a new line, causing that line
|
||||||
|
// to be indented.
|
||||||
|
buffer.edit_with_autoindent(
|
||||||
|
[(Point::new(2, 1)..Point::new(2, 1), ".c")],
|
||||||
|
IndentSize::tab(),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
|
assert_eq!(buffer.text(), "fn a() {\n\tb()\n\t\t.c\n}");
|
||||||
|
|
||||||
|
// Remove the dot so that the line is no longer a field expression,
|
||||||
|
// causing the line to be outdented.
|
||||||
|
buffer.edit_with_autoindent(
|
||||||
|
[(Point::new(2, 2)..Point::new(2, 3), "")],
|
||||||
|
IndentSize::tab(),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
|
assert_eq!(buffer.text(), "fn a() {\n\tb()\n\tc\n}");
|
||||||
|
|
||||||
buffer
|
buffer
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -609,7 +666,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
||||||
(empty(Point::new(1, 1)), "()"),
|
(empty(Point::new(1, 1)), "()"),
|
||||||
(empty(Point::new(2, 1)), "()"),
|
(empty(Point::new(2, 1)), "()"),
|
||||||
],
|
],
|
||||||
4,
|
IndentSize::spaces(4),
|
||||||
cx,
|
cx,
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -630,7 +687,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
||||||
(empty(Point::new(1, 1)), "\n.f\n.g"),
|
(empty(Point::new(1, 1)), "\n.f\n.g"),
|
||||||
(empty(Point::new(2, 1)), "\n.f\n.g"),
|
(empty(Point::new(2, 1)), "\n.f\n.g"),
|
||||||
],
|
],
|
||||||
4,
|
IndentSize::spaces(4),
|
||||||
cx,
|
cx,
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -653,13 +710,21 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
||||||
cx.add_model(|cx| {
|
cx.add_model(|cx| {
|
||||||
let text = "fn a() {\n {\n b()?\n }\n\n Ok(())\n}";
|
let text = "fn a() {\n {\n b()?\n }\n\n Ok(())\n}";
|
||||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||||
buffer.edit_with_autoindent([(Point::new(3, 4)..Point::new(3, 5), "")], 4, cx);
|
buffer.edit_with_autoindent(
|
||||||
|
[(Point::new(3, 4)..Point::new(3, 5), "")],
|
||||||
|
IndentSize::spaces(4),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer.text(),
|
buffer.text(),
|
||||||
"fn a() {\n {\n b()?\n \n\n Ok(())\n}"
|
"fn a() {\n {\n b()?\n \n\n Ok(())\n}"
|
||||||
);
|
);
|
||||||
|
|
||||||
buffer.edit_with_autoindent([(Point::new(3, 0)..Point::new(3, 12), "")], 4, cx);
|
buffer.edit_with_autoindent(
|
||||||
|
[(Point::new(3, 0)..Point::new(3, 12), "")],
|
||||||
|
IndentSize::spaces(4),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer.text(),
|
buffer.text(),
|
||||||
"fn a() {\n {\n b()?\n\n\n Ok(())\n}"
|
"fn a() {\n {\n b()?\n\n\n Ok(())\n}"
|
||||||
|
@ -678,7 +743,7 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte
|
||||||
|
|
||||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||||
|
|
||||||
buffer.edit_with_autoindent([(5..5, "\nb")], 4, cx);
|
buffer.edit_with_autoindent([(5..5, "\nb")], IndentSize::spaces(4), cx);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer.text(),
|
buffer.text(),
|
||||||
"
|
"
|
||||||
|
@ -690,7 +755,11 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte
|
||||||
|
|
||||||
// The indentation suggestion changed because `@end` node (a close paren)
|
// The indentation suggestion changed because `@end` node (a close paren)
|
||||||
// is now at the beginning of the line.
|
// is now at the beginning of the line.
|
||||||
buffer.edit_with_autoindent([(Point::new(1, 4)..Point::new(1, 5), "")], 4, cx);
|
buffer.edit_with_autoindent(
|
||||||
|
[(Point::new(1, 4)..Point::new(1, 5), "")],
|
||||||
|
IndentSize::spaces(4),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer.text(),
|
buffer.text(),
|
||||||
"
|
"
|
||||||
|
@ -709,7 +778,7 @@ fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut MutableAppContext) {
|
||||||
cx.add_model(|cx| {
|
cx.add_model(|cx| {
|
||||||
let text = "a\nb";
|
let text = "a\nb";
|
||||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||||
buffer.edit_with_autoindent([(0..1, "\n"), (2..3, "\n")], 4, cx);
|
buffer.edit_with_autoindent([(0..1, "\n"), (2..3, "\n")], IndentSize::spaces(4), cx);
|
||||||
assert_eq!(buffer.text(), "\n\n\n");
|
assert_eq!(buffer.text(), "\n\n\n");
|
||||||
buffer
|
buffer
|
||||||
});
|
});
|
||||||
|
|
|
@ -1990,6 +1990,20 @@ impl Project {
|
||||||
})
|
})
|
||||||
.detach();
|
.detach();
|
||||||
|
|
||||||
|
// Even though we don't have handling for these requests, respond to them to
|
||||||
|
// avoid stalling any language server like `gopls` which waits for a response
|
||||||
|
// to these requests when initializing.
|
||||||
|
language_server
|
||||||
|
.on_request::<lsp::request::WorkDoneProgressCreate, _, _>(|_, _| async {
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.detach();
|
||||||
|
language_server
|
||||||
|
.on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
.detach();
|
||||||
|
|
||||||
language_server
|
language_server
|
||||||
.on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
|
.on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
|
||||||
let this = this.downgrade();
|
let this = this.downgrade();
|
||||||
|
@ -2247,7 +2261,7 @@ impl Project {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
match progress {
|
match progress {
|
||||||
lsp::WorkDoneProgress::Begin(_) => {
|
lsp::WorkDoneProgress::Begin(report) => {
|
||||||
if Some(token.as_str()) == disk_based_diagnostics_progress_token {
|
if Some(token.as_str()) == disk_based_diagnostics_progress_token {
|
||||||
language_server_status.pending_diagnostic_updates += 1;
|
language_server_status.pending_diagnostic_updates += 1;
|
||||||
if language_server_status.pending_diagnostic_updates == 1 {
|
if language_server_status.pending_diagnostic_updates == 1 {
|
||||||
|
@ -2260,11 +2274,22 @@ impl Project {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
self.on_lsp_work_start(server_id, token.clone(), cx);
|
self.on_lsp_work_start(
|
||||||
|
server_id,
|
||||||
|
token.clone(),
|
||||||
|
LanguageServerProgress {
|
||||||
|
message: report.message.clone(),
|
||||||
|
percentage: report.percentage.map(|p| p as usize),
|
||||||
|
last_update_at: Instant::now(),
|
||||||
|
},
|
||||||
|
cx,
|
||||||
|
);
|
||||||
self.broadcast_language_server_update(
|
self.broadcast_language_server_update(
|
||||||
server_id,
|
server_id,
|
||||||
proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
|
proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
|
||||||
token,
|
token,
|
||||||
|
message: report.message,
|
||||||
|
percentage: report.percentage.map(|p| p as u32),
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -2322,17 +2347,11 @@ impl Project {
|
||||||
&mut self,
|
&mut self,
|
||||||
language_server_id: usize,
|
language_server_id: usize,
|
||||||
token: String,
|
token: String,
|
||||||
|
progress: LanguageServerProgress,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) {
|
) {
|
||||||
if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
|
if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
|
||||||
status.pending_work.insert(
|
status.pending_work.insert(token, progress);
|
||||||
token,
|
|
||||||
LanguageServerProgress {
|
|
||||||
message: None,
|
|
||||||
percentage: None,
|
|
||||||
last_update_at: Instant::now(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
cx.notify();
|
cx.notify();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2345,7 +2364,21 @@ impl Project {
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) {
|
) {
|
||||||
if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
|
if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
|
||||||
status.pending_work.insert(token, progress);
|
let entry = status
|
||||||
|
.pending_work
|
||||||
|
.entry(token)
|
||||||
|
.or_insert(LanguageServerProgress {
|
||||||
|
message: Default::default(),
|
||||||
|
percentage: Default::default(),
|
||||||
|
last_update_at: progress.last_update_at,
|
||||||
|
});
|
||||||
|
if progress.message.is_some() {
|
||||||
|
entry.message = progress.message;
|
||||||
|
}
|
||||||
|
if progress.percentage.is_some() {
|
||||||
|
entry.percentage = progress.percentage;
|
||||||
|
}
|
||||||
|
entry.last_update_at = progress.last_update_at;
|
||||||
cx.notify();
|
cx.notify();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3109,6 +3142,16 @@ impl Project {
|
||||||
Ok(completions
|
Ok(completions
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|lsp_completion| {
|
.filter_map(|lsp_completion| {
|
||||||
|
// For now, we can only handle additional edits if they are returned
|
||||||
|
// when resolving the completion, not if they are present initially.
|
||||||
|
if lsp_completion
|
||||||
|
.additional_text_edits
|
||||||
|
.as_ref()
|
||||||
|
.map_or(false, |edits| !edits.is_empty())
|
||||||
|
{
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
|
let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
|
||||||
// If the language server provides a range to overwrite, then
|
// If the language server provides a range to overwrite, then
|
||||||
// check that the range is valid.
|
// check that the range is valid.
|
||||||
|
@ -4560,7 +4603,16 @@ impl Project {
|
||||||
{
|
{
|
||||||
proto::update_language_server::Variant::WorkStart(payload) => {
|
proto::update_language_server::Variant::WorkStart(payload) => {
|
||||||
this.update(&mut cx, |this, cx| {
|
this.update(&mut cx, |this, cx| {
|
||||||
this.on_lsp_work_start(language_server_id, payload.token, cx);
|
this.on_lsp_work_start(
|
||||||
|
language_server_id,
|
||||||
|
payload.token,
|
||||||
|
LanguageServerProgress {
|
||||||
|
message: payload.message,
|
||||||
|
percentage: payload.percentage.map(|p| p as usize),
|
||||||
|
last_update_at: Instant::now(),
|
||||||
|
},
|
||||||
|
cx,
|
||||||
|
);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
proto::update_language_server::Variant::WorkProgress(payload) => {
|
proto::update_language_server::Variant::WorkProgress(payload) => {
|
||||||
|
|
|
@ -555,6 +555,8 @@ message UpdateLanguageServer {
|
||||||
|
|
||||||
message LspWorkStart {
|
message LspWorkStart {
|
||||||
string token = 1;
|
string token = 1;
|
||||||
|
optional string message = 2;
|
||||||
|
optional uint32 percentage = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message LspWorkProgress {
|
message LspWorkProgress {
|
||||||
|
|
|
@ -25,6 +25,7 @@ pub struct Settings {
|
||||||
pub default_buffer_font_size: f32,
|
pub default_buffer_font_size: f32,
|
||||||
pub vim_mode: bool,
|
pub vim_mode: bool,
|
||||||
pub tab_size: u32,
|
pub tab_size: u32,
|
||||||
|
pub hard_tabs: bool,
|
||||||
pub soft_wrap: SoftWrap,
|
pub soft_wrap: SoftWrap,
|
||||||
pub preferred_line_length: u32,
|
pub preferred_line_length: u32,
|
||||||
pub format_on_save: bool,
|
pub format_on_save: bool,
|
||||||
|
@ -36,6 +37,7 @@ pub struct Settings {
|
||||||
#[derive(Clone, Debug, Default, Deserialize, JsonSchema)]
|
#[derive(Clone, Debug, Default, Deserialize, JsonSchema)]
|
||||||
pub struct LanguageOverride {
|
pub struct LanguageOverride {
|
||||||
pub tab_size: Option<u32>,
|
pub tab_size: Option<u32>,
|
||||||
|
pub hard_tabs: Option<bool>,
|
||||||
pub soft_wrap: Option<SoftWrap>,
|
pub soft_wrap: Option<SoftWrap>,
|
||||||
pub preferred_line_length: Option<u32>,
|
pub preferred_line_length: Option<u32>,
|
||||||
pub format_on_save: Option<bool>,
|
pub format_on_save: Option<bool>,
|
||||||
|
@ -84,6 +86,7 @@ impl Settings {
|
||||||
default_buffer_font_size: 15.,
|
default_buffer_font_size: 15.,
|
||||||
vim_mode: false,
|
vim_mode: false,
|
||||||
tab_size: 4,
|
tab_size: 4,
|
||||||
|
hard_tabs: false,
|
||||||
soft_wrap: SoftWrap::None,
|
soft_wrap: SoftWrap::None,
|
||||||
preferred_line_length: 80,
|
preferred_line_length: 80,
|
||||||
language_overrides: Default::default(),
|
language_overrides: Default::default(),
|
||||||
|
@ -111,6 +114,13 @@ impl Settings {
|
||||||
.unwrap_or(self.tab_size)
|
.unwrap_or(self.tab_size)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn hard_tabs(&self, language: Option<&str>) -> bool {
|
||||||
|
language
|
||||||
|
.and_then(|language| self.language_overrides.get(language))
|
||||||
|
.and_then(|settings| settings.hard_tabs)
|
||||||
|
.unwrap_or(self.hard_tabs)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn soft_wrap(&self, language: Option<&str>) -> SoftWrap {
|
pub fn soft_wrap(&self, language: Option<&str>) -> SoftWrap {
|
||||||
language
|
language
|
||||||
.and_then(|language| self.language_overrides.get(language))
|
.and_then(|language| self.language_overrides.get(language))
|
||||||
|
@ -147,6 +157,7 @@ impl Settings {
|
||||||
default_buffer_font_size: 14.,
|
default_buffer_font_size: 14.,
|
||||||
vim_mode: false,
|
vim_mode: false,
|
||||||
tab_size: 4,
|
tab_size: 4,
|
||||||
|
hard_tabs: false,
|
||||||
soft_wrap: SoftWrap::None,
|
soft_wrap: SoftWrap::None,
|
||||||
preferred_line_length: 80,
|
preferred_line_length: 80,
|
||||||
format_on_save: true,
|
format_on_save: true,
|
||||||
|
|
|
@ -1642,18 +1642,6 @@ impl BufferSnapshot {
|
||||||
.all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
|
.all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn indent_column_for_line(&self, row: u32) -> u32 {
|
|
||||||
let mut result = 0;
|
|
||||||
for c in self.chars_at(Point::new(row, 0)) {
|
|
||||||
if c == ' ' {
|
|
||||||
result += 1;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn text_summary_for_range<'a, D, O: ToOffset>(&'a self, range: Range<O>) -> D
|
pub fn text_summary_for_range<'a, D, O: ToOffset>(&'a self, range: Range<O>) -> D
|
||||||
where
|
where
|
||||||
D: TextDimension,
|
D: TextDimension,
|
||||||
|
|
|
@ -202,7 +202,7 @@ mod test {
|
||||||
cx.enable_vim();
|
cx.enable_vim();
|
||||||
assert_eq!(cx.mode(), Mode::Normal);
|
assert_eq!(cx.mode(), Mode::Normal);
|
||||||
cx.simulate_keystrokes(["h", "h", "h", "l"]);
|
cx.simulate_keystrokes(["h", "h", "h", "l"]);
|
||||||
assert_eq!(cx.editor_text(), "hjkl".to_owned());
|
assert_eq!(cx.buffer_text(), "hjkl".to_owned());
|
||||||
cx.assert_editor_state("h|jkl");
|
cx.assert_editor_state("h|jkl");
|
||||||
cx.simulate_keystrokes(["i", "T", "e", "s", "t"]);
|
cx.simulate_keystrokes(["i", "T", "e", "s", "t"]);
|
||||||
cx.assert_editor_state("hTest|jkl");
|
cx.assert_editor_state("hTest|jkl");
|
||||||
|
|
|
@ -89,6 +89,7 @@ toml = "0.5"
|
||||||
tree-sitter = "0.20.6"
|
tree-sitter = "0.20.6"
|
||||||
tree-sitter-c = "0.20.1"
|
tree-sitter-c = "0.20.1"
|
||||||
tree-sitter-cpp = "0.20.0"
|
tree-sitter-cpp = "0.20.0"
|
||||||
|
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
|
||||||
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "137e1ce6a02698fc246cdb9c6b886ed1de9a1ed8" }
|
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "137e1ce6a02698fc246cdb9c6b886ed1de9a1ed8" }
|
||||||
tree-sitter-rust = "0.20.1"
|
tree-sitter-rust = "0.20.1"
|
||||||
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
|
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
|
||||||
|
|
|
@ -4,6 +4,7 @@ use rust_embed::RustEmbed;
|
||||||
use std::{borrow::Cow, str, sync::Arc};
|
use std::{borrow::Cow, str, sync::Arc};
|
||||||
|
|
||||||
mod c;
|
mod c;
|
||||||
|
mod go;
|
||||||
mod installation;
|
mod installation;
|
||||||
mod json;
|
mod json;
|
||||||
mod rust;
|
mod rust;
|
||||||
|
@ -27,6 +28,11 @@ pub fn build_language_registry(login_shell_env_loaded: Task<()>) -> LanguageRegi
|
||||||
tree_sitter_cpp::language(),
|
tree_sitter_cpp::language(),
|
||||||
Some(Arc::new(c::CLspAdapter) as Arc<dyn LspAdapter>),
|
Some(Arc::new(c::CLspAdapter) as Arc<dyn LspAdapter>),
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
"go",
|
||||||
|
tree_sitter_go::language(),
|
||||||
|
Some(Arc::new(go::GoLspAdapter) as Arc<dyn LspAdapter>),
|
||||||
|
),
|
||||||
(
|
(
|
||||||
"json",
|
"json",
|
||||||
tree_sitter_json::language(),
|
tree_sitter_json::language(),
|
||||||
|
|
|
@ -4,7 +4,11 @@ use client::http::HttpClient;
|
||||||
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||||
pub use language::*;
|
pub use language::*;
|
||||||
use smol::fs::{self, File};
|
use smol::fs::{self, File};
|
||||||
use std::{any::Any, path::PathBuf, sync::Arc};
|
use std::{
|
||||||
|
any::Any,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
use util::{ResultExt, TryFutureExt};
|
use util::{ResultExt, TryFutureExt};
|
||||||
|
|
||||||
pub struct CLspAdapter;
|
pub struct CLspAdapter;
|
||||||
|
@ -19,10 +23,17 @@ impl super::LspAdapter for CLspAdapter {
|
||||||
http: Arc<dyn HttpClient>,
|
http: Arc<dyn HttpClient>,
|
||||||
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
||||||
async move {
|
async move {
|
||||||
let version = latest_github_release("clangd/clangd", http, |release_name| {
|
let release = latest_github_release("clangd/clangd", http).await?;
|
||||||
format!("clangd-mac-{release_name}.zip")
|
let asset_name = format!("clangd-mac-{}.zip", release.name);
|
||||||
})
|
let asset = release
|
||||||
.await?;
|
.assets
|
||||||
|
.iter()
|
||||||
|
.find(|asset| asset.name == asset_name)
|
||||||
|
.ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?;
|
||||||
|
let version = GitHubLspBinaryVersion {
|
||||||
|
name: release.name,
|
||||||
|
url: asset.browser_download_url.clone(),
|
||||||
|
};
|
||||||
Ok(Box::new(version) as Box<_>)
|
Ok(Box::new(version) as Box<_>)
|
||||||
}
|
}
|
||||||
.boxed()
|
.boxed()
|
||||||
|
@ -32,7 +43,7 @@ impl super::LspAdapter for CLspAdapter {
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
http: Arc<dyn HttpClient>,
|
http: Arc<dyn HttpClient>,
|
||||||
container_dir: PathBuf,
|
container_dir: Arc<Path>,
|
||||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||||
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||||
async move {
|
async move {
|
||||||
|
@ -81,7 +92,10 @@ impl super::LspAdapter for CLspAdapter {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cached_server_binary(&self, container_dir: PathBuf) -> BoxFuture<'static, Option<PathBuf>> {
|
fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: Arc<Path>,
|
||||||
|
) -> BoxFuture<'static, Option<PathBuf>> {
|
||||||
async move {
|
async move {
|
||||||
let mut last_clangd_dir = None;
|
let mut last_clangd_dir = None;
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
|
401
crates/zed/src/languages/go.rs
Normal file
401
crates/zed/src/languages/go.rs
Normal file
|
@ -0,0 +1,401 @@
|
||||||
|
use super::installation::latest_github_release;
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use client::http::HttpClient;
|
||||||
|
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||||
|
pub use language::*;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use regex::Regex;
|
||||||
|
use smol::{fs, process};
|
||||||
|
use std::{
|
||||||
|
any::Any,
|
||||||
|
ops::Range,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
str,
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
use util::{ResultExt, TryFutureExt};
|
||||||
|
|
||||||
|
#[derive(Copy, Clone)]
|
||||||
|
pub struct GoLspAdapter;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref GOPLS_VERSION_REGEX: Regex = Regex::new(r"\d+\.\d+\.\d+").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
impl super::LspAdapter for GoLspAdapter {
|
||||||
|
fn name(&self) -> LanguageServerName {
|
||||||
|
LanguageServerName("gopls".into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn server_args(&self) -> &[&str] {
|
||||||
|
&["-mode=stdio"]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_latest_server_version(
|
||||||
|
&self,
|
||||||
|
http: Arc<dyn HttpClient>,
|
||||||
|
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
||||||
|
async move {
|
||||||
|
let release = latest_github_release("golang/tools", http).await?;
|
||||||
|
let version: Option<String> = release.name.strip_prefix("gopls/v").map(str::to_string);
|
||||||
|
if version.is_none() {
|
||||||
|
log::warn!(
|
||||||
|
"couldn't infer gopls version from github release name '{}'",
|
||||||
|
release.name
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(Box::new(version) as Box<_>)
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_server_binary(
|
||||||
|
&self,
|
||||||
|
version: Box<dyn 'static + Send + Any>,
|
||||||
|
_: Arc<dyn HttpClient>,
|
||||||
|
container_dir: Arc<Path>,
|
||||||
|
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||||
|
let version = version.downcast::<Option<String>>().unwrap();
|
||||||
|
let this = *self;
|
||||||
|
|
||||||
|
async move {
|
||||||
|
if let Some(version) = *version {
|
||||||
|
let binary_path = container_dir.join(&format!("gopls_{version}"));
|
||||||
|
if let Ok(metadata) = fs::metadata(&binary_path).await {
|
||||||
|
if metadata.is_file() {
|
||||||
|
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
|
||||||
|
while let Some(entry) = entries.next().await {
|
||||||
|
if let Some(entry) = entry.log_err() {
|
||||||
|
let entry_path = entry.path();
|
||||||
|
if entry_path.as_path() != binary_path
|
||||||
|
&& entry.file_name() != "gobin"
|
||||||
|
{
|
||||||
|
fs::remove_file(&entry_path).await.log_err();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Ok(binary_path.to_path_buf());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if let Some(path) = this.cached_server_binary(container_dir.clone()).await {
|
||||||
|
return Ok(path.to_path_buf());
|
||||||
|
}
|
||||||
|
|
||||||
|
let gobin_dir = container_dir.join("gobin");
|
||||||
|
fs::create_dir_all(&gobin_dir).await?;
|
||||||
|
let install_output = process::Command::new("go")
|
||||||
|
.env("GO111MODULE", "on")
|
||||||
|
.env("GOBIN", &gobin_dir)
|
||||||
|
.args(["install", "golang.org/x/tools/gopls@latest"])
|
||||||
|
.output()
|
||||||
|
.await?;
|
||||||
|
if !install_output.status.success() {
|
||||||
|
Err(anyhow!("failed to install gopls"))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let installed_binary_path = gobin_dir.join("gopls");
|
||||||
|
let version_output = process::Command::new(&installed_binary_path)
|
||||||
|
.arg("version")
|
||||||
|
.output()
|
||||||
|
.await
|
||||||
|
.map_err(|e| anyhow!("failed to run installed gopls binary {:?}", e))?;
|
||||||
|
let version_stdout = str::from_utf8(&version_output.stdout)
|
||||||
|
.map_err(|_| anyhow!("gopls version produced invalid utf8"))?;
|
||||||
|
let version = GOPLS_VERSION_REGEX
|
||||||
|
.find(version_stdout)
|
||||||
|
.ok_or_else(|| anyhow!("failed to parse gopls version output"))?
|
||||||
|
.as_str();
|
||||||
|
let binary_path = container_dir.join(&format!("gopls_{version}"));
|
||||||
|
fs::rename(&installed_binary_path, &binary_path).await?;
|
||||||
|
|
||||||
|
Ok(binary_path.to_path_buf())
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: Arc<Path>,
|
||||||
|
) -> BoxFuture<'static, Option<PathBuf>> {
|
||||||
|
async move {
|
||||||
|
let mut last_binary_path = None;
|
||||||
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
while let Some(entry) = entries.next().await {
|
||||||
|
let entry = entry?;
|
||||||
|
if entry.file_type().await?.is_file()
|
||||||
|
&& entry
|
||||||
|
.file_name()
|
||||||
|
.to_str()
|
||||||
|
.map_or(false, |name| name.starts_with("gopls_"))
|
||||||
|
{
|
||||||
|
last_binary_path = Some(entry.path());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(path) = last_binary_path {
|
||||||
|
Ok(path.to_path_buf())
|
||||||
|
} else {
|
||||||
|
Err(anyhow!("no cached binary"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.log_err()
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn label_for_completion(
|
||||||
|
&self,
|
||||||
|
completion: &lsp::CompletionItem,
|
||||||
|
language: &Language,
|
||||||
|
) -> Option<CodeLabel> {
|
||||||
|
let label = &completion.label;
|
||||||
|
|
||||||
|
// Gopls returns nested fields and methods as completions.
|
||||||
|
// To syntax highlight these, combine their final component
|
||||||
|
// with their detail.
|
||||||
|
let name_offset = label.rfind(".").unwrap_or(0);
|
||||||
|
|
||||||
|
match completion.kind.zip(completion.detail.as_ref()) {
|
||||||
|
Some((lsp::CompletionItemKind::MODULE, detail)) => {
|
||||||
|
let text = format!("{label} {detail}");
|
||||||
|
let source = Rope::from(format!("import {text}").as_str());
|
||||||
|
let runs = language.highlight_text(&source, 7..7 + text.len());
|
||||||
|
return Some(CodeLabel {
|
||||||
|
text,
|
||||||
|
runs,
|
||||||
|
filter_range: 0..label.len(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Some((
|
||||||
|
lsp::CompletionItemKind::CONSTANT | lsp::CompletionItemKind::VARIABLE,
|
||||||
|
detail,
|
||||||
|
)) => {
|
||||||
|
let text = format!("{label} {detail}");
|
||||||
|
let source =
|
||||||
|
Rope::from(format!("var {} {}", &text[name_offset..], detail).as_str());
|
||||||
|
let runs = adjust_runs(
|
||||||
|
name_offset,
|
||||||
|
language.highlight_text(&source, 4..4 + text.len()),
|
||||||
|
);
|
||||||
|
return Some(CodeLabel {
|
||||||
|
text,
|
||||||
|
runs,
|
||||||
|
filter_range: 0..label.len(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Some((lsp::CompletionItemKind::STRUCT, _)) => {
|
||||||
|
let text = format!("{label} struct {{}}");
|
||||||
|
let source = Rope::from(format!("type {}", &text[name_offset..]).as_str());
|
||||||
|
let runs = adjust_runs(
|
||||||
|
name_offset,
|
||||||
|
language.highlight_text(&source, 5..5 + text.len()),
|
||||||
|
);
|
||||||
|
return Some(CodeLabel {
|
||||||
|
text,
|
||||||
|
runs,
|
||||||
|
filter_range: 0..label.len(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Some((lsp::CompletionItemKind::INTERFACE, _)) => {
|
||||||
|
let text = format!("{label} interface {{}}");
|
||||||
|
let source = Rope::from(format!("type {}", &text[name_offset..]).as_str());
|
||||||
|
let runs = adjust_runs(
|
||||||
|
name_offset,
|
||||||
|
language.highlight_text(&source, 5..5 + text.len()),
|
||||||
|
);
|
||||||
|
return Some(CodeLabel {
|
||||||
|
text,
|
||||||
|
runs,
|
||||||
|
filter_range: 0..label.len(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Some((lsp::CompletionItemKind::FIELD, detail)) => {
|
||||||
|
let text = format!("{label} {detail}");
|
||||||
|
let source =
|
||||||
|
Rope::from(format!("type T struct {{ {} }}", &text[name_offset..]).as_str());
|
||||||
|
let runs = adjust_runs(
|
||||||
|
name_offset,
|
||||||
|
language.highlight_text(&source, 16..16 + text.len()),
|
||||||
|
);
|
||||||
|
return Some(CodeLabel {
|
||||||
|
text,
|
||||||
|
runs,
|
||||||
|
filter_range: 0..label.len(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Some((lsp::CompletionItemKind::FUNCTION | lsp::CompletionItemKind::METHOD, detail)) => {
|
||||||
|
if let Some(signature) = detail.strip_prefix("func") {
|
||||||
|
let text = format!("{label}{signature}");
|
||||||
|
let source = Rope::from(format!("func {} {{}}", &text[name_offset..]).as_str());
|
||||||
|
let runs = adjust_runs(
|
||||||
|
name_offset,
|
||||||
|
language.highlight_text(&source, 5..5 + text.len()),
|
||||||
|
);
|
||||||
|
return Some(CodeLabel {
|
||||||
|
filter_range: 0..label.len(),
|
||||||
|
text,
|
||||||
|
runs,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn label_for_symbol(
|
||||||
|
&self,
|
||||||
|
name: &str,
|
||||||
|
kind: lsp::SymbolKind,
|
||||||
|
language: &Language,
|
||||||
|
) -> Option<CodeLabel> {
|
||||||
|
let (text, filter_range, display_range) = match kind {
|
||||||
|
lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => {
|
||||||
|
let text = format!("func {} () {{}}", name);
|
||||||
|
let filter_range = 5..5 + name.len();
|
||||||
|
let display_range = 0..filter_range.end;
|
||||||
|
(text, filter_range, display_range)
|
||||||
|
}
|
||||||
|
lsp::SymbolKind::STRUCT => {
|
||||||
|
let text = format!("type {} struct {{}}", name);
|
||||||
|
let filter_range = 5..5 + name.len();
|
||||||
|
let display_range = 0..text.len();
|
||||||
|
(text, filter_range, display_range)
|
||||||
|
}
|
||||||
|
lsp::SymbolKind::INTERFACE => {
|
||||||
|
let text = format!("type {} interface {{}}", name);
|
||||||
|
let filter_range = 5..5 + name.len();
|
||||||
|
let display_range = 0..text.len();
|
||||||
|
(text, filter_range, display_range)
|
||||||
|
}
|
||||||
|
lsp::SymbolKind::CLASS => {
|
||||||
|
let text = format!("type {} T", name);
|
||||||
|
let filter_range = 5..5 + name.len();
|
||||||
|
let display_range = 0..filter_range.end;
|
||||||
|
(text, filter_range, display_range)
|
||||||
|
}
|
||||||
|
lsp::SymbolKind::CONSTANT => {
|
||||||
|
let text = format!("const {} = nil", name);
|
||||||
|
let filter_range = 6..6 + name.len();
|
||||||
|
let display_range = 0..filter_range.end;
|
||||||
|
(text, filter_range, display_range)
|
||||||
|
}
|
||||||
|
lsp::SymbolKind::VARIABLE => {
|
||||||
|
let text = format!("var {} = nil", name);
|
||||||
|
let filter_range = 4..4 + name.len();
|
||||||
|
let display_range = 0..filter_range.end;
|
||||||
|
(text, filter_range, display_range)
|
||||||
|
}
|
||||||
|
lsp::SymbolKind::MODULE => {
|
||||||
|
let text = format!("package {}", name);
|
||||||
|
let filter_range = 8..8 + name.len();
|
||||||
|
let display_range = 0..filter_range.end;
|
||||||
|
(text, filter_range, display_range)
|
||||||
|
}
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(CodeLabel {
|
||||||
|
runs: language.highlight_text(&text.as_str().into(), display_range.clone()),
|
||||||
|
text: text[display_range].to_string(),
|
||||||
|
filter_range,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn adjust_runs(
|
||||||
|
delta: usize,
|
||||||
|
mut runs: Vec<(Range<usize>, HighlightId)>,
|
||||||
|
) -> Vec<(Range<usize>, HighlightId)> {
|
||||||
|
for (range, _) in &mut runs {
|
||||||
|
range.start += delta;
|
||||||
|
range.end += delta;
|
||||||
|
}
|
||||||
|
runs
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::languages::language;
|
||||||
|
use gpui::color::Color;
|
||||||
|
use theme::SyntaxTheme;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_go_label_for_completion() {
|
||||||
|
let language = language(
|
||||||
|
"go",
|
||||||
|
tree_sitter_go::language(),
|
||||||
|
Some(Arc::new(GoLspAdapter)),
|
||||||
|
);
|
||||||
|
|
||||||
|
let theme = SyntaxTheme::new(vec![
|
||||||
|
("type".into(), Color::green().into()),
|
||||||
|
("keyword".into(), Color::blue().into()),
|
||||||
|
("function".into(), Color::red().into()),
|
||||||
|
("number".into(), Color::yellow().into()),
|
||||||
|
("property".into(), Color::white().into()),
|
||||||
|
]);
|
||||||
|
language.set_theme(&theme);
|
||||||
|
|
||||||
|
let grammar = language.grammar().unwrap();
|
||||||
|
let highlight_function = grammar.highlight_id_for_name("function").unwrap();
|
||||||
|
let highlight_type = grammar.highlight_id_for_name("type").unwrap();
|
||||||
|
let highlight_keyword = grammar.highlight_id_for_name("keyword").unwrap();
|
||||||
|
let highlight_number = grammar.highlight_id_for_name("number").unwrap();
|
||||||
|
let highlight_field = grammar.highlight_id_for_name("property").unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
language.label_for_completion(&lsp::CompletionItem {
|
||||||
|
kind: Some(lsp::CompletionItemKind::FUNCTION),
|
||||||
|
label: "Hello".to_string(),
|
||||||
|
detail: Some("func(a B) c.D".to_string()),
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
Some(CodeLabel {
|
||||||
|
text: "Hello(a B) c.D".to_string(),
|
||||||
|
filter_range: 0..5,
|
||||||
|
runs: vec![
|
||||||
|
(0..5, highlight_function),
|
||||||
|
(8..9, highlight_type),
|
||||||
|
(13..14, highlight_type),
|
||||||
|
],
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
// Nested methods
|
||||||
|
assert_eq!(
|
||||||
|
language.label_for_completion(&lsp::CompletionItem {
|
||||||
|
kind: Some(lsp::CompletionItemKind::METHOD),
|
||||||
|
label: "one.two.Three".to_string(),
|
||||||
|
detail: Some("func() [3]interface{}".to_string()),
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
Some(CodeLabel {
|
||||||
|
text: "one.two.Three() [3]interface{}".to_string(),
|
||||||
|
filter_range: 0..13,
|
||||||
|
runs: vec![
|
||||||
|
(8..13, highlight_function),
|
||||||
|
(17..18, highlight_number),
|
||||||
|
(19..28, highlight_keyword),
|
||||||
|
],
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
// Nested fields
|
||||||
|
assert_eq!(
|
||||||
|
language.label_for_completion(&lsp::CompletionItem {
|
||||||
|
kind: Some(lsp::CompletionItemKind::FIELD),
|
||||||
|
label: "two.Three".to_string(),
|
||||||
|
detail: Some("a.Bcd".to_string()),
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
Some(CodeLabel {
|
||||||
|
text: "two.Three a.Bcd".to_string(),
|
||||||
|
filter_range: 0..9,
|
||||||
|
runs: vec![(4..9, highlight_field), (12..15, highlight_type)],
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
3
crates/zed/src/languages/go/brackets.scm
Normal file
3
crates/zed/src/languages/go/brackets.scm
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
("[" @open "]" @close)
|
||||||
|
("{" @open "}" @close)
|
||||||
|
("\"" @open "\"" @close)
|
11
crates/zed/src/languages/go/config.toml
Normal file
11
crates/zed/src/languages/go/config.toml
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
name = "Go"
|
||||||
|
path_suffixes = ["go"]
|
||||||
|
line_comment = "// "
|
||||||
|
autoclose_before = ";:.,=}])>"
|
||||||
|
brackets = [
|
||||||
|
{ start = "{", end = "}", close = true, newline = true },
|
||||||
|
{ start = "[", end = "]", close = true, newline = true },
|
||||||
|
{ start = "(", end = ")", close = true, newline = true },
|
||||||
|
{ start = "\"", end = "\"", close = true, newline = false },
|
||||||
|
{ start = "/*", end = " */", close = true, newline = false },
|
||||||
|
]
|
107
crates/zed/src/languages/go/highlights.scm
Normal file
107
crates/zed/src/languages/go/highlights.scm
Normal file
|
@ -0,0 +1,107 @@
|
||||||
|
(identifier) @variable
|
||||||
|
(type_identifier) @type
|
||||||
|
(field_identifier) @property
|
||||||
|
|
||||||
|
(call_expression
|
||||||
|
function: (identifier) @function)
|
||||||
|
|
||||||
|
(call_expression
|
||||||
|
function: (selector_expression
|
||||||
|
field: (field_identifier) @function.method))
|
||||||
|
|
||||||
|
(function_declaration
|
||||||
|
name: (identifier) @function)
|
||||||
|
|
||||||
|
(method_declaration
|
||||||
|
name: (field_identifier) @function.method)
|
||||||
|
|
||||||
|
[
|
||||||
|
"--"
|
||||||
|
"-"
|
||||||
|
"-="
|
||||||
|
":="
|
||||||
|
"!"
|
||||||
|
"!="
|
||||||
|
"..."
|
||||||
|
"*"
|
||||||
|
"*"
|
||||||
|
"*="
|
||||||
|
"/"
|
||||||
|
"/="
|
||||||
|
"&"
|
||||||
|
"&&"
|
||||||
|
"&="
|
||||||
|
"%"
|
||||||
|
"%="
|
||||||
|
"^"
|
||||||
|
"^="
|
||||||
|
"+"
|
||||||
|
"++"
|
||||||
|
"+="
|
||||||
|
"<-"
|
||||||
|
"<"
|
||||||
|
"<<"
|
||||||
|
"<<="
|
||||||
|
"<="
|
||||||
|
"="
|
||||||
|
"=="
|
||||||
|
">"
|
||||||
|
">="
|
||||||
|
">>"
|
||||||
|
">>="
|
||||||
|
"|"
|
||||||
|
"|="
|
||||||
|
"||"
|
||||||
|
"~"
|
||||||
|
] @operator
|
||||||
|
|
||||||
|
[
|
||||||
|
"break"
|
||||||
|
"case"
|
||||||
|
"chan"
|
||||||
|
"const"
|
||||||
|
"continue"
|
||||||
|
"default"
|
||||||
|
"defer"
|
||||||
|
"else"
|
||||||
|
"fallthrough"
|
||||||
|
"for"
|
||||||
|
"func"
|
||||||
|
"go"
|
||||||
|
"goto"
|
||||||
|
"if"
|
||||||
|
"import"
|
||||||
|
"interface"
|
||||||
|
"map"
|
||||||
|
"package"
|
||||||
|
"range"
|
||||||
|
"return"
|
||||||
|
"select"
|
||||||
|
"struct"
|
||||||
|
"switch"
|
||||||
|
"type"
|
||||||
|
"var"
|
||||||
|
] @keyword
|
||||||
|
|
||||||
|
[
|
||||||
|
(interpreted_string_literal)
|
||||||
|
(raw_string_literal)
|
||||||
|
(rune_literal)
|
||||||
|
] @string
|
||||||
|
|
||||||
|
(escape_sequence) @escape
|
||||||
|
|
||||||
|
[
|
||||||
|
(int_literal)
|
||||||
|
(float_literal)
|
||||||
|
(imaginary_literal)
|
||||||
|
] @number
|
||||||
|
|
||||||
|
[
|
||||||
|
(true)
|
||||||
|
(false)
|
||||||
|
(nil)
|
||||||
|
(iota)
|
||||||
|
] @constant.builtin
|
||||||
|
|
||||||
|
(comment) @comment
|
9
crates/zed/src/languages/go/indents.scm
Normal file
9
crates/zed/src/languages/go/indents.scm
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
[
|
||||||
|
(assignment_statement)
|
||||||
|
(call_expression)
|
||||||
|
(selector_expression)
|
||||||
|
] @indent
|
||||||
|
|
||||||
|
(_ "[" "]" @end) @indent
|
||||||
|
(_ "{" "}" @end) @indent
|
||||||
|
(_ "(" ")" @end) @indent
|
44
crates/zed/src/languages/go/outline.scm
Normal file
44
crates/zed/src/languages/go/outline.scm
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
(type_declaration
|
||||||
|
"type" @context
|
||||||
|
(type_spec
|
||||||
|
name: (_) @name)) @item
|
||||||
|
|
||||||
|
(function_declaration
|
||||||
|
"func" @context
|
||||||
|
name: (identifier) @name
|
||||||
|
parameters: (parameter_list
|
||||||
|
"(" @context
|
||||||
|
")" @context)) @item
|
||||||
|
|
||||||
|
(method_declaration
|
||||||
|
"func" @context
|
||||||
|
receiver: (parameter_list
|
||||||
|
"(" @context
|
||||||
|
(parameter_declaration
|
||||||
|
type: (_) @context)
|
||||||
|
")" @context)
|
||||||
|
name: (field_identifier) @name
|
||||||
|
parameters: (parameter_list
|
||||||
|
"(" @context
|
||||||
|
")" @context)) @item
|
||||||
|
|
||||||
|
(const_declaration
|
||||||
|
"const" @context
|
||||||
|
(const_spec
|
||||||
|
name: (identifier) @name) @item)
|
||||||
|
|
||||||
|
(source_file
|
||||||
|
(var_declaration
|
||||||
|
"var" @context
|
||||||
|
(var_spec
|
||||||
|
name: (identifier) @name) @item))
|
||||||
|
|
||||||
|
(method_spec
|
||||||
|
name: (_) @name
|
||||||
|
parameters: (parameter_list
|
||||||
|
"(" @context
|
||||||
|
")" @context)) @item
|
||||||
|
|
||||||
|
(field_declaration
|
||||||
|
name: (_) @name
|
||||||
|
type: (_) @context) @item
|
|
@ -25,14 +25,14 @@ struct NpmInfoDistTags {
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub(crate) struct GithubRelease {
|
pub(crate) struct GithubRelease {
|
||||||
name: String,
|
pub name: String,
|
||||||
assets: Vec<GithubReleaseAsset>,
|
pub assets: Vec<GithubReleaseAsset>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub(crate) struct GithubReleaseAsset {
|
pub(crate) struct GithubReleaseAsset {
|
||||||
name: String,
|
pub name: String,
|
||||||
browser_download_url: String,
|
pub browser_download_url: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn npm_package_latest_version(name: &str) -> Result<String> {
|
pub async fn npm_package_latest_version(name: &str) -> Result<String> {
|
||||||
|
@ -78,11 +78,10 @@ pub async fn npm_install_packages(
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn latest_github_release(
|
pub(crate) async fn latest_github_release(
|
||||||
repo_name_with_owner: &str,
|
repo_name_with_owner: &str,
|
||||||
http: Arc<dyn HttpClient>,
|
http: Arc<dyn HttpClient>,
|
||||||
asset_name: impl Fn(&str) -> String,
|
) -> Result<GithubRelease, anyhow::Error> {
|
||||||
) -> Result<GitHubLspBinaryVersion> {
|
|
||||||
let mut response = http
|
let mut response = http
|
||||||
.get(
|
.get(
|
||||||
&format!("https://api.github.com/repos/{repo_name_with_owner}/releases/latest"),
|
&format!("https://api.github.com/repos/{repo_name_with_owner}/releases/latest"),
|
||||||
|
@ -91,24 +90,13 @@ pub async fn latest_github_release(
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("error fetching latest release")?;
|
.context("error fetching latest release")?;
|
||||||
|
|
||||||
let mut body = Vec::new();
|
let mut body = Vec::new();
|
||||||
response
|
response
|
||||||
.body_mut()
|
.body_mut()
|
||||||
.read_to_end(&mut body)
|
.read_to_end(&mut body)
|
||||||
.await
|
.await
|
||||||
.context("error reading latest release")?;
|
.context("error reading latest release")?;
|
||||||
|
|
||||||
let release: GithubRelease =
|
let release: GithubRelease =
|
||||||
serde_json::from_slice(body.as_slice()).context("error deserializing latest release")?;
|
serde_json::from_slice(body.as_slice()).context("error deserializing latest release")?;
|
||||||
let asset_name = asset_name(&release.name);
|
Ok(release)
|
||||||
let asset = release
|
|
||||||
.assets
|
|
||||||
.iter()
|
|
||||||
.find(|asset| asset.name == asset_name)
|
|
||||||
.ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?;
|
|
||||||
Ok(GitHubLspBinaryVersion {
|
|
||||||
name: release.name,
|
|
||||||
url: asset.browser_download_url.clone(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,11 @@ use language::{LanguageServerName, LspAdapter};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use smol::fs;
|
use smol::fs;
|
||||||
use std::{any::Any, path::PathBuf, sync::Arc};
|
use std::{
|
||||||
|
any::Any,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
use util::{ResultExt, TryFutureExt};
|
use util::{ResultExt, TryFutureExt};
|
||||||
|
|
||||||
pub struct JsonLspAdapter;
|
pub struct JsonLspAdapter;
|
||||||
|
@ -56,7 +60,7 @@ impl LspAdapter for JsonLspAdapter {
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
_: Arc<dyn HttpClient>,
|
_: Arc<dyn HttpClient>,
|
||||||
container_dir: PathBuf,
|
container_dir: Arc<Path>,
|
||||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||||
let version = version.downcast::<String>().unwrap();
|
let version = version.downcast::<String>().unwrap();
|
||||||
async move {
|
async move {
|
||||||
|
@ -95,7 +99,10 @@ impl LspAdapter for JsonLspAdapter {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cached_server_binary(&self, container_dir: PathBuf) -> BoxFuture<'static, Option<PathBuf>> {
|
fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: Arc<Path>,
|
||||||
|
) -> BoxFuture<'static, Option<PathBuf>> {
|
||||||
async move {
|
async move {
|
||||||
let mut last_version_dir = None;
|
let mut last_version_dir = None;
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
|
|
@ -7,7 +7,14 @@ pub use language::*;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use smol::fs::{self, File};
|
use smol::fs::{self, File};
|
||||||
use std::{any::Any, borrow::Cow, env::consts, path::PathBuf, str, sync::Arc};
|
use std::{
|
||||||
|
any::Any,
|
||||||
|
borrow::Cow,
|
||||||
|
env::consts,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
str,
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
use util::{ResultExt, TryFutureExt};
|
use util::{ResultExt, TryFutureExt};
|
||||||
|
|
||||||
pub struct RustLspAdapter;
|
pub struct RustLspAdapter;
|
||||||
|
@ -22,10 +29,17 @@ impl LspAdapter for RustLspAdapter {
|
||||||
http: Arc<dyn HttpClient>,
|
http: Arc<dyn HttpClient>,
|
||||||
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
||||||
async move {
|
async move {
|
||||||
let version = latest_github_release("rust-analyzer/rust-analyzer", http, |_| {
|
let release = latest_github_release("rust-analyzer/rust-analyzer", http).await?;
|
||||||
format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH)
|
let asset_name = format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH);
|
||||||
})
|
let asset = release
|
||||||
.await?;
|
.assets
|
||||||
|
.iter()
|
||||||
|
.find(|asset| asset.name == asset_name)
|
||||||
|
.ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?;
|
||||||
|
let version = GitHubLspBinaryVersion {
|
||||||
|
name: release.name,
|
||||||
|
url: asset.browser_download_url.clone(),
|
||||||
|
};
|
||||||
Ok(Box::new(version) as Box<_>)
|
Ok(Box::new(version) as Box<_>)
|
||||||
}
|
}
|
||||||
.boxed()
|
.boxed()
|
||||||
|
@ -35,7 +49,7 @@ impl LspAdapter for RustLspAdapter {
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
http: Arc<dyn HttpClient>,
|
http: Arc<dyn HttpClient>,
|
||||||
container_dir: PathBuf,
|
container_dir: Arc<Path>,
|
||||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||||
async move {
|
async move {
|
||||||
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||||
|
@ -72,7 +86,10 @@ impl LspAdapter for RustLspAdapter {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cached_server_binary(&self, container_dir: PathBuf) -> BoxFuture<'static, Option<PathBuf>> {
|
fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: Arc<Path>,
|
||||||
|
) -> BoxFuture<'static, Option<PathBuf>> {
|
||||||
async move {
|
async move {
|
||||||
let mut last = None;
|
let mut last = None;
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
|
|
@ -5,7 +5,11 @@ use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||||
use language::{LanguageServerName, LspAdapter};
|
use language::{LanguageServerName, LspAdapter};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use smol::fs;
|
use smol::fs;
|
||||||
use std::{any::Any, path::PathBuf, sync::Arc};
|
use std::{
|
||||||
|
any::Any,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
use util::{ResultExt, TryFutureExt};
|
use util::{ResultExt, TryFutureExt};
|
||||||
|
|
||||||
pub struct TypeScriptLspAdapter;
|
pub struct TypeScriptLspAdapter;
|
||||||
|
@ -45,7 +49,7 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||||
&self,
|
&self,
|
||||||
versions: Box<dyn 'static + Send + Any>,
|
versions: Box<dyn 'static + Send + Any>,
|
||||||
_: Arc<dyn HttpClient>,
|
_: Arc<dyn HttpClient>,
|
||||||
container_dir: PathBuf,
|
container_dir: Arc<Path>,
|
||||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||||
let versions = versions.downcast::<Versions>().unwrap();
|
let versions = versions.downcast::<Versions>().unwrap();
|
||||||
async move {
|
async move {
|
||||||
|
@ -88,7 +92,10 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cached_server_binary(&self, container_dir: PathBuf) -> BoxFuture<'static, Option<PathBuf>> {
|
fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: Arc<Path>,
|
||||||
|
) -> BoxFuture<'static, Option<PathBuf>> {
|
||||||
async move {
|
async move {
|
||||||
let mut last_version_dir = None;
|
let mut last_version_dir = None;
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
|
|
@ -94,6 +94,14 @@ fn main() {
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
.with_overrides(
|
||||||
|
"Go",
|
||||||
|
settings::LanguageOverride {
|
||||||
|
tab_size: Some(4),
|
||||||
|
hard_tabs: Some(true),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
)
|
||||||
.with_overrides(
|
.with_overrides(
|
||||||
"Markdown",
|
"Markdown",
|
||||||
settings::LanguageOverride {
|
settings::LanguageOverride {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue