Merge pull request #1404 from zed-industries/html-support

Basic html support
This commit is contained in:
Max Brunsfeld 2022-10-06 10:32:44 -07:00 committed by GitHub
commit 51fa06cc8d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 1538 additions and 623 deletions

35
Cargo.lock generated
View file

@ -1722,6 +1722,8 @@ dependencies = [
"text", "text",
"theme", "theme",
"tree-sitter", "tree-sitter",
"tree-sitter-html",
"tree-sitter-javascript",
"tree-sitter-rust", "tree-sitter-rust",
"unindent", "unindent",
"util", "util",
@ -2896,6 +2898,8 @@ dependencies = [
"text", "text",
"theme", "theme",
"tree-sitter", "tree-sitter",
"tree-sitter-html",
"tree-sitter-javascript",
"tree-sitter-json 0.19.0", "tree-sitter-json 0.19.0",
"tree-sitter-python", "tree-sitter-python",
"tree-sitter-rust", "tree-sitter-rust",
@ -6075,6 +6079,15 @@ dependencies = [
"tree-sitter", "tree-sitter",
] ]
[[package]]
name = "tree-sitter-css"
version = "0.19.0"
source = "git+https://github.com/tree-sitter/tree-sitter-css?rev=769203d0f9abe1a9a691ac2b9fe4bb4397a73c51#769203d0f9abe1a9a691ac2b9fe4bb4397a73c51"
dependencies = [
"cc",
"tree-sitter",
]
[[package]] [[package]]
name = "tree-sitter-elixir" name = "tree-sitter-elixir"
version = "0.19.0" version = "0.19.0"
@ -6093,6 +6106,26 @@ dependencies = [
"tree-sitter", "tree-sitter",
] ]
[[package]]
name = "tree-sitter-html"
version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "184e6b77953a354303dc87bf5fe36558c83569ce92606e7b382a0dc1b7443443"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-javascript"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2490fab08630b2c8943c320f7b63473cbf65511c8d83aec551beb9b4375906ed"
dependencies = [
"cc",
"tree-sitter",
]
[[package]] [[package]]
name = "tree-sitter-json" name = "tree-sitter-json"
version = "0.19.0" version = "0.19.0"
@ -7279,8 +7312,10 @@ dependencies = [
"tree-sitter", "tree-sitter",
"tree-sitter-c", "tree-sitter-c",
"tree-sitter-cpp", "tree-sitter-cpp",
"tree-sitter-css",
"tree-sitter-elixir", "tree-sitter-elixir",
"tree-sitter-go", "tree-sitter-go",
"tree-sitter-html",
"tree-sitter-json 0.20.0", "tree-sitter-json 0.20.0",
"tree-sitter-markdown", "tree-sitter-markdown",
"tree-sitter-python", "tree-sitter-python",

View file

@ -52,6 +52,8 @@ serde = { version = "1.0", features = ["derive", "rc"] }
smallvec = { version = "1.6", features = ["union"] } smallvec = { version = "1.6", features = ["union"] }
smol = "1.2" smol = "1.2"
tree-sitter-rust = { version = "*", optional = true } tree-sitter-rust = { version = "*", optional = true }
tree-sitter-html = { version = "*", optional = true }
tree-sitter-javascript = { version = "*", optional = true }
[dev-dependencies] [dev-dependencies]
text = { path = "../text", features = ["test-support"] } text = { path = "../text", features = ["test-support"] }
@ -68,3 +70,5 @@ rand = "0.8"
unindent = "0.1.7" unindent = "0.1.7"
tree-sitter = "0.20" tree-sitter = "0.20"
tree-sitter-rust = "0.20" tree-sitter-rust = "0.20"
tree-sitter-html = "0.19"
tree-sitter-javascript = "0.20"

File diff suppressed because it is too large Load diff

View file

@ -1228,9 +1228,9 @@ impl MultiBuffer {
&self, &self,
point: T, point: T,
cx: &'a AppContext, cx: &'a AppContext,
) -> Option<&'a Arc<Language>> { ) -> Option<Arc<Language>> {
self.point_to_buffer_offset(point, cx) self.point_to_buffer_offset(point, cx)
.and_then(|(buffer, _)| buffer.read(cx).language()) .and_then(|(buffer, offset)| buffer.read(cx).language_at(offset))
} }
pub fn files<'a>(&'a self, cx: &'a AppContext) -> SmallVec<[&'a dyn File; 2]> { pub fn files<'a>(&'a self, cx: &'a AppContext) -> SmallVec<[&'a dyn File; 2]> {
@ -1966,6 +1966,24 @@ impl MultiBufferSnapshot {
} }
} }
pub fn point_to_buffer_offset<T: ToOffset>(
&self,
point: T,
) -> Option<(&BufferSnapshot, usize)> {
let offset = point.to_offset(&self);
let mut cursor = self.excerpts.cursor::<usize>();
cursor.seek(&offset, Bias::Right, &());
if cursor.item().is_none() {
cursor.prev(&());
}
cursor.item().map(|excerpt| {
let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
let buffer_point = excerpt_start + offset - *cursor.start();
(&excerpt.buffer, buffer_point)
})
}
pub fn suggested_indents( pub fn suggested_indents(
&self, &self,
rows: impl IntoIterator<Item = u32>, rows: impl IntoIterator<Item = u32>,
@ -1975,8 +1993,10 @@ impl MultiBufferSnapshot {
let mut rows_for_excerpt = Vec::new(); let mut rows_for_excerpt = Vec::new();
let mut cursor = self.excerpts.cursor::<Point>(); let mut cursor = self.excerpts.cursor::<Point>();
let mut rows = rows.into_iter().peekable(); let mut rows = rows.into_iter().peekable();
let mut prev_row = u32::MAX;
let mut prev_language_indent_size = IndentSize::default();
while let Some(row) = rows.next() { while let Some(row) = rows.next() {
cursor.seek(&Point::new(row, 0), Bias::Right, &()); cursor.seek(&Point::new(row, 0), Bias::Right, &());
let excerpt = match cursor.item() { let excerpt = match cursor.item() {
@ -1984,7 +2004,17 @@ impl MultiBufferSnapshot {
_ => continue, _ => continue,
}; };
let single_indent_size = excerpt.buffer.single_indent_size(cx); // Retrieve the language and indent size once for each disjoint region being indented.
let single_indent_size = if row.saturating_sub(1) == prev_row {
prev_language_indent_size
} else {
excerpt
.buffer
.language_indent_size_at(Point::new(row, 0), cx)
};
prev_language_indent_size = single_indent_size;
prev_row = row;
let start_buffer_row = excerpt.range.context.start.to_point(&excerpt.buffer).row; let start_buffer_row = excerpt.range.context.start.to_point(&excerpt.buffer).row;
let start_multibuffer_row = cursor.start().row; let start_multibuffer_row = cursor.start().row;
@ -2513,11 +2543,9 @@ impl MultiBufferSnapshot {
self.trailing_excerpt_update_count self.trailing_excerpt_update_count
} }
pub fn language(&self) -> Option<&Arc<Language>> { pub fn language_at<'a, T: ToOffset>(&'a self, point: T) -> Option<&'a Arc<Language>> {
self.excerpts self.point_to_buffer_offset(point)
.iter() .and_then(|(buffer, offset)| buffer.language_at(offset))
.next()
.and_then(|excerpt| excerpt.buffer.language())
} }
pub fn is_dirty(&self) -> bool { pub fn is_dirty(&self) -> bool {

View file

@ -64,6 +64,8 @@ util = { path = "../util", features = ["test-support"] }
ctor = "0.1" ctor = "0.1"
env_logger = "0.9" env_logger = "0.9"
rand = "0.8.3" rand = "0.8.3"
tree-sitter-html = "*"
tree-sitter-javascript = "*"
tree-sitter-json = "*" tree-sitter-json = "*"
tree-sitter-rust = "*" tree-sitter-rust = "*"
tree-sitter-python = "*" tree-sitter-python = "*"

View file

@ -95,14 +95,15 @@ pub struct BufferSnapshot {
parse_count: usize, parse_count: usize,
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
pub struct IndentSize { pub struct IndentSize {
pub len: u32, pub len: u32,
pub kind: IndentKind, pub kind: IndentKind,
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
pub enum IndentKind { pub enum IndentKind {
#[default]
Space, Space,
Tab, Tab,
} }
@ -247,7 +248,6 @@ pub enum AutoindentMode {
struct AutoindentRequest { struct AutoindentRequest {
before_edit: BufferSnapshot, before_edit: BufferSnapshot,
entries: Vec<AutoindentRequestEntry>, entries: Vec<AutoindentRequestEntry>,
indent_size: IndentSize,
is_block_mode: bool, is_block_mode: bool,
} }
@ -260,6 +260,7 @@ struct AutoindentRequestEntry {
/// only be adjusted if the suggested indentation level has *changed* /// only be adjusted if the suggested indentation level has *changed*
/// since the edit was made. /// since the edit was made.
first_line_is_new: bool, first_line_is_new: bool,
indent_size: IndentSize,
original_indent_column: Option<u32>, original_indent_column: Option<u32>,
} }
@ -719,6 +720,16 @@ impl Buffer {
self.language.as_ref() self.language.as_ref()
} }
pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
let offset = position.to_offset(self);
self.syntax_map
.lock()
.layers_for_range(offset..offset, &self.text)
.last()
.map(|info| info.language.clone())
.or_else(|| self.language.clone())
}
pub fn parse_count(&self) -> usize { pub fn parse_count(&self) -> usize {
self.parse_count self.parse_count
} }
@ -866,10 +877,13 @@ impl Buffer {
// buffer before this batch of edits. // buffer before this batch of edits.
let mut row_ranges = Vec::new(); let mut row_ranges = Vec::new();
let mut old_to_new_rows = BTreeMap::new(); let mut old_to_new_rows = BTreeMap::new();
let mut language_indent_sizes_by_new_row = Vec::new();
for entry in &request.entries { for entry in &request.entries {
let position = entry.range.start; let position = entry.range.start;
let new_row = position.to_point(&snapshot).row; let new_row = position.to_point(&snapshot).row;
let new_end_row = entry.range.end.to_point(&snapshot).row + 1; let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
if !entry.first_line_is_new { if !entry.first_line_is_new {
let old_row = position.to_point(&request.before_edit).row; let old_row = position.to_point(&request.before_edit).row;
old_to_new_rows.insert(old_row, new_row); old_to_new_rows.insert(old_row, new_row);
@ -883,6 +897,8 @@ impl Buffer {
let mut old_suggestions = BTreeMap::<u32, IndentSize>::default(); let mut old_suggestions = BTreeMap::<u32, IndentSize>::default();
let old_edited_ranges = let old_edited_ranges =
contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields); contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
let mut language_indent_size = IndentSize::default();
for old_edited_range in old_edited_ranges { for old_edited_range in old_edited_ranges {
let suggestions = request let suggestions = request
.before_edit .before_edit
@ -891,6 +907,17 @@ impl Buffer {
.flatten(); .flatten();
for (old_row, suggestion) in old_edited_range.zip(suggestions) { for (old_row, suggestion) in old_edited_range.zip(suggestions) {
if let Some(suggestion) = suggestion { if let Some(suggestion) = suggestion {
let new_row = *old_to_new_rows.get(&old_row).unwrap();
// Find the indent size based on the language for this row.
while let Some((row, size)) = language_indent_sizes.peek() {
if *row > new_row {
break;
}
language_indent_size = *size;
language_indent_sizes.next();
}
let suggested_indent = old_to_new_rows let suggested_indent = old_to_new_rows
.get(&suggestion.basis_row) .get(&suggestion.basis_row)
.and_then(|from_row| old_suggestions.get(from_row).copied()) .and_then(|from_row| old_suggestions.get(from_row).copied())
@ -899,9 +926,8 @@ impl Buffer {
.before_edit .before_edit
.indent_size_for_line(suggestion.basis_row) .indent_size_for_line(suggestion.basis_row)
}) })
.with_delta(suggestion.delta, request.indent_size); .with_delta(suggestion.delta, language_indent_size);
old_suggestions old_suggestions.insert(new_row, suggested_indent);
.insert(*old_to_new_rows.get(&old_row).unwrap(), suggested_indent);
} }
} }
yield_now().await; yield_now().await;
@ -922,6 +948,8 @@ impl Buffer {
// Compute new suggestions for each line, but only include them in the result // Compute new suggestions for each line, but only include them in the result
// if they differ from the old suggestion for that line. // if they differ from the old suggestion for that line.
let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
let mut language_indent_size = IndentSize::default();
for new_edited_row_range in new_edited_row_ranges { for new_edited_row_range in new_edited_row_ranges {
let suggestions = snapshot let suggestions = snapshot
.suggest_autoindents(new_edited_row_range.clone()) .suggest_autoindents(new_edited_row_range.clone())
@ -929,13 +957,22 @@ impl Buffer {
.flatten(); .flatten();
for (new_row, suggestion) in new_edited_row_range.zip(suggestions) { for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
if let Some(suggestion) = suggestion { if let Some(suggestion) = suggestion {
// Find the indent size based on the language for this row.
while let Some((row, size)) = language_indent_sizes.peek() {
if *row > new_row {
break;
}
language_indent_size = *size;
language_indent_sizes.next();
}
let suggested_indent = indent_sizes let suggested_indent = indent_sizes
.get(&suggestion.basis_row) .get(&suggestion.basis_row)
.copied() .copied()
.unwrap_or_else(|| { .unwrap_or_else(|| {
snapshot.indent_size_for_line(suggestion.basis_row) snapshot.indent_size_for_line(suggestion.basis_row)
}) })
.with_delta(suggestion.delta, request.indent_size); .with_delta(suggestion.delta, language_indent_size);
if old_suggestions if old_suggestions
.get(&new_row) .get(&new_row)
.map_or(true, |old_indentation| { .map_or(true, |old_indentation| {
@ -1266,7 +1303,6 @@ impl Buffer {
let edit_id = edit_operation.local_timestamp(); let edit_id = edit_operation.local_timestamp();
if let Some((before_edit, mode)) = autoindent_request { if let Some((before_edit, mode)) = autoindent_request {
let indent_size = before_edit.single_indent_size(cx);
let (start_columns, is_block_mode) = match mode { let (start_columns, is_block_mode) = match mode {
AutoindentMode::Block { AutoindentMode::Block {
original_indent_columns: start_columns, original_indent_columns: start_columns,
@ -1315,6 +1351,7 @@ impl Buffer {
AutoindentRequestEntry { AutoindentRequestEntry {
first_line_is_new, first_line_is_new,
original_indent_column: start_column, original_indent_column: start_column,
indent_size: before_edit.language_indent_size_at(range.start, cx),
range: self.anchor_before(new_start + range_of_insertion_to_indent.start) range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
..self.anchor_after(new_start + range_of_insertion_to_indent.end), ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
} }
@ -1324,7 +1361,6 @@ impl Buffer {
self.autoindent_requests.push(Arc::new(AutoindentRequest { self.autoindent_requests.push(Arc::new(AutoindentRequest {
before_edit, before_edit,
entries, entries,
indent_size,
is_block_mode, is_block_mode,
})); }));
} }
@ -1642,8 +1678,8 @@ impl BufferSnapshot {
indent_size_for_line(self, row) indent_size_for_line(self, row)
} }
pub fn single_indent_size(&self, cx: &AppContext) -> IndentSize { pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
let language_name = self.language().map(|language| language.name()); let language_name = self.language_at(position).map(|language| language.name());
let settings = cx.global::<Settings>(); let settings = cx.global::<Settings>();
if settings.hard_tabs(language_name.as_deref()) { if settings.hard_tabs(language_name.as_deref()) {
IndentSize::tab() IndentSize::tab()
@ -1713,6 +1749,8 @@ impl BufferSnapshot {
if capture.index == config.indent_capture_ix { if capture.index == config.indent_capture_ix {
start.get_or_insert(Point::from_ts_point(capture.node.start_position())); start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
end.get_or_insert(Point::from_ts_point(capture.node.end_position())); end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
} else if Some(capture.index) == config.start_capture_ix {
start = Some(Point::from_ts_point(capture.node.end_position()));
} else if Some(capture.index) == config.end_capture_ix { } else if Some(capture.index) == config.end_capture_ix {
end = Some(Point::from_ts_point(capture.node.start_position())); end = Some(Point::from_ts_point(capture.node.start_position()));
} }
@ -1902,8 +1940,14 @@ impl BufferSnapshot {
} }
} }
pub fn language(&self) -> Option<&Arc<Language>> { pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
self.language.as_ref() let offset = position.to_offset(self);
self.syntax
.layers_for_range(offset..offset, &self.text)
.filter(|l| l.node.end_byte() > offset)
.last()
.map(|info| info.language)
.or(self.language.as_ref())
} }
pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) { pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
@ -1938,8 +1982,8 @@ impl BufferSnapshot {
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> { pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
let range = range.start.to_offset(self)..range.end.to_offset(self); let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut result: Option<Range<usize>> = None; let mut result: Option<Range<usize>> = None;
'outer: for (_, _, node) in self.syntax.layers_for_range(range.clone(), &self.text) { 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
let mut cursor = node.walk(); let mut cursor = layer.node.walk();
// Descend to the first leaf that touches the start of the range, // Descend to the first leaf that touches the start of the range,
// and if the range is non-empty, extends beyond the start. // and if the range is non-empty, extends beyond the start.

View file

@ -26,6 +26,7 @@ use serde_json::Value;
use std::{ use std::{
any::Any, any::Any,
cell::RefCell, cell::RefCell,
fmt::Debug,
mem, mem,
ops::Range, ops::Range,
path::{Path, PathBuf}, path::{Path, PathBuf},
@ -135,7 +136,7 @@ impl CachedLspAdapter {
pub async fn label_for_completion( pub async fn label_for_completion(
&self, &self,
completion_item: &lsp::CompletionItem, completion_item: &lsp::CompletionItem,
language: &Language, language: &Arc<Language>,
) -> Option<CodeLabel> { ) -> Option<CodeLabel> {
self.adapter self.adapter
.label_for_completion(completion_item, language) .label_for_completion(completion_item, language)
@ -146,7 +147,7 @@ impl CachedLspAdapter {
&self, &self,
name: &str, name: &str,
kind: lsp::SymbolKind, kind: lsp::SymbolKind,
language: &Language, language: &Arc<Language>,
) -> Option<CodeLabel> { ) -> Option<CodeLabel> {
self.adapter.label_for_symbol(name, kind, language).await self.adapter.label_for_symbol(name, kind, language).await
} }
@ -175,7 +176,7 @@ pub trait LspAdapter: 'static + Send + Sync {
async fn label_for_completion( async fn label_for_completion(
&self, &self,
_: &lsp::CompletionItem, _: &lsp::CompletionItem,
_: &Language, _: &Arc<Language>,
) -> Option<CodeLabel> { ) -> Option<CodeLabel> {
None None
} }
@ -184,7 +185,7 @@ pub trait LspAdapter: 'static + Send + Sync {
&self, &self,
_: &str, _: &str,
_: lsp::SymbolKind, _: lsp::SymbolKind,
_: &Language, _: &Arc<Language>,
) -> Option<CodeLabel> { ) -> Option<CodeLabel> {
None None
} }
@ -230,7 +231,10 @@ pub struct LanguageConfig {
pub decrease_indent_pattern: Option<Regex>, pub decrease_indent_pattern: Option<Regex>,
#[serde(default)] #[serde(default)]
pub autoclose_before: String, pub autoclose_before: String,
pub line_comment: Option<String>, #[serde(default)]
pub line_comment: Option<Arc<str>>,
#[serde(default)]
pub block_comment: Option<(Arc<str>, Arc<str>)>,
} }
impl Default for LanguageConfig { impl Default for LanguageConfig {
@ -244,6 +248,7 @@ impl Default for LanguageConfig {
decrease_indent_pattern: Default::default(), decrease_indent_pattern: Default::default(),
autoclose_before: Default::default(), autoclose_before: Default::default(),
line_comment: Default::default(), line_comment: Default::default(),
block_comment: Default::default(),
} }
} }
} }
@ -270,7 +275,7 @@ pub struct FakeLspAdapter {
pub disk_based_diagnostics_sources: Vec<String>, pub disk_based_diagnostics_sources: Vec<String>,
} }
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Default, Deserialize)]
pub struct BracketPair { pub struct BracketPair {
pub start: String, pub start: String,
pub end: String, pub end: String,
@ -304,6 +309,7 @@ pub struct Grammar {
struct IndentConfig { struct IndentConfig {
query: Query, query: Query,
indent_capture_ix: u32, indent_capture_ix: u32,
start_capture_ix: Option<u32>,
end_capture_ix: Option<u32>, end_capture_ix: Option<u32>,
} }
@ -661,11 +667,13 @@ impl Language {
let grammar = self.grammar_mut(); let grammar = self.grammar_mut();
let query = Query::new(grammar.ts_language, source)?; let query = Query::new(grammar.ts_language, source)?;
let mut indent_capture_ix = None; let mut indent_capture_ix = None;
let mut start_capture_ix = None;
let mut end_capture_ix = None; let mut end_capture_ix = None;
get_capture_indices( get_capture_indices(
&query, &query,
&mut [ &mut [
("indent", &mut indent_capture_ix), ("indent", &mut indent_capture_ix),
("start", &mut start_capture_ix),
("end", &mut end_capture_ix), ("end", &mut end_capture_ix),
], ],
); );
@ -673,6 +681,7 @@ impl Language {
grammar.indents_config = Some(IndentConfig { grammar.indents_config = Some(IndentConfig {
query, query,
indent_capture_ix, indent_capture_ix,
start_capture_ix,
end_capture_ix, end_capture_ix,
}); });
} }
@ -763,8 +772,15 @@ impl Language {
self.config.name.clone() self.config.name.clone()
} }
pub fn line_comment_prefix(&self) -> Option<&str> { pub fn line_comment_prefix(&self) -> Option<&Arc<str>> {
self.config.line_comment.as_deref() self.config.line_comment.as_ref()
}
pub fn block_comment_delimiters(&self) -> Option<(&Arc<str>, &Arc<str>)> {
self.config
.block_comment
.as_ref()
.map(|(start, end)| (start, end))
} }
pub async fn disk_based_diagnostic_sources(&self) -> &[String] { pub async fn disk_based_diagnostic_sources(&self) -> &[String] {
@ -789,7 +805,7 @@ impl Language {
} }
pub async fn label_for_completion( pub async fn label_for_completion(
&self, self: &Arc<Self>,
completion: &lsp::CompletionItem, completion: &lsp::CompletionItem,
) -> Option<CodeLabel> { ) -> Option<CodeLabel> {
self.adapter self.adapter
@ -798,7 +814,11 @@ impl Language {
.await .await
} }
pub async fn label_for_symbol(&self, name: &str, kind: lsp::SymbolKind) -> Option<CodeLabel> { pub async fn label_for_symbol(
self: &Arc<Self>,
name: &str,
kind: lsp::SymbolKind,
) -> Option<CodeLabel> {
self.adapter self.adapter
.as_ref()? .as_ref()?
.label_for_symbol(name, kind, self) .label_for_symbol(name, kind, self)
@ -806,20 +826,17 @@ impl Language {
} }
pub fn highlight_text<'a>( pub fn highlight_text<'a>(
&'a self, self: &'a Arc<Self>,
text: &'a Rope, text: &'a Rope,
range: Range<usize>, range: Range<usize>,
) -> Vec<(Range<usize>, HighlightId)> { ) -> Vec<(Range<usize>, HighlightId)> {
let mut result = Vec::new(); let mut result = Vec::new();
if let Some(grammar) = &self.grammar { if let Some(grammar) = &self.grammar {
let tree = grammar.parse_text(text, None); let tree = grammar.parse_text(text, None);
let captures = SyntaxSnapshot::single_tree_captures( let captures =
range.clone(), SyntaxSnapshot::single_tree_captures(range.clone(), text, &tree, self, |grammar| {
text, grammar.highlights_query.as_ref()
&tree, });
grammar,
|grammar| grammar.highlights_query.as_ref(),
);
let highlight_maps = vec![grammar.highlight_map()]; let highlight_maps = vec![grammar.highlight_map()];
let mut offset = 0; let mut offset = 0;
for chunk in BufferChunks::new(text, range, Some((captures, highlight_maps)), vec![]) { for chunk in BufferChunks::new(text, range, Some((captures, highlight_maps)), vec![]) {
@ -861,6 +878,14 @@ impl Language {
} }
} }
impl Debug for Language {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Language")
.field("name", &self.config.name)
.finish()
}
}
impl Grammar { impl Grammar {
pub fn id(&self) -> usize { pub fn id(&self) -> usize {
self.id self.id

View file

@ -92,6 +92,13 @@ struct SyntaxLayer {
language: Arc<Language>, language: Arc<Language>,
} }
#[derive(Debug)]
pub struct SyntaxLayerInfo<'a> {
pub depth: usize,
pub node: Node<'a>,
pub language: &'a Arc<Language>,
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct SyntaxLayerSummary { struct SyntaxLayerSummary {
min_depth: usize, min_depth: usize,
@ -473,13 +480,18 @@ impl SyntaxSnapshot {
range: Range<usize>, range: Range<usize>,
text: &'a Rope, text: &'a Rope,
tree: &'a Tree, tree: &'a Tree,
grammar: &'a Grammar, language: &'a Arc<Language>,
query: fn(&Grammar) -> Option<&Query>, query: fn(&Grammar) -> Option<&Query>,
) -> SyntaxMapCaptures<'a> { ) -> SyntaxMapCaptures<'a> {
SyntaxMapCaptures::new( SyntaxMapCaptures::new(
range.clone(), range.clone(),
text, text,
[(grammar, 0, tree.root_node())].into_iter(), [SyntaxLayerInfo {
language,
depth: 0,
node: tree.root_node(),
}]
.into_iter(),
query, query,
) )
} }
@ -513,19 +525,19 @@ impl SyntaxSnapshot {
} }
#[cfg(test)] #[cfg(test)]
pub fn layers(&self, buffer: &BufferSnapshot) -> Vec<(&Grammar, usize, Node)> { pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec<SyntaxLayerInfo> {
self.layers_for_range(0..buffer.len(), buffer) self.layers_for_range(0..buffer.len(), buffer).collect()
} }
pub fn layers_for_range<'a, T: ToOffset>( pub fn layers_for_range<'a, T: ToOffset>(
&self, &'a self,
range: Range<T>, range: Range<T>,
buffer: &BufferSnapshot, buffer: &'a BufferSnapshot,
) -> Vec<(&Grammar, usize, Node)> { ) -> impl 'a + Iterator<Item = SyntaxLayerInfo> {
let start = buffer.anchor_before(range.start.to_offset(buffer)); let start = buffer.anchor_before(range.start.to_offset(buffer));
let end = buffer.anchor_after(range.end.to_offset(buffer)); let end = buffer.anchor_after(range.end.to_offset(buffer));
let mut cursor = self.layers.filter::<_, ()>(|summary| { let mut cursor = self.layers.filter::<_, ()>(move |summary| {
if summary.max_depth > summary.min_depth { if summary.max_depth > summary.min_depth {
true true
} else { } else {
@ -535,23 +547,26 @@ impl SyntaxSnapshot {
} }
}); });
let mut result = Vec::new(); // let mut result = Vec::new();
cursor.next(buffer); cursor.next(buffer);
while let Some(layer) = cursor.item() { std::iter::from_fn(move || {
if let Some(grammar) = &layer.language.grammar { if let Some(layer) = cursor.item() {
result.push(( let info = SyntaxLayerInfo {
grammar.as_ref(), language: &layer.language,
layer.depth, depth: layer.depth,
layer.tree.root_node_with_offset( node: layer.tree.root_node_with_offset(
layer.range.start.to_offset(buffer), layer.range.start.to_offset(buffer),
layer.range.start.to_point(buffer).to_ts_point(), layer.range.start.to_point(buffer).to_ts_point(),
), ),
)); };
cursor.next(buffer);
Some(info)
} else {
None
} }
cursor.next(buffer) })
}
result // result
} }
} }
@ -559,7 +574,7 @@ impl<'a> SyntaxMapCaptures<'a> {
fn new( fn new(
range: Range<usize>, range: Range<usize>,
text: &'a Rope, text: &'a Rope,
layers: impl Iterator<Item = (&'a Grammar, usize, Node<'a>)>, layers: impl Iterator<Item = SyntaxLayerInfo<'a>>,
query: fn(&Grammar) -> Option<&Query>, query: fn(&Grammar) -> Option<&Query>,
) -> Self { ) -> Self {
let mut result = Self { let mut result = Self {
@ -567,11 +582,19 @@ impl<'a> SyntaxMapCaptures<'a> {
grammars: Vec::new(), grammars: Vec::new(),
active_layer_count: 0, active_layer_count: 0,
}; };
for (grammar, depth, node) in layers { for SyntaxLayerInfo {
let query = if let Some(query) = query(grammar) { language,
query depth,
} else { node,
continue; } in layers
{
let grammar = match &language.grammar {
Some(grammer) => grammer,
None => continue,
};
let query = match query(&grammar) {
Some(query) => query,
None => continue,
}; };
let mut query_cursor = QueryCursorHandle::new(); let mut query_cursor = QueryCursorHandle::new();
@ -678,15 +701,23 @@ impl<'a> SyntaxMapMatches<'a> {
fn new( fn new(
range: Range<usize>, range: Range<usize>,
text: &'a Rope, text: &'a Rope,
layers: impl Iterator<Item = (&'a Grammar, usize, Node<'a>)>, layers: impl Iterator<Item = SyntaxLayerInfo<'a>>,
query: fn(&Grammar) -> Option<&Query>, query: fn(&Grammar) -> Option<&Query>,
) -> Self { ) -> Self {
let mut result = Self::default(); let mut result = Self::default();
for (grammar, depth, node) in layers { for SyntaxLayerInfo {
let query = if let Some(query) = query(grammar) { language,
query depth,
} else { node,
continue; } in layers
{
let grammar = match &language.grammar {
Some(grammer) => grammer,
None => continue,
};
let query = match query(&grammar) {
Some(query) => query,
None => continue,
}; };
let mut query_cursor = QueryCursorHandle::new(); let mut query_cursor = QueryCursorHandle::new();
@ -1624,8 +1655,8 @@ mod tests {
let reference_layers = reference_syntax_map.layers(&buffer); let reference_layers = reference_syntax_map.layers(&buffer);
for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter()) for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter())
{ {
assert_eq!(edited_layer.2.to_sexp(), reference_layer.2.to_sexp()); assert_eq!(edited_layer.node.to_sexp(), reference_layer.node.to_sexp());
assert_eq!(edited_layer.2.range(), reference_layer.2.range()); assert_eq!(edited_layer.node.range(), reference_layer.node.range());
} }
} }
@ -1770,13 +1801,13 @@ mod tests {
mutated_layers.into_iter().zip(reference_layers.into_iter()) mutated_layers.into_iter().zip(reference_layers.into_iter())
{ {
assert_eq!( assert_eq!(
edited_layer.2.to_sexp(), edited_layer.node.to_sexp(),
reference_layer.2.to_sexp(), reference_layer.node.to_sexp(),
"different layer at step {i}" "different layer at step {i}"
); );
assert_eq!( assert_eq!(
edited_layer.2.range(), edited_layer.node.range(),
reference_layer.2.range(), reference_layer.node.range(),
"different layer at step {i}" "different layer at step {i}"
); );
} }
@ -1822,13 +1853,15 @@ mod tests {
range: Range<Point>, range: Range<Point>,
expected_layers: &[&str], expected_layers: &[&str],
) { ) {
let layers = syntax_map.layers_for_range(range, &buffer); let layers = syntax_map
.layers_for_range(range, &buffer)
.collect::<Vec<_>>();
assert_eq!( assert_eq!(
layers.len(), layers.len(),
expected_layers.len(), expected_layers.len(),
"wrong number of layers" "wrong number of layers"
); );
for (i, ((_, _, node), expected_s_exp)) in for (i, (SyntaxLayerInfo { node, .. }, expected_s_exp)) in
layers.iter().zip(expected_layers.iter()).enumerate() layers.iter().zip(expected_layers.iter()).enumerate()
{ {
let actual_s_exp = node.to_sexp(); let actual_s_exp = node.to_sexp();

View file

@ -14,7 +14,7 @@ use std::{
}; };
use text::network::Network; use text::network::Network;
use unindent::Unindent as _; use unindent::Unindent as _;
use util::post_inc; use util::{post_inc, test::marked_text_ranges};
#[cfg(test)] #[cfg(test)]
#[ctor::ctor] #[ctor::ctor]
@ -1035,6 +1035,120 @@ fn test_autoindent_language_without_indents_query(cx: &mut MutableAppContext) {
}); });
} }
#[gpui::test]
fn test_autoindent_with_injected_languages(cx: &mut MutableAppContext) {
cx.set_global({
let mut settings = Settings::test(cx);
settings.language_overrides.extend([
(
"HTML".into(),
settings::EditorSettings {
tab_size: Some(2.try_into().unwrap()),
..Default::default()
},
),
(
"JavaScript".into(),
settings::EditorSettings {
tab_size: Some(8.try_into().unwrap()),
..Default::default()
},
),
]);
settings
});
let html_language = Arc::new(
Language::new(
LanguageConfig {
name: "HTML".into(),
..Default::default()
},
Some(tree_sitter_html::language()),
)
.with_indents_query(
"
(element
(start_tag) @start
(end_tag)? @end) @indent
",
)
.unwrap()
.with_injection_query(
r#"
(script_element
(raw_text) @content
(#set! "language" "javascript"))
"#,
)
.unwrap(),
);
let javascript_language = Arc::new(
Language::new(
LanguageConfig {
name: "JavaScript".into(),
..Default::default()
},
Some(tree_sitter_javascript::language()),
)
.with_indents_query(
r#"
(object "}" @end) @indent
"#,
)
.unwrap(),
);
let language_registry = Arc::new(LanguageRegistry::test());
language_registry.add(html_language.clone());
language_registry.add(javascript_language.clone());
cx.add_model(|cx| {
let (text, ranges) = marked_text_ranges(
&"
<div>ˇ
</div>
<script>
init({ˇ
})
</script>
<span>ˇ
</span>
"
.unindent(),
false,
);
let mut buffer = Buffer::new(0, text, cx);
buffer.set_language_registry(language_registry);
buffer.set_language(Some(html_language), cx);
buffer.edit(
ranges.into_iter().map(|range| (range, "\na")),
Some(AutoindentMode::EachLine),
cx,
);
assert_eq!(
buffer.text(),
"
<div>
a
</div>
<script>
init({
a
})
</script>
<span>
a
</span>
"
.unindent()
);
buffer
});
}
#[gpui::test] #[gpui::test]
fn test_serialization(cx: &mut gpui::MutableAppContext) { fn test_serialization(cx: &mut gpui::MutableAppContext) {
let mut now = Instant::now(); let mut now = Instant::now();
@ -1449,7 +1563,7 @@ fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> Str
buffer.read_with(cx, |buffer, _| { buffer.read_with(cx, |buffer, _| {
let snapshot = buffer.snapshot(); let snapshot = buffer.snapshot();
let layers = snapshot.syntax.layers(buffer.as_text_snapshot()); let layers = snapshot.syntax.layers(buffer.as_text_snapshot());
layers[0].2.to_sexp() layers[0].node.to_sexp()
}) })
} }

View file

@ -92,6 +92,7 @@ toml = "0.5"
tree-sitter = "0.20" tree-sitter = "0.20"
tree-sitter-c = "0.20.1" tree-sitter-c = "0.20.1"
tree-sitter-cpp = "0.20.0" tree-sitter-cpp = "0.20.0"
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "05e3631c6a0701c1fa518b0fee7be95a2ceef5e2" } tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "05e3631c6a0701c1fa518b0fee7be95a2ceef5e2" }
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" } tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "137e1ce6a02698fc246cdb9c6b886ed1de9a1ed8" } tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "137e1ce6a02698fc246cdb9c6b886ed1de9a1ed8" }
@ -100,6 +101,7 @@ tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown",
tree-sitter-python = "0.20.2" tree-sitter-python = "0.20.2"
tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", rev = "342d9be207c2dba869b9967124c679b5e6fd0ebe" } tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", rev = "342d9be207c2dba869b9967124c679b5e6fd0ebe" }
tree-sitter-typescript = "0.20.1" tree-sitter-typescript = "0.20.1"
tree-sitter-html = "0.19.0"
url = "2.2" url = "2.2"
[dev-dependencies] [dev-dependencies]

View file

@ -7,6 +7,7 @@ use std::{borrow::Cow, str, sync::Arc};
mod c; mod c;
mod elixir; mod elixir;
mod go; mod go;
mod html;
mod installation; mod installation;
mod json; mod json;
mod language_plugin; mod language_plugin;
@ -46,6 +47,11 @@ pub async fn init(languages: Arc<LanguageRegistry>, _executor: Arc<Background>)
tree_sitter_cpp::language(), tree_sitter_cpp::language(),
Some(CachedLspAdapter::new(c::CLspAdapter).await), Some(CachedLspAdapter::new(c::CLspAdapter).await),
), ),
(
"css",
tree_sitter_css::language(),
None, //
),
( (
"elixir", "elixir",
tree_sitter_elixir::language(), tree_sitter_elixir::language(),
@ -96,8 +102,13 @@ pub async fn init(languages: Arc<LanguageRegistry>, _executor: Arc<Background>)
tree_sitter_typescript::language_tsx(), tree_sitter_typescript::language_tsx(),
Some(CachedLspAdapter::new(typescript::TypeScriptLspAdapter).await), Some(CachedLspAdapter::new(typescript::TypeScriptLspAdapter).await),
), ),
(
"html",
tree_sitter_html::language(),
Some(CachedLspAdapter::new(html::HtmlLspAdapter).await),
),
] { ] {
languages.add(Arc::new(language(name, grammar, lsp_adapter))); languages.add(language(name, grammar, lsp_adapter));
} }
} }
@ -105,7 +116,7 @@ pub(crate) fn language(
name: &str, name: &str,
grammar: tree_sitter::Language, grammar: tree_sitter::Language,
lsp_adapter: Option<Arc<CachedLspAdapter>>, lsp_adapter: Option<Arc<CachedLspAdapter>>,
) -> Language { ) -> Arc<Language> {
let config = toml::from_slice( let config = toml::from_slice(
&LanguageDir::get(&format!("{}/config.toml", name)) &LanguageDir::get(&format!("{}/config.toml", name))
.unwrap() .unwrap()
@ -142,7 +153,7 @@ pub(crate) fn language(
if let Some(lsp_adapter) = lsp_adapter { if let Some(lsp_adapter) = lsp_adapter {
language = language.with_lsp_adapter(lsp_adapter) language = language.with_lsp_adapter(lsp_adapter)
} }
language Arc::new(language)
} }
fn load_query(name: &str, filename_prefix: &str) -> Option<Cow<'static, str>> { fn load_query(name: &str, filename_prefix: &str) -> Option<Cow<'static, str>> {

View file

@ -112,7 +112,7 @@ impl super::LspAdapter for CLspAdapter {
async fn label_for_completion( async fn label_for_completion(
&self, &self,
completion: &lsp::CompletionItem, completion: &lsp::CompletionItem,
language: &Language, language: &Arc<Language>,
) -> Option<CodeLabel> { ) -> Option<CodeLabel> {
let label = completion let label = completion
.label .label
@ -190,7 +190,7 @@ impl super::LspAdapter for CLspAdapter {
&self, &self,
name: &str, name: &str,
kind: lsp::SymbolKind, kind: lsp::SymbolKind,
language: &Language, language: &Arc<Language>,
) -> Option<CodeLabel> { ) -> Option<CodeLabel> {
let (text, filter_range, display_range) = match kind { let (text, filter_range, display_range) = match kind {
lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => {
@ -251,7 +251,6 @@ mod tests {
use gpui::MutableAppContext; use gpui::MutableAppContext;
use language::{AutoindentMode, Buffer}; use language::{AutoindentMode, Buffer};
use settings::Settings; use settings::Settings;
use std::sync::Arc;
#[gpui::test] #[gpui::test]
fn test_c_autoindent(cx: &mut MutableAppContext) { fn test_c_autoindent(cx: &mut MutableAppContext) {
@ -262,7 +261,7 @@ mod tests {
let language = crate::languages::language("c", tree_sitter_c::language(), None); let language = crate::languages::language("c", tree_sitter_c::language(), None);
cx.add_model(|cx| { cx.add_model(|cx| {
let mut buffer = Buffer::new(0, "", cx).with_language(Arc::new(language), cx); let mut buffer = Buffer::new(0, "", cx).with_language(language, cx);
// empty function // empty function
buffer.edit([(0..0, "int main() {}")], None, cx); buffer.edit([(0..0, "int main() {}")], None, cx);

View file

@ -0,0 +1,3 @@
("(" @open ")" @close)
("[" @open "]" @close)
("{" @open "}" @close)

View file

@ -0,0 +1,9 @@
name = "CSS"
path_suffixes = ["css"]
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },
{ start = "(", end = ")", close = true, newline = true },
{ start = "\"", end = "\"", close = true, newline = false }
]

View file

@ -0,0 +1,76 @@
(comment) @comment
[
(tag_name)
(nesting_selector)
(universal_selector)
] @tag
[
"~"
">"
"+"
"-"
"*"
"/"
"="
"^="
"|="
"~="
"$="
"*="
"and"
"or"
"not"
"only"
] @operator
(attribute_selector (plain_value) @string)
(attribute_name) @attribute
(pseudo_element_selector (tag_name) @attribute)
(pseudo_class_selector (class_name) @attribute)
[
(class_name)
(id_name)
(namespace_name)
(property_name)
(feature_name)
] @property
(function_name) @function
((property_name) @variable
(#match? @variable "^--"))
((plain_value) @variable
(#match? @variable "^--"))
[
"@media"
"@import"
"@charset"
"@namespace"
"@supports"
"@keyframes"
(at_keyword)
(to)
(from)
(important)
] @keyword
(string_value) @string
(color_value) @string.special
[
(integer_value)
(float_value)
] @number
(unit) @type
[
"#"
","
":"
] @punctuation.delimiter

View file

@ -0,0 +1 @@
(_ "{" "}" @end) @indent

View file

@ -113,7 +113,7 @@ impl LspAdapter for ElixirLspAdapter {
async fn label_for_completion( async fn label_for_completion(
&self, &self,
completion: &lsp::CompletionItem, completion: &lsp::CompletionItem,
language: &Language, language: &Arc<Language>,
) -> Option<CodeLabel> { ) -> Option<CodeLabel> {
match completion.kind.zip(completion.detail.as_ref()) { match completion.kind.zip(completion.detail.as_ref()) {
Some((_, detail)) if detail.starts_with("(function)") => { Some((_, detail)) if detail.starts_with("(function)") => {
@ -168,7 +168,7 @@ impl LspAdapter for ElixirLspAdapter {
&self, &self,
name: &str, name: &str,
kind: SymbolKind, kind: SymbolKind,
language: &Language, language: &Arc<Language>,
) -> Option<CodeLabel> { ) -> Option<CodeLabel> {
let (text, filter_range, display_range) = match kind { let (text, filter_range, display_range) = match kind {
SymbolKind::METHOD | SymbolKind::FUNCTION => { SymbolKind::METHOD | SymbolKind::FUNCTION => {

View file

@ -134,7 +134,7 @@ impl super::LspAdapter for GoLspAdapter {
async fn label_for_completion( async fn label_for_completion(
&self, &self,
completion: &lsp::CompletionItem, completion: &lsp::CompletionItem,
language: &Language, language: &Arc<Language>,
) -> Option<CodeLabel> { ) -> Option<CodeLabel> {
let label = &completion.label; let label = &completion.label;
@ -235,7 +235,7 @@ impl super::LspAdapter for GoLspAdapter {
&self, &self,
name: &str, name: &str,
kind: lsp::SymbolKind, kind: lsp::SymbolKind,
language: &Language, language: &Arc<Language>,
) -> Option<CodeLabel> { ) -> Option<CodeLabel> {
let (text, filter_range, display_range) = match kind { let (text, filter_range, display_range) = match kind {
lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => {

View file

@ -0,0 +1,101 @@
use super::installation::{npm_install_packages, npm_package_latest_version};
use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
use client::http::HttpClient;
use futures::StreamExt;
use language::{LanguageServerName, LspAdapter};
use serde_json::json;
use smol::fs;
use std::{any::Any, path::PathBuf, sync::Arc};
use util::ResultExt;
pub struct HtmlLspAdapter;
impl HtmlLspAdapter {
const BIN_PATH: &'static str =
"node_modules/vscode-langservers-extracted/bin/vscode-html-language-server";
}
#[async_trait]
impl LspAdapter for HtmlLspAdapter {
async fn name(&self) -> LanguageServerName {
LanguageServerName("vscode-html-language-server".into())
}
async fn server_args(&self) -> Vec<String> {
vec!["--stdio".into()]
}
async fn fetch_latest_server_version(
&self,
_: Arc<dyn HttpClient>,
) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(npm_package_latest_version("vscode-langservers-extracted").await?) as Box<_>)
}
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
container_dir: PathBuf,
) -> Result<PathBuf> {
let version = version.downcast::<String>().unwrap();
let version_dir = container_dir.join(version.as_str());
fs::create_dir_all(&version_dir)
.await
.context("failed to create version directory")?;
let binary_path = version_dir.join(Self::BIN_PATH);
if fs::metadata(&binary_path).await.is_err() {
npm_install_packages(
[("vscode-langservers-extracted", version.as_str())],
&version_dir,
)
.await?;
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
while let Some(entry) = entries.next().await {
if let Some(entry) = entry.log_err() {
let entry_path = entry.path();
if entry_path.as_path() != version_dir {
fs::remove_dir_all(&entry_path).await.log_err();
}
}
}
}
}
Ok(binary_path)
}
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
(|| async move {
let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
let entry = entry?;
if entry.file_type().await?.is_dir() {
last_version_dir = Some(entry.path());
}
}
let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
let bin_path = last_version_dir.join(Self::BIN_PATH);
if bin_path.exists() {
Ok(bin_path)
} else {
Err(anyhow!(
"missing executable in directory {:?}",
last_version_dir
))
}
})()
.await
.log_err()
}
async fn initialization_options(&self) -> Option<serde_json::Value> {
Some(json!({
"provideFormatter": true
}))
}
}

View file

@ -0,0 +1,2 @@
("<" @open ">" @close)
("\"" @open "\"" @close)

View file

@ -0,0 +1,12 @@
name = "HTML"
path_suffixes = ["html"]
autoclose_before = ">})"
brackets = [
{ start = "<", end = ">", close = true, newline = true },
{ start = "{", end = "}", close = true, newline = true },
{ start = "(", end = ")", close = true, newline = true },
{ start = "\"", end = "\"", close = true, newline = false },
{ start = "!--", end = " --", close = true, newline = false },
]
block_comment = ["<!-- ", " -->"]

View file

@ -0,0 +1,15 @@
(tag_name) @keyword
(erroneous_end_tag_name) @keyword
(doctype) @constant
(attribute_name) @property
(attribute_value) @string
(comment) @comment
"=" @operator
[
"<"
">"
"</"
"/>"
] @punctuation.bracket

View file

@ -0,0 +1,6 @@
(start_tag ">" @end) @indent
(self_closing_tag "/>" @end) @indent
(element
(start_tag) @start
(end_tag)? @end) @indent

View file

@ -0,0 +1,7 @@
(script_element
(raw_text) @content
(#set! "language" "javascript"))
(style_element
(raw_text) @content
(#set! "language" "css"))

View file

@ -90,7 +90,7 @@ impl LspAdapter for PythonLspAdapter {
async fn label_for_completion( async fn label_for_completion(
&self, &self,
item: &lsp::CompletionItem, item: &lsp::CompletionItem,
language: &language::Language, language: &Arc<language::Language>,
) -> Option<language::CodeLabel> { ) -> Option<language::CodeLabel> {
let label = &item.label; let label = &item.label;
let grammar = language.grammar()?; let grammar = language.grammar()?;
@ -112,7 +112,7 @@ impl LspAdapter for PythonLspAdapter {
&self, &self,
name: &str, name: &str,
kind: lsp::SymbolKind, kind: lsp::SymbolKind,
language: &language::Language, language: &Arc<language::Language>,
) -> Option<language::CodeLabel> { ) -> Option<language::CodeLabel> {
let (text, filter_range, display_range) = match kind { let (text, filter_range, display_range) = match kind {
lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => {
@ -149,7 +149,6 @@ mod tests {
use gpui::{ModelContext, MutableAppContext}; use gpui::{ModelContext, MutableAppContext};
use language::{AutoindentMode, Buffer}; use language::{AutoindentMode, Buffer};
use settings::Settings; use settings::Settings;
use std::sync::Arc;
#[gpui::test] #[gpui::test]
fn test_python_autoindent(cx: &mut MutableAppContext) { fn test_python_autoindent(cx: &mut MutableAppContext) {
@ -160,7 +159,7 @@ mod tests {
cx.set_global(settings); cx.set_global(settings);
cx.add_model(|cx| { cx.add_model(|cx| {
let mut buffer = Buffer::new(0, "", cx).with_language(Arc::new(language), cx); let mut buffer = Buffer::new(0, "", cx).with_language(language, cx);
let append = |buffer: &mut Buffer, text: &str, cx: &mut ModelContext<Buffer>| { let append = |buffer: &mut Buffer, text: &str, cx: &mut ModelContext<Buffer>| {
let ix = buffer.len(); let ix = buffer.len();
buffer.edit([(ix..ix, text)], Some(AutoindentMode::EachLine), cx); buffer.edit([(ix..ix, text)], Some(AutoindentMode::EachLine), cx);

View file

@ -119,7 +119,7 @@ impl LspAdapter for RustLspAdapter {
async fn label_for_completion( async fn label_for_completion(
&self, &self,
completion: &lsp::CompletionItem, completion: &lsp::CompletionItem,
language: &Language, language: &Arc<Language>,
) -> Option<CodeLabel> { ) -> Option<CodeLabel> {
match completion.kind { match completion.kind {
Some(lsp::CompletionItemKind::FIELD) if completion.detail.is_some() => { Some(lsp::CompletionItemKind::FIELD) if completion.detail.is_some() => {
@ -196,7 +196,7 @@ impl LspAdapter for RustLspAdapter {
&self, &self,
name: &str, name: &str,
kind: lsp::SymbolKind, kind: lsp::SymbolKind,
language: &Language, language: &Arc<Language>,
) -> Option<CodeLabel> { ) -> Option<CodeLabel> {
let (text, filter_range, display_range) = match kind { let (text, filter_range, display_range) = match kind {
lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => {
@ -439,7 +439,7 @@ mod tests {
cx.set_global(settings); cx.set_global(settings);
cx.add_model(|cx| { cx.add_model(|cx| {
let mut buffer = Buffer::new(0, "", cx).with_language(Arc::new(language), cx); let mut buffer = Buffer::new(0, "", cx).with_language(language, cx);
// indent between braces // indent between braces
buffer.set_text("fn a() {}", cx); buffer.set_text("fn a() {}", cx);

View file

@ -115,7 +115,7 @@ impl LspAdapter for TypeScriptLspAdapter {
async fn label_for_completion( async fn label_for_completion(
&self, &self,
item: &lsp::CompletionItem, item: &lsp::CompletionItem,
language: &language::Language, language: &Arc<language::Language>,
) -> Option<language::CodeLabel> { ) -> Option<language::CodeLabel> {
use lsp::CompletionItemKind as Kind; use lsp::CompletionItemKind as Kind;
let len = item.label.len(); let len = item.label.len();
@ -144,7 +144,6 @@ impl LspAdapter for TypeScriptLspAdapter {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::sync::Arc;
use gpui::MutableAppContext; use gpui::MutableAppContext;
use unindent::Unindent; use unindent::Unindent;
@ -172,9 +171,8 @@ mod tests {
"# "#
.unindent(); .unindent();
let buffer = cx.add_model(|cx| { let buffer =
language::Buffer::new(0, text, cx).with_language(Arc::new(language), cx) cx.add_model(|cx| language::Buffer::new(0, text, cx).with_language(language, cx));
});
let outline = buffer.read(cx).snapshot().outline(None).unwrap(); let outline = buffer.read(cx).snapshot().outline(None).unwrap();
assert_eq!( assert_eq!(
outline outline

View file

@ -1133,7 +1133,7 @@ mod tests {
assert!(!editor.is_dirty(cx)); assert!(!editor.is_dirty(cx));
assert_eq!(editor.title(cx), "untitled"); assert_eq!(editor.title(cx), "untitled");
assert!(Arc::ptr_eq( assert!(Arc::ptr_eq(
editor.language_at(0, cx).unwrap(), &editor.language_at(0, cx).unwrap(),
&languages::PLAIN_TEXT &languages::PLAIN_TEXT
)); ));
editor.handle_input("hi", cx); editor.handle_input("hi", cx);
@ -1220,7 +1220,7 @@ mod tests {
editor.update(cx, |editor, cx| { editor.update(cx, |editor, cx| {
assert!(Arc::ptr_eq( assert!(Arc::ptr_eq(
editor.language_at(0, cx).unwrap(), &editor.language_at(0, cx).unwrap(),
&languages::PLAIN_TEXT &languages::PLAIN_TEXT
)); ));
editor.handle_input("hi", cx); editor.handle_input("hi", cx);