Merge branch 'main' into pixel-columns
This commit is contained in:
commit
9d07561d99
217 changed files with 7640 additions and 12657 deletions
|
@ -45,6 +45,7 @@ lazy_static.workspace = true
|
|||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
pulldown-cmark = { version = "0.9.2", default-features = false }
|
||||
regex.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
pub use crate::{
|
||||
diagnostic_set::DiagnosticSet,
|
||||
highlight_map::{HighlightId, HighlightMap},
|
||||
markdown::ParsedMarkdown,
|
||||
proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, PLAIN_TEXT,
|
||||
};
|
||||
use crate::{
|
||||
diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
|
||||
language_settings::{language_settings, LanguageSettings},
|
||||
markdown::parse_markdown,
|
||||
outline::OutlineItem,
|
||||
syntax_map::{
|
||||
SyntaxLayerInfo, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
|
||||
|
@ -143,11 +145,51 @@ pub struct Diagnostic {
|
|||
pub is_unnecessary: bool,
|
||||
}
|
||||
|
||||
pub async fn prepare_completion_documentation(
|
||||
documentation: &lsp::Documentation,
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
language: Option<Arc<Language>>,
|
||||
) -> Documentation {
|
||||
match documentation {
|
||||
lsp::Documentation::String(text) => {
|
||||
if text.lines().count() <= 1 {
|
||||
Documentation::SingleLine(text.clone())
|
||||
} else {
|
||||
Documentation::MultiLinePlainText(text.clone())
|
||||
}
|
||||
}
|
||||
|
||||
lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
|
||||
lsp::MarkupKind::PlainText => {
|
||||
if value.lines().count() <= 1 {
|
||||
Documentation::SingleLine(value.clone())
|
||||
} else {
|
||||
Documentation::MultiLinePlainText(value.clone())
|
||||
}
|
||||
}
|
||||
|
||||
lsp::MarkupKind::Markdown => {
|
||||
let parsed = parse_markdown(value, language_registry, language).await;
|
||||
Documentation::MultiLineMarkdown(parsed)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Documentation {
|
||||
Undocumented,
|
||||
SingleLine(String),
|
||||
MultiLinePlainText(String),
|
||||
MultiLineMarkdown(ParsedMarkdown),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Completion {
|
||||
pub old_range: Range<Anchor>,
|
||||
pub new_text: String,
|
||||
pub label: CodeLabel,
|
||||
pub documentation: Option<Documentation>,
|
||||
pub server_id: LanguageServerId,
|
||||
pub lsp_completion: lsp::CompletionItem,
|
||||
}
|
||||
|
@ -1406,82 +1448,95 @@ impl Buffer {
|
|||
return None;
|
||||
}
|
||||
|
||||
self.start_transaction();
|
||||
self.pending_autoindent.take();
|
||||
let autoindent_request = autoindent_mode
|
||||
.and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
|
||||
// Non-generic part hoisted out to reduce LLVM IR size.
|
||||
fn tail(
|
||||
this: &mut Buffer,
|
||||
edits: Vec<(Range<usize>, Arc<str>)>,
|
||||
autoindent_mode: Option<AutoindentMode>,
|
||||
cx: &mut ModelContext<Buffer>,
|
||||
) -> Option<clock::Lamport> {
|
||||
this.start_transaction();
|
||||
this.pending_autoindent.take();
|
||||
let autoindent_request = autoindent_mode
|
||||
.and_then(|mode| this.language.as_ref().map(|_| (this.snapshot(), mode)));
|
||||
|
||||
let edit_operation = self.text.edit(edits.iter().cloned());
|
||||
let edit_id = edit_operation.timestamp();
|
||||
let edit_operation = this.text.edit(edits.iter().cloned());
|
||||
let edit_id = edit_operation.timestamp();
|
||||
|
||||
if let Some((before_edit, mode)) = autoindent_request {
|
||||
let mut delta = 0isize;
|
||||
let entries = edits
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.zip(&edit_operation.as_edit().unwrap().new_text)
|
||||
.map(|((ix, (range, _)), new_text)| {
|
||||
let new_text_length = new_text.len();
|
||||
let old_start = range.start.to_point(&before_edit);
|
||||
let new_start = (delta + range.start as isize) as usize;
|
||||
delta += new_text_length as isize - (range.end as isize - range.start as isize);
|
||||
if let Some((before_edit, mode)) = autoindent_request {
|
||||
let mut delta = 0isize;
|
||||
let entries = edits
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.zip(&edit_operation.as_edit().unwrap().new_text)
|
||||
.map(|((ix, (range, _)), new_text)| {
|
||||
let new_text_length = new_text.len();
|
||||
let old_start = range.start.to_point(&before_edit);
|
||||
let new_start = (delta + range.start as isize) as usize;
|
||||
delta +=
|
||||
new_text_length as isize - (range.end as isize - range.start as isize);
|
||||
|
||||
let mut range_of_insertion_to_indent = 0..new_text_length;
|
||||
let mut first_line_is_new = false;
|
||||
let mut original_indent_column = None;
|
||||
let mut range_of_insertion_to_indent = 0..new_text_length;
|
||||
let mut first_line_is_new = false;
|
||||
let mut original_indent_column = None;
|
||||
|
||||
// When inserting an entire line at the beginning of an existing line,
|
||||
// treat the insertion as new.
|
||||
if new_text.contains('\n')
|
||||
&& old_start.column <= before_edit.indent_size_for_line(old_start.row).len
|
||||
{
|
||||
first_line_is_new = true;
|
||||
}
|
||||
|
||||
// When inserting text starting with a newline, avoid auto-indenting the
|
||||
// previous line.
|
||||
if new_text.starts_with('\n') {
|
||||
range_of_insertion_to_indent.start += 1;
|
||||
first_line_is_new = true;
|
||||
}
|
||||
|
||||
// Avoid auto-indenting after the insertion.
|
||||
if let AutoindentMode::Block {
|
||||
original_indent_columns,
|
||||
} = &mode
|
||||
{
|
||||
original_indent_column =
|
||||
Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
|
||||
indent_size_for_text(
|
||||
new_text[range_of_insertion_to_indent.clone()].chars(),
|
||||
)
|
||||
.len
|
||||
}));
|
||||
if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
|
||||
range_of_insertion_to_indent.end -= 1;
|
||||
// When inserting an entire line at the beginning of an existing line,
|
||||
// treat the insertion as new.
|
||||
if new_text.contains('\n')
|
||||
&& old_start.column
|
||||
<= before_edit.indent_size_for_line(old_start.row).len
|
||||
{
|
||||
first_line_is_new = true;
|
||||
}
|
||||
}
|
||||
|
||||
AutoindentRequestEntry {
|
||||
first_line_is_new,
|
||||
original_indent_column,
|
||||
indent_size: before_edit.language_indent_size_at(range.start, cx),
|
||||
range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
|
||||
..self.anchor_after(new_start + range_of_insertion_to_indent.end),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
// When inserting text starting with a newline, avoid auto-indenting the
|
||||
// previous line.
|
||||
if new_text.starts_with('\n') {
|
||||
range_of_insertion_to_indent.start += 1;
|
||||
first_line_is_new = true;
|
||||
}
|
||||
|
||||
self.autoindent_requests.push(Arc::new(AutoindentRequest {
|
||||
before_edit,
|
||||
entries,
|
||||
is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
|
||||
}));
|
||||
// Avoid auto-indenting after the insertion.
|
||||
if let AutoindentMode::Block {
|
||||
original_indent_columns,
|
||||
} = &mode
|
||||
{
|
||||
original_indent_column = Some(
|
||||
original_indent_columns.get(ix).copied().unwrap_or_else(|| {
|
||||
indent_size_for_text(
|
||||
new_text[range_of_insertion_to_indent.clone()].chars(),
|
||||
)
|
||||
.len
|
||||
}),
|
||||
);
|
||||
if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
|
||||
range_of_insertion_to_indent.end -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
AutoindentRequestEntry {
|
||||
first_line_is_new,
|
||||
original_indent_column,
|
||||
indent_size: before_edit.language_indent_size_at(range.start, cx),
|
||||
range: this
|
||||
.anchor_before(new_start + range_of_insertion_to_indent.start)
|
||||
..this.anchor_after(new_start + range_of_insertion_to_indent.end),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
this.autoindent_requests.push(Arc::new(AutoindentRequest {
|
||||
before_edit,
|
||||
entries,
|
||||
is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
|
||||
}));
|
||||
}
|
||||
|
||||
this.end_transaction(cx);
|
||||
this.send_operation(Operation::Buffer(edit_operation), cx);
|
||||
Some(edit_id)
|
||||
}
|
||||
|
||||
self.end_transaction(cx);
|
||||
self.send_operation(Operation::Buffer(edit_operation), cx);
|
||||
Some(edit_id)
|
||||
tail(self, edits, autoindent_mode, cx)
|
||||
}
|
||||
|
||||
fn did_edit(
|
||||
|
|
|
@ -2,6 +2,7 @@ mod buffer;
|
|||
mod diagnostic_set;
|
||||
mod highlight_map;
|
||||
pub mod language_settings;
|
||||
pub mod markdown;
|
||||
mod outline;
|
||||
pub mod proto;
|
||||
mod syntax_map;
|
||||
|
@ -110,7 +111,6 @@ pub struct LanguageServerName(pub Arc<str>);
|
|||
pub struct CachedLspAdapter {
|
||||
pub name: LanguageServerName,
|
||||
pub short_name: &'static str,
|
||||
pub initialization_options: Option<Value>,
|
||||
pub disk_based_diagnostic_sources: Vec<String>,
|
||||
pub disk_based_diagnostics_progress_token: Option<String>,
|
||||
pub language_ids: HashMap<String, String>,
|
||||
|
@ -121,7 +121,6 @@ impl CachedLspAdapter {
|
|||
pub async fn new(adapter: Arc<dyn LspAdapter>) -> Arc<Self> {
|
||||
let name = adapter.name().await;
|
||||
let short_name = adapter.short_name();
|
||||
let initialization_options = adapter.initialization_options().await;
|
||||
let disk_based_diagnostic_sources = adapter.disk_based_diagnostic_sources().await;
|
||||
let disk_based_diagnostics_progress_token =
|
||||
adapter.disk_based_diagnostics_progress_token().await;
|
||||
|
@ -130,7 +129,6 @@ impl CachedLspAdapter {
|
|||
Arc::new(CachedLspAdapter {
|
||||
name,
|
||||
short_name,
|
||||
initialization_options,
|
||||
disk_based_diagnostic_sources,
|
||||
disk_based_diagnostics_progress_token,
|
||||
language_ids,
|
||||
|
@ -227,6 +225,10 @@ impl CachedLspAdapter {
|
|||
) -> Option<CodeLabel> {
|
||||
self.adapter.label_for_symbol(name, kind, language).await
|
||||
}
|
||||
|
||||
pub fn enabled_formatters(&self) -> Vec<BundledFormatter> {
|
||||
self.adapter.enabled_formatters()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait LspAdapterDelegate: Send + Sync {
|
||||
|
@ -333,6 +335,33 @@ pub trait LspAdapter: 'static + Send + Sync {
|
|||
async fn language_ids(&self) -> HashMap<String, String> {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn enabled_formatters(&self) -> Vec<BundledFormatter> {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum BundledFormatter {
|
||||
Prettier {
|
||||
// See https://prettier.io/docs/en/options.html#parser for a list of valid values.
|
||||
// Usually, every language has a single parser (standard or plugin-provided), hence `Some("parser_name")` can be used.
|
||||
// There can not be multiple parsers for a single language, in case of a conflict, we would attempt to select the one with most plugins.
|
||||
//
|
||||
// But exceptions like Tailwind CSS exist, which uses standard parsers for CSS/JS/HTML/etc. but require an extra plugin to be installed.
|
||||
// For those cases, `None` will install the plugin but apply other, regular parser defined for the language, and this would not be a conflict.
|
||||
parser_name: Option<&'static str>,
|
||||
plugin_names: Vec<&'static str>,
|
||||
},
|
||||
}
|
||||
|
||||
impl BundledFormatter {
|
||||
pub fn prettier(parser_name: &'static str) -> Self {
|
||||
Self::Prettier {
|
||||
parser_name: Some(parser_name),
|
||||
plugin_names: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
|
@ -467,6 +496,7 @@ pub struct FakeLspAdapter {
|
|||
pub initializer: Option<Box<dyn 'static + Send + Sync + Fn(&mut lsp::FakeLanguageServer)>>,
|
||||
pub disk_based_diagnostics_progress_token: Option<String>,
|
||||
pub disk_based_diagnostics_sources: Vec<String>,
|
||||
pub enabled_formatters: Vec<BundledFormatter>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
|
@ -1729,6 +1759,7 @@ impl Default for FakeLspAdapter {
|
|||
disk_based_diagnostics_progress_token: None,
|
||||
initialization_options: None,
|
||||
disk_based_diagnostics_sources: Vec::new(),
|
||||
enabled_formatters: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1785,6 +1816,10 @@ impl LspAdapter for Arc<FakeLspAdapter> {
|
|||
async fn initialization_options(&self) -> Option<Value> {
|
||||
self.initialization_options.clone()
|
||||
}
|
||||
|
||||
fn enabled_formatters(&self) -> Vec<BundledFormatter> {
|
||||
self.enabled_formatters.clone()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_capture_indices(query: &Query, captures: &mut [(&str, &mut Option<u32>)]) {
|
||||
|
|
|
@ -50,6 +50,7 @@ pub struct LanguageSettings {
|
|||
pub remove_trailing_whitespace_on_save: bool,
|
||||
pub ensure_final_newline_on_save: bool,
|
||||
pub formatter: Formatter,
|
||||
pub prettier: HashMap<String, serde_json::Value>,
|
||||
pub enable_language_server: bool,
|
||||
pub show_copilot_suggestions: bool,
|
||||
pub show_whitespaces: ShowWhitespaceSetting,
|
||||
|
@ -98,6 +99,8 @@ pub struct LanguageSettingsContent {
|
|||
#[serde(default)]
|
||||
pub formatter: Option<Formatter>,
|
||||
#[serde(default)]
|
||||
pub prettier: Option<HashMap<String, serde_json::Value>>,
|
||||
#[serde(default)]
|
||||
pub enable_language_server: Option<bool>,
|
||||
#[serde(default)]
|
||||
pub show_copilot_suggestions: Option<bool>,
|
||||
|
@ -149,10 +152,13 @@ pub enum ShowWhitespaceSetting {
|
|||
All,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum Formatter {
|
||||
#[default]
|
||||
Auto,
|
||||
LanguageServer,
|
||||
Prettier,
|
||||
External {
|
||||
command: Arc<str>,
|
||||
arguments: Arc<[String]>,
|
||||
|
@ -392,6 +398,7 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent
|
|||
src.preferred_line_length,
|
||||
);
|
||||
merge(&mut settings.formatter, src.formatter.clone());
|
||||
merge(&mut settings.prettier, src.prettier.clone());
|
||||
merge(&mut settings.format_on_save, src.format_on_save.clone());
|
||||
merge(
|
||||
&mut settings.remove_trailing_whitespace_on_save,
|
||||
|
|
301
crates/language/src/markdown.rs
Normal file
301
crates/language/src/markdown.rs
Normal file
|
@ -0,0 +1,301 @@
|
|||
use std::sync::Arc;
|
||||
use std::{ops::Range, path::PathBuf};
|
||||
|
||||
use crate::{HighlightId, Language, LanguageRegistry};
|
||||
use gpui::fonts::{self, HighlightStyle, Weight};
|
||||
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ParsedMarkdown {
|
||||
pub text: String,
|
||||
pub highlights: Vec<(Range<usize>, MarkdownHighlight)>,
|
||||
pub region_ranges: Vec<Range<usize>>,
|
||||
pub regions: Vec<ParsedRegion>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum MarkdownHighlight {
|
||||
Style(MarkdownHighlightStyle),
|
||||
Code(HighlightId),
|
||||
}
|
||||
|
||||
impl MarkdownHighlight {
|
||||
pub fn to_highlight_style(&self, theme: &theme::SyntaxTheme) -> Option<HighlightStyle> {
|
||||
match self {
|
||||
MarkdownHighlight::Style(style) => {
|
||||
let mut highlight = HighlightStyle::default();
|
||||
|
||||
if style.italic {
|
||||
highlight.italic = Some(true);
|
||||
}
|
||||
|
||||
if style.underline {
|
||||
highlight.underline = Some(fonts::Underline {
|
||||
thickness: 1.0.into(),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
if style.weight != fonts::Weight::default() {
|
||||
highlight.weight = Some(style.weight);
|
||||
}
|
||||
|
||||
Some(highlight)
|
||||
}
|
||||
|
||||
MarkdownHighlight::Code(id) => id.style(theme),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
||||
pub struct MarkdownHighlightStyle {
|
||||
pub italic: bool,
|
||||
pub underline: bool,
|
||||
pub weight: Weight,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ParsedRegion {
|
||||
pub code: bool,
|
||||
pub link: Option<Link>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Link {
|
||||
Web { url: String },
|
||||
Path { path: PathBuf },
|
||||
}
|
||||
|
||||
impl Link {
|
||||
fn identify(text: String) -> Option<Link> {
|
||||
if text.starts_with("http") {
|
||||
return Some(Link::Web { url: text });
|
||||
}
|
||||
|
||||
let path = PathBuf::from(text);
|
||||
if path.is_absolute() {
|
||||
return Some(Link::Path { path });
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn parse_markdown(
|
||||
markdown: &str,
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
language: Option<Arc<Language>>,
|
||||
) -> ParsedMarkdown {
|
||||
let mut text = String::new();
|
||||
let mut highlights = Vec::new();
|
||||
let mut region_ranges = Vec::new();
|
||||
let mut regions = Vec::new();
|
||||
|
||||
parse_markdown_block(
|
||||
markdown,
|
||||
language_registry,
|
||||
language,
|
||||
&mut text,
|
||||
&mut highlights,
|
||||
&mut region_ranges,
|
||||
&mut regions,
|
||||
)
|
||||
.await;
|
||||
|
||||
ParsedMarkdown {
|
||||
text,
|
||||
highlights,
|
||||
region_ranges,
|
||||
regions,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn parse_markdown_block(
|
||||
markdown: &str,
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
language: Option<Arc<Language>>,
|
||||
text: &mut String,
|
||||
highlights: &mut Vec<(Range<usize>, MarkdownHighlight)>,
|
||||
region_ranges: &mut Vec<Range<usize>>,
|
||||
regions: &mut Vec<ParsedRegion>,
|
||||
) {
|
||||
let mut bold_depth = 0;
|
||||
let mut italic_depth = 0;
|
||||
let mut link_url = None;
|
||||
let mut current_language = None;
|
||||
let mut list_stack = Vec::new();
|
||||
|
||||
for event in Parser::new_ext(&markdown, Options::all()) {
|
||||
let prev_len = text.len();
|
||||
match event {
|
||||
Event::Text(t) => {
|
||||
if let Some(language) = ¤t_language {
|
||||
highlight_code(text, highlights, t.as_ref(), language);
|
||||
} else {
|
||||
text.push_str(t.as_ref());
|
||||
|
||||
let mut style = MarkdownHighlightStyle::default();
|
||||
|
||||
if bold_depth > 0 {
|
||||
style.weight = Weight::BOLD;
|
||||
}
|
||||
|
||||
if italic_depth > 0 {
|
||||
style.italic = true;
|
||||
}
|
||||
|
||||
if let Some(link) = link_url.clone().and_then(|u| Link::identify(u)) {
|
||||
region_ranges.push(prev_len..text.len());
|
||||
regions.push(ParsedRegion {
|
||||
code: false,
|
||||
link: Some(link),
|
||||
});
|
||||
style.underline = true;
|
||||
}
|
||||
|
||||
if style != MarkdownHighlightStyle::default() {
|
||||
let mut new_highlight = true;
|
||||
if let Some((last_range, MarkdownHighlight::Style(last_style))) =
|
||||
highlights.last_mut()
|
||||
{
|
||||
if last_range.end == prev_len && last_style == &style {
|
||||
last_range.end = text.len();
|
||||
new_highlight = false;
|
||||
}
|
||||
}
|
||||
if new_highlight {
|
||||
let range = prev_len..text.len();
|
||||
highlights.push((range, MarkdownHighlight::Style(style)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Event::Code(t) => {
|
||||
text.push_str(t.as_ref());
|
||||
region_ranges.push(prev_len..text.len());
|
||||
|
||||
let link = link_url.clone().and_then(|u| Link::identify(u));
|
||||
if link.is_some() {
|
||||
highlights.push((
|
||||
prev_len..text.len(),
|
||||
MarkdownHighlight::Style(MarkdownHighlightStyle {
|
||||
underline: true,
|
||||
..Default::default()
|
||||
}),
|
||||
));
|
||||
}
|
||||
regions.push(ParsedRegion { code: true, link });
|
||||
}
|
||||
|
||||
Event::Start(tag) => match tag {
|
||||
Tag::Paragraph => new_paragraph(text, &mut list_stack),
|
||||
|
||||
Tag::Heading(_, _, _) => {
|
||||
new_paragraph(text, &mut list_stack);
|
||||
bold_depth += 1;
|
||||
}
|
||||
|
||||
Tag::CodeBlock(kind) => {
|
||||
new_paragraph(text, &mut list_stack);
|
||||
current_language = if let CodeBlockKind::Fenced(language) = kind {
|
||||
language_registry
|
||||
.language_for_name(language.as_ref())
|
||||
.await
|
||||
.ok()
|
||||
} else {
|
||||
language.clone()
|
||||
}
|
||||
}
|
||||
|
||||
Tag::Emphasis => italic_depth += 1,
|
||||
|
||||
Tag::Strong => bold_depth += 1,
|
||||
|
||||
Tag::Link(_, url, _) => link_url = Some(url.to_string()),
|
||||
|
||||
Tag::List(number) => {
|
||||
list_stack.push((number, false));
|
||||
}
|
||||
|
||||
Tag::Item => {
|
||||
let len = list_stack.len();
|
||||
if let Some((list_number, has_content)) = list_stack.last_mut() {
|
||||
*has_content = false;
|
||||
if !text.is_empty() && !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
for _ in 0..len - 1 {
|
||||
text.push_str(" ");
|
||||
}
|
||||
if let Some(number) = list_number {
|
||||
text.push_str(&format!("{}. ", number));
|
||||
*number += 1;
|
||||
*has_content = false;
|
||||
} else {
|
||||
text.push_str("- ");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
},
|
||||
|
||||
Event::End(tag) => match tag {
|
||||
Tag::Heading(_, _, _) => bold_depth -= 1,
|
||||
Tag::CodeBlock(_) => current_language = None,
|
||||
Tag::Emphasis => italic_depth -= 1,
|
||||
Tag::Strong => bold_depth -= 1,
|
||||
Tag::Link(_, _, _) => link_url = None,
|
||||
Tag::List(_) => drop(list_stack.pop()),
|
||||
_ => {}
|
||||
},
|
||||
|
||||
Event::HardBreak => text.push('\n'),
|
||||
|
||||
Event::SoftBreak => text.push(' '),
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn highlight_code(
|
||||
text: &mut String,
|
||||
highlights: &mut Vec<(Range<usize>, MarkdownHighlight)>,
|
||||
content: &str,
|
||||
language: &Arc<Language>,
|
||||
) {
|
||||
let prev_len = text.len();
|
||||
text.push_str(content);
|
||||
for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) {
|
||||
let highlight = MarkdownHighlight::Code(highlight_id);
|
||||
highlights.push((prev_len + range.start..prev_len + range.end, highlight));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_paragraph(text: &mut String, list_stack: &mut Vec<(Option<u64>, bool)>) {
|
||||
let mut is_subsequent_paragraph_of_list = false;
|
||||
if let Some((_, has_content)) = list_stack.last_mut() {
|
||||
if *has_content {
|
||||
is_subsequent_paragraph_of_list = true;
|
||||
} else {
|
||||
*has_content = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if !text.is_empty() {
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
text.push('\n');
|
||||
}
|
||||
for _ in 0..list_stack.len().saturating_sub(1) {
|
||||
text.push_str(" ");
|
||||
}
|
||||
if is_subsequent_paragraph_of_list {
|
||||
text.push_str(" ");
|
||||
}
|
||||
}
|
|
@ -482,6 +482,7 @@ pub async fn deserialize_completion(
|
|||
lsp_completion.filter_text.as_deref(),
|
||||
)
|
||||
}),
|
||||
documentation: None,
|
||||
server_id: LanguageServerId(completion.server_id as usize),
|
||||
lsp_completion,
|
||||
})
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue