Merge branch 'main' into multi-server-completions-tailwind

This commit is contained in:
Julia 2023-08-30 22:41:12 -04:00
commit ff3865a4ad
427 changed files with 43123 additions and 12861 deletions

View file

@ -1,25 +1,27 @@
use crate::{
DocumentHighlight, Hover, HoverBlock, HoverBlockKind, InlayHint, InlayHintLabel,
InlayHintLabelPart, InlayHintLabelPartTooltip, InlayHintTooltip, Location, LocationLink,
MarkupContent, Project, ProjectTransaction,
MarkupContent, Project, ProjectTransaction, ResolveState,
};
use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
use client::proto::{self, PeerId};
use fs::LineEnding;
use futures::future;
use gpui::{AppContext, AsyncAppContext, ModelHandle};
use language::{
language_settings::{language_settings, InlayHintKind},
point_from_lsp, point_to_lsp,
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CharKind, CodeAction,
Completion, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction, Unclipped,
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CharKind,
CodeAction, Completion, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction,
Unclipped,
};
use lsp::{
CompletionListItemDefaultsEditRange, DocumentHighlightKind, LanguageServer, LanguageServerId,
ServerCapabilities,
OneOf, ServerCapabilities,
};
use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc};
use text::LineEnding;
pub fn lsp_formatting_options(tab_size: u32) -> lsp::FormattingOptions {
lsp::FormattingOptions {
@ -1467,7 +1469,7 @@ impl LspCommand for GetCompletions {
})
});
Ok(futures::future::join_all(completions).await)
Ok(future::join_all(completions).await)
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetCompletions {
@ -1535,7 +1537,7 @@ impl LspCommand for GetCompletions {
let completions = message.completions.into_iter().map(|completion| {
language::proto::deserialize_completion(completion, language.clone())
});
futures::future::try_join_all(completions).await
future::try_join_all(completions).await
}
fn buffer_id_from_proto(message: &proto::GetCompletions) -> u64 {
@ -1689,7 +1691,11 @@ impl LspCommand for OnTypeFormatting {
type ProtoRequest = proto::OnTypeFormatting;
fn check_capabilities(&self, server_capabilities: &lsp::ServerCapabilities) -> bool {
let Some(on_type_formatting_options) = &server_capabilities.document_on_type_formatting_provider else { return false };
let Some(on_type_formatting_options) =
&server_capabilities.document_on_type_formatting_provider
else {
return false;
};
on_type_formatting_options
.first_trigger_character
.contains(&self.trigger)
@ -1803,7 +1809,9 @@ impl LspCommand for OnTypeFormatting {
_: ModelHandle<Buffer>,
_: AsyncAppContext,
) -> Result<Option<Transaction>> {
let Some(transaction) = message.transaction else { return Ok(None) };
let Some(transaction) = message.transaction else {
return Ok(None);
};
Ok(Some(language::proto::deserialize_transaction(transaction)?))
}
@ -1812,6 +1820,377 @@ impl LspCommand for OnTypeFormatting {
}
}
impl InlayHints {
pub async fn lsp_to_project_hint(
lsp_hint: lsp::InlayHint,
buffer_handle: &ModelHandle<Buffer>,
server_id: LanguageServerId,
resolve_state: ResolveState,
force_no_type_left_padding: bool,
cx: &mut AsyncAppContext,
) -> anyhow::Result<InlayHint> {
let kind = lsp_hint.kind.and_then(|kind| match kind {
lsp::InlayHintKind::TYPE => Some(InlayHintKind::Type),
lsp::InlayHintKind::PARAMETER => Some(InlayHintKind::Parameter),
_ => None,
});
let position = cx.update(|cx| {
let buffer = buffer_handle.read(cx);
let position = buffer.clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left);
if kind == Some(InlayHintKind::Parameter) {
buffer.anchor_before(position)
} else {
buffer.anchor_after(position)
}
});
let label = Self::lsp_inlay_label_to_project(lsp_hint.label, server_id)
.await
.context("lsp to project inlay hint conversion")?;
let padding_left = if force_no_type_left_padding && kind == Some(InlayHintKind::Type) {
false
} else {
lsp_hint.padding_left.unwrap_or(false)
};
Ok(InlayHint {
position,
padding_left,
padding_right: lsp_hint.padding_right.unwrap_or(false),
label,
kind,
tooltip: lsp_hint.tooltip.map(|tooltip| match tooltip {
lsp::InlayHintTooltip::String(s) => InlayHintTooltip::String(s),
lsp::InlayHintTooltip::MarkupContent(markup_content) => {
InlayHintTooltip::MarkupContent(MarkupContent {
kind: match markup_content.kind {
lsp::MarkupKind::PlainText => HoverBlockKind::PlainText,
lsp::MarkupKind::Markdown => HoverBlockKind::Markdown,
},
value: markup_content.value,
})
}
}),
resolve_state,
})
}
async fn lsp_inlay_label_to_project(
lsp_label: lsp::InlayHintLabel,
server_id: LanguageServerId,
) -> anyhow::Result<InlayHintLabel> {
let label = match lsp_label {
lsp::InlayHintLabel::String(s) => InlayHintLabel::String(s),
lsp::InlayHintLabel::LabelParts(lsp_parts) => {
let mut parts = Vec::with_capacity(lsp_parts.len());
for lsp_part in lsp_parts {
parts.push(InlayHintLabelPart {
value: lsp_part.value,
tooltip: lsp_part.tooltip.map(|tooltip| match tooltip {
lsp::InlayHintLabelPartTooltip::String(s) => {
InlayHintLabelPartTooltip::String(s)
}
lsp::InlayHintLabelPartTooltip::MarkupContent(markup_content) => {
InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
kind: match markup_content.kind {
lsp::MarkupKind::PlainText => HoverBlockKind::PlainText,
lsp::MarkupKind::Markdown => HoverBlockKind::Markdown,
},
value: markup_content.value,
})
}
}),
location: Some(server_id).zip(lsp_part.location),
});
}
InlayHintLabel::LabelParts(parts)
}
};
Ok(label)
}
pub fn project_to_proto_hint(response_hint: InlayHint) -> proto::InlayHint {
let (state, lsp_resolve_state) = match response_hint.resolve_state {
ResolveState::Resolved => (0, None),
ResolveState::CanResolve(server_id, resolve_data) => (
1,
resolve_data
.map(|json_data| {
serde_json::to_string(&json_data)
.expect("failed to serialize resolve json data")
})
.map(|value| proto::resolve_state::LspResolveState {
server_id: server_id.0 as u64,
value,
}),
),
ResolveState::Resolving => (2, None),
};
let resolve_state = Some(proto::ResolveState {
state,
lsp_resolve_state,
});
proto::InlayHint {
position: Some(language::proto::serialize_anchor(&response_hint.position)),
padding_left: response_hint.padding_left,
padding_right: response_hint.padding_right,
label: Some(proto::InlayHintLabel {
label: Some(match response_hint.label {
InlayHintLabel::String(s) => proto::inlay_hint_label::Label::Value(s),
InlayHintLabel::LabelParts(label_parts) => {
proto::inlay_hint_label::Label::LabelParts(proto::InlayHintLabelParts {
parts: label_parts.into_iter().map(|label_part| {
let location_url = label_part.location.as_ref().map(|(_, location)| location.uri.to_string());
let location_range_start = label_part.location.as_ref().map(|(_, location)| point_from_lsp(location.range.start).0).map(|point| proto::PointUtf16 { row: point.row, column: point.column });
let location_range_end = label_part.location.as_ref().map(|(_, location)| point_from_lsp(location.range.end).0).map(|point| proto::PointUtf16 { row: point.row, column: point.column });
proto::InlayHintLabelPart {
value: label_part.value,
tooltip: label_part.tooltip.map(|tooltip| {
let proto_tooltip = match tooltip {
InlayHintLabelPartTooltip::String(s) => proto::inlay_hint_label_part_tooltip::Content::Value(s),
InlayHintLabelPartTooltip::MarkupContent(markup_content) => proto::inlay_hint_label_part_tooltip::Content::MarkupContent(proto::MarkupContent {
is_markdown: markup_content.kind == HoverBlockKind::Markdown,
value: markup_content.value,
}),
};
proto::InlayHintLabelPartTooltip {content: Some(proto_tooltip)}
}),
location_url,
location_range_start,
location_range_end,
language_server_id: label_part.location.as_ref().map(|(server_id, _)| server_id.0 as u64),
}}).collect()
})
}
}),
}),
kind: response_hint.kind.map(|kind| kind.name().to_string()),
tooltip: response_hint.tooltip.map(|response_tooltip| {
let proto_tooltip = match response_tooltip {
InlayHintTooltip::String(s) => proto::inlay_hint_tooltip::Content::Value(s),
InlayHintTooltip::MarkupContent(markup_content) => {
proto::inlay_hint_tooltip::Content::MarkupContent(proto::MarkupContent {
is_markdown: markup_content.kind == HoverBlockKind::Markdown,
value: markup_content.value,
})
}
};
proto::InlayHintTooltip {
content: Some(proto_tooltip),
}
}),
resolve_state,
}
}
pub fn proto_to_project_hint(message_hint: proto::InlayHint) -> anyhow::Result<InlayHint> {
let resolve_state = message_hint.resolve_state.as_ref().unwrap_or_else(|| {
panic!("incorrect proto inlay hint message: no resolve state in hint {message_hint:?}",)
});
let resolve_state_data = resolve_state
.lsp_resolve_state.as_ref()
.map(|lsp_resolve_state| {
serde_json::from_str::<Option<lsp::LSPAny>>(&lsp_resolve_state.value)
.with_context(|| format!("incorrect proto inlay hint message: non-json resolve state {lsp_resolve_state:?}"))
.map(|state| (LanguageServerId(lsp_resolve_state.server_id as usize), state))
})
.transpose()?;
let resolve_state = match resolve_state.state {
0 => ResolveState::Resolved,
1 => {
let (server_id, lsp_resolve_state) = resolve_state_data.with_context(|| {
format!(
"No lsp resolve data for the hint that can be resolved: {message_hint:?}"
)
})?;
ResolveState::CanResolve(server_id, lsp_resolve_state)
}
2 => ResolveState::Resolving,
invalid => {
anyhow::bail!("Unexpected resolve state {invalid} for hint {message_hint:?}")
}
};
Ok(InlayHint {
position: message_hint
.position
.and_then(language::proto::deserialize_anchor)
.context("invalid position")?,
label: match message_hint
.label
.and_then(|label| label.label)
.context("missing label")?
{
proto::inlay_hint_label::Label::Value(s) => InlayHintLabel::String(s),
proto::inlay_hint_label::Label::LabelParts(parts) => {
let mut label_parts = Vec::new();
for part in parts.parts {
label_parts.push(InlayHintLabelPart {
value: part.value,
tooltip: part.tooltip.map(|tooltip| match tooltip.content {
Some(proto::inlay_hint_label_part_tooltip::Content::Value(s)) => {
InlayHintLabelPartTooltip::String(s)
}
Some(
proto::inlay_hint_label_part_tooltip::Content::MarkupContent(
markup_content,
),
) => InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
kind: if markup_content.is_markdown {
HoverBlockKind::Markdown
} else {
HoverBlockKind::PlainText
},
value: markup_content.value,
}),
None => InlayHintLabelPartTooltip::String(String::new()),
}),
location: {
match part
.location_url
.zip(
part.location_range_start.and_then(|start| {
Some(start..part.location_range_end?)
}),
)
.zip(part.language_server_id)
{
Some(((uri, range), server_id)) => Some((
LanguageServerId(server_id as usize),
lsp::Location {
uri: lsp::Url::parse(&uri)
.context("invalid uri in hint part {part:?}")?,
range: lsp::Range::new(
point_to_lsp(PointUtf16::new(
range.start.row,
range.start.column,
)),
point_to_lsp(PointUtf16::new(
range.end.row,
range.end.column,
)),
),
},
)),
None => None,
}
},
});
}
InlayHintLabel::LabelParts(label_parts)
}
},
padding_left: message_hint.padding_left,
padding_right: message_hint.padding_right,
kind: message_hint
.kind
.as_deref()
.and_then(InlayHintKind::from_name),
tooltip: message_hint.tooltip.and_then(|tooltip| {
Some(match tooltip.content? {
proto::inlay_hint_tooltip::Content::Value(s) => InlayHintTooltip::String(s),
proto::inlay_hint_tooltip::Content::MarkupContent(markup_content) => {
InlayHintTooltip::MarkupContent(MarkupContent {
kind: if markup_content.is_markdown {
HoverBlockKind::Markdown
} else {
HoverBlockKind::PlainText
},
value: markup_content.value,
})
}
})
}),
resolve_state,
})
}
pub fn project_to_lsp_hint(hint: InlayHint, snapshot: &BufferSnapshot) -> lsp::InlayHint {
lsp::InlayHint {
position: point_to_lsp(hint.position.to_point_utf16(snapshot)),
kind: hint.kind.map(|kind| match kind {
InlayHintKind::Type => lsp::InlayHintKind::TYPE,
InlayHintKind::Parameter => lsp::InlayHintKind::PARAMETER,
}),
text_edits: None,
tooltip: hint.tooltip.and_then(|tooltip| {
Some(match tooltip {
InlayHintTooltip::String(s) => lsp::InlayHintTooltip::String(s),
InlayHintTooltip::MarkupContent(markup_content) => {
lsp::InlayHintTooltip::MarkupContent(lsp::MarkupContent {
kind: match markup_content.kind {
HoverBlockKind::PlainText => lsp::MarkupKind::PlainText,
HoverBlockKind::Markdown => lsp::MarkupKind::Markdown,
HoverBlockKind::Code { .. } => return None,
},
value: markup_content.value,
})
}
})
}),
label: match hint.label {
InlayHintLabel::String(s) => lsp::InlayHintLabel::String(s),
InlayHintLabel::LabelParts(label_parts) => lsp::InlayHintLabel::LabelParts(
label_parts
.into_iter()
.map(|part| lsp::InlayHintLabelPart {
value: part.value,
tooltip: part.tooltip.and_then(|tooltip| {
Some(match tooltip {
InlayHintLabelPartTooltip::String(s) => {
lsp::InlayHintLabelPartTooltip::String(s)
}
InlayHintLabelPartTooltip::MarkupContent(markup_content) => {
lsp::InlayHintLabelPartTooltip::MarkupContent(
lsp::MarkupContent {
kind: match markup_content.kind {
HoverBlockKind::PlainText => {
lsp::MarkupKind::PlainText
}
HoverBlockKind::Markdown => {
lsp::MarkupKind::Markdown
}
HoverBlockKind::Code { .. } => return None,
},
value: markup_content.value,
},
)
}
})
}),
location: part.location.map(|(_, location)| location),
command: None,
})
.collect(),
),
},
padding_left: Some(hint.padding_left),
padding_right: Some(hint.padding_right),
data: match hint.resolve_state {
ResolveState::CanResolve(_, data) => data,
ResolveState::Resolving | ResolveState::Resolved => None,
},
}
}
pub fn can_resolve_inlays(capabilities: &ServerCapabilities) -> bool {
capabilities
.inlay_hint_provider
.as_ref()
.and_then(|options| match options {
OneOf::Left(_is_supported) => None,
OneOf::Right(capabilities) => match capabilities {
lsp::InlayHintServerCapabilities::Options(o) => o.resolve_provider,
lsp::InlayHintServerCapabilities::RegistrationOptions(o) => {
o.inlay_hint_options.resolve_provider
}
},
})
.unwrap_or(false)
}
}
#[async_trait(?Send)]
impl LspCommand for InlayHints {
type Response = Vec<InlayHint>;
@ -1819,7 +2198,9 @@ impl LspCommand for InlayHints {
type ProtoRequest = proto::InlayHints;
fn check_capabilities(&self, server_capabilities: &lsp::ServerCapabilities) -> bool {
let Some(inlay_hint_provider) = &server_capabilities.inlay_hint_provider else { return false };
let Some(inlay_hint_provider) = &server_capabilities.inlay_hint_provider else {
return false;
};
match inlay_hint_provider {
lsp::OneOf::Left(enabled) => *enabled,
lsp::OneOf::Right(inlay_hint_capabilities) => match inlay_hint_capabilities {
@ -1852,8 +2233,9 @@ impl LspCommand for InlayHints {
buffer: ModelHandle<Buffer>,
server_id: LanguageServerId,
mut cx: AsyncAppContext,
) -> Result<Vec<InlayHint>> {
let (lsp_adapter, _) = language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
) -> anyhow::Result<Vec<InlayHint>> {
let (lsp_adapter, lsp_server) =
language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
// `typescript-language-server` adds padding to the left for type hints, turning
// `const foo: boolean` into `const foo : boolean` which looks odd.
// `rust-analyzer` does not have the padding for this case, and we have to accomodate both.
@ -1863,93 +2245,32 @@ impl LspCommand for InlayHints {
// Hence let's use a heuristic first to handle the most awkward case and look for more.
let force_no_type_left_padding =
lsp_adapter.name.0.as_ref() == "typescript-language-server";
cx.read(|cx| {
let origin_buffer = buffer.read(cx);
Ok(message
.unwrap_or_default()
.into_iter()
.map(|lsp_hint| {
let kind = lsp_hint.kind.and_then(|kind| match kind {
lsp::InlayHintKind::TYPE => Some(InlayHintKind::Type),
lsp::InlayHintKind::PARAMETER => Some(InlayHintKind::Parameter),
_ => None,
});
let position = origin_buffer
.clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left);
let padding_left =
if force_no_type_left_padding && kind == Some(InlayHintKind::Type) {
false
} else {
lsp_hint.padding_left.unwrap_or(false)
};
InlayHint {
buffer_id: origin_buffer.remote_id(),
position: if kind == Some(InlayHintKind::Parameter) {
origin_buffer.anchor_before(position)
} else {
origin_buffer.anchor_after(position)
},
padding_left,
padding_right: lsp_hint.padding_right.unwrap_or(false),
label: match lsp_hint.label {
lsp::InlayHintLabel::String(s) => InlayHintLabel::String(s),
lsp::InlayHintLabel::LabelParts(lsp_parts) => {
InlayHintLabel::LabelParts(
lsp_parts
.into_iter()
.map(|label_part| InlayHintLabelPart {
value: label_part.value,
tooltip: label_part.tooltip.map(
|tooltip| {
match tooltip {
lsp::InlayHintLabelPartTooltip::String(s) => {
InlayHintLabelPartTooltip::String(s)
}
lsp::InlayHintLabelPartTooltip::MarkupContent(
markup_content,
) => InlayHintLabelPartTooltip::MarkupContent(
MarkupContent {
kind: format!("{:?}", markup_content.kind),
value: markup_content.value,
},
),
}
},
),
location: label_part.location.map(|lsp_location| {
let target_start = origin_buffer.clip_point_utf16(
point_from_lsp(lsp_location.range.start),
Bias::Left,
);
let target_end = origin_buffer.clip_point_utf16(
point_from_lsp(lsp_location.range.end),
Bias::Left,
);
Location {
buffer: buffer.clone(),
range: origin_buffer.anchor_after(target_start)
..origin_buffer.anchor_before(target_end),
}
}),
})
.collect(),
)
}
},
kind,
tooltip: lsp_hint.tooltip.map(|tooltip| match tooltip {
lsp::InlayHintTooltip::String(s) => InlayHintTooltip::String(s),
lsp::InlayHintTooltip::MarkupContent(markup_content) => {
InlayHintTooltip::MarkupContent(MarkupContent {
kind: format!("{:?}", markup_content.kind),
value: markup_content.value,
})
}
}),
}
})
.collect())
})
let hints = message.unwrap_or_default().into_iter().map(|lsp_hint| {
let resolve_state = if InlayHints::can_resolve_inlays(lsp_server.capabilities()) {
ResolveState::CanResolve(lsp_server.server_id(), lsp_hint.data.clone())
} else {
ResolveState::Resolved
};
let buffer = buffer.clone();
cx.spawn(|mut cx| async move {
InlayHints::lsp_to_project_hint(
lsp_hint,
&buffer,
server_id,
resolve_state,
force_no_type_left_padding,
&mut cx,
)
.await
})
});
future::join_all(hints)
.await
.into_iter()
.collect::<anyhow::Result<_>>()
.context("lsp to project inlay hints conversion")
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::InlayHints {
@ -1995,23 +2316,7 @@ impl LspCommand for InlayHints {
proto::InlayHintsResponse {
hints: response
.into_iter()
.map(|response_hint| proto::InlayHint {
position: Some(language::proto::serialize_anchor(&response_hint.position)),
padding_left: response_hint.padding_left,
padding_right: response_hint.padding_right,
kind: response_hint.kind.map(|kind| kind.name().to_string()),
// Do not pass extra data such as tooltips to clients: host can put tooltip data from the cache during resolution.
tooltip: None,
// Similarly, do not pass label parts to clients: host can return a detailed list during resolution.
label: Some(proto::InlayHintLabel {
label: Some(proto::inlay_hint_label::Label::Value(
match response_hint.label {
InlayHintLabel::String(s) => s,
InlayHintLabel::LabelParts(_) => response_hint.text(),
},
)),
}),
})
.map(|response_hint| InlayHints::project_to_proto_hint(response_hint))
.collect(),
version: serialize_version(buffer_version),
}
@ -2020,10 +2325,10 @@ impl LspCommand for InlayHints {
async fn response_from_proto(
self,
message: proto::InlayHintsResponse,
project: ModelHandle<Project>,
_: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Vec<InlayHint>> {
) -> anyhow::Result<Vec<InlayHint>> {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
@ -2032,82 +2337,7 @@ impl LspCommand for InlayHints {
let mut hints = Vec::new();
for message_hint in message.hints {
let buffer_id = message_hint
.position
.as_ref()
.and_then(|location| location.buffer_id)
.context("missing buffer id")?;
let hint = InlayHint {
buffer_id,
position: message_hint
.position
.and_then(language::proto::deserialize_anchor)
.context("invalid position")?,
label: match message_hint
.label
.and_then(|label| label.label)
.context("missing label")?
{
proto::inlay_hint_label::Label::Value(s) => InlayHintLabel::String(s),
proto::inlay_hint_label::Label::LabelParts(parts) => {
let mut label_parts = Vec::new();
for part in parts.parts {
label_parts.push(InlayHintLabelPart {
value: part.value,
tooltip: part.tooltip.map(|tooltip| match tooltip.content {
Some(proto::inlay_hint_label_part_tooltip::Content::Value(s)) => InlayHintLabelPartTooltip::String(s),
Some(proto::inlay_hint_label_part_tooltip::Content::MarkupContent(markup_content)) => InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
kind: markup_content.kind,
value: markup_content.value,
}),
None => InlayHintLabelPartTooltip::String(String::new()),
}),
location: match part.location {
Some(location) => {
let target_buffer = project
.update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(location.buffer_id, cx)
})
.await?;
Some(Location {
range: location
.start
.and_then(language::proto::deserialize_anchor)
.context("invalid start")?
..location
.end
.and_then(language::proto::deserialize_anchor)
.context("invalid end")?,
buffer: target_buffer,
})},
None => None,
},
});
}
InlayHintLabel::LabelParts(label_parts)
}
},
padding_left: message_hint.padding_left,
padding_right: message_hint.padding_right,
kind: message_hint
.kind
.as_deref()
.and_then(InlayHintKind::from_name),
tooltip: message_hint.tooltip.and_then(|tooltip| {
Some(match tooltip.content? {
proto::inlay_hint_tooltip::Content::Value(s) => InlayHintTooltip::String(s),
proto::inlay_hint_tooltip::Content::MarkupContent(markup_content) => {
InlayHintTooltip::MarkupContent(MarkupContent {
kind: markup_content.kind,
value: markup_content.value,
})
}
})
}),
};
hints.push(hint);
hints.push(InlayHints::proto_to_project_hint(message_hint)?);
}
Ok(hints)

View file

@ -11,7 +11,7 @@ mod project_tests;
mod worktree_tests;
use anyhow::{anyhow, Context, Result};
use client::{proto, Client, TypedEnvelope, UserStore};
use client::{proto, Client, TypedEnvelope, UserId, UserStore};
use clock::ReplicaId;
use collections::{hash_map, BTreeMap, HashMap, HashSet};
use copilot::Copilot;
@ -26,8 +26,8 @@ use futures::{
};
use globset::{Glob, GlobSet, GlobSetBuilder};
use gpui::{
AnyModelHandle, AppContext, AsyncAppContext, BorrowAppContext, Entity, ModelContext,
ModelHandle, Task, WeakModelHandle,
executor::Background, AnyModelHandle, AppContext, AsyncAppContext, BorrowAppContext, Entity,
ModelContext, ModelHandle, Task, WeakModelHandle,
};
use itertools::Itertools;
use language::{
@ -37,11 +37,11 @@ use language::{
deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
serialize_anchor, serialize_version,
},
range_from_lsp, range_to_lsp, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel,
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _,
Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate, OffsetRangeExt,
Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset,
ToPointUtf16, Transaction, Unclipped,
range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeAction,
CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent,
File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate,
OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
ToOffset, ToPointUtf16, Transaction, Unclipped,
};
use log::error;
use lsp::{
@ -57,8 +57,8 @@ use serde::Serialize;
use settings::SettingsStore;
use sha2::{Digest, Sha256};
use similar::{ChangeTag, TextDiff};
use smol::channel::{Receiver, Sender};
use std::{
cell::RefCell,
cmp::{self, Ordering},
convert::TryInto,
hash::Hash,
@ -67,7 +67,6 @@ use std::{
ops::Range,
path::{self, Component, Path, PathBuf},
process::Stdio,
rc::Rc,
str,
sync::{
atomic::{AtomicUsize, Ordering::SeqCst},
@ -250,6 +249,7 @@ enum ProjectClientState {
pub struct Collaborator {
pub peer_id: proto::PeerId,
pub replica_id: ReplicaId,
pub user_id: UserId,
}
#[derive(Clone, Debug, PartialEq)]
@ -281,8 +281,9 @@ pub enum Event {
old_peer_id: proto::PeerId,
new_peer_id: proto::PeerId,
},
CollaboratorJoined(proto::PeerId),
CollaboratorLeft(proto::PeerId),
RefreshInlays,
RefreshInlayHints,
}
pub enum LanguageServerState {
@ -331,15 +332,22 @@ pub struct Location {
pub range: Range<language::Anchor>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct InlayHint {
pub buffer_id: u64,
pub position: language::Anchor,
pub label: InlayHintLabel,
pub kind: Option<InlayHintKind>,
pub padding_left: bool,
pub padding_right: bool,
pub tooltip: Option<InlayHintTooltip>,
pub resolve_state: ResolveState,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ResolveState {
Resolved,
CanResolve(LanguageServerId, Option<lsp::LSPAny>),
Resolving,
}
impl InlayHint {
@ -351,34 +359,34 @@ impl InlayHint {
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum InlayHintLabel {
String(String),
LabelParts(Vec<InlayHintLabelPart>),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct InlayHintLabelPart {
pub value: String,
pub tooltip: Option<InlayHintLabelPartTooltip>,
pub location: Option<Location>,
pub location: Option<(LanguageServerId, lsp::Location)>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum InlayHintTooltip {
String(String),
MarkupContent(MarkupContent),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum InlayHintLabelPartTooltip {
String(String),
MarkupContent(MarkupContent),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MarkupContent {
pub kind: String,
pub kind: HoverBlockKind,
pub value: String,
}
@ -412,7 +420,7 @@ pub struct HoverBlock {
pub kind: HoverBlockKind,
}
#[derive(Clone, Debug, PartialEq)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum HoverBlockKind {
PlainText,
Markdown,
@ -516,6 +524,28 @@ impl FormatTrigger {
}
}
}
#[derive(Clone, Debug, PartialEq)]
enum SearchMatchCandidate {
OpenBuffer {
buffer: ModelHandle<Buffer>,
// This might be an unnamed file without representation on filesystem
path: Option<Arc<Path>>,
},
Path {
worktree_id: WorktreeId,
path: Arc<Path>,
},
}
type SearchMatchCandidateIndex = usize;
impl SearchMatchCandidate {
fn path(&self) -> Option<Arc<Path>> {
match self {
SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
}
}
}
impl Project {
pub fn init_settings(cx: &mut AppContext) {
@ -549,6 +579,7 @@ impl Project {
client.add_model_request_handler(Self::handle_apply_code_action);
client.add_model_request_handler(Self::handle_on_type_formatting);
client.add_model_request_handler(Self::handle_inlay_hints);
client.add_model_request_handler(Self::handle_resolve_inlay_hint);
client.add_model_request_handler(Self::handle_refresh_inlay_hints);
client.add_model_request_handler(Self::handle_reload_buffers);
client.add_model_request_handler(Self::handle_synchronize_buffers);
@ -1537,9 +1568,9 @@ impl Project {
if self.is_remote() {
return Err(anyhow!("creating buffers as a guest is not supported yet"));
}
let id = post_inc(&mut self.next_buffer_id);
let buffer = cx.add_model(|cx| {
Buffer::new(self.replica_id(), text, cx)
Buffer::new(self.replica_id(), id, text)
.with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
});
self.register_buffer(&buffer, cx)?;
@ -1677,7 +1708,7 @@ impl Project {
}
/// LanguageServerName is owned, because it is inserted into a map
fn open_local_buffer_via_lsp(
pub fn open_local_buffer_via_lsp(
&mut self,
abs_path: lsp::Url,
language_server_id: LanguageServerId,
@ -2872,7 +2903,7 @@ impl Project {
.upgrade(&cx)
.ok_or_else(|| anyhow!("project dropped"))?;
this.update(&mut cx, |project, cx| {
cx.emit(Event::RefreshInlays);
cx.emit(Event::RefreshInlayHints);
project.remote_id().map(|project_id| {
project.client.send(proto::RefreshInlayHints { project_id })
})
@ -3438,7 +3469,7 @@ impl Project {
cx: &mut ModelContext<Self>,
) {
if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
cx.emit(Event::RefreshInlays);
cx.emit(Event::RefreshInlayHints);
status.pending_work.remove(&token);
cx.notify();
}
@ -4496,10 +4527,20 @@ impl Project {
};
cx.spawn(|this, mut cx| async move {
let additional_text_edits = lang_server
.request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
.await?
.additional_text_edits;
let can_resolve = lang_server
.capabilities()
.completion_provider
.as_ref()
.and_then(|options| options.resolve_provider)
.unwrap_or(false);
let additional_text_edits = if can_resolve {
lang_server
.request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
.await?
.additional_text_edits
} else {
completion.lsp_completion.additional_text_edits
};
if let Some(edits) = additional_text_edits {
let edits = this
.update(&mut cx, |this, cx| {
@ -4999,7 +5040,7 @@ impl Project {
buffer_handle: ModelHandle<Buffer>,
range: Range<T>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<InlayHint>>> {
) -> Task<anyhow::Result<Vec<InlayHint>>> {
let buffer = buffer_handle.read(cx);
let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
let range_start = range.start;
@ -5049,192 +5090,79 @@ impl Project {
}
}
pub fn resolve_inlay_hint(
&self,
hint: InlayHint,
buffer_handle: ModelHandle<Buffer>,
server_id: LanguageServerId,
cx: &mut ModelContext<Self>,
) -> Task<anyhow::Result<InlayHint>> {
if self.is_local() {
let buffer = buffer_handle.read(cx);
let (_, lang_server) = if let Some((adapter, server)) =
self.language_server_for_buffer(buffer, server_id, cx)
{
(adapter.clone(), server.clone())
} else {
return Task::ready(Ok(hint));
};
if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
return Task::ready(Ok(hint));
}
let buffer_snapshot = buffer.snapshot();
cx.spawn(|_, mut cx| async move {
let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
);
let resolved_hint = resolve_task
.await
.context("inlay hint resolve LSP request")?;
let resolved_hint = InlayHints::lsp_to_project_hint(
resolved_hint,
&buffer_handle,
server_id,
ResolveState::Resolved,
false,
&mut cx,
)
.await?;
Ok(resolved_hint)
})
} else if let Some(project_id) = self.remote_id() {
let client = self.client.clone();
let request = proto::ResolveInlayHint {
project_id,
buffer_id: buffer_handle.read(cx).remote_id(),
language_server_id: server_id.0 as u64,
hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
};
cx.spawn(|_, _| async move {
let response = client
.request(request)
.await
.context("inlay hints proto request")?;
match response.hint {
Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
.context("inlay hints proto resolve response conversion"),
None => Ok(hint),
}
})
} else {
Task::ready(Err(anyhow!("project does not have a remote id")))
}
}
#[allow(clippy::type_complexity)]
pub fn search(
&self,
query: SearchQuery,
cx: &mut ModelContext<Self>,
) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
if self.is_local() {
let snapshots = self
.visible_worktrees(cx)
.filter_map(|tree| {
let tree = tree.read(cx).as_local()?;
Some(tree.snapshot())
})
.collect::<Vec<_>>();
let background = cx.background().clone();
let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
if path_count == 0 {
return Task::ready(Ok(Default::default()));
}
let workers = background.num_cpus().min(path_count);
let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
cx.background()
.spawn({
let fs = self.fs.clone();
let background = cx.background().clone();
let query = query.clone();
async move {
let fs = &fs;
let query = &query;
let matching_paths_tx = &matching_paths_tx;
let paths_per_worker = (path_count + workers - 1) / workers;
let snapshots = &snapshots;
background
.scoped(|scope| {
for worker_ix in 0..workers {
let worker_start_ix = worker_ix * paths_per_worker;
let worker_end_ix = worker_start_ix + paths_per_worker;
scope.spawn(async move {
let mut snapshot_start_ix = 0;
let mut abs_path = PathBuf::new();
for snapshot in snapshots {
let snapshot_end_ix =
snapshot_start_ix + snapshot.visible_file_count();
if worker_end_ix <= snapshot_start_ix {
break;
} else if worker_start_ix > snapshot_end_ix {
snapshot_start_ix = snapshot_end_ix;
continue;
} else {
let start_in_snapshot = worker_start_ix
.saturating_sub(snapshot_start_ix);
let end_in_snapshot =
cmp::min(worker_end_ix, snapshot_end_ix)
- snapshot_start_ix;
for entry in snapshot
.files(false, start_in_snapshot)
.take(end_in_snapshot - start_in_snapshot)
{
if matching_paths_tx.is_closed() {
break;
}
let matches = if query
.file_matches(Some(&entry.path))
{
abs_path.clear();
abs_path.push(&snapshot.abs_path());
abs_path.push(&entry.path);
if let Some(file) =
fs.open_sync(&abs_path).await.log_err()
{
query.detect(file).unwrap_or(false)
} else {
false
}
} else {
false
};
if matches {
let project_path =
(snapshot.id(), entry.path.clone());
if matching_paths_tx
.send(project_path)
.await
.is_err()
{
break;
}
}
}
snapshot_start_ix = snapshot_end_ix;
}
}
});
}
})
.await;
}
})
.detach();
let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
let open_buffers = self
.opened_buffers
.values()
.filter_map(|b| b.upgrade(cx))
.collect::<HashSet<_>>();
cx.spawn(|this, cx| async move {
for buffer in &open_buffers {
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
buffers_tx.send((buffer.clone(), snapshot)).await?;
}
let open_buffers = Rc::new(RefCell::new(open_buffers));
while let Some(project_path) = matching_paths_rx.next().await {
if buffers_tx.is_closed() {
break;
}
let this = this.clone();
let open_buffers = open_buffers.clone();
let buffers_tx = buffers_tx.clone();
cx.spawn(|mut cx| async move {
if let Some(buffer) = this
.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
.await
.log_err()
{
if open_buffers.borrow_mut().insert(buffer.clone()) {
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
buffers_tx.send((buffer, snapshot)).await?;
}
}
Ok::<_, anyhow::Error>(())
})
.detach();
}
Ok::<_, anyhow::Error>(())
})
.detach_and_log_err(cx);
let background = cx.background().clone();
cx.background().spawn(async move {
let query = &query;
let mut matched_buffers = Vec::new();
for _ in 0..workers {
matched_buffers.push(HashMap::default());
}
background
.scoped(|scope| {
for worker_matched_buffers in matched_buffers.iter_mut() {
let mut buffers_rx = buffers_rx.clone();
scope.spawn(async move {
while let Some((buffer, snapshot)) = buffers_rx.next().await {
let buffer_matches = if query.file_matches(
snapshot.file().map(|file| file.path().as_ref()),
) {
query
.search(&snapshot, None)
.await
.iter()
.map(|range| {
snapshot.anchor_before(range.start)
..snapshot.anchor_after(range.end)
})
.collect()
} else {
Vec::new()
};
if !buffer_matches.is_empty() {
worker_matched_buffers
.insert(buffer.clone(), buffer_matches);
}
}
});
}
})
.await;
Ok(matched_buffers.into_iter().flatten().collect())
})
self.search_local(query, cx)
} else if let Some(project_id) = self.remote_id() {
let (tx, rx) = smol::channel::unbounded();
let request = self.client.request(query.to_proto(project_id));
cx.spawn(|this, mut cx| async move {
let response = request.await?;
@ -5258,13 +5186,303 @@ impl Project {
.or_insert(Vec::new())
.push(start..end)
}
Ok(result)
for (buffer, ranges) in result {
let _ = tx.send((buffer, ranges)).await;
}
Result::<(), anyhow::Error>::Ok(())
})
.detach_and_log_err(cx);
rx
} else {
Task::ready(Ok(Default::default()))
unimplemented!();
}
}
pub fn search_local(
&self,
query: SearchQuery,
cx: &mut ModelContext<Self>,
) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
// Local search is split into several phases.
// TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
// and the second phase that finds positions of all the matches found in the candidate files.
// The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
//
// It gets a bit hairy though, because we must account for files that do not have a persistent representation
// on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
//
// 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
// Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
// of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
// 2. At this point, we have a list of all potentially matching buffers/files.
// We sort that list by buffer path - this list is retained for later use.
// We ensure that all buffers are now opened and available in project.
// 3. We run a scan over all the candidate buffers on multiple background threads.
// We cannot assume that there will even be a match - while at least one match
// is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
// There is also an auxilliary background thread responsible for result gathering.
// This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
// it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
// As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
// entry - which might already be available thanks to out-of-order processing.
//
// We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
// This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
// This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
// in face of constantly updating list of sorted matches.
// Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
let snapshots = self
.visible_worktrees(cx)
.filter_map(|tree| {
let tree = tree.read(cx).as_local()?;
Some(tree.snapshot())
})
.collect::<Vec<_>>();
let background = cx.background().clone();
let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
if path_count == 0 {
let (_, rx) = smol::channel::bounded(1024);
return rx;
}
let workers = background.num_cpus().min(path_count);
let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
let mut unnamed_files = vec![];
let opened_buffers = self
.opened_buffers
.iter()
.filter_map(|(_, b)| {
let buffer = b.upgrade(cx)?;
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
if let Some(path) = snapshot.file().map(|file| file.path()) {
Some((path.clone(), (buffer, snapshot)))
} else {
unnamed_files.push(buffer);
None
}
})
.collect();
cx.background()
.spawn(Self::background_search(
unnamed_files,
opened_buffers,
cx.background().clone(),
self.fs.clone(),
workers,
query.clone(),
path_count,
snapshots,
matching_paths_tx,
))
.detach();
let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
let background = cx.background().clone();
let (result_tx, result_rx) = smol::channel::bounded(1024);
cx.background()
.spawn(async move {
let Ok(buffers) = buffers.await else {
return;
};
let buffers_len = buffers.len();
if buffers_len == 0 {
return;
}
let query = &query;
let (finished_tx, mut finished_rx) = smol::channel::unbounded();
background
.scoped(|scope| {
#[derive(Clone)]
struct FinishedStatus {
entry: Option<(ModelHandle<Buffer>, Vec<Range<Anchor>>)>,
buffer_index: SearchMatchCandidateIndex,
}
for _ in 0..workers {
let finished_tx = finished_tx.clone();
let mut buffers_rx = buffers_rx.clone();
scope.spawn(async move {
while let Some((entry, buffer_index)) = buffers_rx.next().await {
let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
{
if query.file_matches(
snapshot.file().map(|file| file.path().as_ref()),
) {
query
.search(&snapshot, None)
.await
.iter()
.map(|range| {
snapshot.anchor_before(range.start)
..snapshot.anchor_after(range.end)
})
.collect()
} else {
Vec::new()
}
} else {
Vec::new()
};
let status = if !buffer_matches.is_empty() {
let entry = if let Some((buffer, _)) = entry.as_ref() {
Some((buffer.clone(), buffer_matches))
} else {
None
};
FinishedStatus {
entry,
buffer_index,
}
} else {
FinishedStatus {
entry: None,
buffer_index,
}
};
if finished_tx.send(status).await.is_err() {
break;
}
}
});
}
// Report sorted matches
scope.spawn(async move {
let mut current_index = 0;
let mut scratch = vec![None; buffers_len];
while let Some(status) = finished_rx.next().await {
debug_assert!(
scratch[status.buffer_index].is_none(),
"Got match status of position {} twice",
status.buffer_index
);
let index = status.buffer_index;
scratch[index] = Some(status);
while current_index < buffers_len {
let Some(current_entry) = scratch[current_index].take() else {
// We intentionally **do not** increment `current_index` here. When next element arrives
// from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
// this time.
break;
};
if let Some(entry) = current_entry.entry {
result_tx.send(entry).await.log_err();
}
current_index += 1;
}
if current_index == buffers_len {
break;
}
}
});
})
.await;
})
.detach();
result_rx
}
/// Pick paths that might potentially contain a match of a given search query.
async fn background_search(
unnamed_buffers: Vec<ModelHandle<Buffer>>,
opened_buffers: HashMap<Arc<Path>, (ModelHandle<Buffer>, BufferSnapshot)>,
background: Arc<Background>,
fs: Arc<dyn Fs>,
workers: usize,
query: SearchQuery,
path_count: usize,
snapshots: Vec<LocalSnapshot>,
matching_paths_tx: Sender<SearchMatchCandidate>,
) {
let fs = &fs;
let query = &query;
let matching_paths_tx = &matching_paths_tx;
let snapshots = &snapshots;
let paths_per_worker = (path_count + workers - 1) / workers;
for buffer in unnamed_buffers {
matching_paths_tx
.send(SearchMatchCandidate::OpenBuffer {
buffer: buffer.clone(),
path: None,
})
.await
.log_err();
}
for (path, (buffer, _)) in opened_buffers.iter() {
matching_paths_tx
.send(SearchMatchCandidate::OpenBuffer {
buffer: buffer.clone(),
path: Some(path.clone()),
})
.await
.log_err();
}
background
.scoped(|scope| {
for worker_ix in 0..workers {
let worker_start_ix = worker_ix * paths_per_worker;
let worker_end_ix = worker_start_ix + paths_per_worker;
let unnamed_buffers = opened_buffers.clone();
scope.spawn(async move {
let mut snapshot_start_ix = 0;
let mut abs_path = PathBuf::new();
for snapshot in snapshots {
let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
if worker_end_ix <= snapshot_start_ix {
break;
} else if worker_start_ix > snapshot_end_ix {
snapshot_start_ix = snapshot_end_ix;
continue;
} else {
let start_in_snapshot =
worker_start_ix.saturating_sub(snapshot_start_ix);
let end_in_snapshot =
cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
for entry in snapshot
.files(false, start_in_snapshot)
.take(end_in_snapshot - start_in_snapshot)
{
if matching_paths_tx.is_closed() {
break;
}
if unnamed_buffers.contains_key(&entry.path) {
continue;
}
let matches = if query.file_matches(Some(&entry.path)) {
abs_path.clear();
abs_path.push(&snapshot.abs_path());
abs_path.push(&entry.path);
if let Some(file) = fs.open_sync(&abs_path).await.log_err()
{
query.detect(file).unwrap_or(false)
} else {
false
}
} else {
false
};
if matches {
let project_path = SearchMatchCandidate::Path {
worktree_id: snapshot.id(),
path: entry.path.clone(),
};
if matching_paths_tx.send(project_path).await.is_err() {
break;
}
}
}
snapshot_start_ix = snapshot_end_ix;
}
}
});
}
})
.await;
}
fn request_primary_lsp<R: LspCommand>(
&self,
buffer_handle: ModelHandle<Buffer>,
@ -5359,6 +5577,61 @@ impl Project {
Task::ready(Ok(Default::default()))
}
fn sort_candidates_and_open_buffers(
mut matching_paths_rx: Receiver<SearchMatchCandidate>,
cx: &mut ModelContext<Self>,
) -> (
futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
Receiver<(
Option<(ModelHandle<Buffer>, BufferSnapshot)>,
SearchMatchCandidateIndex,
)>,
) {
let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
cx.spawn(|this, cx| async move {
let mut buffers = vec![];
while let Some(entry) = matching_paths_rx.next().await {
buffers.push(entry);
}
buffers.sort_by_key(|candidate| candidate.path());
let matching_paths = buffers.clone();
let _ = sorted_buffers_tx.send(buffers);
for (index, candidate) in matching_paths.into_iter().enumerate() {
if buffers_tx.is_closed() {
break;
}
let this = this.clone();
let buffers_tx = buffers_tx.clone();
cx.spawn(|mut cx| async move {
let buffer = match candidate {
SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
SearchMatchCandidate::Path { worktree_id, path } => this
.update(&mut cx, |this, cx| {
this.open_buffer((worktree_id, path), cx)
})
.await
.log_err(),
};
if let Some(buffer) = buffer {
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
buffers_tx
.send((Some((buffer, snapshot)), index))
.await
.log_err();
} else {
buffers_tx.send((None, index)).await.log_err();
}
Ok::<_, anyhow::Error>(())
})
.detach();
}
})
.detach();
(sorted_buffers_rx, buffers_rx)
}
pub fn find_or_create_local_worktree(
&mut self,
abs_path: impl AsRef<Path>,
@ -5982,6 +6255,7 @@ impl Project {
let collaborator = Collaborator::from_proto(collaborator)?;
this.update(&mut cx, |this, cx| {
this.shared_buffers.remove(&collaborator.peer_id);
cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
this.collaborators
.insert(collaborator.peer_id, collaborator);
cx.notify();
@ -6865,6 +7139,40 @@ impl Project {
}))
}
async fn handle_resolve_inlay_hint(
this: ModelHandle<Self>,
envelope: TypedEnvelope<proto::ResolveInlayHint>,
_: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<proto::ResolveInlayHintResponse> {
let proto_hint = envelope
.payload
.hint
.expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
let hint = InlayHints::proto_to_project_hint(proto_hint)
.context("resolved proto inlay hint conversion")?;
let buffer = this.update(&mut cx, |this, cx| {
this.opened_buffers
.get(&envelope.payload.buffer_id)
.and_then(|buffer| buffer.upgrade(cx))
.ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
})?;
let response_hint = this
.update(&mut cx, |project, cx| {
project.resolve_inlay_hint(
hint,
buffer,
LanguageServerId(envelope.payload.language_server_id as usize),
cx,
)
})
.await
.context("inlay hints fetch")?;
Ok(proto::ResolveInlayHintResponse {
hint: Some(InlayHints::project_to_proto_hint(response_hint)),
})
}
async fn handle_refresh_inlay_hints(
this: ModelHandle<Self>,
_: TypedEnvelope<proto::RefreshInlayHints>,
@ -6872,7 +7180,7 @@ impl Project {
mut cx: AsyncAppContext,
) -> Result<proto::Ack> {
this.update(&mut cx, |_, cx| {
cx.emit(Event::RefreshInlays);
cx.emit(Event::RefreshInlayHints);
});
Ok(proto::Ack {})
}
@ -6943,17 +7251,17 @@ impl Project {
) -> Result<proto::SearchProjectResponse> {
let peer_id = envelope.original_sender_id()?;
let query = SearchQuery::from_proto(envelope.payload)?;
let result = this
.update(&mut cx, |this, cx| this.search(query, cx))
.await?;
let mut result = this.update(&mut cx, |this, cx| this.search(query, cx));
this.update(&mut cx, |this, cx| {
cx.spawn(|mut cx| async move {
let mut locations = Vec::new();
for (buffer, ranges) in result {
while let Some((buffer, ranges)) = result.next().await {
for range in ranges {
let start = serialize_anchor(&range.start);
let end = serialize_anchor(&range.end);
let buffer_id = this.create_buffer_for_peer(&buffer, peer_id, cx);
let buffer_id = this.update(&mut cx, |this, cx| {
this.create_buffer_for_peer(&buffer, peer_id, cx)
});
locations.push(proto::Location {
buffer_id,
start: Some(start),
@ -6963,6 +7271,7 @@ impl Project {
}
Ok(proto::SearchProjectResponse { locations })
})
.await
}
async fn handle_open_buffer_for_symbol(
@ -7628,7 +7937,7 @@ impl Project {
self.language_servers_for_buffer(buffer, cx).next()
}
fn language_server_for_buffer(
pub fn language_server_for_buffer(
&self,
buffer: &Buffer,
server_id: LanguageServerId,
@ -7808,6 +8117,7 @@ impl Collaborator {
Ok(Self {
peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
replica_id: message.replica_id as ReplicaId,
user_id: message.user_id as UserId,
})
}
}

View file

@ -1,11 +1,11 @@
use crate::{search::PathMatcher, worktree::WorktreeHandle, Event, *};
use fs::{FakeFs, LineEnding, RealFs};
use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *};
use fs::{FakeFs, RealFs};
use futures::{future, StreamExt};
use gpui::{executor::Deterministic, test::subscribe, AppContext};
use language::{
language_settings::{AllLanguageSettings, LanguageSettingsContent},
tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
OffsetRangeExt, Point, ToPoint,
LineEnding, OffsetRangeExt, Point, ToPoint,
};
use lsp::Url;
use parking_lot::Mutex;
@ -3953,11 +3953,12 @@ async fn search(
query: SearchQuery,
cx: &mut gpui::TestAppContext,
) -> Result<HashMap<String, Vec<Range<usize>>>> {
let results = project
.update(cx, |project, cx| project.search(query, cx))
.await?;
Ok(results
let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
let mut result = HashMap::default();
while let Some((buffer, range)) = search_rx.next().await {
result.entry(buffer).or_insert(range);
}
Ok(result
.into_iter()
.map(|(buffer, ranges)| {
buffer.read_with(cx, |buffer, _| {

View file

@ -13,25 +13,40 @@ use std::{
sync::Arc,
};
#[derive(Clone, Debug)]
pub struct SearchInputs {
query: Arc<str>,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
}
impl SearchInputs {
pub fn as_str(&self) -> &str {
self.query.as_ref()
}
pub fn files_to_include(&self) -> &[PathMatcher] {
&self.files_to_include
}
pub fn files_to_exclude(&self) -> &[PathMatcher] {
&self.files_to_exclude
}
}
#[derive(Clone, Debug)]
pub enum SearchQuery {
Text {
search: Arc<AhoCorasick<usize>>,
query: Arc<str>,
whole_word: bool,
case_sensitive: bool,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
inner: SearchInputs,
},
Regex {
regex: Regex,
query: Arc<str>,
multiline: bool,
whole_word: bool,
case_sensitive: bool,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
inner: SearchInputs,
},
}
@ -73,13 +88,16 @@ impl SearchQuery {
.auto_configure(&[&query])
.ascii_case_insensitive(!case_sensitive)
.build(&[&query]);
let inner = SearchInputs {
query: query.into(),
files_to_exclude,
files_to_include,
};
Self::Text {
search: Arc::new(search),
query: Arc::from(query),
whole_word,
case_sensitive,
files_to_include,
files_to_exclude,
inner,
}
}
@ -105,14 +123,17 @@ impl SearchQuery {
.case_insensitive(!case_sensitive)
.multi_line(multiline)
.build()?;
let inner = SearchInputs {
query: initial_query,
files_to_exclude,
files_to_include,
};
Ok(Self::Regex {
regex,
query: initial_query,
multiline,
whole_word,
case_sensitive,
files_to_include,
files_to_exclude,
inner,
})
}
@ -284,10 +305,7 @@ impl SearchQuery {
}
pub fn as_str(&self) -> &str {
match self {
Self::Text { query, .. } => query.as_ref(),
Self::Regex { query, .. } => query.as_ref(),
}
self.as_inner().as_str()
}
pub fn whole_word(&self) -> bool {
@ -309,25 +327,11 @@ impl SearchQuery {
}
pub fn files_to_include(&self) -> &[PathMatcher] {
match self {
Self::Text {
files_to_include, ..
} => files_to_include,
Self::Regex {
files_to_include, ..
} => files_to_include,
}
self.as_inner().files_to_include()
}
pub fn files_to_exclude(&self) -> &[PathMatcher] {
match self {
Self::Text {
files_to_exclude, ..
} => files_to_exclude,
Self::Regex {
files_to_exclude, ..
} => files_to_exclude,
}
self.as_inner().files_to_exclude()
}
pub fn file_matches(&self, file_path: Option<&Path>) -> bool {
@ -346,6 +350,11 @@ impl SearchQuery {
None => self.files_to_include().is_empty(),
}
}
pub fn as_inner(&self) -> &SearchInputs {
match self {
Self::Regex { inner, .. } | Self::Text { inner, .. } => inner,
}
}
}
fn deserialize_path_matches(glob_set: &str) -> anyhow::Result<Vec<PathMatcher>> {

View file

@ -1,7 +1,13 @@
use crate::Project;
use gpui::{AnyWindowHandle, ModelContext, ModelHandle, WeakModelHandle};
use std::path::PathBuf;
use terminal::{Terminal, TerminalBuilder, TerminalSettings};
use std::path::{Path, PathBuf};
use terminal::{
terminal_settings::{self, TerminalSettings, VenvSettingsContent},
Terminal, TerminalBuilder,
};
#[cfg(target_os = "macos")]
use std::os::unix::ffi::OsStrExt;
pub struct Terminals {
pub(crate) local_handles: Vec<WeakModelHandle<terminal::Terminal>>,
@ -20,10 +26,12 @@ impl Project {
));
} else {
let settings = settings::get::<TerminalSettings>(cx);
let python_settings = settings.detect_venv.clone();
let shell = settings.shell.clone();
let terminal = TerminalBuilder::new(
working_directory.clone(),
settings.shell.clone(),
shell.clone(),
settings.env.clone(),
Some(settings.blinking.clone()),
settings.alternate_scroll,
@ -47,6 +55,15 @@ impl Project {
})
.detach();
if let Some(python_settings) = &python_settings.as_option() {
let activate_script_path =
self.find_activate_script_path(&python_settings, working_directory);
self.activate_python_virtual_environment(
activate_script_path,
&terminal_handle,
cx,
);
}
terminal_handle
});
@ -54,6 +71,50 @@ impl Project {
}
}
pub fn find_activate_script_path(
&mut self,
settings: &VenvSettingsContent,
working_directory: Option<PathBuf>,
) -> Option<PathBuf> {
// When we are unable to resolve the working directory, the terminal builder
// defaults to '/'. We should probably encode this directly somewhere, but for
// now, let's just hard code it here.
let working_directory = working_directory.unwrap_or_else(|| Path::new("/").to_path_buf());
let activate_script_name = match settings.activate_script {
terminal_settings::ActivateScript::Default => "activate",
terminal_settings::ActivateScript::Csh => "activate.csh",
terminal_settings::ActivateScript::Fish => "activate.fish",
};
for virtual_environment_name in settings.directories {
let mut path = working_directory.join(virtual_environment_name);
path.push("bin/");
path.push(activate_script_name);
if path.exists() {
return Some(path);
}
}
None
}
fn activate_python_virtual_environment(
&mut self,
activate_script: Option<PathBuf>,
terminal_handle: &ModelHandle<Terminal>,
cx: &mut ModelContext<Project>,
) {
if let Some(activate_script) = activate_script {
// Paths are not strings so we need to jump through some hoops to format the command without `format!`
let mut command = Vec::from("source ".as_bytes());
command.extend_from_slice(activate_script.as_os_str().as_bytes());
command.push(b'\n');
terminal_handle.update(cx, |this, _| this.input_bytes(command));
}
}
pub fn local_terminal_handles(&self) -> &Vec<WeakModelHandle<terminal::Terminal>> {
&self.terminals.local_handles
}

View file

@ -8,7 +8,7 @@ use clock::ReplicaId;
use collections::{HashMap, HashSet, VecDeque};
use fs::{
repository::{GitFileStatus, GitRepository, RepoPath},
Fs, LineEnding,
Fs,
};
use futures::{
channel::{
@ -27,7 +27,7 @@ use language::{
deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
serialize_version,
},
Buffer, DiagnosticEntry, File as _, PointUtf16, Rope, RopeFingerprint, Unclipped,
Buffer, DiagnosticEntry, File as _, LineEnding, PointUtf16, Rope, RopeFingerprint, Unclipped,
};
use lsp::LanguageServerId;
use parking_lot::Mutex;
@ -2317,9 +2317,10 @@ impl BackgroundScannerState {
for changed_path in changed_paths {
let Some(dot_git_dir) = changed_path
.ancestors()
.find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT)) else {
continue;
};
.find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT))
else {
continue;
};
// Avoid processing the same repository multiple times, if multiple paths
// within it have changed.
@ -2348,7 +2349,10 @@ impl BackgroundScannerState {
let Some(work_dir) = self
.snapshot
.entry_for_id(entry_id)
.map(|entry| RepositoryWorkDirectory(entry.path.clone())) else { continue };
.map(|entry| RepositoryWorkDirectory(entry.path.clone()))
else {
continue;
};
log::info!("reload git repository {:?}", dot_git_dir);
let repository = repository.repo_ptr.lock();
@ -4026,7 +4030,7 @@ struct UpdateIgnoreStatusJob {
scan_queue: Sender<ScanJob>,
}
pub trait WorktreeHandle {
pub trait WorktreeModelHandle {
#[cfg(any(test, feature = "test-support"))]
fn flush_fs_events<'a>(
&self,
@ -4034,7 +4038,7 @@ pub trait WorktreeHandle {
) -> futures::future::LocalBoxFuture<'a, ()>;
}
impl WorktreeHandle for ModelHandle<Worktree> {
impl WorktreeModelHandle for ModelHandle<Worktree> {
// When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
// occurred before the worktree was constructed. These events can cause the worktree to perform
// extra directory scans, and emit extra scan-state notifications.

View file

@ -1,5 +1,5 @@
use crate::{
worktree::{Event, Snapshot, WorktreeHandle},
worktree::{Event, Snapshot, WorktreeModelHandle},
Entry, EntryKind, PathChange, Worktree,
};
use anyhow::Result;